Actual source code: precon.c


  2: /*
  3:     The PC (preconditioner) interface routines, callable by users.
  4: */
  5: #include <petsc/private/pcimpl.h>
  6: #include <petscdm.h>

  8: /* Logging support */
  9: PetscClassId  PC_CLASSID;
 10: PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
 11: PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
 12: PetscInt      PetscMGLevelId;

 14: PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
 15: {
 17:   PetscMPIInt    size;
 18:   PetscBool      hasop,flg1,flg2,set,flg3;

 21:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
 22:   if (pc->pmat) {
 23:     MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);
 24:     if (size == 1) {
 25:       MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);
 26:       MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);
 27:       MatIsSymmetricKnown(pc->pmat,&set,&flg3);
 28:       if (flg1 && (!flg2 || (set && flg3))) {
 29:         *type = PCICC;
 30:       } else if (flg2) {
 31:         *type = PCILU;
 32:       } else if (hasop) { /* likely is a parallel matrix run on one processor */
 33:         *type = PCBJACOBI;
 34:       } else {
 35:         *type = PCNONE;
 36:       }
 37:     } else {
 38:        if (hasop) {
 39:         *type = PCBJACOBI;
 40:       } else {
 41:         *type = PCNONE;
 42:       }
 43:     }
 44:   } else {
 45:     if (size == 1) {
 46:       *type = PCILU;
 47:     } else {
 48:       *type = PCBJACOBI;
 49:     }
 50:   }
 51:   return(0);
 52: }

 54: /*@
 55:    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats

 57:    Collective on PC

 59:    Input Parameter:
 60: .  pc - the preconditioner context

 62:    Level: developer

 64:    Notes:
 65:     This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC

 67: .seealso: PCCreate(), PCSetUp()
 68: @*/
 69: PetscErrorCode  PCReset(PC pc)
 70: {

 75:   if (pc->ops->reset) {
 76:     (*pc->ops->reset)(pc);
 77:   }
 78:   VecDestroy(&pc->diagonalscaleright);
 79:   VecDestroy(&pc->diagonalscaleleft);
 80:   MatDestroy(&pc->pmat);
 81:   MatDestroy(&pc->mat);

 83:   pc->setupcalled = 0;
 84:   return(0);
 85: }

 87: /*@C
 88:    PCDestroy - Destroys PC context that was created with PCCreate().

 90:    Collective on PC

 92:    Input Parameter:
 93: .  pc - the preconditioner context

 95:    Level: developer

 97: .seealso: PCCreate(), PCSetUp()
 98: @*/
 99: PetscErrorCode  PCDestroy(PC *pc)
100: {

104:   if (!*pc) return(0);
106:   if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; return(0);}

108:   PCReset(*pc);

110:   /* if memory was published with SAWs then destroy it */
111:   PetscObjectSAWsViewOff((PetscObject)*pc);
112:   if ((*pc)->ops->destroy) {(*(*pc)->ops->destroy)((*pc));}
113:   DMDestroy(&(*pc)->dm);
114:   PetscHeaderDestroy(pc);
115:   return(0);
116: }

118: /*@C
119:    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
120:       scaling as needed by certain time-stepping codes.

122:    Logically Collective on PC

124:    Input Parameter:
125: .  pc - the preconditioner context

127:    Output Parameter:
128: .  flag - PETSC_TRUE if it applies the scaling

130:    Level: developer

132:    Notes:
133:     If this returns PETSC_TRUE then the system solved via the Krylov method is
134: $           D M A D^{-1} y = D M b  for left preconditioning or
135: $           D A M D^{-1} z = D b for right preconditioning

137: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale()
138: @*/
139: PetscErrorCode  PCGetDiagonalScale(PC pc,PetscBool  *flag)
140: {
144:   *flag = pc->diagonalscale;
145:   return(0);
146: }

148: /*@
149:    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
150:       scaling as needed by certain time-stepping codes.

152:    Logically Collective on PC

154:    Input Parameters:
155: +  pc - the preconditioner context
156: -  s - scaling vector

158:    Level: intermediate

160:    Notes:
161:     The system solved via the Krylov method is
162: $           D M A D^{-1} y = D M b  for left preconditioning or
163: $           D A M D^{-1} z = D b for right preconditioning

165:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

167: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale()
168: @*/
169: PetscErrorCode  PCSetDiagonalScale(PC pc,Vec s)
170: {

176:   pc->diagonalscale     = PETSC_TRUE;

178:   PetscObjectReference((PetscObject)s);
179:   VecDestroy(&pc->diagonalscaleleft);

181:   pc->diagonalscaleleft = s;

183:   VecDuplicate(s,&pc->diagonalscaleright);
184:   VecCopy(s,pc->diagonalscaleright);
185:   VecReciprocal(pc->diagonalscaleright);
186:   return(0);
187: }

189: /*@
190:    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.

192:    Logically Collective on PC

194:    Input Parameters:
195: +  pc - the preconditioner context
196: .  in - input vector
197: -  out - scaled vector (maybe the same as in)

199:    Level: intermediate

201:    Notes:
202:     The system solved via the Krylov method is
203: $           D M A D^{-1} y = D M b  for left preconditioning or
204: $           D A M D^{-1} z = D b for right preconditioning

206:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

208:    If diagonal scaling is turned off and in is not out then in is copied to out

210: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale()
211: @*/
212: PetscErrorCode  PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
213: {

220:   if (pc->diagonalscale) {
221:     VecPointwiseMult(out,pc->diagonalscaleleft,in);
222:   } else if (in != out) {
223:     VecCopy(in,out);
224:   }
225:   return(0);
226: }

228: /*@
229:    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.

231:    Logically Collective on PC

233:    Input Parameters:
234: +  pc - the preconditioner context
235: .  in - input vector
236: -  out - scaled vector (maybe the same as in)

238:    Level: intermediate

240:    Notes:
241:     The system solved via the Krylov method is
242: $           D M A D^{-1} y = D M b  for left preconditioning or
243: $           D A M D^{-1} z = D b for right preconditioning

245:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

247:    If diagonal scaling is turned off and in is not out then in is copied to out

249: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale()
250: @*/
251: PetscErrorCode  PCDiagonalScaleRight(PC pc,Vec in,Vec out)
252: {

259:   if (pc->diagonalscale) {
260:     VecPointwiseMult(out,pc->diagonalscaleright,in);
261:   } else if (in != out) {
262:     VecCopy(in,out);
263:   }
264:   return(0);
265: }

267: /*@
268:    PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
269:    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
270:    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.

272:    Logically Collective on PC

274:    Input Parameters:
275: +  pc - the preconditioner context
276: -  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)

278:    Options Database Key:
279: .  -pc_use_amat <true,false>

281:    Notes:
282:    For the common case in which the linear system matrix and the matrix used to construct the
283:    preconditioner are identical, this routine is does nothing.

285:    Level: intermediate

287: .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
288: @*/
289: PetscErrorCode  PCSetUseAmat(PC pc,PetscBool flg)
290: {
293:   pc->useAmat = flg;
294:   return(0);
295: }

297: /*@
298:    PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected.

300:    Logically Collective on PC

302:    Input Parameters:
303: +  pc - iterative context obtained from PCCreate()
304: -  flg - PETSC_TRUE indicates you want the error generated

306:    Level: advanced

308:    Notes:
309:     Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve()
310:     to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
311:     detected.

313:     This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs

315: .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll()
316: @*/
317: PetscErrorCode  PCSetErrorIfFailure(PC pc,PetscBool flg)
318: {
322:   pc->erroriffailure = flg;
323:   return(0);
324: }

326: /*@
327:    PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
328:    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
329:    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.

331:    Logically Collective on PC

333:    Input Parameter:
334: .  pc - the preconditioner context

336:    Output Parameter:
337: .  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)

339:    Notes:
340:    For the common case in which the linear system matrix and the matrix used to construct the
341:    preconditioner are identical, this routine is does nothing.

343:    Level: intermediate

345: .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
346: @*/
347: PetscErrorCode  PCGetUseAmat(PC pc,PetscBool *flg)
348: {
351:   *flg = pc->useAmat;
352:   return(0);
353: }

355: /*@
356:    PCCreate - Creates a preconditioner context.

358:    Collective

360:    Input Parameter:
361: .  comm - MPI communicator

363:    Output Parameter:
364: .  pc - location to put the preconditioner context

366:    Notes:
367:    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC
368:    in parallel. For dense matrices it is always PCNONE.

370:    Level: developer

372: .seealso: PCSetUp(), PCApply(), PCDestroy()
373: @*/
374: PetscErrorCode  PCCreate(MPI_Comm comm,PC *newpc)
375: {
376:   PC             pc;

381:   *newpc = NULL;
382:   PCInitializePackage();

384:   PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);

386:   pc->mat                  = NULL;
387:   pc->pmat                 = NULL;
388:   pc->setupcalled          = 0;
389:   pc->setfromoptionscalled = 0;
390:   pc->data                 = NULL;
391:   pc->diagonalscale        = PETSC_FALSE;
392:   pc->diagonalscaleleft    = NULL;
393:   pc->diagonalscaleright   = NULL;

395:   pc->modifysubmatrices  = NULL;
396:   pc->modifysubmatricesP = NULL;

398:   *newpc = pc;
399:   return(0);

401: }

403: /* -------------------------------------------------------------------------------*/

405: /*@
406:    PCApply - Applies the preconditioner to a vector.

408:    Collective on PC

410:    Input Parameters:
411: +  pc - the preconditioner context
412: -  x - input vector

414:    Output Parameter:
415: .  y - output vector

417:    Level: developer

419: .seealso: PCApplyTranspose(), PCApplyBAorAB()
420: @*/
421: PetscErrorCode  PCApply(PC pc,Vec x,Vec y)
422: {
424:   PetscInt       m,n,mv,nv;

430:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
431:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
432:   /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */
433:   MatGetLocalSize(pc->pmat,&m,&n);
434:   VecGetLocalSize(x,&mv);
435:   VecGetLocalSize(y,&nv);
436:   /* check pmat * y = x is feasible */
437:   if (mv != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal input vector size %D",m,mv);
438:   if (nv != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal output vector size %D",n,nv);
439:   VecSetErrorIfLocked(y,3);

441:   PCSetUp(pc);
442:   if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply");
443:   VecLockReadPush(x);
444:   PetscLogEventBegin(PC_Apply,pc,x,y,0);
445:   (*pc->ops->apply)(pc,x,y);
446:   PetscLogEventEnd(PC_Apply,pc,x,y,0);
447:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
448:   VecLockReadPop(x);
449:   return(0);
450: }

452: /*@
453:    PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices.

455:    Collective on PC

457:    Input Parameters:
458: +  pc - the preconditioner context
459: -  X - block of input vectors

461:    Output Parameter:
462: .  Y - block of output vectors

464:    Level: developer

466: .seealso: PCApply(), KSPMatSolve()
467: @*/
468: PetscErrorCode  PCMatApply(PC pc,Mat X,Mat Y)
469: {
470:   Mat            A;
471:   Vec            cy, cx;
472:   PetscInt       m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3;
473:   PetscBool      match;

482:   if (Y == X) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices");
483:   PCGetOperators(pc, NULL, &A);
484:   MatGetLocalSize(A, &m3, &n3);
485:   MatGetLocalSize(X, &m2, &n2);
486:   MatGetLocalSize(Y, &m1, &n1);
487:   MatGetSize(A, &M3, &N3);
488:   MatGetSize(X, &M2, &N2);
489:   MatGetSize(Y, &M1, &N1);
490:   if (n1 != n2 || N1 != N2) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%D,%D) and block of output vectors (n,N) = (%D,%D)", n2, N2, n1, N1);
491:   if (m2 != m3 || M2 != M3) SETERRQ6(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%D,%D) and Pmat (m,M)x(n,N) = (%D,%D)x(%D,%D)", m2, M2, m3, M3, n3, N3);
492:   if (m1 != n3 || M1 != N3) SETERRQ6(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%D,%D) and Pmat (m,M)x(n,N) = (%D,%D)x(%D,%D)", m1, M1, m3, M3, n3, N3);
493:   PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "");
494:   if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat");
495:   PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");
496:   if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat");
497:   PCSetUp(pc);
498:   if (pc->ops->matapply) {
499:     PetscLogEventBegin(PC_MatApply, pc, X, Y, 0);
500:     (*pc->ops->matapply)(pc, X, Y);
501:     PetscLogEventEnd(PC_MatApply, pc, X, Y, 0);
502:   } else {
503:     PetscInfo1(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name);
504:     for (n1 = 0; n1 < N1; ++n1) {
505:       MatDenseGetColumnVecRead(X, n1, &cx);
506:       MatDenseGetColumnVecWrite(Y, n1, &cy);
507:       PCApply(pc, cx, cy);
508:       MatDenseRestoreColumnVecWrite(Y, n1, &cy);
509:       MatDenseRestoreColumnVecRead(X, n1, &cx);
510:     }
511:   }
512:   return(0);
513: }

515: /*@
516:    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.

518:    Collective on PC

520:    Input Parameters:
521: +  pc - the preconditioner context
522: -  x - input vector

524:    Output Parameter:
525: .  y - output vector

527:    Notes:
528:    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.

530:    Level: developer

532: .seealso: PCApply(), PCApplySymmetricRight()
533: @*/
534: PetscErrorCode  PCApplySymmetricLeft(PC pc,Vec x,Vec y)
535: {

542:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
543:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
544:   PCSetUp(pc);
545:   if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
546:   VecLockReadPush(x);
547:   PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);
548:   (*pc->ops->applysymmetricleft)(pc,x,y);
549:   PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);
550:   VecLockReadPop(x);
551:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
552:   return(0);
553: }

555: /*@
556:    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.

558:    Collective on PC

560:    Input Parameters:
561: +  pc - the preconditioner context
562: -  x - input vector

564:    Output Parameter:
565: .  y - output vector

567:    Level: developer

569:    Notes:
570:    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.

572: .seealso: PCApply(), PCApplySymmetricLeft()
573: @*/
574: PetscErrorCode  PCApplySymmetricRight(PC pc,Vec x,Vec y)
575: {

582:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
583:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
584:   PCSetUp(pc);
585:   if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
586:   VecLockReadPush(x);
587:   PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);
588:   (*pc->ops->applysymmetricright)(pc,x,y);
589:   PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);
590:   VecLockReadPop(x);
591:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
592:   return(0);
593: }

595: /*@
596:    PCApplyTranspose - Applies the transpose of preconditioner to a vector.

598:    Collective on PC

600:    Input Parameters:
601: +  pc - the preconditioner context
602: -  x - input vector

604:    Output Parameter:
605: .  y - output vector

607:    Notes:
608:     For complex numbers this applies the non-Hermitian transpose.

610:    Developer Notes:
611:     We need to implement a PCApplyHermitianTranspose()

613:    Level: developer

615: .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists()
616: @*/
617: PetscErrorCode  PCApplyTranspose(PC pc,Vec x,Vec y)
618: {

625:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
626:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
627:   PCSetUp(pc);
628:   if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose");
629:   VecLockReadPush(x);
630:   PetscLogEventBegin(PC_Apply,pc,x,y,0);
631:   (*pc->ops->applytranspose)(pc,x,y);
632:   PetscLogEventEnd(PC_Apply,pc,x,y,0);
633:   VecLockReadPop(x);
634:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
635:   return(0);
636: }

638: /*@
639:    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation

641:    Collective on PC

643:    Input Parameters:
644: .  pc - the preconditioner context

646:    Output Parameter:
647: .  flg - PETSC_TRUE if a transpose operation is defined

649:    Level: developer

651: .seealso: PCApplyTranspose()
652: @*/
653: PetscErrorCode  PCApplyTransposeExists(PC pc,PetscBool  *flg)
654: {
658:   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
659:   else *flg = PETSC_FALSE;
660:   return(0);
661: }

663: /*@
664:    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.

666:    Collective on PC

668:    Input Parameters:
669: +  pc - the preconditioner context
670: .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
671: .  x - input vector
672: -  work - work vector

674:    Output Parameter:
675: .  y - output vector

677:    Level: developer

679:    Notes:
680:     If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
681:    specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.

683: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose()
684: @*/
685: PetscErrorCode  PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
686: {

698:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
699:   if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
700:   if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
701:   if (pc->erroriffailure) {VecValidValues(x,3,PETSC_TRUE);}

703:   PCSetUp(pc);
704:   if (pc->diagonalscale) {
705:     if (pc->ops->applyBA) {
706:       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
707:       VecDuplicate(x,&work2);
708:       PCDiagonalScaleRight(pc,x,work2);
709:       (*pc->ops->applyBA)(pc,side,work2,y,work);
710:       PCDiagonalScaleLeft(pc,y,y);
711:       VecDestroy(&work2);
712:     } else if (side == PC_RIGHT) {
713:       PCDiagonalScaleRight(pc,x,y);
714:       PCApply(pc,y,work);
715:       MatMult(pc->mat,work,y);
716:       PCDiagonalScaleLeft(pc,y,y);
717:     } else if (side == PC_LEFT) {
718:       PCDiagonalScaleRight(pc,x,y);
719:       MatMult(pc->mat,y,work);
720:       PCApply(pc,work,y);
721:       PCDiagonalScaleLeft(pc,y,y);
722:     } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
723:   } else {
724:     if (pc->ops->applyBA) {
725:       (*pc->ops->applyBA)(pc,side,x,y,work);
726:     } else if (side == PC_RIGHT) {
727:       PCApply(pc,x,work);
728:       MatMult(pc->mat,work,y);
729:     } else if (side == PC_LEFT) {
730:       MatMult(pc->mat,x,work);
731:       PCApply(pc,work,y);
732:     } else if (side == PC_SYMMETRIC) {
733:       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
734:       PCApplySymmetricRight(pc,x,work);
735:       MatMult(pc->mat,work,y);
736:       VecCopy(y,work);
737:       PCApplySymmetricLeft(pc,work,y);
738:     }
739:   }
740:   if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
741:   return(0);
742: }

744: /*@
745:    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
746:    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
747:    NOT tr(B*A) = tr(A)*tr(B).

749:    Collective on PC

751:    Input Parameters:
752: +  pc - the preconditioner context
753: .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
754: .  x - input vector
755: -  work - work vector

757:    Output Parameter:
758: .  y - output vector

760:    Notes:
761:     this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
762:       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)

764:     Level: developer

766: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB()
767: @*/
768: PetscErrorCode  PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
769: {

777:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
778:   if (pc->erroriffailure) {VecValidValues(x,3,PETSC_TRUE);}
779:   if (pc->ops->applyBAtranspose) {
780:     (*pc->ops->applyBAtranspose)(pc,side,x,y,work);
781:     if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
782:     return(0);
783:   }
784:   if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");

786:   PCSetUp(pc);
787:   if (side == PC_RIGHT) {
788:     PCApplyTranspose(pc,x,work);
789:     MatMultTranspose(pc->mat,work,y);
790:   } else if (side == PC_LEFT) {
791:     MatMultTranspose(pc->mat,x,work);
792:     PCApplyTranspose(pc,work,y);
793:   }
794:   /* add support for PC_SYMMETRIC */
795:   if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
796:   return(0);
797: }

799: /* -------------------------------------------------------------------------------*/

801: /*@
802:    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
803:    built-in fast application of Richardson's method.

805:    Not Collective

807:    Input Parameter:
808: .  pc - the preconditioner

810:    Output Parameter:
811: .  exists - PETSC_TRUE or PETSC_FALSE

813:    Level: developer

815: .seealso: PCApplyRichardson()
816: @*/
817: PetscErrorCode  PCApplyRichardsonExists(PC pc,PetscBool  *exists)
818: {
822:   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
823:   else *exists = PETSC_FALSE;
824:   return(0);
825: }

827: /*@
828:    PCApplyRichardson - Applies several steps of Richardson iteration with
829:    the particular preconditioner. This routine is usually used by the
830:    Krylov solvers and not the application code directly.

832:    Collective on PC

834:    Input Parameters:
835: +  pc  - the preconditioner context
836: .  b   - the right hand side
837: .  w   - one work vector
838: .  rtol - relative decrease in residual norm convergence criteria
839: .  abstol - absolute residual norm convergence criteria
840: .  dtol - divergence residual norm increase criteria
841: .  its - the number of iterations to apply.
842: -  guesszero - if the input x contains nonzero initial guess

844:    Output Parameters:
845: +  outits - number of iterations actually used (for SOR this always equals its)
846: .  reason - the reason the apply terminated
847: -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE

849:    Notes:
850:    Most preconditioners do not support this function. Use the command
851:    PCApplyRichardsonExists() to determine if one does.

853:    Except for the multigrid PC this routine ignores the convergence tolerances
854:    and always runs for the number of iterations

856:    Level: developer

858: .seealso: PCApplyRichardsonExists()
859: @*/
860: PetscErrorCode  PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
861: {

869:   if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
870:   PCSetUp(pc);
871:   if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson");
872:   (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);
873:   return(0);
874: }

876: /*@
877:    PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail

879:    Logically Collective on PC

881:    Input Parameters:
882: +  pc - the preconditioner context
883: -  reason - the reason it failedx

885:    Level: advanced

887: .seealso: PCCreate(), PCApply(), PCDestroy(), PCFailedReason
888: @*/
889: PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason)
890: {
892:   pc->failedreason = reason;
893:   return(0);
894: }

896: /*@
897:    PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail

899:    Logically Collective on PC

901:    Input Parameter:
902: .  pc - the preconditioner context

904:    Output Parameter:
905: .  reason - the reason it failed

907:    Level: advanced

909:    Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after
910:    a call KSPCheckDot() or  KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp()
911:    or PCApply(), then use PCGetFailedReasonRank()

913: .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReasonRank(), PCSetFailedReason()
914: @*/
915: PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason)
916: {
918:   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
919:   else *reason = pc->failedreason;
920:   return(0);
921: }

923: /*@
924:    PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank

926:   Not Collective on PC

928:    Input Parameter:
929: .  pc - the preconditioner context

931:    Output Parameter:
932: .  reason - the reason it failed

934:    Notes:
935:      Different ranks may have different reasons or no reason, see PCGetFailedReason()

937:    Level: advanced

939: .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReason(), PCSetFailedReason()
940: @*/
941: PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason)
942: {
944:   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
945:   else *reason = pc->failedreason;
946:   return(0);
947: }

949: /*  Next line needed to deactivate KSP_Solve logging */
950: #include <petsc/private/kspimpl.h>

952: /*
953:       a setupcall of 0 indicates never setup,
954:                      1 indicates has been previously setup
955:                     -1 indicates a PCSetUp() was attempted and failed
956: */
957: /*@
958:    PCSetUp - Prepares for the use of a preconditioner.

960:    Collective on PC

962:    Input Parameter:
963: .  pc - the preconditioner context

965:    Level: developer

967: .seealso: PCCreate(), PCApply(), PCDestroy()
968: @*/
969: PetscErrorCode  PCSetUp(PC pc)
970: {
971:   PetscErrorCode   ierr;
972:   const char       *def;
973:   PetscObjectState matstate, matnonzerostate;

977:   if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");

979:   if (pc->setupcalled && pc->reusepreconditioner) {
980:     PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");
981:     return(0);
982:   }

984:   PetscObjectStateGet((PetscObject)pc->pmat,&matstate);
985:   MatGetNonzeroState(pc->pmat,&matnonzerostate);
986:   if (!pc->setupcalled) {
987:     PetscInfo(pc,"Setting up PC for first time\n");
988:     pc->flag = DIFFERENT_NONZERO_PATTERN;
989:   } else if (matstate == pc->matstate) {
990:     PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");
991:     return(0);
992:   } else {
993:     if (matnonzerostate > pc->matnonzerostate) {
994:       PetscInfo(pc,"Setting up PC with different nonzero pattern\n");
995:       pc->flag = DIFFERENT_NONZERO_PATTERN;
996:     } else {
997:       PetscInfo(pc,"Setting up PC with same nonzero pattern\n");
998:       pc->flag = SAME_NONZERO_PATTERN;
999:     }
1000:   }
1001:   pc->matstate        = matstate;
1002:   pc->matnonzerostate = matnonzerostate;

1004:   if (!((PetscObject)pc)->type_name) {
1005:     PCGetDefaultType_Private(pc,&def);
1006:     PCSetType(pc,def);
1007:   }

1009:   MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);
1010:   MatSetErrorIfFailure(pc->mat,pc->erroriffailure);
1011:   PetscLogEventBegin(PC_SetUp,pc,0,0,0);
1012:   if (pc->ops->setup) {
1013:     /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */
1014:     PetscLogEventDeactivatePush(KSP_Solve);
1015:     PetscLogEventDeactivatePush(PC_Apply);
1016:     (*pc->ops->setup)(pc);
1017:     PetscLogEventDeactivatePop(KSP_Solve);
1018:     PetscLogEventDeactivatePop(PC_Apply);
1019:   }
1020:   PetscLogEventEnd(PC_SetUp,pc,0,0,0);
1021:   if (!pc->setupcalled) pc->setupcalled = 1;
1022:   return(0);
1023: }

1025: /*@
1026:    PCSetUpOnBlocks - Sets up the preconditioner for each block in
1027:    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
1028:    methods.

1030:    Collective on PC

1032:    Input Parameters:
1033: .  pc - the preconditioner context

1035:    Level: developer

1037: .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp()
1038: @*/
1039: PetscErrorCode  PCSetUpOnBlocks(PC pc)
1040: {

1045:   if (!pc->ops->setuponblocks) return(0);
1046:   PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);
1047:   (*pc->ops->setuponblocks)(pc);
1048:   PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);
1049:   return(0);
1050: }

1052: /*@C
1053:    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
1054:    submatrices that arise within certain subdomain-based preconditioners.
1055:    The basic submatrices are extracted from the preconditioner matrix as
1056:    usual; the user can then alter these (for example, to set different boundary
1057:    conditions for each submatrix) before they are used for the local solves.

1059:    Logically Collective on PC

1061:    Input Parameters:
1062: +  pc - the preconditioner context
1063: .  func - routine for modifying the submatrices
1064: -  ctx - optional user-defined context (may be null)

1066:    Calling sequence of func:
1067: $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);

1069: +  row - an array of index sets that contain the global row numbers
1070:          that comprise each local submatrix
1071: .  col - an array of index sets that contain the global column numbers
1072:          that comprise each local submatrix
1073: .  submat - array of local submatrices
1074: -  ctx - optional user-defined context for private data for the
1075:          user-defined func routine (may be null)

1077:    Notes:
1078:    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
1079:    KSPSolve().

1081:    A routine set by PCSetModifySubMatrices() is currently called within
1082:    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
1083:    preconditioners.  All other preconditioners ignore this routine.

1085:    Level: advanced

1087: .seealso: PCModifySubMatrices()
1088: @*/
1089: PetscErrorCode  PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
1090: {
1093:   pc->modifysubmatrices  = func;
1094:   pc->modifysubmatricesP = ctx;
1095:   return(0);
1096: }

1098: /*@C
1099:    PCModifySubMatrices - Calls an optional user-defined routine within
1100:    certain preconditioners if one has been set with PCSetModifySubMatrices().

1102:    Collective on PC

1104:    Input Parameters:
1105: +  pc - the preconditioner context
1106: .  nsub - the number of local submatrices
1107: .  row - an array of index sets that contain the global row numbers
1108:          that comprise each local submatrix
1109: .  col - an array of index sets that contain the global column numbers
1110:          that comprise each local submatrix
1111: .  submat - array of local submatrices
1112: -  ctx - optional user-defined context for private data for the
1113:          user-defined routine (may be null)

1115:    Output Parameter:
1116: .  submat - array of local submatrices (the entries of which may
1117:             have been modified)

1119:    Notes:
1120:    The user should NOT generally call this routine, as it will
1121:    automatically be called within certain preconditioners (currently
1122:    block Jacobi, additive Schwarz) if set.

1124:    The basic submatrices are extracted from the preconditioner matrix
1125:    as usual; the user can then alter these (for example, to set different
1126:    boundary conditions for each submatrix) before they are used for the
1127:    local solves.

1129:    Level: developer

1131: .seealso: PCSetModifySubMatrices()
1132: @*/
1133: PetscErrorCode  PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1134: {

1139:   if (!pc->modifysubmatrices) return(0);
1140:   PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);
1141:   (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);
1142:   PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);
1143:   return(0);
1144: }

1146: /*@
1147:    PCSetOperators - Sets the matrix associated with the linear system and
1148:    a (possibly) different one associated with the preconditioner.

1150:    Logically Collective on PC

1152:    Input Parameters:
1153: +  pc - the preconditioner context
1154: .  Amat - the matrix that defines the linear system
1155: -  Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.

1157:    Notes:
1158:     Passing a NULL for Amat or Pmat removes the matrix that is currently used.

1160:     If you wish to replace either Amat or Pmat but leave the other one untouched then
1161:     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1162:     on it and then pass it back in in your call to KSPSetOperators().

1164:    More Notes about Repeated Solution of Linear Systems:
1165:    PETSc does NOT reset the matrix entries of either Amat or Pmat
1166:    to zero after a linear solve; the user is completely responsible for
1167:    matrix assembly.  See the routine MatZeroEntries() if desiring to
1168:    zero all elements of a matrix.

1170:    Level: intermediate

1172: .seealso: PCGetOperators(), MatZeroEntries()
1173:  @*/
1174: PetscErrorCode  PCSetOperators(PC pc,Mat Amat,Mat Pmat)
1175: {
1176:   PetscErrorCode   ierr;
1177:   PetscInt         m1,n1,m2,n2;

1185:   if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1186:     MatGetLocalSize(Amat,&m1,&n1);
1187:     MatGetLocalSize(pc->mat,&m2,&n2);
1188:     if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1189:     MatGetLocalSize(Pmat,&m1,&n1);
1190:     MatGetLocalSize(pc->pmat,&m2,&n2);
1191:     if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1192:   }

1194:   if (Pmat != pc->pmat) {
1195:     /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1196:     pc->matnonzerostate = -1;
1197:     pc->matstate        = -1;
1198:   }

1200:   /* reference first in case the matrices are the same */
1201:   if (Amat) {PetscObjectReference((PetscObject)Amat);}
1202:   MatDestroy(&pc->mat);
1203:   if (Pmat) {PetscObjectReference((PetscObject)Pmat);}
1204:   MatDestroy(&pc->pmat);
1205:   pc->mat  = Amat;
1206:   pc->pmat = Pmat;
1207:   return(0);
1208: }

1210: /*@
1211:    PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.

1213:    Logically Collective on PC

1215:    Input Parameters:
1216: +  pc - the preconditioner context
1217: -  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner

1219:     Level: intermediate

1221: .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner()
1222:  @*/
1223: PetscErrorCode  PCSetReusePreconditioner(PC pc,PetscBool flag)
1224: {
1228:   pc->reusepreconditioner = flag;
1229:   return(0);
1230: }

1232: /*@
1233:    PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed.

1235:    Not Collective

1237:    Input Parameter:
1238: .  pc - the preconditioner context

1240:    Output Parameter:
1241: .  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner

1243:    Level: intermediate

1245: .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner()
1246:  @*/
1247: PetscErrorCode  PCGetReusePreconditioner(PC pc,PetscBool *flag)
1248: {
1252:   *flag = pc->reusepreconditioner;
1253:   return(0);
1254: }

1256: /*@
1257:    PCGetOperators - Gets the matrix associated with the linear system and
1258:    possibly a different one associated with the preconditioner.

1260:    Not collective, though parallel Mats are returned if the PC is parallel

1262:    Input Parameter:
1263: .  pc - the preconditioner context

1265:    Output Parameters:
1266: +  Amat - the matrix defining the linear system
1267: -  Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.

1269:    Level: intermediate

1271:    Notes:
1272:     Does not increase the reference count of the matrices, so you should not destroy them

1274:    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1275:       are created in PC and returned to the user. In this case, if both operators
1276:       mat and pmat are requested, two DIFFERENT operators will be returned. If
1277:       only one is requested both operators in the PC will be the same (i.e. as
1278:       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1279:       The user must set the sizes of the returned matrices and their type etc just
1280:       as if the user created them with MatCreate(). For example,

1282: $         KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1283: $           set size, type, etc of Amat

1285: $         MatCreate(comm,&mat);
1286: $         KSP/PCSetOperators(ksp/pc,Amat,Amat);
1287: $         PetscObjectDereference((PetscObject)mat);
1288: $           set size, type, etc of Amat

1290:      and

1292: $         KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1293: $           set size, type, etc of Amat and Pmat

1295: $         MatCreate(comm,&Amat);
1296: $         MatCreate(comm,&Pmat);
1297: $         KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1298: $         PetscObjectDereference((PetscObject)Amat);
1299: $         PetscObjectDereference((PetscObject)Pmat);
1300: $           set size, type, etc of Amat and Pmat

1302:     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1303:     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1304:     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1305:     at this is when you create a SNES you do not NEED to create a KSP and attach it to
1306:     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1307:     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1308:     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1309:     it can be created for you?

1311: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet()
1312: @*/
1313: PetscErrorCode  PCGetOperators(PC pc,Mat *Amat,Mat *Pmat)
1314: {

1319:   if (Amat) {
1320:     if (!pc->mat) {
1321:       if (pc->pmat && !Pmat) {  /* Apmat has been set, but user did not request it, so use for Amat */
1322:         pc->mat = pc->pmat;
1323:         PetscObjectReference((PetscObject)pc->mat);
1324:       } else {                  /* both Amat and Pmat are empty */
1325:         MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);
1326:         if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1327:           pc->pmat = pc->mat;
1328:           PetscObjectReference((PetscObject)pc->pmat);
1329:         }
1330:       }
1331:     }
1332:     *Amat = pc->mat;
1333:   }
1334:   if (Pmat) {
1335:     if (!pc->pmat) {
1336:       if (pc->mat && !Amat) {    /* Amat has been set but was not requested, so use for pmat */
1337:         pc->pmat = pc->mat;
1338:         PetscObjectReference((PetscObject)pc->pmat);
1339:       } else {
1340:         MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);
1341:         if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1342:           pc->mat = pc->pmat;
1343:           PetscObjectReference((PetscObject)pc->mat);
1344:         }
1345:       }
1346:     }
1347:     *Pmat = pc->pmat;
1348:   }
1349:   return(0);
1350: }

1352: /*@C
1353:    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1354:    possibly a different one associated with the preconditioner have been set in the PC.

1356:    Not collective, though the results on all processes should be the same

1358:    Input Parameter:
1359: .  pc - the preconditioner context

1361:    Output Parameters:
1362: +  mat - the matrix associated with the linear system was set
1363: -  pmat - matrix associated with the preconditioner was set, usually the same

1365:    Level: intermediate

1367: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators()
1368: @*/
1369: PetscErrorCode  PCGetOperatorsSet(PC pc,PetscBool  *mat,PetscBool  *pmat)
1370: {
1373:   if (mat) *mat = (pc->mat)  ? PETSC_TRUE : PETSC_FALSE;
1374:   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1375:   return(0);
1376: }

1378: /*@
1379:    PCFactorGetMatrix - Gets the factored matrix from the
1380:    preconditioner context.  This routine is valid only for the LU,
1381:    incomplete LU, Cholesky, and incomplete Cholesky methods.

1383:    Not Collective on PC though Mat is parallel if PC is parallel

1385:    Input Parameters:
1386: .  pc - the preconditioner context

1388:    Output parameters:
1389: .  mat - the factored matrix

1391:    Level: advanced

1393:    Notes:
1394:     Does not increase the reference count for the matrix so DO NOT destroy it

1396: @*/
1397: PetscErrorCode  PCFactorGetMatrix(PC pc,Mat *mat)
1398: {

1404:   if (pc->ops->getfactoredmatrix) {
1405:     (*pc->ops->getfactoredmatrix)(pc,mat);
1406:   } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1407:   return(0);
1408: }

1410: /*@C
1411:    PCSetOptionsPrefix - Sets the prefix used for searching for all
1412:    PC options in the database.

1414:    Logically Collective on PC

1416:    Input Parameters:
1417: +  pc - the preconditioner context
1418: -  prefix - the prefix string to prepend to all PC option requests

1420:    Notes:
1421:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1422:    The first character of all runtime options is AUTOMATICALLY the
1423:    hyphen.

1425:    Level: advanced

1427: .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix()
1428: @*/
1429: PetscErrorCode  PCSetOptionsPrefix(PC pc,const char prefix[])
1430: {

1435:   PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);
1436:   return(0);
1437: }

1439: /*@C
1440:    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1441:    PC options in the database.

1443:    Logically Collective on PC

1445:    Input Parameters:
1446: +  pc - the preconditioner context
1447: -  prefix - the prefix string to prepend to all PC option requests

1449:    Notes:
1450:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1451:    The first character of all runtime options is AUTOMATICALLY the
1452:    hyphen.

1454:    Level: advanced

1456: .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix()
1457: @*/
1458: PetscErrorCode  PCAppendOptionsPrefix(PC pc,const char prefix[])
1459: {

1464:   PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);
1465:   return(0);
1466: }

1468: /*@C
1469:    PCGetOptionsPrefix - Gets the prefix used for searching for all
1470:    PC options in the database.

1472:    Not Collective

1474:    Input Parameters:
1475: .  pc - the preconditioner context

1477:    Output Parameters:
1478: .  prefix - pointer to the prefix string used, is returned

1480:    Notes:
1481:     On the fortran side, the user should pass in a string 'prifix' of
1482:    sufficient length to hold the prefix.

1484:    Level: advanced

1486: .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix()
1487: @*/
1488: PetscErrorCode  PCGetOptionsPrefix(PC pc,const char *prefix[])
1489: {

1495:   PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);
1496:   return(0);
1497: }

1499: /*
1500:    Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1501:   preconditioners including BDDC and Eisentat that transform the equations before applying
1502:   the Krylov methods
1503: */
1504: PETSC_INTERN PetscErrorCode  PCPreSolveChangeRHS(PC pc,PetscBool *change)
1505: {

1511:   *change = PETSC_FALSE;
1512:   PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));
1513:   return(0);
1514: }

1516: /*@
1517:    PCPreSolve - Optional pre-solve phase, intended for any
1518:    preconditioner-specific actions that must be performed before
1519:    the iterative solve itself.

1521:    Collective on PC

1523:    Input Parameters:
1524: +  pc - the preconditioner context
1525: -  ksp - the Krylov subspace context

1527:    Level: developer

1529:    Sample of Usage:
1530: .vb
1531:     PCPreSolve(pc,ksp);
1532:     KSPSolve(ksp,b,x);
1533:     PCPostSolve(pc,ksp);
1534: .ve

1536:    Notes:
1537:    The pre-solve phase is distinct from the PCSetUp() phase.

1539:    KSPSolve() calls this directly, so is rarely called by the user.

1541: .seealso: PCPostSolve()
1542: @*/
1543: PetscErrorCode PCPreSolve(PC pc,KSP ksp)
1544: {
1546:   Vec            x,rhs;

1551:   pc->presolvedone++;
1552:   if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1553:   KSPGetSolution(ksp,&x);
1554:   KSPGetRhs(ksp,&rhs);

1556:   if (pc->ops->presolve) {
1557:     (*pc->ops->presolve)(pc,ksp,rhs,x);
1558:   } else if (pc->presolve) {
1559:     (pc->presolve)(pc,ksp);
1560:   }
1561:   return(0);
1562: }

1564: /*@C
1565:    PCSetPreSolve - Sets function PCPreSolve() which is intended for any
1566:    preconditioner-specific actions that must be performed before
1567:    the iterative solve itself.

1569:    Logically Collective on pc

1571:    Input Parameters:
1572: +   pc - the preconditioner object
1573: -   presolve - the function to call before the solve

1575:    Calling sequence of presolve:
1576: $  func(PC pc,KSP ksp)

1578: +  pc - the PC context
1579: -  ksp - the KSP context

1581:    Level: developer

1583: .seealso: PC, PCSetUp(), PCPreSolve()
1584: @*/
1585: PetscErrorCode PCSetPreSolve(PC pc,PetscErrorCode (*presolve)(PC,KSP))
1586: {
1589:   pc->presolve = presolve;
1590:   return(0);
1591: }

1593: /*@
1594:    PCPostSolve - Optional post-solve phase, intended for any
1595:    preconditioner-specific actions that must be performed after
1596:    the iterative solve itself.

1598:    Collective on PC

1600:    Input Parameters:
1601: +  pc - the preconditioner context
1602: -  ksp - the Krylov subspace context

1604:    Sample of Usage:
1605: .vb
1606:     PCPreSolve(pc,ksp);
1607:     KSPSolve(ksp,b,x);
1608:     PCPostSolve(pc,ksp);
1609: .ve

1611:    Note:
1612:    KSPSolve() calls this routine directly, so it is rarely called by the user.

1614:    Level: developer

1616: .seealso: PCPreSolve(), KSPSolve()
1617: @*/
1618: PetscErrorCode  PCPostSolve(PC pc,KSP ksp)
1619: {
1621:   Vec            x,rhs;

1626:   pc->presolvedone--;
1627:   KSPGetSolution(ksp,&x);
1628:   KSPGetRhs(ksp,&rhs);
1629:   if (pc->ops->postsolve) {
1630:      (*pc->ops->postsolve)(pc,ksp,rhs,x);
1631:   }
1632:   return(0);
1633: }

1635: /*@C
1636:   PCLoad - Loads a PC that has been stored in binary  with PCView().

1638:   Collective on PetscViewer

1640:   Input Parameters:
1641: + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1642:            some related function before a call to PCLoad().
1643: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()

1645:    Level: intermediate

1647:   Notes:
1648:    The type is determined by the data in the file, any type set into the PC before this call is ignored.

1650:   Notes for advanced users:
1651:   Most users should not need to know the details of the binary storage
1652:   format, since PCLoad() and PCView() completely hide these details.
1653:   But for anyone who's interested, the standard binary matrix storage
1654:   format is
1655: .vb
1656:      has not yet been determined
1657: .ve

1659: .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad()
1660: @*/
1661: PetscErrorCode  PCLoad(PC newdm, PetscViewer viewer)
1662: {
1664:   PetscBool      isbinary;
1665:   PetscInt       classid;
1666:   char           type[256];

1671:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1672:   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");

1674:   PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);
1675:   if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1676:   PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);
1677:   PCSetType(newdm, type);
1678:   if (newdm->ops->load) {
1679:     (*newdm->ops->load)(newdm,viewer);
1680:   }
1681:   return(0);
1682: }

1684: #include <petscdraw.h>
1685: #if defined(PETSC_HAVE_SAWS)
1686: #include <petscviewersaws.h>
1687: #endif

1689: /*@C
1690:    PCViewFromOptions - View from Options

1692:    Collective on PC

1694:    Input Parameters:
1695: +  A - the PC context
1696: .  obj - Optional object
1697: -  name - command line option

1699:    Level: intermediate
1700: .seealso:  PC, PCView, PetscObjectViewFromOptions(), PCCreate()
1701: @*/
1702: PetscErrorCode  PCViewFromOptions(PC A,PetscObject obj,const char name[])
1703: {

1708:   PetscObjectViewFromOptions((PetscObject)A,obj,name);
1709:   return(0);
1710: }

1712: /*@C
1713:    PCView - Prints the PC data structure.

1715:    Collective on PC

1717:    Input Parameters:
1718: +  PC - the PC context
1719: -  viewer - optional visualization context

1721:    Note:
1722:    The available visualization contexts include
1723: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1724: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1725:          output where only the first processor opens
1726:          the file.  All other processors send their
1727:          data to the first processor to print.

1729:    The user can open an alternative visualization contexts with
1730:    PetscViewerASCIIOpen() (output to a specified file).

1732:    Level: developer

1734: .seealso: KSPView(), PetscViewerASCIIOpen()
1735: @*/
1736: PetscErrorCode  PCView(PC pc,PetscViewer viewer)
1737: {
1738:   PCType         cstr;
1740:   PetscBool      iascii,isstring,isbinary,isdraw;
1741: #if defined(PETSC_HAVE_SAWS)
1742:   PetscBool      issaws;
1743: #endif

1747:   if (!viewer) {
1748:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);
1749:   }

1753:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
1754:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
1755:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1756:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
1757: #if defined(PETSC_HAVE_SAWS)
1758:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);
1759: #endif

1761:   if (iascii) {
1762:     PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);
1763:     if (!pc->setupcalled) {
1764:       PetscViewerASCIIPrintf(viewer,"  PC has not been set up so information may be incomplete\n");
1765:     }
1766:     if (pc->ops->view) {
1767:       PetscViewerASCIIPushTab(viewer);
1768:       (*pc->ops->view)(pc,viewer);
1769:       PetscViewerASCIIPopTab(viewer);
1770:     }
1771:     if (pc->mat) {
1772:       PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);
1773:       if (pc->pmat == pc->mat) {
1774:         PetscViewerASCIIPrintf(viewer,"  linear system matrix = precond matrix:\n");
1775:         PetscViewerASCIIPushTab(viewer);
1776:         MatView(pc->mat,viewer);
1777:         PetscViewerASCIIPopTab(viewer);
1778:       } else {
1779:         if (pc->pmat) {
1780:           PetscViewerASCIIPrintf(viewer,"  linear system matrix followed by preconditioner matrix:\n");
1781:         } else {
1782:           PetscViewerASCIIPrintf(viewer,"  linear system matrix:\n");
1783:         }
1784:         PetscViewerASCIIPushTab(viewer);
1785:         MatView(pc->mat,viewer);
1786:         if (pc->pmat) {MatView(pc->pmat,viewer);}
1787:         PetscViewerASCIIPopTab(viewer);
1788:       }
1789:       PetscViewerPopFormat(viewer);
1790:     }
1791:   } else if (isstring) {
1792:     PCGetType(pc,&cstr);
1793:     PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);
1794:     if (pc->ops->view) {(*pc->ops->view)(pc,viewer);}
1795:     if (pc->mat) {MatView(pc->mat,viewer);}
1796:     if (pc->pmat && pc->pmat != pc->mat) {MatView(pc->pmat,viewer);}
1797:   } else if (isbinary) {
1798:     PetscInt    classid = PC_FILE_CLASSID;
1799:     MPI_Comm    comm;
1800:     PetscMPIInt rank;
1801:     char        type[256];

1803:     PetscObjectGetComm((PetscObject)pc,&comm);
1804:     MPI_Comm_rank(comm,&rank);
1805:     if (rank == 0) {
1806:       PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT);
1807:       PetscStrncpy(type,((PetscObject)pc)->type_name,256);
1808:       PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR);
1809:     }
1810:     if (pc->ops->view) {
1811:       (*pc->ops->view)(pc,viewer);
1812:     }
1813:   } else if (isdraw) {
1814:     PetscDraw draw;
1815:     char      str[25];
1816:     PetscReal x,y,bottom,h;
1817:     PetscInt  n;

1819:     PetscViewerDrawGetDraw(viewer,0,&draw);
1820:     PetscDrawGetCurrentPoint(draw,&x,&y);
1821:     if (pc->mat) {
1822:       MatGetSize(pc->mat,&n,NULL);
1823:       PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);
1824:     } else {
1825:       PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);
1826:     }
1827:     PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);
1828:     bottom = y - h;
1829:     PetscDrawPushCurrentPoint(draw,x,bottom);
1830:     if (pc->ops->view) {
1831:       (*pc->ops->view)(pc,viewer);
1832:     }
1833:     PetscDrawPopCurrentPoint(draw);
1834: #if defined(PETSC_HAVE_SAWS)
1835:   } else if (issaws) {
1836:     PetscMPIInt rank;

1838:     PetscObjectName((PetscObject)pc);
1839:     MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
1840:     if (!((PetscObject)pc)->amsmem && rank == 0) {
1841:       PetscObjectViewSAWs((PetscObject)pc,viewer);
1842:     }
1843:     if (pc->mat) {MatView(pc->mat,viewer);}
1844:     if (pc->pmat && pc->pmat != pc->mat) {MatView(pc->pmat,viewer);}
1845: #endif
1846:   }
1847:   return(0);
1848: }

1850: /*@C
1851:   PCRegister -  Adds a method to the preconditioner package.

1853:    Not collective

1855:    Input Parameters:
1856: +  name_solver - name of a new user-defined solver
1857: -  routine_create - routine to create method context

1859:    Notes:
1860:    PCRegister() may be called multiple times to add several user-defined preconditioners.

1862:    Sample usage:
1863: .vb
1864:    PCRegister("my_solver", MySolverCreate);
1865: .ve

1867:    Then, your solver can be chosen with the procedural interface via
1868: $     PCSetType(pc,"my_solver")
1869:    or at runtime via the option
1870: $     -pc_type my_solver

1872:    Level: advanced

1874: .seealso: PCRegisterAll()
1875: @*/
1876: PetscErrorCode  PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1877: {

1881:   PCInitializePackage();
1882:   PetscFunctionListAdd(&PCList,sname,function);
1883:   return(0);
1884: }

1886: static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y)
1887: {
1888:   PC             pc;

1892:   MatShellGetContext(A,&pc);
1893:   PCApply(pc,X,Y);
1894:   return(0);
1895: }

1897: /*@
1898:     PCComputeOperator - Computes the explicit preconditioned operator.

1900:     Collective on PC

1902:     Input Parameters:
1903: +   pc - the preconditioner object
1904: -   mattype - the matrix type to be used for the operator

1906:     Output Parameter:
1907: .   mat - the explicit preconditioned operator

1909:     Notes:
1910:     This computation is done by applying the operators to columns of the identity matrix.
1911:     This routine is costly in general, and is recommended for use only with relatively small systems.
1912:     Currently, this routine uses a dense matrix format when mattype == NULL

1914:     Level: advanced

1916: .seealso: KSPComputeOperator(), MatType

1918: @*/
1919: PetscErrorCode  PCComputeOperator(PC pc,MatType mattype,Mat *mat)
1920: {
1922:   PetscInt       N,M,m,n;
1923:   Mat            A,Apc;

1928:   PCGetOperators(pc,&A,NULL);
1929:   MatGetLocalSize(A,&m,&n);
1930:   MatGetSize(A,&M,&N);
1931:   MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);
1932:   MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);
1933:   MatComputeOperator(Apc,mattype,mat);
1934:   MatDestroy(&Apc);
1935:   return(0);
1936: }

1938: /*@
1939:    PCSetCoordinates - sets the coordinates of all the nodes on the local process

1941:    Collective on PC

1943:    Input Parameters:
1944: +  pc - the solver context
1945: .  dim - the dimension of the coordinates 1, 2, or 3
1946: .  nloc - the blocked size of the coordinates array
1947: -  coords - the coordinates array

1949:    Level: intermediate

1951:    Notes:
1952:    coords is an array of the dim coordinates for the nodes on
1953:    the local processor, of size dim*nloc.
1954:    If there are 108 equation on a processor
1955:    for a displacement finite element discretization of elasticity (so
1956:    that there are nloc = 36 = 108/3 nodes) then the array must have 108
1957:    double precision values (ie, 3 * 36).  These x y z coordinates
1958:    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1959:    ... , N-1.z ].

1961: .seealso: MatSetNearNullSpace()
1962: @*/
1963: PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
1964: {

1970:   PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1971:   return(0);
1972: }

1974: /*@
1975:    PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)

1977:    Logically Collective on PC

1979:    Input Parameter:
1980: .  pc - the precondition context

1982:    Output Parameters:
1983: +  num_levels - the number of levels
1984: -  interpolations - the interpolation matrices (size of num_levels-1)

1986:    Level: advanced

1988: .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level

1990: .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators()
1991: @*/
1992: PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[])
1993: {

2000:   PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));
2001:   return(0);
2002: }

2004: /*@
2005:    PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)

2007:    Logically Collective on PC

2009:    Input Parameter:
2010: .  pc - the precondition context

2012:    Output Parameters:
2013: +  num_levels - the number of levels
2014: -  coarseOperators - the coarse operator matrices (size of num_levels-1)

2016:    Level: advanced

2018: .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level

2020: .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations()
2021: @*/
2022: PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[])
2023: {

2030:   PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));
2031:   return(0);
2032: }