Actual source code: ex10.c


  2: static char help[] = "Solve a small system and a large system through preloading\n\
  3:   Input arguments are:\n\
  4:   -permute <natural,rcm,nd,...> : solve system in permuted indexing\n\
  5:   -f0 <small_sys_binary> -f1 <large_sys_binary> \n\n";

  7: /*
  8:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
  9:   automatically includes:
 10:      petscsys.h       - base PETSc routines   petscvec.h - vectors
 11:      petscmat.h - matrices
 12:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 13:      petscviewer.h - viewers               petscpc.h  - preconditioners
 14: */
 15: #include <petscksp.h>

 17: typedef enum {
 18:   RHS_FILE,
 19:   RHS_ONE,
 20:   RHS_RANDOM
 21: } RHSType;
 22: const char *const RHSTypes[] = {"FILE", "ONE", "RANDOM", "RHSType", "RHS_", NULL};

 24: PetscErrorCode CheckResult(KSP *ksp, Mat *A, Vec *b, Vec *x, IS *rowperm)
 25: {
 26:   PetscReal         norm;        /* norm of solution error */
 27:   PetscInt          its;
 28:   KSPGetTotalIterations(*ksp,&its);
 29:   PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %d\n",its);

 31:   KSPGetResidualNorm(*ksp,&norm);
 32:   if (norm < 1.e-12) {
 33:     PetscPrintf(PETSC_COMM_WORLD,"Residual norm < 1.e-12\n");
 34:   } else {
 35:     PetscPrintf(PETSC_COMM_WORLD,"Residual norm %e\n",(double)norm);
 36:   }

 38:   KSPDestroy(ksp);
 39:   MatDestroy(A);
 40:   VecDestroy(x);
 41:   VecDestroy(b);
 42:   ISDestroy(rowperm);
 43:   return 0;
 44: }

 46: PetscErrorCode CreateSystem(const char filename[PETSC_MAX_PATH_LEN], RHSType rhstype, MatOrderingType ordering, PetscBool permute, IS *rowperm_out, Mat *A_out, Vec *b_out, Vec *x_out)
 47: {

 49:   Vec               x,b,b2;
 50:   Mat               A;           /* linear system matrix */
 51:   PetscViewer       viewer;      /* viewer */
 52:   PetscBool         same;
 53:   PetscInt          j,len,start,idx,n1,n2;
 54:   const PetscScalar *val;
 55:   IS                rowperm=NULL,colperm=NULL;

 57:   /* open binary file. Note that we use FILE_MODE_READ to indicate reading from this file */
 58:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,filename,FILE_MODE_READ,&viewer);

 60:   /* load the matrix and vector; then destroy the viewer */
 61:   MatCreate(PETSC_COMM_WORLD,&A);
 62:   MatSetFromOptions(A);
 63:   MatLoad(A,viewer);
 64:   switch (rhstype) {
 65:   case RHS_FILE:
 66:     /* Vectors in the file might a different size than the matrix so we need a
 67:      * Vec whose size hasn't been set yet.  It'll get fixed below.  Otherwise we
 68:      * can create the correct size Vec. */
 69:     VecCreate(PETSC_COMM_WORLD,&b);
 70:     VecLoad(b,viewer);
 71:     break;
 72:   case RHS_ONE:
 73:     MatCreateVecs(A,&b,NULL);
 74:     VecSet(b,1.0);
 75:     break;
 76:   case RHS_RANDOM:
 77:     MatCreateVecs(A,&b,NULL);
 78:     VecSetRandom(b,NULL);
 79:     break;
 80:   }
 81:   PetscViewerDestroy(&viewer);

 83:   /* if the loaded matrix is larger than the vector (due to being padded
 84:      to match the block size of the system), then create a new padded vector
 85:    */
 86:   MatGetLocalSize(A,NULL,&n1);
 87:   VecGetLocalSize(b,&n2);
 88:   same = (n1 == n2)? PETSC_TRUE : PETSC_FALSE;
 89:   MPIU_Allreduce(MPI_IN_PLACE,&same,1,MPIU_BOOL,MPI_LAND,PETSC_COMM_WORLD);

 91:   if (!same) { /* create a new vector b by padding the old one */
 92:     VecCreate(PETSC_COMM_WORLD,&b2);
 93:     VecSetSizes(b2,n1,PETSC_DECIDE);
 94:     VecSetFromOptions(b2);
 95:     VecGetOwnershipRange(b,&start,NULL);
 96:     VecGetLocalSize(b,&len);
 97:     VecGetArrayRead(b,&val);
 98:     for (j=0; j<len; j++) {
 99:       idx = start+j;
100:       VecSetValues(b2,1,&idx,val+j,INSERT_VALUES);
101:     }
102:     VecRestoreArrayRead(b,&val);
103:     VecDestroy(&b);
104:     VecAssemblyBegin(b2);
105:     VecAssemblyEnd(b2);
106:     b    = b2;
107:   }
108:   VecDuplicate(b,&x);

110:   if (permute) {
111:     Mat Aperm;
112:     MatGetOrdering(A,ordering,&rowperm,&colperm);
113:     MatPermute(A,rowperm,colperm,&Aperm);
114:     VecPermute(b,colperm,PETSC_FALSE);
115:     MatDestroy(&A);
116:     A    = Aperm;               /* Replace original operator with permuted version */
117:     ISDestroy(&colperm);
118:   }

120:   *b_out = b;
121:   *x_out = x;
122:   *A_out = A;
123:   *rowperm_out = rowperm;

125:   return 0;
126: }

128: /* ATTENTION: this is the example used in the Profiling chaper of the PETSc manual,
129:    where we referenced its profiling stages, preloading and output etc.
130:    When you modify it, please make sure it is still consistent with the manual.
131:  */
132: int main(int argc,char **args)
133: {
134:   PetscErrorCode    ierr;
135:   Vec               x,b;
136:   Mat               A;           /* linear system matrix */
137:   KSP               ksp;         /* Krylov subspace method context */
138:   char              file[2][PETSC_MAX_PATH_LEN],ordering[256]=MATORDERINGRCM;
139:   RHSType           rhstype = RHS_FILE;
140:   PetscBool         flg,preload=PETSC_FALSE,trans=PETSC_FALSE,permute=PETSC_FALSE;
141:   IS                rowperm=NULL;

143:   PetscInitialize(&argc,&args,(char*)0,help);

145:   PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"Preloading example options","");
146:   {
147:     /*
148:        Determine files from which we read the two linear systems
149:        (matrix and right-hand-side vector).
150:     */
151:     PetscOptionsBool("-trans","Solve transpose system instead","",trans,&trans,&flg);
152:     PetscOptionsString("-f","First file to load (small system)","",file[0],file[0],sizeof(file[0]),&flg);
153:     PetscOptionsFList("-permute","Permute matrix and vector to solve in new ordering","",MatOrderingList,ordering,ordering,sizeof(ordering),&permute);

155:     if (flg) {
156:       PetscStrcpy(file[1],file[0]);
157:       preload = PETSC_FALSE;
158:     } else {
159:       PetscOptionsString("-f0","First file to load (small system)","",file[0],file[0],sizeof(file[0]),&flg);
161:       PetscOptionsString("-f1","Second file to load (larger system)","",file[1],file[1],sizeof(file[1]),&flg);
162:       if (!flg) preload = PETSC_FALSE;   /* don't bother with second system */
163:     }

165:     PetscOptionsEnum("-rhs","Right hand side","",RHSTypes,(PetscEnum)rhstype,(PetscEnum*)&rhstype,NULL);
166:   }
167:   PetscOptionsEnd();

169:   /*
170:     To use preloading, one usually has code like the following:

172:     PetscPreLoadBegin(preload,"first stage);
173:       lines of code
174:     PetscPreLoadStage("second stage");
175:       lines of code
176:     PetscPreLoadEnd();

178:     The two macro PetscPreLoadBegin() and PetscPreLoadEnd() implicitly form a
179:     loop with maximal two iterations, depending whether preloading is turned on or
180:     not. If it is, either through the preload arg of PetscPreLoadBegin or through
181:     -preload command line, the trip count is 2, otherwise it is 1. One can use the
182:     predefined variable PetscPreLoadIt within the loop body to get the current
183:     iteration number, which is 0 or 1. If preload is turned on, the runtime doesn't
184:     do profiling for the first iteration, but it will do profiling for the second
185:     iteration instead.

187:     One can solve a small system in the first iteration and a large system in
188:     the second iteration. This process preloads the instructions with the small
189:     system so that more accurate performance monitoring (via -log_view) can be done
190:     with the large one (that actually is the system of interest).

192:     But in this example, we turned off preloading and duplicated the code for
193:     the large system. In general, it is a bad practice and one should not duplicate
194:     code. We do that because we want to show profiling stages for both the small
195:     system and the large system.
196:   */

198:   /*=========================
199:       solve a small system
200:     =========================*/

202:   PetscPreLoadBegin(preload,"Load System 0");
203:   CreateSystem(file[0],rhstype,ordering,permute,&rowperm,&A,&b,&x);

205:   PetscPreLoadStage("KSPSetUp 0");
206:   KSPCreate(PETSC_COMM_WORLD,&ksp);
207:   KSPSetOperators(ksp,A,A);
208:   KSPSetFromOptions(ksp);

210:   /*
211:     Here we explicitly call KSPSetUp() and KSPSetUpOnBlocks() to
212:     enable more precise profiling of setting up the preconditioner.
213:     These calls are optional, since both will be called within
214:     KSPSolve() if they haven't been called already.
215:   */
216:   KSPSetUp(ksp);
217:   KSPSetUpOnBlocks(ksp);

219:   PetscPreLoadStage("KSPSolve 0");
220:   if (trans) KSPSolveTranspose(ksp,b,x);
221:   else       KSPSolve(ksp,b,x);

223:   if (permute) VecPermute(x,rowperm,PETSC_TRUE);

225:   CheckResult(&ksp,&A,&b,&x,&rowperm);

227:   /*=========================
228:     solve a large system
229:     =========================*/

231:   PetscPreLoadStage("Load System 1");

233:   CreateSystem(file[1],rhstype,ordering,permute,&rowperm,&A,&b,&x);

235:   PetscPreLoadStage("KSPSetUp 1");
236:   KSPCreate(PETSC_COMM_WORLD,&ksp);
237:   KSPSetOperators(ksp,A,A);
238:   KSPSetFromOptions(ksp);

240:   /*
241:     Here we explicitly call KSPSetUp() and KSPSetUpOnBlocks() to
242:     enable more precise profiling of setting up the preconditioner.
243:     These calls are optional, since both will be called within
244:     KSPSolve() if they haven't been called already.
245:   */
246:   KSPSetUp(ksp);
247:   KSPSetUpOnBlocks(ksp);

249:   PetscPreLoadStage("KSPSolve 1");
250:   if (trans) KSPSolveTranspose(ksp,b,x);
251:   else       KSPSolve(ksp,b,x);

253:   if (permute) VecPermute(x,rowperm,PETSC_TRUE);

255:   CheckResult(&ksp,&A,&b,&x,&rowperm);

257:   PetscPreLoadEnd();
258:   /*
259:      Always call PetscFinalize() before exiting a program.  This routine
260:        - finalizes the PETSc libraries as well as MPI
261:        - provides summary and diagnostic information if certain runtime
262:          options are chosen (e.g., -log_view).
263:   */
264:   PetscFinalize();
265:   return 0;
266: }

268: /*TEST

270:    test:
271:       TODO: Matrix row/column sizes are not compatible with block size
272:       suffix: 1
273:       nsize: 4
274:       output_file: output/ex10_1.out
275:       requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
276:       args: -f0 ${DATAFILESPATH}/matrices/medium -f1 ${DATAFILESPATH}/matrices/arco6 -ksp_gmres_classicalgramschmidt -mat_type baij -matload_block_size 3 -pc_type bjacobi

278:    test:
279:       TODO: Matrix row/column sizes are not compatible with block size
280:       suffix: 2
281:       nsize: 4
282:       output_file: output/ex10_2.out
283:       requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)
284:       args: -f0 ${DATAFILESPATH}/matrices/medium -f1 ${DATAFILESPATH}/matrices/arco6 -ksp_gmres_classicalgramschmidt -mat_type baij -matload_block_size 3 -pc_type bjacobi -trans

286:    test:
287:       suffix: 3
288:       requires: double complex !defined(PETSC_USE_64BIT_INDICES)
289:       args: -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/nh-complex-int32-float64 -ksp_type bicg

291:    test:
292:       suffix: 4
293:       args: -f ${DATAFILESPATH}/matrices/medium -ksp_type bicg -permute rcm
294:       requires: datafilespath double !complex !defined(PETSC_USE_64BIT_INDICES)

296: TEST*/