Actual source code: wb.c

  1: #include <petscdmda.h>
  2: #include <petsc/private/pcmgimpl.h>
  3: #include <petsc/private/hashmapi.h>

  5: typedef struct {
  6:   PCExoticType type;
  7:   Mat          P;           /* the constructed interpolation matrix */
  8:   PetscBool    directSolve; /* use direct LU factorization to construct interpolation */
  9:   KSP          ksp;
 10: } PC_Exotic;

 12: const char *const PCExoticTypes[] = {"face", "wirebasket", "PCExoticType", "PC_Exotic", NULL};

 14: /*
 15:       DMDAGetWireBasketInterpolation - Gets the interpolation for a wirebasket based coarse space
 16: */
 17: static PetscErrorCode DMDAGetWireBasketInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P)
 18: {
 19:   PetscInt               dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
 20:   PetscInt               mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[26], *globals, Ng, *IIint, *IIsurf;
 21:   Mat                    Xint, Xsurf, Xint_tmp;
 22:   IS                     isint, issurf, is, row, col;
 23:   ISLocalToGlobalMapping ltg;
 24:   MPI_Comm               comm;
 25:   Mat                    A, Aii, Ais, Asi, *Aholder, iAii;
 26:   MatFactorInfo          info;
 27:   PetscScalar           *xsurf, *xint;
 28:   const PetscScalar     *rxint;
 29: #if defined(PETSC_USE_DEBUG_foo)
 30:   PetscScalar tmp;
 31: #endif
 32:   PetscHMapI ht = NULL;

 34:   PetscFunctionBegin;
 35:   PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
 36:   PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
 37:   PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
 38:   PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
 39:   PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
 40:   istart = istart ? -1 : 0;
 41:   jstart = jstart ? -1 : 0;
 42:   kstart = kstart ? -1 : 0;

 44:   /*
 45:     the columns of P are the interpolation of each coarse (wirebasket) grid point (one for each face, vertex and edge)
 46:     to all the local degrees of freedom (this includes the vertices, edges and faces).

 48:     Xint are the subset of the interpolation into the interior

 50:     Xface are the interpolation onto faces but not into the interior

 52:     Xsurf are the interpolation onto the vertices and edges (the wirebasket)
 53:                                       Xint
 54:     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
 55:                                       Xsurf
 56:   */
 57:   N     = (m - istart) * (n - jstart) * (p - kstart);
 58:   Nint  = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
 59:   Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
 60:   Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
 61:   Nsurf = Nface + Nwire;
 62:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 26, NULL, &Xint));
 63:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 26, NULL, &Xsurf));
 64:   PetscCall(MatDenseGetArray(Xsurf, &xsurf));

 66:   /*
 67:      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
 68:      Xsurf will be all zero (thus making the coarse matrix singular).
 69:   */
 70:   PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
 71:   PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
 72:   PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");

 74:   cnt = 0;

 76:   xsurf[cnt++] = 1;
 77:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + Nsurf] = 1;
 78:   xsurf[cnt++ + 2 * Nsurf] = 1;

 80:   for (j = 1; j < n - 1 - jstart; j++) {
 81:     xsurf[cnt++ + 3 * Nsurf] = 1;
 82:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
 83:     xsurf[cnt++ + 5 * Nsurf] = 1;
 84:   }

 86:   xsurf[cnt++ + 6 * Nsurf] = 1;
 87:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 7 * Nsurf] = 1;
 88:   xsurf[cnt++ + 8 * Nsurf] = 1;

 90:   for (k = 1; k < p - 1 - kstart; k++) {
 91:     xsurf[cnt++ + 9 * Nsurf] = 1;
 92:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 10 * Nsurf] = 1;
 93:     xsurf[cnt++ + 11 * Nsurf] = 1;

 95:     for (j = 1; j < n - 1 - jstart; j++) {
 96:       xsurf[cnt++ + 12 * Nsurf] = 1;
 97:       /* these are the interior nodes */
 98:       xsurf[cnt++ + 13 * Nsurf] = 1;
 99:     }

101:     xsurf[cnt++ + 14 * Nsurf] = 1;
102:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 15 * Nsurf] = 1;
103:     xsurf[cnt++ + 16 * Nsurf] = 1;
104:   }

106:   xsurf[cnt++ + 17 * Nsurf] = 1;
107:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 18 * Nsurf] = 1;
108:   xsurf[cnt++ + 19 * Nsurf] = 1;

110:   for (j = 1; j < n - 1 - jstart; j++) {
111:     xsurf[cnt++ + 20 * Nsurf] = 1;
112:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 21 * Nsurf] = 1;
113:     xsurf[cnt++ + 22 * Nsurf] = 1;
114:   }

116:   xsurf[cnt++ + 23 * Nsurf] = 1;
117:   for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 24 * Nsurf] = 1;
118:   xsurf[cnt++ + 25 * Nsurf] = 1;

120:   /* interpolations only sum to 1 when using direct solver */
121: #if defined(PETSC_USE_DEBUG_foo)
122:   for (i = 0; i < Nsurf; i++) {
123:     tmp = 0.0;
124:     for (j = 0; j < 26; j++) tmp += xsurf[i + j * Nsurf];
125:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
126:   }
127: #endif
128:   PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
129:   /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/

131:   /*
132:        I are the indices for all the needed vertices (in global numbering)
133:        Iint are the indices for the interior values, I surf for the surface values
134:             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
135:              is NOT the local DMDA ordering.)
136:        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
137:   */
138: #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
139:   PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
140:   PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
141:   for (k = 0; k < p - kstart; k++) {
142:     for (j = 0; j < n - jstart; j++) {
143:       for (i = 0; i < m - istart; i++) {
144:         II[c++] = i + j * mwidth + k * mwidth * nwidth;

146:         if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
147:           IIint[cint]  = i + j * mwidth + k * mwidth * nwidth;
148:           Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
149:         } else {
150:           IIsurf[csurf]  = i + j * mwidth + k * mwidth * nwidth;
151:           Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
152:         }
153:       }
154:     }
155:   }
156: #undef Endpoint
157:   PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
158:   PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
159:   PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
160:   PetscCall(DMGetLocalToGlobalMapping(da, &ltg));
161:   PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
162:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
163:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
164:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
165:   PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
166:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
167:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
168:   PetscCall(PetscFree3(II, Iint, Isurf));

170:   PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
171:   A = *Aholder;
172:   PetscCall(PetscFree(Aholder));

174:   PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
175:   PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
176:   PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));

178:   /*
179:      Solve for the interpolation onto the interior Xint
180:   */
181:   PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
182:   PetscCall(MatScale(Xint_tmp, -1.0));
183:   if (exotic->directSolve) {
184:     PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
185:     PetscCall(MatFactorInfoInitialize(&info));
186:     PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
187:     PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
188:     PetscCall(ISDestroy(&row));
189:     PetscCall(ISDestroy(&col));
190:     PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
191:     PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
192:     PetscCall(MatDestroy(&iAii));
193:   } else {
194:     Vec          b, x;
195:     PetscScalar *xint_tmp;

197:     PetscCall(MatDenseGetArray(Xint, &xint));
198:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
199:     PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
200:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
201:     PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
202:     for (i = 0; i < 26; i++) {
203:       PetscCall(VecPlaceArray(x, xint + i * Nint));
204:       PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
205:       PetscCall(KSPSolve(exotic->ksp, b, x));
206:       PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
207:       PetscCall(VecResetArray(x));
208:       PetscCall(VecResetArray(b));
209:     }
210:     PetscCall(MatDenseRestoreArray(Xint, &xint));
211:     PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
212:     PetscCall(VecDestroy(&x));
213:     PetscCall(VecDestroy(&b));
214:   }
215:   PetscCall(MatDestroy(&Xint_tmp));

217: #if defined(PETSC_USE_DEBUG_foo)
218:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
219:   for (i = 0; i < Nint; i++) {
220:     tmp = 0.0;
221:     for (j = 0; j < 26; j++) tmp += rxint[i + j * Nint];

223:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
224:   }
225:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
226:   /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
227: #endif

229:   /*         total vertices             total faces                                  total edges */
230:   Ntotal = (mp + 1) * (np + 1) * (pp + 1) + mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1) + mp * (np + 1) * (pp + 1) + np * (mp + 1) * (pp + 1) + pp * (mp + 1) * (np + 1);

232:   /*
233:       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
234:   */
235:   cnt = 0;

237:   gl[cnt++] = 0;
238:   {
239:     gl[cnt++] = 1;
240:   }
241:   gl[cnt++] = m - istart - 1;
242:   {
243:     gl[cnt++] = mwidth;
244:     {
245:       gl[cnt++] = mwidth + 1;
246:     }
247:     gl[cnt++] = mwidth + m - istart - 1;
248:   }
249:   gl[cnt++] = mwidth * (n - jstart - 1);
250:   {
251:     gl[cnt++] = mwidth * (n - jstart - 1) + 1;
252:   }
253:   gl[cnt++] = mwidth * (n - jstart - 1) + m - istart - 1;
254:   {
255:     gl[cnt++] = mwidth * nwidth;
256:     {
257:       gl[cnt++] = mwidth * nwidth + 1;
258:     }
259:     gl[cnt++] = mwidth * nwidth + m - istart - 1;
260:     {
261:       gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
262:       gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
263:     }
264:     gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1);
265:     {
266:       gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1;
267:     }
268:     gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + m - istart - 1;
269:   }
270:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1);
271:   {
272:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + 1;
273:   }
274:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + m - istart - 1;
275:   {
276:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth;
277:     {
278:       gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1;
279:     }
280:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + m - istart - 1;
281:   }
282:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1);
283:   {
284:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + 1;
285:   }
286:   gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth * (n - jstart - 1) + m - istart - 1;

288:   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
289:   /* convert that to global numbering and get them on all processes */
290:   PetscCall(ISLocalToGlobalMappingApply(ltg, 26, gl, gl));
291:   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */
292:   PetscCall(PetscMalloc1(26 * mp * np * pp, &globals));
293:   PetscCallMPI(MPI_Allgather(gl, 26, MPIU_INT, globals, 26, MPIU_INT, PetscObjectComm((PetscObject)da)));

295:   /* Number the coarse grid points from 0 to Ntotal */
296:   PetscCall(PetscHMapICreateWithSize(Ntotal / 3, &ht));
297:   for (i = 0, cnt = 0; i < 26 * mp * np * pp; i++) {
298:     PetscHashIter it      = 0;
299:     PetscBool     missing = PETSC_TRUE;

301:     PetscCall(PetscHMapIPut(ht, globals[i] + 1, &it, &missing));
302:     if (missing) {
303:       ++cnt;
304:       PetscCall(PetscHMapIIterSet(ht, it, cnt));
305:     }
306:   }
307:   PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
308:   PetscCall(PetscFree(globals));
309:   for (i = 0; i < 26; i++) {
310:     PetscCall(PetscHMapIGetWithDefault(ht, gl[i] + 1, 0, gl + i));
311:     --(gl[i]);
312:   }
313:   PetscCall(PetscHMapIDestroy(&ht));
314:   /* PetscIntView(26,gl,PETSC_VIEWER_STDOUT_WORLD); */

316:   /* construct global interpolation matrix */
317:   PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
318:   if (reuse == MAT_INITIAL_MATRIX) {
319:     PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint + Nsurf, NULL, P));
320:   } else {
321:     PetscCall(MatZeroEntries(*P));
322:   }
323:   PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
324:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
325:   PetscCall(MatSetValues(*P, Nint, IIint, 26, gl, rxint, INSERT_VALUES));
326:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
327:   PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
328:   PetscCall(MatSetValues(*P, Nsurf, IIsurf, 26, gl, rxint, INSERT_VALUES));
329:   PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
330:   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
331:   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
332:   PetscCall(PetscFree2(IIint, IIsurf));

334: #if defined(PETSC_USE_DEBUG_foo)
335:   {
336:     Vec          x, y;
337:     PetscScalar *yy;
338:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
339:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
340:     PetscCall(VecSet(x, 1.0));
341:     PetscCall(MatMult(*P, x, y));
342:     PetscCall(VecGetArray(y, &yy));
343:     for (i = 0; i < Ng; i++) PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i]));
344:     PetscCall(VecRestoreArray(y, &yy));
345:     PetscCall(VecDestroy(x));
346:     PetscCall(VecDestroy(y));
347:   }
348: #endif

350:   PetscCall(MatDestroy(&Aii));
351:   PetscCall(MatDestroy(&Ais));
352:   PetscCall(MatDestroy(&Asi));
353:   PetscCall(MatDestroy(&A));
354:   PetscCall(ISDestroy(&is));
355:   PetscCall(ISDestroy(&isint));
356:   PetscCall(ISDestroy(&issurf));
357:   PetscCall(MatDestroy(&Xint));
358:   PetscCall(MatDestroy(&Xsurf));
359:   PetscFunctionReturn(PETSC_SUCCESS);
360: }

362: /*
363:       DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space
364: */
365: static PetscErrorCode DMDAGetFaceInterpolation(PC pc, DM da, PC_Exotic *exotic, Mat Aglobal, MatReuse reuse, Mat *P)
366: {
367:   PetscInt               dim, i, j, k, m, n, p, dof, Nint, Nface, Nwire, Nsurf, *Iint, *Isurf, cint = 0, csurf = 0, istart, jstart, kstart, *II, N, c = 0;
368:   PetscInt               mwidth, nwidth, pwidth, cnt, mp, np, pp, Ntotal, gl[6], *globals, Ng, *IIint, *IIsurf;
369:   Mat                    Xint, Xsurf, Xint_tmp;
370:   IS                     isint, issurf, is, row, col;
371:   ISLocalToGlobalMapping ltg;
372:   MPI_Comm               comm;
373:   Mat                    A, Aii, Ais, Asi, *Aholder, iAii;
374:   MatFactorInfo          info;
375:   PetscScalar           *xsurf, *xint;
376:   const PetscScalar     *rxint;
377: #if defined(PETSC_USE_DEBUG_foo)
378:   PetscScalar tmp;
379: #endif
380:   PetscHMapI ht;

382:   PetscFunctionBegin;
383:   PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, &mp, &np, &pp, &dof, NULL, NULL, NULL, NULL, NULL));
384:   PetscCheck(dof == 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only for single field problems");
385:   PetscCheck(dim == 3, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Only coded for 3d problems");
386:   PetscCall(DMDAGetCorners(da, NULL, NULL, NULL, &m, &n, &p));
387:   PetscCall(DMDAGetGhostCorners(da, &istart, &jstart, &kstart, &mwidth, &nwidth, &pwidth));
388:   istart = istart ? -1 : 0;
389:   jstart = jstart ? -1 : 0;
390:   kstart = kstart ? -1 : 0;

392:   /*
393:     the columns of P are the interpolation of each coarse (face) grid point (one for each face)
394:     to all the local degrees of freedom (this includes the vertices, edges and faces).

396:     Xint are the subset of the interpolation into the interior

398:     Xface are the interpolation onto faces but not into the interior

400:     Xsurf are the interpolation onto the vertices and edges (the wirebasket)
401:                                       Xint
402:     Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
403:                                       Xsurf
404:   */
405:   N     = (m - istart) * (n - jstart) * (p - kstart);
406:   Nint  = (m - 2 - istart) * (n - 2 - jstart) * (p - 2 - kstart);
407:   Nface = 2 * ((m - 2 - istart) * (n - 2 - jstart) + (m - 2 - istart) * (p - 2 - kstart) + (n - 2 - jstart) * (p - 2 - kstart));
408:   Nwire = 4 * ((m - 2 - istart) + (n - 2 - jstart) + (p - 2 - kstart)) + 8;
409:   Nsurf = Nface + Nwire;
410:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nint, 6, NULL, &Xint));
411:   PetscCall(MatCreateSeqDense(MPI_COMM_SELF, Nsurf, 6, NULL, &Xsurf));
412:   PetscCall(MatDenseGetArray(Xsurf, &xsurf));

414:   /*
415:      Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
416:      Xsurf will be all zero (thus making the coarse matrix singular).
417:   */
418:   PetscCheck(m - istart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in X direction must be at least 3");
419:   PetscCheck(n - jstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Y direction must be at least 3");
420:   PetscCheck(p - kstart >= 3, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of grid points per process in Z direction must be at least 3");

422:   cnt = 0;
423:   for (j = 1; j < n - 1 - jstart; j++) {
424:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 0 * Nsurf] = 1;
425:   }

427:   for (k = 1; k < p - 1 - kstart; k++) {
428:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 1 * Nsurf] = 1;
429:     for (j = 1; j < n - 1 - jstart; j++) {
430:       xsurf[cnt++ + 2 * Nsurf] = 1;
431:       /* these are the interior nodes */
432:       xsurf[cnt++ + 3 * Nsurf] = 1;
433:     }
434:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 4 * Nsurf] = 1;
435:   }
436:   for (j = 1; j < n - 1 - jstart; j++) {
437:     for (i = 1; i < m - istart - 1; i++) xsurf[cnt++ + 5 * Nsurf] = 1;
438:   }

440: #if defined(PETSC_USE_DEBUG_foo)
441:   for (i = 0; i < Nsurf; i++) {
442:     tmp = 0.0;
443:     for (j = 0; j < 6; j++) tmp += xsurf[i + j * Nsurf];

445:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xsurf interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
446:   }
447: #endif
448:   PetscCall(MatDenseRestoreArray(Xsurf, &xsurf));
449:   /* PetscCall(MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD));*/

451:   /*
452:        I are the indices for all the needed vertices (in global numbering)
453:        Iint are the indices for the interior values, I surf for the surface values
454:             (This is just for the part of the global matrix obtained with MatCreateSubMatrix(), it
455:              is NOT the local DMDA ordering.)
456:        IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
457:   */
458: #define Endpoint(a, start, b) (a == 0 || a == (b - 1 - start))
459:   PetscCall(PetscMalloc3(N, &II, Nint, &Iint, Nsurf, &Isurf));
460:   PetscCall(PetscMalloc2(Nint, &IIint, Nsurf, &IIsurf));
461:   for (k = 0; k < p - kstart; k++) {
462:     for (j = 0; j < n - jstart; j++) {
463:       for (i = 0; i < m - istart; i++) {
464:         II[c++] = i + j * mwidth + k * mwidth * nwidth;

466:         if (!Endpoint(i, istart, m) && !Endpoint(j, jstart, n) && !Endpoint(k, kstart, p)) {
467:           IIint[cint]  = i + j * mwidth + k * mwidth * nwidth;
468:           Iint[cint++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
469:         } else {
470:           IIsurf[csurf]  = i + j * mwidth + k * mwidth * nwidth;
471:           Isurf[csurf++] = i + j * (m - istart) + k * (m - istart) * (n - jstart);
472:         }
473:       }
474:     }
475:   }
476: #undef Endpoint
477:   PetscCheck(c == N, PETSC_COMM_SELF, PETSC_ERR_PLIB, "c != N");
478:   PetscCheck(cint == Nint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "cint != Nint");
479:   PetscCheck(csurf == Nsurf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "csurf != Nsurf");
480:   PetscCall(DMGetLocalToGlobalMapping(da, &ltg));
481:   PetscCall(ISLocalToGlobalMappingApply(ltg, N, II, II));
482:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nint, IIint, IIint));
483:   PetscCall(ISLocalToGlobalMappingApply(ltg, Nsurf, IIsurf, IIsurf));
484:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
485:   PetscCall(ISCreateGeneral(comm, N, II, PETSC_COPY_VALUES, &is));
486:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nint, Iint, PETSC_COPY_VALUES, &isint));
487:   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, Nsurf, Isurf, PETSC_COPY_VALUES, &issurf));
488:   PetscCall(PetscFree3(II, Iint, Isurf));

490:   PetscCall(ISSort(is));
491:   PetscCall(MatCreateSubMatrices(Aglobal, 1, &is, &is, MAT_INITIAL_MATRIX, &Aholder));
492:   A = *Aholder;
493:   PetscCall(PetscFree(Aholder));

495:   PetscCall(MatCreateSubMatrix(A, isint, isint, MAT_INITIAL_MATRIX, &Aii));
496:   PetscCall(MatCreateSubMatrix(A, isint, issurf, MAT_INITIAL_MATRIX, &Ais));
497:   PetscCall(MatCreateSubMatrix(A, issurf, isint, MAT_INITIAL_MATRIX, &Asi));

499:   /*
500:      Solve for the interpolation onto the interior Xint
501:   */
502:   PetscCall(MatMatMult(Ais, Xsurf, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &Xint_tmp));
503:   PetscCall(MatScale(Xint_tmp, -1.0));

505:   if (exotic->directSolve) {
506:     PetscCall(MatGetFactor(Aii, MATSOLVERPETSC, MAT_FACTOR_LU, &iAii));
507:     PetscCall(MatFactorInfoInitialize(&info));
508:     PetscCall(MatGetOrdering(Aii, MATORDERINGND, &row, &col));
509:     PetscCall(MatLUFactorSymbolic(iAii, Aii, row, col, &info));
510:     PetscCall(ISDestroy(&row));
511:     PetscCall(ISDestroy(&col));
512:     PetscCall(MatLUFactorNumeric(iAii, Aii, &info));
513:     PetscCall(MatMatSolve(iAii, Xint_tmp, Xint));
514:     PetscCall(MatDestroy(&iAii));
515:   } else {
516:     Vec          b, x;
517:     PetscScalar *xint_tmp;

519:     PetscCall(MatDenseGetArray(Xint, &xint));
520:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &x));
521:     PetscCall(MatDenseGetArray(Xint_tmp, &xint_tmp));
522:     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, Nint, NULL, &b));
523:     PetscCall(KSPSetOperators(exotic->ksp, Aii, Aii));
524:     for (i = 0; i < 6; i++) {
525:       PetscCall(VecPlaceArray(x, xint + i * Nint));
526:       PetscCall(VecPlaceArray(b, xint_tmp + i * Nint));
527:       PetscCall(KSPSolve(exotic->ksp, b, x));
528:       PetscCall(KSPCheckSolve(exotic->ksp, pc, x));
529:       PetscCall(VecResetArray(x));
530:       PetscCall(VecResetArray(b));
531:     }
532:     PetscCall(MatDenseRestoreArray(Xint, &xint));
533:     PetscCall(MatDenseRestoreArray(Xint_tmp, &xint_tmp));
534:     PetscCall(VecDestroy(&x));
535:     PetscCall(VecDestroy(&b));
536:   }
537:   PetscCall(MatDestroy(&Xint_tmp));

539: #if defined(PETSC_USE_DEBUG_foo)
540:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
541:   for (i = 0; i < Nint; i++) {
542:     tmp = 0.0;
543:     for (j = 0; j < 6; j++) tmp += rxint[i + j * Nint];

545:     PetscCheck(PetscAbsScalar(tmp - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong Xint interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(tmp));
546:   }
547:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
548:   /* PetscCall(MatView(Xint,PETSC_VIEWER_STDOUT_WORLD)); */
549: #endif

551:   /*         total faces    */
552:   Ntotal = mp * np * (pp + 1) + mp * pp * (np + 1) + np * pp * (mp + 1);

554:   /*
555:       For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
556:   */
557:   cnt = 0;
558:   {
559:     gl[cnt++] = mwidth + 1;
560:   }
561:   {
562:     {
563:       gl[cnt++] = mwidth * nwidth + 1;
564:     }
565:     {
566:       gl[cnt++] = mwidth * nwidth + mwidth; /* these are the interior nodes */
567:       gl[cnt++] = mwidth * nwidth + mwidth + m - istart - 1;
568:     }
569:     {
570:       gl[cnt++] = mwidth * nwidth + mwidth * (n - jstart - 1) + 1;
571:     }
572:   }
573:   {
574:     gl[cnt++] = mwidth * nwidth * (p - kstart - 1) + mwidth + 1;
575:   }

577:   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
578:   /* convert that to global numbering and get them on all processes */
579:   PetscCall(ISLocalToGlobalMappingApply(ltg, 6, gl, gl));
580:   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
581:   PetscCall(PetscMalloc1(6 * mp * np * pp, &globals));
582:   PetscCallMPI(MPI_Allgather(gl, 6, MPIU_INT, globals, 6, MPIU_INT, PetscObjectComm((PetscObject)da)));

584:   /* Number the coarse grid points from 0 to Ntotal */
585:   PetscCall(PetscHMapICreateWithSize(Ntotal / 3, &ht));
586:   for (i = 0, cnt = 0; i < 6 * mp * np * pp; i++) {
587:     PetscHashIter it      = 0;
588:     PetscBool     missing = PETSC_TRUE;

590:     PetscCall(PetscHMapIPut(ht, globals[i] + 1, &it, &missing));
591:     if (missing) {
592:       ++cnt;
593:       PetscCall(PetscHMapIIterSet(ht, it, cnt));
594:     }
595:   }
596:   PetscCheck(cnt == Ntotal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Hash table size %" PetscInt_FMT " not equal to total number coarse grid points %" PetscInt_FMT, cnt, Ntotal);
597:   PetscCall(PetscFree(globals));
598:   for (i = 0; i < 6; i++) {
599:     PetscCall(PetscHMapIGetWithDefault(ht, gl[i] + 1, 0, gl + i));
600:     --(gl[i]);
601:   }
602:   PetscCall(PetscHMapIDestroy(&ht));
603:   /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */

605:   /* construct global interpolation matrix */
606:   PetscCall(MatGetLocalSize(Aglobal, &Ng, NULL));
607:   if (reuse == MAT_INITIAL_MATRIX) {
608:     PetscCall(MatCreateAIJ(PetscObjectComm((PetscObject)da), Ng, PETSC_DECIDE, PETSC_DECIDE, Ntotal, Nint + Nsurf, NULL, Nint, NULL, P));
609:   } else {
610:     PetscCall(MatZeroEntries(*P));
611:   }
612:   PetscCall(MatSetOption(*P, MAT_ROW_ORIENTED, PETSC_FALSE));
613:   PetscCall(MatDenseGetArrayRead(Xint, &rxint));
614:   PetscCall(MatSetValues(*P, Nint, IIint, 6, gl, rxint, INSERT_VALUES));
615:   PetscCall(MatDenseRestoreArrayRead(Xint, &rxint));
616:   PetscCall(MatDenseGetArrayRead(Xsurf, &rxint));
617:   PetscCall(MatSetValues(*P, Nsurf, IIsurf, 6, gl, rxint, INSERT_VALUES));
618:   PetscCall(MatDenseRestoreArrayRead(Xsurf, &rxint));
619:   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
620:   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
621:   PetscCall(PetscFree2(IIint, IIsurf));

623: #if defined(PETSC_USE_DEBUG_foo)
624:   {
625:     Vec          x, y;
626:     PetscScalar *yy;
627:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), Ng, PETSC_DETERMINE, &y));
628:     PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)da), PETSC_DETERMINE, Ntotal, &x));
629:     PetscCall(VecSet(x, 1.0));
630:     PetscCall(MatMult(*P, x, y));
631:     PetscCall(VecGetArray(y, &yy));
632:     for (i = 0; i < Ng; i++) PetscCheck(PetscAbsScalar(yy[i] - 1.0) <= 1.e-10, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Wrong p interpolation at i %" PetscInt_FMT " value %g", i, (double)PetscAbsScalar(yy[i]));
633:     PetscCall(VecRestoreArray(y, &yy));
634:     PetscCall(VecDestroy(x));
635:     PetscCall(VecDestroy(y));
636:   }
637: #endif

639:   PetscCall(MatDestroy(&Aii));
640:   PetscCall(MatDestroy(&Ais));
641:   PetscCall(MatDestroy(&Asi));
642:   PetscCall(MatDestroy(&A));
643:   PetscCall(ISDestroy(&is));
644:   PetscCall(ISDestroy(&isint));
645:   PetscCall(ISDestroy(&issurf));
646:   PetscCall(MatDestroy(&Xint));
647:   PetscCall(MatDestroy(&Xsurf));
648:   PetscFunctionReturn(PETSC_SUCCESS);
649: }

651: /*@
652:   PCExoticSetType - Sets the type of coarse grid interpolation to use

654:   Logically Collective

656:   Input Parameters:
657: + pc   - the preconditioner context
658: - type - either `PC_EXOTIC_FACE` or `PC_EXOTIC_WIREBASKET` (defaults to face)

660:   Options Database Keys:
661: . -pc_exotic_type <face,wirebasket> - use a coarse grid point for each face, or edge and vertex

663:   Notes:
664:   The face based interpolation has 1 degree of freedom per face and ignores the
665:   edge and vertex values completely in the coarse problem. For any seven point
666:   stencil the interpolation of a constant on all faces into the interior is that constant.

668:   The wirebasket interpolation has 1 degree of freedom per vertex, per edge and
669:   per face. A constant on the subdomain boundary is interpolated as that constant
670:   in the interior of the domain.

672:   The coarse grid matrix is obtained via the Galerkin computation $A_c = R A R^T$, hence
673:   if $A$ is nonsingular $A_c$ is also nonsingular.

675:   Both interpolations are suitable for only scalar problems.

677:   Level: intermediate

679: .seealso: [](ch_ksp), `PCEXOTIC`, `PCExoticType()`
680: @*/
681: PetscErrorCode PCExoticSetType(PC pc, PCExoticType type)
682: {
683:   PetscFunctionBegin;
686:   PetscTryMethod(pc, "PCExoticSetType_C", (PC, PCExoticType), (pc, type));
687:   PetscFunctionReturn(PETSC_SUCCESS);
688: }

690: static PetscErrorCode PCExoticSetType_Exotic(PC pc, PCExoticType type)
691: {
692:   PC_MG     *mg  = (PC_MG *)pc->data;
693:   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;

695:   PetscFunctionBegin;
696:   ctx->type = type;
697:   PetscFunctionReturn(PETSC_SUCCESS);
698: }

700: static PetscErrorCode PCSetUp_Exotic(PC pc)
701: {
702:   Mat        A;
703:   PC_MG     *mg    = (PC_MG *)pc->data;
704:   PC_Exotic *ex    = (PC_Exotic *)mg->innerctx;
705:   MatReuse   reuse = (ex->P) ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;

707:   PetscFunctionBegin;
708:   PetscCheck(pc->dm, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "Need to call PCSetDM() before using this PC");
709:   PetscCall(PCGetOperators(pc, NULL, &A));
710:   PetscCheck(ex->type == PC_EXOTIC_FACE || ex->type == PC_EXOTIC_WIREBASKET, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Unknown exotic coarse space %d", ex->type);
711:   if (ex->type == PC_EXOTIC_FACE) {
712:     PetscCall(DMDAGetFaceInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
713:   } else /* if (ex->type == PC_EXOTIC_WIREBASKET) */ {
714:     PetscCall(DMDAGetWireBasketInterpolation(pc, pc->dm, ex, A, reuse, &ex->P));
715:   }
716:   PetscCall(PCMGSetInterpolation(pc, 1, ex->P));
717:   /* if PC has attached DM we must remove it or the PCMG will use it to compute incorrect sized vectors and interpolations */
718:   PetscCall(PCSetDM(pc, NULL));
719:   PetscCall(PCSetUp_MG(pc));
720:   PetscFunctionReturn(PETSC_SUCCESS);
721: }

723: static PetscErrorCode PCDestroy_Exotic(PC pc)
724: {
725:   PC_MG     *mg  = (PC_MG *)pc->data;
726:   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;

728:   PetscFunctionBegin;
729:   PetscCall(MatDestroy(&ctx->P));
730:   PetscCall(KSPDestroy(&ctx->ksp));
731:   PetscCall(PetscFree(ctx));
732:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", NULL));
733:   PetscCall(PCDestroy_MG(pc));
734:   PetscFunctionReturn(PETSC_SUCCESS);
735: }

737: static PetscErrorCode PCView_Exotic(PC pc, PetscViewer viewer)
738: {
739:   PC_MG     *mg = (PC_MG *)pc->data;
740:   PetscBool  iascii;
741:   PC_Exotic *ctx = (PC_Exotic *)mg->innerctx;

743:   PetscFunctionBegin;
744:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
745:   if (iascii) {
746:     PetscCall(PetscViewerASCIIPrintf(viewer, "    Exotic type = %s\n", PCExoticTypes[ctx->type]));
747:     if (ctx->directSolve) {
748:       PetscCall(PetscViewerASCIIPrintf(viewer, "      Using direct solver to construct interpolation\n"));
749:     } else {
750:       PetscViewer sviewer;
751:       PetscMPIInt rank;

753:       PetscCall(PetscViewerASCIIPrintf(viewer, "      Using iterative solver to construct interpolation\n"));
754:       PetscCall(PetscViewerASCIIPushTab(viewer));
755:       PetscCall(PetscViewerASCIIPushTab(viewer)); /* should not need to push this twice? */
756:       PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
757:       PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
758:       if (rank == 0) PetscCall(KSPView(ctx->ksp, sviewer));
759:       PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
760:       PetscCall(PetscViewerASCIIPopTab(viewer));
761:       PetscCall(PetscViewerASCIIPopTab(viewer));
762:     }
763:   }
764:   PetscCall(PCView_MG(pc, viewer));
765:   PetscFunctionReturn(PETSC_SUCCESS);
766: }

768: static PetscErrorCode PCSetFromOptions_Exotic(PC pc, PetscOptionItems *PetscOptionsObject)
769: {
770:   PetscBool    flg;
771:   PC_MG       *mg = (PC_MG *)pc->data;
772:   PCExoticType mgctype;
773:   PC_Exotic   *ctx = (PC_Exotic *)mg->innerctx;

775:   PetscFunctionBegin;
776:   PetscOptionsHeadBegin(PetscOptionsObject, "Exotic coarse space options");
777:   PetscCall(PetscOptionsEnum("-pc_exotic_type", "face or wirebasket", "PCExoticSetType", PCExoticTypes, (PetscEnum)ctx->type, (PetscEnum *)&mgctype, &flg));
778:   if (flg) PetscCall(PCExoticSetType(pc, mgctype));
779:   PetscCall(PetscOptionsBool("-pc_exotic_direct_solver", "use direct solver to construct interpolation", "None", ctx->directSolve, &ctx->directSolve, NULL));
780:   if (!ctx->directSolve) {
781:     if (!ctx->ksp) {
782:       const char *prefix;
783:       PetscCall(KSPCreate(PETSC_COMM_SELF, &ctx->ksp));
784:       PetscCall(KSPSetNestLevel(ctx->ksp, pc->kspnestlevel));
785:       PetscCall(KSPSetErrorIfNotConverged(ctx->ksp, pc->erroriffailure));
786:       PetscCall(PetscObjectIncrementTabLevel((PetscObject)ctx->ksp, (PetscObject)pc, 1));
787:       PetscCall(PCGetOptionsPrefix(pc, &prefix));
788:       PetscCall(KSPSetOptionsPrefix(ctx->ksp, prefix));
789:       PetscCall(KSPAppendOptionsPrefix(ctx->ksp, "exotic_"));
790:     }
791:     PetscCall(KSPSetFromOptions(ctx->ksp));
792:   }
793:   PetscOptionsHeadEnd();
794:   PetscFunctionReturn(PETSC_SUCCESS);
795: }

797: /*MC
798:    PCEXOTIC - Two level overlapping Schwarz preconditioner with exotic (non-standard) coarse grid spaces

800:    This uses the `PCMG` infrastructure restricted to two levels and the face and wirebasket based coarse
801:    grid spaces.

803:    Options Database Keys:
804: +  -pc_exotic_type <face,wirebasket> - use a coarse grid point for each face, or edge and vertex
805: -  -pc_exotic_direct_solver          - use a direct solver to construct interpolation instead of an iterative solver

807:    Level: advanced

809:    Notes:
810:    Must be used with `DMDA` in three dimensions

812:    By default this uses `KSPGMRES` on the fine grid smoother so this should be used with `KSPFGMRES` or the smoother changed to not use `KSPGMRES`

814:    These coarse grid spaces originate in the work of Bramble, Pasciak  and Schatz {cite}`bramble1989construction`.
815:    They were generalized slightly in "Domain Decomposition Method for Linear Elasticity", Ph. D. thesis, Barry Smith, {cite}`smith1990domain`.
816:    They were then explored in great detail in Dryja, Smith, Widlund {cite}`dryja1994schwarz`. These were developed in the context of iterative substructuring preconditioners.

818:    They were then ingeniously applied as coarse grid spaces for overlapping Schwarz methods by Dohrmann and Widlund.
819:    They refer to them as GDSW (generalized Dryja, Smith, Widlund preconditioners). See, for example, {cite}`dohrmann2008extending`, {cite}`dohrmann2008family`,
820:    {cite}`dohrmann2008domain`, {cite}`dohrmann2009overlapping`.

822:    In this code the wirebasket includes a constant for each face, as well as the true "wirebasket". Other wirebasket algorithms exist that
823:    only have constants for edges and vertices.

825:    The usual `PCMG` options are supported, such as `-mg_levels_pc_type` <type> `-mg_coarse_pc_type` <type> `-mg_fine_pc_type` <type> and `-pc_mg_type` <type>

827: .seealso: [](ch_ksp), `PCMG`, `PCSetDM()`, `PCExoticType`, `PCExoticSetType()`
828: M*/

830: PETSC_EXTERN PetscErrorCode PCCreate_Exotic(PC pc)
831: {
832:   PC_Exotic *ex;
833:   PC_MG     *mg;

835:   PetscFunctionBegin;
836:   /* if type was previously mg; must manually destroy it because call to PCSetType(pc,PCMG) will not destroy it */
837:   PetscTryTypeMethod(pc, destroy);
838:   pc->data = NULL;

840:   PetscCall(PetscFree(((PetscObject)pc)->type_name));
841:   ((PetscObject)pc)->type_name = NULL;

843:   PetscCall(PCSetType(pc, PCMG));
844:   PetscCall(PCMGSetLevels(pc, 2, NULL));
845:   PetscCall(PCMGSetGalerkin(pc, PC_MG_GALERKIN_PMAT));
846:   PetscCall(PetscNew(&ex));
847:   ex->type     = PC_EXOTIC_FACE;
848:   mg           = (PC_MG *)pc->data;
849:   mg->innerctx = ex;

851:   pc->ops->setfromoptions = PCSetFromOptions_Exotic;
852:   pc->ops->view           = PCView_Exotic;
853:   pc->ops->destroy        = PCDestroy_Exotic;
854:   pc->ops->setup          = PCSetUp_Exotic;

856:   PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCExoticSetType_C", PCExoticSetType_Exotic));
857:   PetscFunctionReturn(PETSC_SUCCESS);
858: }