Actual source code: dmproject.c

  1: #include <petsc/private/dmimpl.h>
  2: #include <petscdm.h>
  3: #include <petscdmda.h>
  4: #include <petscdmplex.h>
  5: #include <petscdmswarm.h>
  6: #include <petscksp.h>
  7: #include <petscblaslapack.h>

  9: #include <petsc/private/dmswarmimpl.h>
 10: #include "../src/dm/impls/swarm/data_bucket.h" // For DataBucket internals
 11: #include "petscmath.h"

 13: typedef struct _projectConstraintsCtx {
 14:   DM  dm;
 15:   Vec mask;
 16: } projectConstraintsCtx;

 18: static PetscErrorCode MatMult_GlobalToLocalNormal(Mat CtC, Vec x, Vec y)
 19: {
 20:   DM                     dm;
 21:   Vec                    local, mask;
 22:   projectConstraintsCtx *ctx;

 24:   PetscFunctionBegin;
 25:   PetscCall(MatShellGetContext(CtC, &ctx));
 26:   dm   = ctx->dm;
 27:   mask = ctx->mask;
 28:   PetscCall(DMGetLocalVector(dm, &local));
 29:   PetscCall(DMGlobalToLocalBegin(dm, x, INSERT_VALUES, local));
 30:   PetscCall(DMGlobalToLocalEnd(dm, x, INSERT_VALUES, local));
 31:   if (mask) PetscCall(VecPointwiseMult(local, mask, local));
 32:   PetscCall(VecSet(y, 0.));
 33:   PetscCall(DMLocalToGlobalBegin(dm, local, ADD_VALUES, y));
 34:   PetscCall(DMLocalToGlobalEnd(dm, local, ADD_VALUES, y));
 35:   PetscCall(DMRestoreLocalVector(dm, &local));
 36:   PetscFunctionReturn(PETSC_SUCCESS);
 37: }

 39: static PetscErrorCode DMGlobalToLocalSolve_project1(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar u[], void *ctx)
 40: {
 41:   PetscInt f;

 43:   PetscFunctionBegin;
 44:   for (f = 0; f < Nf; f++) u[f] = 1.;
 45:   PetscFunctionReturn(PETSC_SUCCESS);
 46: }

 48: /*@
 49:   DMGlobalToLocalSolve - Solve for the global vector that is mapped to a given local vector by `DMGlobalToLocalBegin()`/`DMGlobalToLocalEnd()` with mode
 50:   `INSERT_VALUES`.

 52:   Collective

 54:   Input Parameters:
 55: + dm - The `DM` object
 56: . x  - The local vector
 57: - y  - The global vector: the input value of globalVec is used as an initial guess

 59:   Output Parameter:
 60: . y - The least-squares solution

 62:   Level: advanced

 64:   Note:
 65:   It is assumed that the sum of all the local vector sizes is greater than or equal to the global vector size, so the solution is
 66:   a least-squares solution.  It is also assumed that `DMLocalToGlobalBegin()`/`DMLocalToGlobalEnd()` with mode `ADD_VALUES` is the adjoint of the
 67:   global-to-local map, so that the least-squares solution may be found by the normal equations.

 69:   If the `DM` is of type `DMPLEX`, then `y` is the solution of $ L^T * D * L * y = L^T * D * x $, where $D$ is a diagonal mask that is 1 for every point in
 70:   the union of the closures of the local cells and 0 otherwise.  This difference is only relevant if there are anchor points that are not in the
 71:   closure of any local cell (see `DMPlexGetAnchors()`/`DMPlexSetAnchors()`).

 73:   What is L?

 75:   If this solves for a global vector from a local vector why is not called `DMLocalToGlobalSolve()`?

 77: .seealso: [](ch_dmbase), `DM`, `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMLocalToGlobalBegin()`, `DMLocalToGlobalEnd()`, `DMPlexGetAnchors()`, `DMPlexSetAnchors()`
 78: @*/
 79: PetscErrorCode DMGlobalToLocalSolve(DM dm, Vec x, Vec y)
 80: {
 81:   Mat                   CtC;
 82:   PetscInt              n, N, cStart, cEnd, c;
 83:   PetscBool             isPlex;
 84:   KSP                   ksp;
 85:   PC                    pc;
 86:   Vec                   global, mask = NULL;
 87:   projectConstraintsCtx ctx;

 89:   PetscFunctionBegin;
 90:   PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &isPlex));
 91:   if (isPlex) {
 92:     /* mark points in the closure */
 93:     PetscCall(DMCreateLocalVector(dm, &mask));
 94:     PetscCall(VecSet(mask, 0.0));
 95:     PetscCall(DMPlexGetSimplexOrBoxCells(dm, 0, &cStart, &cEnd));
 96:     if (cEnd > cStart) {
 97:       PetscScalar *ones;
 98:       PetscInt     numValues, i;

100:       PetscCall(DMPlexVecGetClosure(dm, NULL, mask, cStart, &numValues, NULL));
101:       PetscCall(PetscMalloc1(numValues, &ones));
102:       for (i = 0; i < numValues; i++) ones[i] = 1.;
103:       for (c = cStart; c < cEnd; c++) PetscCall(DMPlexVecSetClosure(dm, NULL, mask, c, ones, INSERT_VALUES));
104:       PetscCall(PetscFree(ones));
105:     }
106:   } else {
107:     PetscBool hasMask;

109:     PetscCall(DMHasNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &hasMask));
110:     if (!hasMask) {
111:       PetscErrorCode (**func)(PetscInt dim, PetscReal time, const PetscReal x[], PetscInt Nf, PetscScalar *u, void *ctx);
112:       void   **ctx;
113:       PetscInt Nf, f;

115:       PetscCall(DMGetNumFields(dm, &Nf));
116:       PetscCall(PetscMalloc2(Nf, &func, Nf, &ctx));
117:       for (f = 0; f < Nf; ++f) {
118:         func[f] = DMGlobalToLocalSolve_project1;
119:         ctx[f]  = NULL;
120:       }
121:       PetscCall(DMGetNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
122:       PetscCall(DMProjectFunctionLocal(dm, 0.0, func, ctx, INSERT_ALL_VALUES, mask));
123:       PetscCall(DMRestoreNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
124:       PetscCall(PetscFree2(func, ctx));
125:     }
126:     PetscCall(DMGetNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
127:   }
128:   ctx.dm   = dm;
129:   ctx.mask = mask;
130:   PetscCall(VecGetSize(y, &N));
131:   PetscCall(VecGetLocalSize(y, &n));
132:   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), &CtC));
133:   PetscCall(MatSetSizes(CtC, n, n, N, N));
134:   PetscCall(MatSetType(CtC, MATSHELL));
135:   PetscCall(MatSetUp(CtC));
136:   PetscCall(MatShellSetContext(CtC, &ctx));
137:   PetscCall(MatShellSetOperation(CtC, MATOP_MULT, (void (*)(void))MatMult_GlobalToLocalNormal));
138:   PetscCall(KSPCreate(PetscObjectComm((PetscObject)dm), &ksp));
139:   PetscCall(KSPSetOperators(ksp, CtC, CtC));
140:   PetscCall(KSPSetType(ksp, KSPCG));
141:   PetscCall(KSPGetPC(ksp, &pc));
142:   PetscCall(PCSetType(pc, PCNONE));
143:   PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
144:   PetscCall(KSPSetUp(ksp));
145:   PetscCall(DMGetGlobalVector(dm, &global));
146:   PetscCall(VecSet(global, 0.));
147:   if (mask) PetscCall(VecPointwiseMult(x, mask, x));
148:   PetscCall(DMLocalToGlobalBegin(dm, x, ADD_VALUES, global));
149:   PetscCall(DMLocalToGlobalEnd(dm, x, ADD_VALUES, global));
150:   PetscCall(KSPSolve(ksp, global, y));
151:   PetscCall(DMRestoreGlobalVector(dm, &global));
152:   /* clean up */
153:   PetscCall(KSPDestroy(&ksp));
154:   PetscCall(MatDestroy(&CtC));
155:   if (isPlex) {
156:     PetscCall(VecDestroy(&mask));
157:   } else {
158:     PetscCall(DMRestoreNamedLocalVector(dm, "_DMGlobalToLocalSolve_mask", &mask));
159:   }
160:   PetscFunctionReturn(PETSC_SUCCESS);
161: }

163: /*@C
164:   DMProjectField - This projects a given function of the input fields into the function space provided by a `DM`, putting the coefficients in a global vector.

166:   Collective

168:   Input Parameters:
169: + dm    - The `DM`
170: . time  - The time
171: . U     - The input field vector
172: . funcs - The functions to evaluate, one per field
173: - mode  - The insertion mode for values

175:   Output Parameter:
176: . X - The output vector

178:   Calling sequence of `funcs`:
179: + dim          - The spatial dimension
180: . Nf           - The number of input fields
181: . NfAux        - The number of input auxiliary fields
182: . uOff         - The offset of each field in `u`
183: . uOff_x       - The offset of each field in `u_x`
184: . u            - The field values at this point in space
185: . u_t          - The field time derivative at this point in space (or `NULL`)
186: . u_x          - The field derivatives at this point in space
187: . aOff         - The offset of each auxiliary field in `u`
188: . aOff_x       - The offset of each auxiliary field in `u_x`
189: . a            - The auxiliary field values at this point in space
190: . a_t          - The auxiliary field time derivative at this point in space (or `NULL`)
191: . a_x          - The auxiliary field derivatives at this point in space
192: . t            - The current time
193: . x            - The coordinates of this point
194: . numConstants - The number of constants
195: . constants    - The value of each constant
196: - f            - The value of the function at this point in space

198:   Level: advanced

200:   Note:
201:   There are three different `DM`s that potentially interact in this function. The output `dm`, specifies the layout of the values calculates by the function.
202:   The input `DM`, attached to `U`, may be different. For example, you can input the solution over the full domain, but output over a piece of the boundary, or
203:   a subdomain. You can also output a different number of fields than the input, with different discretizations. Last the auxiliary `DM`, attached to the
204:   auxiliary field vector, which is attached to `dm`, can also be different. It can have a different topology, number of fields, and discretizations.

206: .seealso: [](ch_dmbase), `DM`, `DMProjectFieldLocal()`, `DMProjectFieldLabelLocal()`, `DMProjectFunction()`, `DMComputeL2Diff()`
207: @*/
208: PetscErrorCode DMProjectField(DM dm, PetscReal time, Vec U, void (**funcs)(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f[]), InsertMode mode, Vec X)
209: {
210:   Vec localX, localU;
211:   DM  dmIn;

213:   PetscFunctionBegin;
215:   PetscCall(DMGetLocalVector(dm, &localX));
216:   /* We currently check whether locU == locX to see if we need to apply BC */
217:   if (U != X) {
218:     PetscCall(VecGetDM(U, &dmIn));
219:     PetscCall(DMGetLocalVector(dmIn, &localU));
220:   } else {
221:     dmIn   = dm;
222:     localU = localX;
223:   }
224:   PetscCall(DMGlobalToLocalBegin(dmIn, U, INSERT_VALUES, localU));
225:   PetscCall(DMGlobalToLocalEnd(dmIn, U, INSERT_VALUES, localU));
226:   PetscCall(DMProjectFieldLocal(dm, time, localU, funcs, mode, localX));
227:   PetscCall(DMLocalToGlobalBegin(dm, localX, mode, X));
228:   PetscCall(DMLocalToGlobalEnd(dm, localX, mode, X));
229:   if (mode == INSERT_VALUES || mode == INSERT_ALL_VALUES || mode == INSERT_BC_VALUES) {
230:     Mat cMat;

232:     PetscCall(DMGetDefaultConstraints(dm, NULL, &cMat, NULL));
233:     if (cMat) PetscCall(DMGlobalToLocalSolve(dm, localX, X));
234:   }
235:   PetscCall(DMRestoreLocalVector(dm, &localX));
236:   if (U != X) PetscCall(DMRestoreLocalVector(dmIn, &localU));
237:   PetscFunctionReturn(PETSC_SUCCESS);
238: }

240: /********************* Adaptive Interpolation **************************/

242: /* See the discussion of Adaptive Interpolation in manual/high_level_mg.rst */
243: PetscErrorCode DMAdaptInterpolator(DM dmc, DM dmf, Mat In, KSP smoother, Mat MF, Mat MC, Mat *InAdapt, void *user)
244: {
245:   Mat                globalA, AF;
246:   Vec                tmp;
247:   const PetscScalar *af, *ac;
248:   PetscScalar       *A, *b, *x, *workscalar;
249:   PetscReal         *w, *sing, *workreal, rcond = PETSC_SMALL;
250:   PetscBLASInt       M, N, one = 1, irank, lwrk, info;
251:   PetscInt           debug = 0, rStart, rEnd, r, maxcols = 0, k, Nc, ldac, ldaf;
252:   PetscBool          allocVc = PETSC_FALSE;

254:   PetscFunctionBegin;
255:   PetscCall(PetscLogEventBegin(DM_AdaptInterpolator, dmc, dmf, 0, 0));
256:   PetscCall(PetscOptionsGetInt(NULL, NULL, "-dm_interpolator_adapt_debug", &debug, NULL));
257:   PetscCall(MatGetSize(MF, NULL, &Nc));
258:   PetscCall(MatDuplicate(In, MAT_SHARE_NONZERO_PATTERN, InAdapt));
259:   PetscCall(MatGetOwnershipRange(In, &rStart, &rEnd));
260: #if 0
261:   PetscCall(MatGetMaxRowLen(In, &maxcols));
262: #else
263:   for (r = rStart; r < rEnd; ++r) {
264:     PetscInt ncols;

266:     PetscCall(MatGetRow(In, r, &ncols, NULL, NULL));
267:     maxcols = PetscMax(maxcols, ncols);
268:     PetscCall(MatRestoreRow(In, r, &ncols, NULL, NULL));
269:   }
270: #endif
271:   if (Nc < maxcols) PetscCall(PetscPrintf(PETSC_COMM_SELF, "The number of input vectors %" PetscInt_FMT " < %" PetscInt_FMT " the maximum number of column entries\n", Nc, maxcols));
272:   for (k = 0; k < Nc && debug; ++k) {
273:     char        name[PETSC_MAX_PATH_LEN];
274:     const char *prefix;
275:     Vec         vc, vf;

277:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)smoother, &prefix));

279:     if (MC) {
280:       PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN, "%sCoarse Vector %" PetscInt_FMT, prefix ? prefix : NULL, k));
281:       PetscCall(MatDenseGetColumnVecRead(MC, k, &vc));
282:       PetscCall(PetscObjectSetName((PetscObject)vc, name));
283:       PetscCall(VecViewFromOptions(vc, NULL, "-dm_adapt_interp_view_coarse"));
284:       PetscCall(MatDenseRestoreColumnVecRead(MC, k, &vc));
285:     }
286:     PetscCall(PetscSNPrintf(name, PETSC_MAX_PATH_LEN, "%sFine Vector %" PetscInt_FMT, prefix ? prefix : NULL, k));
287:     PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
288:     PetscCall(PetscObjectSetName((PetscObject)vf, name));
289:     PetscCall(VecViewFromOptions(vf, NULL, "-dm_adapt_interp_view_fine"));
290:     PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
291:   }
292:   PetscCall(PetscBLASIntCast(3 * PetscMin(Nc, maxcols) + PetscMax(2 * PetscMin(Nc, maxcols), PetscMax(Nc, maxcols)), &lwrk));
293:   PetscCall(PetscMalloc7(Nc * maxcols, &A, PetscMax(Nc, maxcols), &b, Nc, &w, maxcols, &x, maxcols, &sing, lwrk, &workscalar, 5 * PetscMin(Nc, maxcols), &workreal));
294:   /* w_k = \frac{\HC{v_k} B_l v_k}{\HC{v_k} A_l v_k} or the inverse Rayleigh quotient, which we calculate using \frac{\HC{v_k} v_k}{\HC{v_k} B^{-1}_l A_l v_k} */
295:   PetscCall(KSPGetOperators(smoother, &globalA, NULL));

297:   PetscCall(MatMatMult(globalA, MF, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &AF));
298:   for (k = 0; k < Nc; ++k) {
299:     PetscScalar vnorm, vAnorm;
300:     Vec         vf;

302:     w[k] = 1.0;
303:     PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
304:     PetscCall(MatDenseGetColumnVecRead(AF, k, &tmp));
305:     PetscCall(VecDot(vf, vf, &vnorm));
306: #if 0
307:     PetscCall(DMGetGlobalVector(dmf, &tmp2));
308:     PetscCall(KSPSolve(smoother, tmp, tmp2));
309:     PetscCall(VecDot(vf, tmp2, &vAnorm));
310:     PetscCall(DMRestoreGlobalVector(dmf, &tmp2));
311: #else
312:     PetscCall(VecDot(vf, tmp, &vAnorm));
313: #endif
314:     w[k] = PetscRealPart(vnorm) / PetscRealPart(vAnorm);
315:     PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
316:     PetscCall(MatDenseRestoreColumnVecRead(AF, k, &tmp));
317:   }
318:   PetscCall(MatDestroy(&AF));
319:   if (!MC) {
320:     allocVc = PETSC_TRUE;
321:     PetscCall(MatTransposeMatMult(In, MF, MAT_INITIAL_MATRIX, PETSC_DETERMINE, &MC));
322:   }
323:   /* Solve a LS system for each fine row
324:      MATT: Can we generalize to the case where Nc for the fine space
325:      is different for Nc for the coarse? */
326:   PetscCall(MatDenseGetArrayRead(MF, &af));
327:   PetscCall(MatDenseGetLDA(MF, &ldaf));
328:   PetscCall(MatDenseGetArrayRead(MC, &ac));
329:   PetscCall(MatDenseGetLDA(MC, &ldac));
330:   for (r = rStart; r < rEnd; ++r) {
331:     PetscInt           ncols, c;
332:     const PetscInt    *cols;
333:     const PetscScalar *vals;

335:     PetscCall(MatGetRow(In, r, &ncols, &cols, &vals));
336:     for (k = 0; k < Nc; ++k) {
337:       /* Need to fit lowest mode exactly */
338:       const PetscReal wk = ((ncols == 1) && (k > 0)) ? 0.0 : PetscSqrtReal(w[k]);

340:       /* b_k = \sqrt{w_k} f^{F,k}_r */
341:       b[k] = wk * af[r - rStart + k * ldaf];
342:       /* A_{kc} = \sqrt{w_k} f^{C,k}_c */
343:       /* TODO Must pull out VecScatter from In, scatter in vc[k] values up front, and access them indirectly just as in MatMult() */
344:       for (c = 0; c < ncols; ++c) {
345:         /* This is element (k, c) of A */
346:         A[c * Nc + k] = wk * ac[cols[c] - rStart + k * ldac];
347:       }
348:     }
349:     PetscCall(PetscBLASIntCast(Nc, &M));
350:     PetscCall(PetscBLASIntCast(ncols, &N));
351:     if (debug) {
352: #if defined(PETSC_USE_COMPLEX)
353:       PetscScalar *tmp;
354:       PetscInt     j;

356:       PetscCall(DMGetWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
357:       for (j = 0; j < Nc; ++j) tmp[j] = w[j];
358:       PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS weights", Nc, 1, tmp));
359:       PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS matrix", Nc, ncols, A));
360:       for (j = 0; j < Nc; ++j) tmp[j] = b[j];
361:       PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS rhs", Nc, 1, tmp));
362:       PetscCall(DMRestoreWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
363: #else
364:       PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS weights", Nc, 1, w));
365:       PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS matrix", Nc, ncols, A));
366:       PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS rhs", Nc, 1, b));
367: #endif
368:     }
369: #if defined(PETSC_USE_COMPLEX)
370:     /* ZGELSS( M, N, NRHS, A, LDA, B, LDB, S, RCOND, RANK, WORK, LWORK, RWORK, INFO) */
371:     PetscCallBLAS("LAPACKgelss", LAPACKgelss_(&M, &N, &one, A, &M, b, M > N ? &M : &N, sing, &rcond, &irank, workscalar, &lwrk, workreal, &info));
372: #else
373:     /* DGELSS( M, N, NRHS, A, LDA, B, LDB, S, RCOND, RANK, WORK, LWORK, INFO) */
374:     PetscCallBLAS("LAPACKgelss", LAPACKgelss_(&M, &N, &one, A, &M, b, M > N ? &M : &N, sing, &rcond, &irank, workscalar, &lwrk, &info));
375: #endif
376:     PetscCheck(info >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Bad argument to GELSS");
377:     PetscCheck(info <= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "SVD failed to converge");
378:     if (debug) {
379:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "rank %" PetscBLASInt_FMT " rcond %g\n", irank, (double)rcond));
380: #if defined(PETSC_USE_COMPLEX)
381:       {
382:         PetscScalar *tmp;
383:         PetscInt     j;

385:         PetscCall(DMGetWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
386:         for (j = 0; j < PetscMin(Nc, ncols); ++j) tmp[j] = sing[j];
387:         PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS singular values", PetscMin(Nc, ncols), 1, tmp));
388:         PetscCall(DMRestoreWorkArray(dmc, Nc, MPIU_SCALAR, (void *)&tmp));
389:       }
390: #else
391:       PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS singular values", PetscMin(Nc, ncols), 1, sing));
392: #endif
393:       PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS old P", ncols, 1, vals));
394:       PetscCall(DMPrintCellMatrix(r, "Interpolator Row LS sol", ncols, 1, b));
395:     }
396:     PetscCall(MatSetValues(*InAdapt, 1, &r, ncols, cols, b, INSERT_VALUES));
397:     PetscCall(MatRestoreRow(In, r, &ncols, &cols, &vals));
398:   }
399:   PetscCall(MatDenseRestoreArrayRead(MF, &af));
400:   PetscCall(MatDenseRestoreArrayRead(MC, &ac));
401:   PetscCall(PetscFree7(A, b, w, x, sing, workscalar, workreal));
402:   if (allocVc) PetscCall(MatDestroy(&MC));
403:   PetscCall(MatAssemblyBegin(*InAdapt, MAT_FINAL_ASSEMBLY));
404:   PetscCall(MatAssemblyEnd(*InAdapt, MAT_FINAL_ASSEMBLY));
405:   PetscCall(PetscLogEventEnd(DM_AdaptInterpolator, dmc, dmf, 0, 0));
406:   PetscFunctionReturn(PETSC_SUCCESS);
407: }

409: PetscErrorCode DMCheckInterpolator(DM dmf, Mat In, Mat MC, Mat MF, PetscReal tol)
410: {
411:   Vec       tmp;
412:   PetscReal norminf, norm2, maxnorminf = 0.0, maxnorm2 = 0.0;
413:   PetscInt  k, Nc;

415:   PetscFunctionBegin;
416:   PetscCall(DMGetGlobalVector(dmf, &tmp));
417:   PetscCall(MatViewFromOptions(In, NULL, "-dm_interpolator_adapt_error"));
418:   PetscCall(MatGetSize(MF, NULL, &Nc));
419:   for (k = 0; k < Nc; ++k) {
420:     Vec vc, vf;

422:     PetscCall(MatDenseGetColumnVecRead(MC, k, &vc));
423:     PetscCall(MatDenseGetColumnVecRead(MF, k, &vf));
424:     PetscCall(MatMult(In, vc, tmp));
425:     PetscCall(VecAXPY(tmp, -1.0, vf));
426:     PetscCall(VecViewFromOptions(vc, NULL, "-dm_interpolator_adapt_error"));
427:     PetscCall(VecViewFromOptions(vf, NULL, "-dm_interpolator_adapt_error"));
428:     PetscCall(VecViewFromOptions(tmp, NULL, "-dm_interpolator_adapt_error"));
429:     PetscCall(VecNorm(tmp, NORM_INFINITY, &norminf));
430:     PetscCall(VecNorm(tmp, NORM_2, &norm2));
431:     maxnorminf = PetscMax(maxnorminf, norminf);
432:     maxnorm2   = PetscMax(maxnorm2, norm2);
433:     PetscCall(PetscPrintf(PetscObjectComm((PetscObject)dmf), "Coarse vec %" PetscInt_FMT " ||vf - P vc||_\\infty %g, ||vf - P vc||_2 %g\n", k, (double)norminf, (double)norm2));
434:     PetscCall(MatDenseRestoreColumnVecRead(MC, k, &vc));
435:     PetscCall(MatDenseRestoreColumnVecRead(MF, k, &vf));
436:   }
437:   PetscCall(DMRestoreGlobalVector(dmf, &tmp));
438:   PetscCheck(maxnorm2 <= tol, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_WRONG, "max_k ||vf_k - P vc_k||_2 %g > tol %g", (double)maxnorm2, (double)tol);
439:   PetscFunctionReturn(PETSC_SUCCESS);
440: }

442: // Project particles to field
443: //   M_f u_f = M_p u_p
444: //   u_f = M^{-1}_f M_p u_p
445: static PetscErrorCode DMSwarmProjectField_Conservative_PLEX(DM sw, DM dm, Vec u_p, Vec u_f)
446: {
447:   KSP         ksp;
448:   Mat         M_f, M_p; // TODO Should cache these
449:   Vec         rhs;
450:   const char *prefix;

452:   PetscFunctionBegin;
453:   PetscCall(DMCreateMassMatrix(dm, dm, &M_f));
454:   PetscCall(DMCreateMassMatrix(sw, dm, &M_p));
455:   PetscCall(DMGetGlobalVector(dm, &rhs));
456:   PetscCall(MatMultTranspose(M_p, u_p, rhs));

458:   PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
459:   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
460:   PetscCall(KSPSetOptionsPrefix(ksp, prefix));
461:   PetscCall(KSPAppendOptionsPrefix(ksp, "ptof_"));
462:   PetscCall(KSPSetFromOptions(ksp));

464:   PetscCall(KSPSetOperators(ksp, M_f, M_f));
465:   PetscCall(KSPSolve(ksp, rhs, u_f));

467:   PetscCall(DMRestoreGlobalVector(dm, &rhs));
468:   PetscCall(KSPDestroy(&ksp));
469:   PetscCall(MatDestroy(&M_f));
470:   PetscCall(MatDestroy(&M_p));
471:   PetscFunctionReturn(PETSC_SUCCESS);
472: }

474: // Project field to particles
475: //   M_p u_p = M_f u_f
476: //   u_p = M^+_p M_f u_f
477: static PetscErrorCode DMSwarmProjectParticles_Conservative_PLEX(DM sw, DM dm, Vec u_p, Vec u_f)
478: {
479:   KSP         ksp;
480:   PC          pc;
481:   Mat         M_f, M_p, PM_p;
482:   Vec         rhs;
483:   PetscBool   isBjacobi;
484:   const char *prefix;

486:   PetscFunctionBegin;
487:   PetscCall(DMCreateMassMatrix(dm, dm, &M_f));
488:   PetscCall(DMCreateMassMatrix(sw, dm, &M_p));
489:   PetscCall(DMGetGlobalVector(dm, &rhs));
490:   PetscCall(MatMult(M_f, u_f, rhs));

492:   PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
493:   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
494:   PetscCall(KSPSetOptionsPrefix(ksp, prefix));
495:   PetscCall(KSPAppendOptionsPrefix(ksp, "ftop_"));
496:   PetscCall(KSPSetFromOptions(ksp));

498:   PetscCall(KSPGetPC(ksp, &pc));
499:   PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBJACOBI, &isBjacobi));
500:   if (isBjacobi) {
501:     PetscCall(DMSwarmCreateMassMatrixSquare(sw, dm, &PM_p));
502:   } else {
503:     PM_p = M_p;
504:     PetscCall(PetscObjectReference((PetscObject)PM_p));
505:   }
506:   PetscCall(KSPSetOperators(ksp, M_p, PM_p));
507:   PetscCall(KSPSolveTranspose(ksp, rhs, u_p));

509:   PetscCall(DMRestoreGlobalVector(dm, &rhs));
510:   PetscCall(KSPDestroy(&ksp));
511:   PetscCall(MatDestroy(&M_f));
512:   PetscCall(MatDestroy(&M_p));
513:   PetscCall(MatDestroy(&PM_p));
514:   PetscFunctionReturn(PETSC_SUCCESS);
515: }

517: static PetscErrorCode DMSwarmProjectFields_Plex_Internal(DM sw, DM dm, PetscInt Nf, const char *fieldnames[], Vec vec, ScatterMode mode)
518: {
519:   PetscDS  ds;
520:   Vec      u;
521:   PetscInt f = 0, bs, *Nc;

523:   PetscFunctionBegin;
524:   PetscCall(DMGetDS(dm, &ds));
525:   PetscCall(PetscDSGetComponents(ds, &Nc));
526:   PetscCall(PetscCitationsRegister(SwarmProjCitation, &SwarmProjcite));
527:   PetscCheck(Nf == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Currently supported only for a single field");
528:   PetscCall(DMSwarmVectorDefineFields(sw, Nf, fieldnames));
529:   PetscCall(DMSwarmCreateGlobalVectorFromField(sw, fieldnames[f], &u));
530:   PetscCall(VecGetBlockSize(u, &bs));
531:   PetscCheck(Nc[f] == bs, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Field %" PetscInt_FMT " components %" PetscInt_FMT " != %" PetscInt_FMT " blocksize for swarm field %s", f, Nc[f], bs, fieldnames[f]);
532:   if (mode == SCATTER_FORWARD) {
533:     PetscCall(DMSwarmProjectField_Conservative_PLEX(sw, dm, u, vec));
534:   } else {
535:     PetscCall(DMSwarmProjectParticles_Conservative_PLEX(sw, dm, u, vec));
536:   }
537:   PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, fieldnames[0], &u));
538:   PetscFunctionReturn(PETSC_SUCCESS);
539: }

541: static PetscErrorCode DMSwarmProjectField_ApproxQ1_DA_2D(DM swarm, PetscReal *swarm_field, DM dm, Vec v_field)
542: {
543:   DMSwarmCellDM      celldm;
544:   Vec                v_field_l, denom_l, coor_l, denom;
545:   PetscScalar       *_field_l, *_denom_l;
546:   PetscInt           k, p, e, npoints, nel, npe, Nfc;
547:   PetscInt          *mpfield_cell;
548:   PetscReal         *mpfield_coor;
549:   const PetscInt    *element_list;
550:   const PetscInt    *element;
551:   PetscScalar        xi_p[2], Ni[4];
552:   const PetscScalar *_coor;
553:   const char       **coordFields, *cellid;

555:   PetscFunctionBegin;
556:   PetscCall(VecZeroEntries(v_field));

558:   PetscCall(DMGetLocalVector(dm, &v_field_l));
559:   PetscCall(DMGetGlobalVector(dm, &denom));
560:   PetscCall(DMGetLocalVector(dm, &denom_l));
561:   PetscCall(VecZeroEntries(v_field_l));
562:   PetscCall(VecZeroEntries(denom));
563:   PetscCall(VecZeroEntries(denom_l));

565:   PetscCall(VecGetArray(v_field_l, &_field_l));
566:   PetscCall(VecGetArray(denom_l, &_denom_l));

568:   PetscCall(DMGetCoordinatesLocal(dm, &coor_l));
569:   PetscCall(VecGetArrayRead(coor_l, &_coor));

571:   PetscCall(DMSwarmGetCellDMActive(swarm, &celldm));
572:   PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
573:   PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
574:   PetscCall(DMSwarmCellDMGetCellID(celldm, &cellid));

576:   PetscCall(DMDAGetElements(dm, &nel, &npe, &element_list));
577:   PetscCall(DMSwarmGetLocalSize(swarm, &npoints));
578:   PetscCall(DMSwarmGetField(swarm, coordFields[0], NULL, NULL, (void **)&mpfield_coor));
579:   PetscCall(DMSwarmGetField(swarm, cellid, NULL, NULL, (void **)&mpfield_cell));

581:   for (p = 0; p < npoints; p++) {
582:     PetscReal         *coor_p;
583:     const PetscScalar *x0;
584:     const PetscScalar *x2;
585:     PetscScalar        dx[2];

587:     e       = mpfield_cell[p];
588:     coor_p  = &mpfield_coor[2 * p];
589:     element = &element_list[npe * e];

591:     /* compute local coordinates: (xp-x0)/dx = (xip+1)/2 */
592:     x0 = &_coor[2 * element[0]];
593:     x2 = &_coor[2 * element[2]];

595:     dx[0] = x2[0] - x0[0];
596:     dx[1] = x2[1] - x0[1];

598:     xi_p[0] = 2.0 * (coor_p[0] - x0[0]) / dx[0] - 1.0;
599:     xi_p[1] = 2.0 * (coor_p[1] - x0[1]) / dx[1] - 1.0;

601:     /* evaluate basis functions */
602:     Ni[0] = 0.25 * (1.0 - xi_p[0]) * (1.0 - xi_p[1]);
603:     Ni[1] = 0.25 * (1.0 + xi_p[0]) * (1.0 - xi_p[1]);
604:     Ni[2] = 0.25 * (1.0 + xi_p[0]) * (1.0 + xi_p[1]);
605:     Ni[3] = 0.25 * (1.0 - xi_p[0]) * (1.0 + xi_p[1]);

607:     for (k = 0; k < npe; k++) {
608:       _field_l[element[k]] += Ni[k] * swarm_field[p];
609:       _denom_l[element[k]] += Ni[k];
610:     }
611:   }

613:   PetscCall(DMSwarmRestoreField(swarm, cellid, NULL, NULL, (void **)&mpfield_cell));
614:   PetscCall(DMSwarmRestoreField(swarm, coordFields[0], NULL, NULL, (void **)&mpfield_coor));
615:   PetscCall(DMDARestoreElements(dm, &nel, &npe, &element_list));
616:   PetscCall(VecRestoreArrayRead(coor_l, &_coor));
617:   PetscCall(VecRestoreArray(v_field_l, &_field_l));
618:   PetscCall(VecRestoreArray(denom_l, &_denom_l));

620:   PetscCall(DMLocalToGlobalBegin(dm, v_field_l, ADD_VALUES, v_field));
621:   PetscCall(DMLocalToGlobalEnd(dm, v_field_l, ADD_VALUES, v_field));
622:   PetscCall(DMLocalToGlobalBegin(dm, denom_l, ADD_VALUES, denom));
623:   PetscCall(DMLocalToGlobalEnd(dm, denom_l, ADD_VALUES, denom));

625:   PetscCall(VecPointwiseDivide(v_field, v_field, denom));

627:   PetscCall(DMRestoreLocalVector(dm, &v_field_l));
628:   PetscCall(DMRestoreLocalVector(dm, &denom_l));
629:   PetscCall(DMRestoreGlobalVector(dm, &denom));
630:   PetscFunctionReturn(PETSC_SUCCESS);
631: }

633: static PetscErrorCode DMSwarmProjectFields_DA_Internal(DM swarm, DM celldm, PetscInt nfields, DMSwarmDataField dfield[], Vec vecs[], ScatterMode mode)
634: {
635:   PetscInt        f, dim;
636:   DMDAElementType etype;

638:   PetscFunctionBegin;
639:   PetscCall(DMDAGetElementType(celldm, &etype));
640:   PetscCheck(etype != DMDA_ELEMENT_P1, PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "Only Q1 DMDA supported");
641:   PetscCheck(mode == SCATTER_FORWARD, PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "Mapping the continuum to particles is not currently supported for DMDA");

643:   PetscCall(DMGetDimension(swarm, &dim));
644:   switch (dim) {
645:   case 2:
646:     for (f = 0; f < nfields; f++) {
647:       PetscReal *swarm_field;

649:       PetscCall(DMSwarmDataFieldGetEntries(dfield[f], (void **)&swarm_field));
650:       PetscCall(DMSwarmProjectField_ApproxQ1_DA_2D(swarm, swarm_field, celldm, vecs[f]));
651:     }
652:     break;
653:   case 3:
654:     SETERRQ(PetscObjectComm((PetscObject)swarm), PETSC_ERR_SUP, "No support for 3D");
655:   default:
656:     break;
657:   }
658:   PetscFunctionReturn(PETSC_SUCCESS);
659: }

661: /*@C
662:   DMSwarmProjectFields - Project a set of swarm fields onto another `DM`

664:   Collective

666:   Input Parameters:
667: + sw         - the `DMSWARM`
668: . dm         - the `DM`, or `NULL` to use the cell `DM`
669: . nfields    - the number of swarm fields to project
670: . fieldnames - the textual names of the swarm fields to project
671: . fields     - an array of `Vec`'s of length nfields
672: - mode       - if `SCATTER_FORWARD` then map particles to the continuum, and if `SCATTER_REVERSE` map the continuum to particles

674:   Level: beginner

676:   Notes:
677:   Currently, there are two available projection methods. The first is conservative projection, used for a `DMPLEX` cell `DM`.
678:   The second is the averaging which is used for a `DMDA` cell `DM`

680:   $$
681:   \phi_i = \sum_{p=0}^{np} N_i(x_p) \phi_p dJ / \sum_{p=0}^{np} N_i(x_p) dJ
682:   $$

684:   where $\phi_p $ is the swarm field at point $p$, $N_i()$ is the cell `DM` basis function at vertex $i$, $dJ$ is the determinant of the cell Jacobian and
685:   $\phi_i$ is the projected vertex value of the field $\phi$.

687:   The user is responsible for destroying both the array and the individual `Vec` objects.

689:   For the `DMPLEX` case, there is only a single vector, so the field layout in the `DMPLEX` must match the requested fields from the `DMSwarm`.

691:   For averaging projection, nly swarm fields registered with data type of `PETSC_REAL` can be projected onto the cell `DM`, and only swarm fields of block size = 1 can currently be projected.

693: .seealso: [](ch_dmbase), `DMSWARM`, `DMSwarmSetType()`, `DMSwarmSetCellDM()`, `DMSwarmType`
694: @*/
695: PetscErrorCode DMSwarmProjectFields(DM sw, DM dm, PetscInt nfields, const char *fieldnames[], Vec fields[], ScatterMode mode)
696: {
697:   DM_Swarm         *swarm = (DM_Swarm *)sw->data;
698:   DMSwarmDataField *gfield;
699:   PetscBool         isDA, isPlex;
700:   MPI_Comm          comm;

702:   PetscFunctionBegin;
703:   DMSWARMPICVALID(sw);
704:   PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));
705:   if (!dm) PetscCall(DMSwarmGetCellDM(sw, &dm));
706:   PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMDA, &isDA));
707:   PetscCall(PetscObjectTypeCompare((PetscObject)dm, DMPLEX, &isPlex));
708:   PetscCall(PetscMalloc1(nfields, &gfield));
709:   for (PetscInt f = 0; f < nfields; ++f) PetscCall(DMSwarmDataBucketGetDMSwarmDataFieldByName(swarm->db, fieldnames[f], &gfield[f]));

711:   if (isDA) {
712:     for (PetscInt f = 0; f < nfields; f++) {
713:       PetscCheck(gfield[f]->petsc_type == PETSC_REAL, comm, PETSC_ERR_SUP, "Projection only valid for fields using a data type = PETSC_REAL");
714:       PetscCheck(gfield[f]->bs == 1, comm, PETSC_ERR_SUP, "Projection only valid for fields with block size = 1");
715:     }
716:     PetscCall(DMSwarmProjectFields_DA_Internal(sw, dm, nfields, gfield, fields, mode));
717:   } else if (isPlex) {
718:     PetscInt Nf;

720:     PetscCall(DMGetNumFields(dm, &Nf));
721:     PetscCheck(Nf == nfields, comm, PETSC_ERR_ARG_WRONG, "Number of DM fields %" PetscInt_FMT " != %" PetscInt_FMT " number of requested Swarm fields", Nf, nfields);
722:     PetscCall(DMSwarmProjectFields_Plex_Internal(sw, dm, nfields, fieldnames, fields[0], mode));
723:   } else SETERRQ(PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "Only supported for cell DMs of type DMDA and DMPLEX");

725:   PetscCall(PetscFree(gfield));
726:   PetscFunctionReturn(PETSC_SUCCESS);
727: }

729: /*
730:   InitializeParticles_Regular - Initialize a regular grid of particles in each cell

732:   Input Parameters:
733: + sw - The `DMSWARM`
734: - n  - The number of particles per dimension per species

736: Notes:
737:   This functions sets the species, cellid, and cell DM coordinates.

739:   It places n^d particles per species in each cell of the cell DM.
740: */
741: static PetscErrorCode InitializeParticles_Regular(DM sw, PetscInt n)
742: {
743:   DM_Swarm     *swarm = (DM_Swarm *)sw->data;
744:   DM            dm;
745:   DMSwarmCellDM celldm;
746:   PetscInt      dim, Ns, Npc, Np, cStart, cEnd, debug;
747:   PetscBool     flg;
748:   MPI_Comm      comm;

750:   PetscFunctionBegin;
751:   PetscCall(PetscObjectGetComm((PetscObject)sw, &comm));

753:   PetscOptionsBegin(comm, "", "DMSwarm Options", "DMSWARM");
754:   PetscCall(DMSwarmGetNumSpecies(sw, &Ns));
755:   PetscCall(PetscOptionsInt("-dm_swarm_num_species", "The number of species", "DMSwarmSetNumSpecies", Ns, &Ns, &flg));
756:   if (flg) PetscCall(DMSwarmSetNumSpecies(sw, Ns));
757:   PetscCall(PetscOptionsBoundedInt("-dm_swarm_print_coords", "Debug output level for particle coordinate computations", "InitializeParticles", 0, &swarm->printCoords, NULL, 0));
758:   PetscCall(PetscOptionsBoundedInt("-dm_swarm_print_weights", "Debug output level for particle weight computations", "InitializeWeights", 0, &swarm->printWeights, NULL, 0));
759:   PetscOptionsEnd();
760:   debug = swarm->printCoords;

762:   // n^d particle per cell on the grid
763:   PetscCall(DMSwarmGetCellDM(sw, &dm));
764:   PetscCall(DMGetDimension(dm, &dim));
765:   PetscCheck(!(dim % 2), comm, PETSC_ERR_SUP, "We only support even dimension, not %" PetscInt_FMT, dim);
766:   PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
767:   Npc = Ns * PetscPowInt(n, dim);
768:   Np  = (cEnd - cStart) * Npc;
769:   PetscCall(DMSwarmSetLocalSizes(sw, Np, 0));
770:   if (debug) {
771:     PetscInt gNp;
772:     PetscCallMPI(MPIU_Allreduce(&Np, &gNp, 1, MPIU_INT, MPIU_SUM, comm));
773:     PetscCall(PetscPrintf(comm, "Global Np = %" PetscInt_FMT "\n", gNp));
774:   }
775:   PetscCall(PetscPrintf(comm, "Regular layout using %" PetscInt_FMT " particles per cell\n", Npc));

777:   // Set species and cellid
778:   {
779:     const char *cellidName;
780:     PetscInt   *species, *cellid;

782:     PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
783:     PetscCall(DMSwarmCellDMGetCellID(celldm, &cellidName));
784:     PetscCall(DMSwarmGetField(sw, "species", NULL, NULL, (void **)&species));
785:     PetscCall(DMSwarmGetField(sw, cellidName, NULL, NULL, (void **)&cellid));
786:     for (PetscInt c = 0, p = 0; c < cEnd - cStart; ++c) {
787:       for (PetscInt s = 0; s < Ns; ++s) {
788:         for (PetscInt q = 0; q < Npc / Ns; ++q, ++p) {
789:           species[p] = s;
790:           cellid[p]  = c;
791:         }
792:       }
793:     }
794:     PetscCall(DMSwarmRestoreField(sw, "species", NULL, NULL, (void **)&species));
795:     PetscCall(DMSwarmRestoreField(sw, cellidName, NULL, NULL, (void **)&cellid));
796:   }

798:   // Set particle coordinates
799:   {
800:     PetscReal     *x, *v;
801:     const char   **coordNames;
802:     PetscInt       Ncoord;
803:     const PetscInt xdim = dim / 2, vdim = dim / 2;

805:     PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Ncoord, &coordNames));
806:     PetscCheck(Ncoord == 2, comm, PETSC_ERR_SUP, "We only support regular layout for 2 coordinate fields, not %" PetscInt_FMT, Ncoord);
807:     PetscCall(DMSwarmGetField(sw, coordNames[0], NULL, NULL, (void **)&x));
808:     PetscCall(DMSwarmGetField(sw, coordNames[1], NULL, NULL, (void **)&v));
809:     PetscCall(DMSwarmSortGetAccess(sw));
810:     PetscCall(DMGetCoordinatesLocalSetUp(dm));
811:     for (PetscInt c = 0; c < cEnd - cStart; ++c) {
812:       const PetscInt     cell = c + cStart;
813:       const PetscScalar *a;
814:       PetscScalar       *coords;
815:       PetscReal          lower[6], upper[6];
816:       PetscBool          isDG;
817:       PetscInt          *pidx, npc, Nc;

819:       PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &npc, &pidx));
820:       PetscCheck(Npc == npc, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid number of points per cell %" PetscInt_FMT " != %" PetscInt_FMT, npc, Npc);
821:       PetscCall(DMPlexGetCellCoordinates(dm, cell, &isDG, &Nc, &a, &coords));
822:       for (PetscInt d = 0; d < dim; ++d) {
823:         lower[d] = PetscRealPart(coords[0 * dim + d]);
824:         upper[d] = PetscRealPart(coords[0 * dim + d]);
825:       }
826:       for (PetscInt i = 1; i < Nc / dim; ++i) {
827:         for (PetscInt d = 0; d < dim; ++d) {
828:           lower[d] = PetscMin(lower[d], PetscRealPart(coords[i * dim + d]));
829:           upper[d] = PetscMax(upper[d], PetscRealPart(coords[i * dim + d]));
830:         }
831:       }
832:       for (PetscInt s = 0; s < Ns; ++s) {
833:         for (PetscInt q = 0; q < Npc / Ns; ++q) {
834:           const PetscInt p = pidx[q * Ns + s];
835:           PetscInt       xi[3], vi[3];

837:           xi[0] = q % n;
838:           xi[1] = (q / n) % n;
839:           xi[2] = (q / PetscSqr(n)) % n;
840:           for (PetscInt d = 0; d < xdim; ++d) x[p * xdim + d] = lower[d] + (xi[d] + 0.5) * (upper[d] - lower[d]) / n;
841:           vi[0] = (q / PetscPowInt(n, xdim)) % n;
842:           vi[1] = (q / PetscPowInt(n, xdim + 1)) % n;
843:           vi[2] = (q / PetscPowInt(n, xdim + 2));
844:           for (PetscInt d = 0; d < vdim; ++d) v[p * vdim + d] = lower[xdim + d] + (vi[d] + 0.5) * (upper[xdim + d] - lower[xdim + d]) / n;
845:           if (debug > 1) {
846:             PetscCall(PetscPrintf(PETSC_COMM_SELF, "Particle %4" PetscInt_FMT " ", p));
847:             PetscCall(PetscPrintf(PETSC_COMM_SELF, "  x: ("));
848:             for (PetscInt d = 0; d < xdim; ++d) {
849:               if (d > 0) PetscCall(PetscPrintf(PETSC_COMM_SELF, ", "));
850:               PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g", (double)PetscRealPart(x[p * xdim + d])));
851:             }
852:             PetscCall(PetscPrintf(PETSC_COMM_SELF, ") v:("));
853:             for (PetscInt d = 0; d < vdim; ++d) {
854:               if (d > 0) PetscCall(PetscPrintf(PETSC_COMM_SELF, ", "));
855:               PetscCall(PetscPrintf(PETSC_COMM_SELF, "%g", (double)PetscRealPart(v[p * vdim + d])));
856:             }
857:             PetscCall(PetscPrintf(PETSC_COMM_SELF, ")\n"));
858:           }
859:         }
860:       }
861:       PetscCall(DMPlexRestoreCellCoordinates(dm, cell, &isDG, &Nc, &a, &coords));
862:       PetscCall(DMSwarmSortRestorePointsPerCell(sw, c, &Npc, &pidx));
863:     }
864:     PetscCall(DMSwarmSortRestoreAccess(sw));
865:     PetscCall(DMSwarmRestoreField(sw, coordNames[0], NULL, NULL, (void **)&x));
866:     PetscCall(DMSwarmRestoreField(sw, coordNames[1], NULL, NULL, (void **)&v));
867:   }
868:   PetscFunctionReturn(PETSC_SUCCESS);
869: }

871: /*
872: @article{MyersColellaVanStraalen2017,
873:    title   = {A 4th-order particle-in-cell method with phase-space remapping for the {Vlasov-Poisson} equation},
874:    author  = {Andrew Myers and Phillip Colella and Brian Van Straalen},
875:    journal = {SIAM Journal on Scientific Computing},
876:    volume  = {39},
877:    issue   = {3},
878:    pages   = {B467-B485},
879:    doi     = {10.1137/16M105962X},
880:    issn    = {10957197},
881:    year    = {2017},
882: }
883: */
884: static PetscErrorCode W_3_Interpolation_Private(PetscReal x, PetscReal *w)
885: {
886:   const PetscReal ax = PetscAbsReal(x);

888:   PetscFunctionBegin;
889:   *w = 0.;
890:   // W_3(x) = 1 - 5/2 |x|^2 + 3/2 |x|^3   0 \le |x| \e 1
891:   if (ax <= 1.) *w = 1. - 2.5 * PetscSqr(ax) + 1.5 * PetscSqr(ax) * ax;
892:   //          1/2 (2 - |x|)^2 (1 - |x|)   1 \le |x| \le 2
893:   else if (ax <= 2.) *w = 0.5 * PetscSqr(2. - ax) * (1. - ax);
894:   //PetscCall(PetscPrintf(PETSC_COMM_SELF, "    W_3 %g --> %g\n", x, *w));
895:   PetscFunctionReturn(PETSC_SUCCESS);
896: }

898: // Right now, we will assume that the spatial and velocity grids are regular, which will speed up point location immensely
899: static PetscErrorCode DMSwarmRemap_Colella_Internal(DM sw, DM *rsw)
900: {
901:   DM            xdm, vdm;
902:   DMSwarmCellDM celldm;
903:   PetscReal     xmin[3], xmax[3], vmin[3], vmax[3];
904:   PetscInt      xend[3], vend[3];
905:   PetscReal    *x, *v, *w, *rw;
906:   PetscReal     hx[3], hv[3];
907:   PetscInt      dim, xcdim, vcdim, xcStart, xcEnd, vcStart, vcEnd, Np, Nfc;
908:   PetscInt      debug = ((DM_Swarm *)sw->data)->printWeights;
909:   const char  **coordFields;

911:   PetscFunctionBegin;
912:   PetscCall(DMGetDimension(sw, &dim));
913:   PetscCall(DMSwarmGetCellDM(sw, &xdm));
914:   PetscCall(DMGetCoordinateDim(xdm, &xcdim));
915:   // Create a new centroid swarm without weights
916:   PetscCall(DMSwarmDuplicate(sw, rsw));
917:   PetscCall(DMSwarmGetCellDMActive(*rsw, &celldm));
918:   PetscCall(DMSwarmSetCellDMActive(*rsw, "remap"));
919:   PetscCall(InitializeParticles_Regular(*rsw, 1));
920:   PetscCall(DMSwarmSetCellDMActive(*rsw, ((PetscObject)celldm)->name));
921:   PetscCall(DMSwarmGetLocalSize(*rsw, &Np));
922:   // Assume quad mesh and calculate cell diameters (TODO this could be more robust)
923:   {
924:     const PetscScalar *array;
925:     PetscScalar       *coords;
926:     PetscBool          isDG;
927:     PetscInt           Nc;

929:     PetscCall(DMGetBoundingBox(xdm, xmin, xmax));
930:     PetscCall(DMPlexGetHeightStratum(xdm, 0, &xcStart, &xcEnd));
931:     PetscCall(DMPlexGetCellCoordinates(xdm, xcStart, &isDG, &Nc, &array, &coords));
932:     hx[0] = PetscRealPart(coords[1 * xcdim + 0] - coords[0 * xcdim + 0]);
933:     hx[1] = xcdim > 1 ? PetscRealPart(coords[2 * xcdim + 1] - coords[1 * xcdim + 1]) : 1.;
934:     PetscCall(DMPlexRestoreCellCoordinates(xdm, xcStart, &isDG, &Nc, &array, &coords));
935:     PetscCall(PetscObjectQuery((PetscObject)sw, "__vdm__", (PetscObject *)&vdm));
936:     PetscCall(DMGetCoordinateDim(vdm, &vcdim));
937:     PetscCall(DMGetBoundingBox(vdm, vmin, vmax));
938:     PetscCall(DMPlexGetHeightStratum(vdm, 0, &vcStart, &vcEnd));
939:     PetscCall(DMPlexGetCellCoordinates(vdm, vcStart, &isDG, &Nc, &array, &coords));
940:     hv[0] = PetscRealPart(coords[1 * vcdim + 0] - coords[0 * vcdim + 0]);
941:     hv[1] = vcdim > 1 ? PetscRealPart(coords[2 * vcdim + 1] - coords[1 * vcdim + 1]) : 1.;
942:     PetscCall(DMPlexRestoreCellCoordinates(vdm, vcStart, &isDG, &Nc, &array, &coords));

944:     PetscCheck(dim == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_WRONG, "Only support 1D distributions at this time");
945:     xend[0] = xcEnd - xcStart;
946:     xend[1] = 1;
947:     vend[0] = vcEnd - vcStart;
948:     vend[1] = 1;
949:     if (debug > 1)
950:       PetscCall(PetscPrintf(PETSC_COMM_SELF, "Phase Grid (%g, %g, %g, %g) (%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")\n", (double)PetscRealPart(hx[0]), (double)PetscRealPart(hx[1]), (double)PetscRealPart(hv[0]), (double)PetscRealPart(hv[1]), xend[0], xend[1], vend[0], vend[1]));
951:   }
952:   // Iterate over particles in the original swarm
953:   PetscCall(DMSwarmGetCellDMActive(sw, &celldm));
954:   PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
955:   PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
956:   PetscCall(DMSwarmGetField(sw, coordFields[0], NULL, NULL, (void **)&x));
957:   PetscCall(DMSwarmGetField(sw, "velocity", NULL, NULL, (void **)&v));
958:   PetscCall(DMSwarmGetField(sw, "w_q", NULL, NULL, (void **)&w));
959:   PetscCall(DMSwarmGetField(*rsw, "w_q", NULL, NULL, (void **)&rw));
960:   PetscCall(DMSwarmSortGetAccess(sw));
961:   PetscCall(DMSwarmSortGetAccess(*rsw));
962:   PetscCall(DMGetBoundingBox(vdm, vmin, vmax));
963:   PetscCall(DMGetCoordinatesLocalSetUp(xdm));
964:   for (PetscInt i = 0; i < Np; ++i) rw[i] = 0.;
965:   for (PetscInt c = 0; c < xcEnd - xcStart; ++c) {
966:     PetscInt *pidx, Npc;
967:     PetscInt *rpidx, rNpc;

969:     PetscCall(DMSwarmSortGetPointsPerCell(sw, c, &Npc, &pidx));
970:     for (PetscInt q = 0; q < Npc; ++q) {
971:       const PetscInt  p  = pidx[q];
972:       const PetscReal wp = w[p];
973:       PetscReal       Wx[3], Wv[3];
974:       PetscInt        xs[3], vs[3];

976:       // Determine the containing cell
977:       for (PetscInt d = 0; d < dim; ++d) {
978:         const PetscReal xp = x[p * dim + d];
979:         const PetscReal vp = v[p * dim + d];

981:         xs[d] = PetscFloorReal((xp - xmin[d]) / hx[d]);
982:         vs[d] = PetscFloorReal((vp - vmin[d]) / hv[d]);
983:       }
984:       // Loop over all grid points within 2 spacings of the particle
985:       if (debug > 2) {
986:         PetscCall(PetscPrintf(PETSC_COMM_SELF, "Interpolating particle %" PetscInt_FMT " wt %g (%g, %g, %g, %g) (%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ")\n", p, (double)wp, (double)PetscRealPart(x[p * dim + 0]), xcdim > 1 ? (double)PetscRealPart(x[p * xcdim + 1]) : 0., (double)PetscRealPart(v[p * vcdim + 0]), vcdim > 1 ? (double)PetscRealPart(v[p * vcdim + 1]) : 0., xs[0], xs[1], vs[0], vs[1]));
987:       }
988:       for (PetscInt xi = xs[0] - 1; xi < xs[0] + 3; ++xi) {
989:         // Treat xi as periodic
990:         const PetscInt xip = xi < 0 ? xi + xend[0] : (xi >= xend[0] ? xi - xend[0] : xi);
991:         PetscCall(W_3_Interpolation_Private((xmin[0] + (xi + 0.5) * hx[0] - x[p * dim + 0]) / hx[0], &Wx[0]));
992:         for (PetscInt xj = PetscMax(xs[1] - 1, 0); xj < PetscMin(xs[1] + 3, xend[1]); ++xj) {
993:           if (xcdim > 1) PetscCall(W_3_Interpolation_Private((xmin[1] + (xj + 0.5) * hx[1] - x[p * dim + 1]) / hx[1], &Wx[1]));
994:           else Wx[1] = 1.;
995:           for (PetscInt vi = PetscMax(vs[0] - 1, 0); vi < PetscMin(vs[0] + 3, vend[0]); ++vi) {
996:             PetscCall(W_3_Interpolation_Private((vmin[0] + (vi + 0.5) * hv[0] - v[p * dim + 0]) / hv[0], &Wv[0]));
997:             for (PetscInt vj = PetscMax(vs[1] - 1, 0); vj < PetscMin(vs[1] + 3, vend[1]); ++vj) {
998:               const PetscInt rc = xip * xend[1] + xj;
999:               const PetscInt rv = vi * vend[1] + vj;

1001:               PetscCall(DMSwarmSortGetPointsPerCell(*rsw, rc, &rNpc, &rpidx));
1002:               if (vcdim > 1) PetscCall(W_3_Interpolation_Private((vmin[1] + (vj + 0.5) * hv[1] - v[p * dim + 1]) / hv[1], &Wv[1]));
1003:               else Wv[1] = 1.;
1004:               if (debug > 2)
1005:                 PetscCall(PetscPrintf(PETSC_COMM_SELF, "  Depositing on particle (%" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ", %" PetscInt_FMT ") w = %g (%g, %g, %g, %g)\n", xi, xj, vi, vj, (double)(wp * Wx[0] * Wx[1] * Wv[0] * Wv[1]), (double)Wx[0], (double)Wx[1], (double)Wv[0], (double)Wv[1]));
1006:               // Add weight to new particles from original particle using interpolation function
1007:               PetscCheck(rNpc == vend[0] * vend[1], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid particle velocity binning");
1008:               const PetscInt rp = rpidx[rv];
1009:               PetscCheck(rp >= 0 && rp < Np, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Particle index %" PetscInt_FMT " not in [0, %" PetscInt_FMT ")", rp, Np);
1010:               rw[rp] += wp * Wx[0] * Wx[1] * Wv[0] * Wv[1];
1011:               if (debug > 2) PetscCall(PetscPrintf(PETSC_COMM_SELF, "  Adding weight %g (%g) to particle %" PetscInt_FMT "\n", (double)(wp * Wx[0] * Wx[1] * Wv[0] * Wv[1]), (double)PetscRealPart(rw[rp]), rp));
1012:               PetscCall(DMSwarmSortRestorePointsPerCell(*rsw, rc, &rNpc, &rpidx));
1013:             }
1014:           }
1015:         }
1016:       }
1017:     }
1018:     PetscCall(DMSwarmSortRestorePointsPerCell(sw, c, &Npc, &pidx));
1019:   }
1020:   PetscCall(DMSwarmSortRestoreAccess(sw));
1021:   PetscCall(DMSwarmSortRestoreAccess(*rsw));
1022:   PetscCall(DMSwarmRestoreField(sw, coordFields[0], NULL, NULL, (void **)&x));
1023:   PetscCall(DMSwarmRestoreField(sw, "velocity", NULL, NULL, (void **)&v));
1024:   PetscCall(DMSwarmRestoreField(sw, "w_q", NULL, NULL, (void **)&w));
1025:   PetscCall(DMSwarmRestoreField(*rsw, "w_q", NULL, NULL, (void **)&rw));

1027:   if (debug) {
1028:     Vec w;

1030:     PetscCall(DMSwarmCreateGlobalVectorFromField(sw, coordFields[0], &w));
1031:     PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1032:     PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, coordFields[0], &w));
1033:     PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "velocity", &w));
1034:     PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1035:     PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "velocity", &w));
1036:     PetscCall(DMSwarmCreateGlobalVectorFromField(sw, "w_q", &w));
1037:     PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1038:     PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, "w_q", &w));
1039:     PetscCall(DMSwarmCreateGlobalVectorFromField(*rsw, coordFields[0], &w));
1040:     PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1041:     PetscCall(DMSwarmDestroyGlobalVectorFromField(*rsw, coordFields[0], &w));
1042:     PetscCall(DMSwarmCreateGlobalVectorFromField(*rsw, "velocity", &w));
1043:     PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1044:     PetscCall(DMSwarmDestroyGlobalVectorFromField(*rsw, "velocity", &w));
1045:     PetscCall(DMSwarmCreateGlobalVectorFromField(*rsw, "w_q", &w));
1046:     PetscCall(VecViewFromOptions(w, NULL, "-remap_view"));
1047:     PetscCall(DMSwarmDestroyGlobalVectorFromField(*rsw, "w_q", &w));
1048:   }
1049:   PetscFunctionReturn(PETSC_SUCCESS);
1050: }

1052: static void f0_v2(PetscInt dim, PetscInt Nf, PetscInt NfAux, const PetscInt uOff[], const PetscInt uOff_x[], const PetscScalar u[], const PetscScalar u_t[], const PetscScalar u_x[], const PetscInt aOff[], const PetscInt aOff_x[], const PetscScalar a[], const PetscScalar a_t[], const PetscScalar a_x[], PetscReal t, const PetscReal x[], PetscInt numConstants, const PetscScalar constants[], PetscScalar f0[])
1053: {
1054:   PetscInt d;

1056:   f0[0] = 0.0;
1057:   for (d = dim / 2; d < dim; ++d) f0[0] += PetscSqr(x[d]) * u[0];
1058: }

1060: static PetscErrorCode DMSwarmRemap_PFAK_Internal(DM sw, DM *rsw)
1061: {
1062:   DM            xdm, vdm, rdm;
1063:   DMSwarmCellDM rcelldm;
1064:   Mat           M_p, rM_p, rPM_p;
1065:   Vec           w, rw, rhs;
1066:   PetscInt      Nf;
1067:   const char  **fields;

1069:   PetscFunctionBegin;
1070:   // Create a new centroid swarm without weights
1071:   PetscCall(DMSwarmGetCellDM(sw, &xdm));
1072:   PetscCall(DMSwarmSetCellDMActive(sw, "velocity"));
1073:   PetscCall(DMSwarmGetCellDMActive(sw, &rcelldm));
1074:   PetscCall(DMSwarmCellDMGetDM(rcelldm, &vdm));
1075:   PetscCall(DMSwarmDuplicate(sw, rsw));
1076:   // Set remap cell DM
1077:   PetscCall(DMSwarmSetCellDMActive(sw, "remap"));
1078:   PetscCall(DMSwarmGetCellDMActive(sw, &rcelldm));
1079:   PetscCall(DMSwarmCellDMGetFields(rcelldm, &Nf, &fields));
1080:   PetscCheck(Nf == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_ARG_WRONG, "We only allow a single weight field, not %" PetscInt_FMT, Nf);
1081:   PetscCall(DMSwarmGetCellDM(sw, &rdm));
1082:   PetscCall(DMGetGlobalVector(rdm, &rhs));
1083:   PetscCall(DMSwarmMigrate(sw, PETSC_FALSE)); // Bin particles in remap mesh
1084:   // Compute rhs = M_p w_p
1085:   PetscCall(DMCreateMassMatrix(sw, rdm, &M_p));
1086:   PetscCall(DMSwarmCreateGlobalVectorFromField(sw, fields[0], &w));
1087:   PetscCall(VecViewFromOptions(w, NULL, "-remap_w_view"));
1088:   PetscCall(MatMultTranspose(M_p, w, rhs));
1089:   PetscCall(VecViewFromOptions(rhs, NULL, "-remap_rhs_view"));
1090:   PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, fields[0], &w));
1091:   PetscCall(MatDestroy(&M_p));
1092:   {
1093:     KSP         ksp;
1094:     Mat         M_f;
1095:     Vec         u_f;
1096:     PetscReal   mom[4];
1097:     PetscInt    cdim;
1098:     const char *prefix;

1100:     PetscCall(DMGetCoordinateDim(rdm, &cdim));
1101:     PetscCall(DMCreateMassMatrix(rdm, rdm, &M_f));
1102:     PetscCall(DMGetGlobalVector(rdm, &u_f));

1104:     PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
1105:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
1106:     PetscCall(KSPSetOptionsPrefix(ksp, prefix));
1107:     PetscCall(KSPAppendOptionsPrefix(ksp, "ptof_"));
1108:     PetscCall(KSPSetFromOptions(ksp));

1110:     PetscCall(KSPSetOperators(ksp, M_f, M_f));
1111:     PetscCall(KSPSolve(ksp, rhs, u_f));
1112:     PetscCall(KSPDestroy(&ksp));
1113:     PetscCall(VecViewFromOptions(u_f, NULL, "-remap_uf_view"));

1115:     PetscCall(DMPlexComputeMoments(rdm, u_f, mom));
1116:     // Energy is not correct since it uses (x^2 + v^2)
1117:     PetscDS     rds;
1118:     PetscScalar rmom;
1119:     void       *ctx;

1121:     PetscCall(DMGetDS(rdm, &rds));
1122:     PetscCall(DMGetApplicationContext(rdm, &ctx));
1123:     PetscCall(PetscDSSetObjective(rds, 0, &f0_v2));
1124:     PetscCall(DMPlexComputeIntegralFEM(rdm, u_f, &rmom, ctx));
1125:     mom[1 + cdim] = PetscRealPart(rmom);

1127:     PetscCall(DMRestoreGlobalVector(rdm, &u_f));
1128:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "========== PFAK u_f ==========\n"));
1129:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom 0: %g\n", (double)mom[0]));
1130:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom x: %g\n", (double)mom[1 + 0]));
1131:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom v: %g\n", (double)mom[1 + 1]));
1132:     PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom 2: %g\n", (double)mom[1 + cdim]));
1133:     PetscCall(MatDestroy(&M_f));
1134:   }
1135:   // Create Remap particle mass matrix M_p
1136:   PetscInt xcStart, xcEnd, vcStart, vcEnd, cStart, cEnd, r;

1138:   PetscCall(DMSwarmSetCellDMActive(*rsw, "remap"));
1139:   PetscCall(DMPlexGetHeightStratum(xdm, 0, &xcStart, &xcEnd));
1140:   PetscCall(DMPlexGetHeightStratum(vdm, 0, &vcStart, &vcEnd));
1141:   PetscCall(DMPlexGetHeightStratum(rdm, 0, &cStart, &cEnd));
1142:   r = (PetscInt)PetscSqrtReal(((xcEnd - xcStart) * (vcEnd - vcStart)) / (cEnd - cStart));
1143:   PetscCall(InitializeParticles_Regular(*rsw, r));
1144:   PetscCall(DMSwarmMigrate(*rsw, PETSC_FALSE)); // Bin particles in remap mesh
1145:   PetscCall(DMCreateMassMatrix(*rsw, rdm, &rM_p));
1146:   PetscCall(MatViewFromOptions(rM_p, NULL, "-rM_p_view"));
1147:   // Solve M_p
1148:   {
1149:     KSP         ksp;
1150:     PC          pc;
1151:     const char *prefix;
1152:     PetscBool   isBjacobi;

1154:     PetscCall(KSPCreate(PetscObjectComm((PetscObject)sw), &ksp));
1155:     PetscCall(PetscObjectGetOptionsPrefix((PetscObject)sw, &prefix));
1156:     PetscCall(KSPSetOptionsPrefix(ksp, prefix));
1157:     PetscCall(KSPAppendOptionsPrefix(ksp, "ftop_"));
1158:     PetscCall(KSPSetFromOptions(ksp));

1160:     PetscCall(KSPGetPC(ksp, &pc));
1161:     PetscCall(PetscObjectTypeCompare((PetscObject)pc, PCBJACOBI, &isBjacobi));
1162:     if (isBjacobi) {
1163:       PetscCall(DMSwarmCreateMassMatrixSquare(sw, rdm, &rPM_p));
1164:     } else {
1165:       rPM_p = rM_p;
1166:       PetscCall(PetscObjectReference((PetscObject)rPM_p));
1167:     }
1168:     PetscCall(KSPSetOperators(ksp, rM_p, rPM_p));
1169:     PetscCall(DMSwarmCreateGlobalVectorFromField(*rsw, fields[0], &rw));
1170:     PetscCall(KSPSolveTranspose(ksp, rhs, rw));
1171:     PetscCall(VecViewFromOptions(rw, NULL, "-remap_rw_view"));
1172:     PetscCall(DMSwarmDestroyGlobalVectorFromField(*rsw, fields[0], &rw));
1173:     PetscCall(KSPDestroy(&ksp));
1174:     PetscCall(MatDestroy(&rPM_p));
1175:     PetscCall(MatDestroy(&rM_p));
1176:   }
1177:   PetscCall(DMRestoreGlobalVector(rdm, &rhs));

1179:   // Restore original cell DM
1180:   PetscCall(DMSwarmSetCellDMActive(sw, "space"));
1181:   PetscCall(DMSwarmSetCellDMActive(*rsw, "space"));
1182:   PetscCall(DMSwarmMigrate(*rsw, PETSC_FALSE)); // Bin particles in spatial mesh
1183:   PetscFunctionReturn(PETSC_SUCCESS);
1184: }

1186: static PetscErrorCode DMSwarmRemapMonitor_Internal(DM sw, DM rsw)
1187: {
1188:   PetscReal mom[4], rmom[4];
1189:   PetscInt  cdim;

1191:   PetscFunctionBegin;
1192:   PetscCall(DMGetCoordinateDim(sw, &cdim));
1193:   PetscCall(DMSwarmComputeMoments(sw, "velocity", "w_q", mom));
1194:   PetscCall(DMSwarmComputeMoments(rsw, "velocity", "w_q", rmom));
1195:   PetscCall(PetscPrintf(PETSC_COMM_SELF, "========== Remapped ==========\n"));
1196:   PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom 0: %g --> %g\n", (double)mom[0], (double)rmom[0]));
1197:   PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom 1: %g --> %g\n", (double)mom[1], (double)rmom[1]));
1198:   PetscCall(PetscPrintf(PETSC_COMM_SELF, "Mom 2: %g --> %g\n", (double)mom[1 + cdim], (double)rmom[1 + cdim]));
1199:   PetscFunctionReturn(PETSC_SUCCESS);
1200: }

1202: /*@
1203:   DMSwarmRemap - Project the swarm fields onto a new set of particles

1205:   Collective

1207:   Input Parameter:
1208: . sw - The `DMSWARM` object

1210:   Level: beginner

1212: .seealso: [](ch_dmbase), `DMSWARM`, `DMSwarmMigrate()`, `DMSwarmCrate()`
1213: @*/
1214: PetscErrorCode DMSwarmRemap(DM sw)
1215: {
1216:   DM_Swarm *swarm = (DM_Swarm *)sw->data;
1217:   DM        rsw;

1219:   PetscFunctionBegin;
1220:   switch (swarm->remap_type) {
1221:   case DMSWARM_REMAP_NONE:
1222:     PetscFunctionReturn(PETSC_SUCCESS);
1223:   case DMSWARM_REMAP_COLELLA:
1224:     PetscCall(DMSwarmRemap_Colella_Internal(sw, &rsw));
1225:     break;
1226:   case DMSWARM_REMAP_PFAK:
1227:     PetscCall(DMSwarmRemap_PFAK_Internal(sw, &rsw));
1228:     break;
1229:   default:
1230:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "No remap algorithm %s", DMSwarmRemapTypeNames[swarm->remap_type]);
1231:   }
1232:   PetscCall(DMSwarmRemapMonitor_Internal(sw, rsw));
1233:   PetscCall(DMSwarmReplace(sw, &rsw));
1234:   PetscFunctionReturn(PETSC_SUCCESS);
1235: }