Actual source code: characteristic.c

  1: #include <petsc/private/characteristicimpl.h>
  2: #include <petscdmda.h>
  3: #include <petscviewer.h>

  5: PetscClassId  CHARACTERISTIC_CLASSID;
  6: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
  7: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
  8: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
  9: /*
 10:    Contains the list of registered characteristic routines
 11: */
 12: PetscFunctionList CharacteristicList              = NULL;
 13: PetscBool         CharacteristicRegisterAllCalled = PETSC_FALSE;

 15: static PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt[]);
 16: static PetscMPIInt    DMDAGetNeighborRelative(DM, PetscReal, PetscReal);

 18: static PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
 19: static PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);

 21: static PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
 22: {
 23:   PetscBool iascii;

 25:   PetscFunctionBegin;
 27:   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c), &viewer));
 29:   PetscCheckSameComm(c, 1, viewer, 2);

 31:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
 32:   if (!iascii) PetscTryTypeMethod(c, view, viewer);
 33:   PetscFunctionReturn(PETSC_SUCCESS);
 34: }

 36: /*@
 37:   CharacteristicDestroy - Destroys a `Characteristic` context created with `CharacteristicCreate()`

 39:   Collective

 41:   Input Parameter:
 42: . c - the `Characteristic` context

 44:   Level: beginner

 46: .seealso: `Characteristic`, `CharacteristicCreate()`
 47: @*/
 48: PetscErrorCode CharacteristicDestroy(Characteristic *c)
 49: {
 50:   PetscFunctionBegin;
 51:   if (!*c) PetscFunctionReturn(PETSC_SUCCESS);
 53:   if (--((PetscObject)*c)->refct > 0) PetscFunctionReturn(PETSC_SUCCESS);

 55:   PetscTryTypeMethod(*c, destroy);
 56:   PetscCallMPI(MPI_Type_free(&(*c)->itemType));
 57:   PetscCall(PetscFree((*c)->queue));
 58:   PetscCall(PetscFree((*c)->queueLocal));
 59:   PetscCall(PetscFree((*c)->queueRemote));
 60:   PetscCall(PetscFree((*c)->neighbors));
 61:   PetscCall(PetscFree((*c)->needCount));
 62:   PetscCall(PetscFree((*c)->localOffsets));
 63:   PetscCall(PetscFree((*c)->fillCount));
 64:   PetscCall(PetscFree((*c)->remoteOffsets));
 65:   PetscCall(PetscFree((*c)->request));
 66:   PetscCall(PetscFree((*c)->status));
 67:   PetscCall(PetscHeaderDestroy(c));
 68:   PetscFunctionReturn(PETSC_SUCCESS);
 69: }

 71: /*@
 72:   CharacteristicCreate - Creates a `Characteristic` context for use with the Method of Characteristics

 74:   Collective

 76:   Input Parameter:
 77: . comm - MPI communicator

 79:   Output Parameter:
 80: . c - the `Characteristic` context

 82:   Level: beginner

 84: .seealso: `Characteristic`, `CharacteristicDestroy()`
 85: @*/
 86: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
 87: {
 88:   Characteristic newC;

 90:   PetscFunctionBegin;
 91:   PetscAssertPointer(c, 2);
 92:   *c = NULL;
 93:   PetscCall(CharacteristicInitializePackage());

 95:   PetscCall(PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView));
 96:   *c = newC;

 98:   newC->structured          = PETSC_TRUE;
 99:   newC->numIds              = 0;
100:   newC->velocityDA          = NULL;
101:   newC->velocity            = NULL;
102:   newC->velocityOld         = NULL;
103:   newC->numVelocityComp     = 0;
104:   newC->velocityComp        = NULL;
105:   newC->velocityInterp      = NULL;
106:   newC->velocityInterpLocal = NULL;
107:   newC->velocityCtx         = NULL;
108:   newC->fieldDA             = NULL;
109:   newC->field               = NULL;
110:   newC->numFieldComp        = 0;
111:   newC->fieldComp           = NULL;
112:   newC->fieldInterp         = NULL;
113:   newC->fieldInterpLocal    = NULL;
114:   newC->fieldCtx            = NULL;
115:   newC->itemType            = 0;
116:   newC->queue               = NULL;
117:   newC->queueSize           = 0;
118:   newC->queueMax            = 0;
119:   newC->queueLocal          = NULL;
120:   newC->queueLocalSize      = 0;
121:   newC->queueLocalMax       = 0;
122:   newC->queueRemote         = NULL;
123:   newC->queueRemoteSize     = 0;
124:   newC->queueRemoteMax      = 0;
125:   newC->numNeighbors        = 0;
126:   newC->neighbors           = NULL;
127:   newC->needCount           = NULL;
128:   newC->localOffsets        = NULL;
129:   newC->fillCount           = NULL;
130:   newC->remoteOffsets       = NULL;
131:   newC->request             = NULL;
132:   newC->status              = NULL;
133:   PetscFunctionReturn(PETSC_SUCCESS);
134: }

136: /*@
137:   CharacteristicSetType - Builds Characteristic for a particular solver.

139:   Logically Collective

141:   Input Parameters:
142: + c    - the method of characteristics context
143: - type - a known method

145:   Options Database Key:
146: . -characteristic_type <method> - Sets the method; use -help for a list
147:     of available methods

149:   Level: intermediate

151:   Note:
152:   See "include/petsccharacteristic.h" for available methods

154: .seealso: [](ch_ts), `CharacteristicType`
155: @*/
156: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
157: {
158:   PetscBool match;
159:   PetscErrorCode (*r)(Characteristic);

161:   PetscFunctionBegin;
163:   PetscAssertPointer(type, 2);

165:   PetscCall(PetscObjectTypeCompare((PetscObject)c, type, &match));
166:   if (match) PetscFunctionReturn(PETSC_SUCCESS);

168:   if (c->data) {
169:     /* destroy the old private Characteristic context */
170:     PetscUseTypeMethod(c, destroy);
171:     c->ops->destroy = NULL;
172:     c->data         = NULL;
173:   }

175:   PetscCall(PetscFunctionListFind(CharacteristicList, type, &r));
176:   PetscCheck(r, PetscObjectComm((PetscObject)c), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
177:   c->setupcalled = 0;
178:   PetscCall((*r)(c));
179:   PetscCall(PetscObjectChangeTypeName((PetscObject)c, type));
180:   PetscFunctionReturn(PETSC_SUCCESS);
181: }

183: /*@
184:   CharacteristicSetUp - Sets up the internal data structures for the
185:   later use of a `Charactoristic` .

187:   Collective

189:   Input Parameter:
190: . c - context obtained from CharacteristicCreate()

192:   Level: developer

194: .seealso: [](ch_ts), `Characteristic`, `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()`
195: @*/
196: PetscErrorCode CharacteristicSetUp(Characteristic c)
197: {
198:   PetscFunctionBegin;

201:   if (!((PetscObject)c)->type_name) PetscCall(CharacteristicSetType(c, CHARACTERISTICDA));

203:   if (c->setupcalled == 2) PetscFunctionReturn(PETSC_SUCCESS);

205:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
206:   if (!c->setupcalled) PetscUseTypeMethod(c, setup);
207:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
208:   c->setupcalled = 2;
209:   PetscFunctionReturn(PETSC_SUCCESS);
210: }

212: /*@C
213:   CharacteristicRegister -  Adds an approarch to the method of characteristics package.

215:   Not Collective, No Fortran Support

217:   Input Parameters:
218: + sname    - name of a new approach
219: - function - routine to create method context

221:   Level: advanced

223:   Example Usage:
224: .vb
225:     CharacteristicRegister("my_char", MyCharCreate);
226: .ve

228:   Then, your Characteristic type can be chosen with the procedural interface via
229: .vb
230:     CharacteristicCreate(MPI_Comm, Characteristic* &char);
231:     CharacteristicSetType(char,"my_char");
232: .ve
233:   or at runtime via the option
234: .vb
235:     -characteristic_type my_char
236: .ve

238:   Notes:
239:   `CharacteristicRegister()` may be called multiple times to add several approaches.

241: .seealso: [](ch_ts), `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()`
242: @*/
243: PetscErrorCode CharacteristicRegister(const char sname[], PetscErrorCode (*function)(Characteristic))
244: {
245:   PetscFunctionBegin;
246:   PetscCall(CharacteristicInitializePackage());
247:   PetscCall(PetscFunctionListAdd(&CharacteristicList, sname, function));
248:   PetscFunctionReturn(PETSC_SUCCESS);
249: }

251: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
252: {
253:   PetscFunctionBegin;
254:   c->velocityDA      = da;
255:   c->velocity        = v;
256:   c->velocityOld     = vOld;
257:   c->numVelocityComp = numComponents;
258:   c->velocityComp    = components;
259:   c->velocityInterp  = interp;
260:   c->velocityCtx     = ctx;
261:   PetscFunctionReturn(PETSC_SUCCESS);
262: }

264: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
265: {
266:   PetscFunctionBegin;
267:   c->velocityDA          = da;
268:   c->velocity            = v;
269:   c->velocityOld         = vOld;
270:   c->numVelocityComp     = numComponents;
271:   c->velocityComp        = components;
272:   c->velocityInterpLocal = interp;
273:   c->velocityCtx         = ctx;
274:   PetscFunctionReturn(PETSC_SUCCESS);
275: }

277: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
278: {
279:   PetscFunctionBegin;
280: #if 0
281:   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
282: #endif
283:   c->fieldDA      = da;
284:   c->field        = v;
285:   c->numFieldComp = numComponents;
286:   c->fieldComp    = components;
287:   c->fieldInterp  = interp;
288:   c->fieldCtx     = ctx;
289:   PetscFunctionReturn(PETSC_SUCCESS);
290: }

292: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
293: {
294:   PetscFunctionBegin;
295: #if 0
296:   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
297: #endif
298:   c->fieldDA          = da;
299:   c->field            = v;
300:   c->numFieldComp     = numComponents;
301:   c->fieldComp        = components;
302:   c->fieldInterpLocal = interp;
303:   c->fieldCtx         = ctx;
304:   PetscFunctionReturn(PETSC_SUCCESS);
305: }

307: /*@
308:   CharacteristicSolve - Apply the Method of Characteristics solver

310:   Collective

312:   Input Parameters:
313: + c        - context obtained from `CharacteristicCreate()`
314: . dt       - the time-step
315: - solution - vector holding the solution

317:   Level: developer

319: .seealso: [](ch_ts), `Characteristic`, `CharacteristicCreate()`, `CharacteristicDestroy()`
320: @*/
321: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
322: {
323:   CharacteristicPointDA2D Qi;
324:   DM                      da = c->velocityDA;
325:   Vec                     velocityLocal, velocityLocalOld;
326:   Vec                     fieldLocal;
327:   DMDALocalInfo           info;
328:   PetscScalar           **solArray;
329:   void                   *velocityArray;
330:   void                   *velocityArrayOld;
331:   void                   *fieldArray;
332:   PetscScalar            *interpIndices;
333:   PetscScalar            *velocityValues, *velocityValuesOld;
334:   PetscScalar            *fieldValues;
335:   PetscMPIInt             rank;
336:   PetscInt                dim;
337:   PetscMPIInt             neighbors[9];
338:   PetscInt                dof;
339:   PetscInt                gx, gy;
340:   PetscInt                n, is, ie, js, je, comp;

342:   PetscFunctionBegin;
343:   c->queueSize = 0;
344:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
345:   PetscCall(DMDAGetNeighborsRank(da, neighbors));
346:   PetscCall(CharacteristicSetNeighbors(c, 9, neighbors));
347:   PetscCall(CharacteristicSetUp(c));
348:   /* global and local grid info */
349:   PetscCall(DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
350:   PetscCall(DMDAGetLocalInfo(da, &info));
351:   is = info.xs;
352:   ie = info.xs + info.xm;
353:   js = info.ys;
354:   je = info.ys + info.ym;
355:   /* Allocation */
356:   PetscCall(PetscMalloc1(dim, &interpIndices));
357:   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValues));
358:   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValuesOld));
359:   PetscCall(PetscMalloc1(c->numFieldComp, &fieldValues));
360:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));

362:   /*
363:      PART 1, AT t-dt/2
364:     */
365:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));
366:   /* GET POSITION AT HALF TIME IN THE PAST */
367:   if (c->velocityInterpLocal) {
368:     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocal));
369:     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocalOld));
370:     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
371:     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
372:     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
373:     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
374:     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray));
375:     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
376:   }
377:   PetscCall(PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n"));
378:   for (Qi.j = js; Qi.j < je; Qi.j++) {
379:     for (Qi.i = is; Qi.i < ie; Qi.i++) {
380:       interpIndices[0] = Qi.i;
381:       interpIndices[1] = Qi.j;
382:       if (c->velocityInterpLocal) PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
383:       else PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
384:       Qi.x = Qi.i - velocityValues[0] * dt / 2.0;
385:       Qi.y = Qi.j - velocityValues[1] * dt / 2.0;

387:       /* Determine whether the position at t - dt/2 is local */
388:       Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);

390:       /* Check for Periodic boundaries and move all periodic points back onto the domain */
391:       PetscCall(DMDAMapCoordsToPeriodicDomain(da, &Qi.x, &Qi.y));
392:       PetscCall(CharacteristicAddPoint(c, &Qi));
393:     }
394:   }
395:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));

397:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
398:   PetscCall(CharacteristicSendCoordinatesBegin(c));
399:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));

401:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));
402:   /* Calculate velocity at t_n+1/2 (local values) */
403:   PetscCall(PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n"));
404:   for (n = 0; n < c->queueSize; n++) {
405:     Qi = c->queue[n];
406:     if (c->neighbors[Qi.proc] == rank) {
407:       interpIndices[0] = Qi.x;
408:       interpIndices[1] = Qi.y;
409:       if (c->velocityInterpLocal) {
410:         PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
411:         PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
412:       } else {
413:         PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
414:         PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
415:       }
416:       Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
417:       Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
418:     }
419:     c->queue[n] = Qi;
420:   }
421:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));

423:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
424:   PetscCall(CharacteristicSendCoordinatesEnd(c));
425:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));

427:   /* Calculate velocity at t_n+1/2 (fill remote requests) */
428:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
429:   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize));
430:   for (n = 0; n < c->queueRemoteSize; n++) {
431:     Qi               = c->queueRemote[n];
432:     interpIndices[0] = Qi.x;
433:     interpIndices[1] = Qi.y;
434:     if (c->velocityInterpLocal) {
435:       PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
436:       PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
437:     } else {
438:       PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
439:       PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
440:     }
441:     Qi.x              = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
442:     Qi.y              = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
443:     c->queueRemote[n] = Qi;
444:   }
445:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
446:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
447:   PetscCall(CharacteristicGetValuesBegin(c));
448:   PetscCall(CharacteristicGetValuesEnd(c));
449:   if (c->velocityInterpLocal) {
450:     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray));
451:     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
452:     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocal));
453:     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocalOld));
454:   }
455:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));

457:   /*
458:      PART 2, AT t-dt
459:   */

461:   /* GET POSITION AT t_n (local values) */
462:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
463:   PetscCall(PetscInfo(NULL, "Calculating position at t_{n}\n"));
464:   for (n = 0; n < c->queueSize; n++) {
465:     Qi   = c->queue[n];
466:     Qi.x = Qi.i - Qi.x * dt;
467:     Qi.y = Qi.j - Qi.y * dt;

469:     /* Determine whether the position at t-dt is local */
470:     Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);

472:     /* Check for Periodic boundaries and move all periodic points back onto the domain */
473:     PetscCall(DMDAMapCoordsToPeriodicDomain(da, &Qi.x, &Qi.y));

475:     c->queue[n] = Qi;
476:   }
477:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));

479:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
480:   PetscCall(CharacteristicSendCoordinatesBegin(c));
481:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));

483:   /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
484:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
485:   if (c->fieldInterpLocal) {
486:     PetscCall(DMGetLocalVector(c->fieldDA, &fieldLocal));
487:     PetscCall(DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
488:     PetscCall(DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
489:     PetscCall(DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray));
490:   }
491:   PetscCall(PetscInfo(NULL, "Calculating local field at t_{n}\n"));
492:   for (n = 0; n < c->queueSize; n++) {
493:     if (c->neighbors[c->queue[n].proc] == rank) {
494:       interpIndices[0] = c->queue[n].x;
495:       interpIndices[1] = c->queue[n].y;
496:       if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
497:       else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
498:       for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
499:     }
500:   }
501:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));

503:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
504:   PetscCall(CharacteristicSendCoordinatesEnd(c));
505:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));

507:   /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
508:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));
509:   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize));
510:   for (n = 0; n < c->queueRemoteSize; n++) {
511:     interpIndices[0] = c->queueRemote[n].x;
512:     interpIndices[1] = c->queueRemote[n].y;

514:     /* for debugging purposes */
515:     if (1) { /* hacked bounds test...let's do better */
516:       PetscScalar im = interpIndices[0];
517:       PetscScalar jm = interpIndices[1];

519:       PetscCheck((im >= (PetscScalar)is - 1.) && (im <= (PetscScalar)ie) && (jm >= (PetscScalar)js - 1.) && (jm <= (PetscScalar)je), PETSC_COMM_SELF, PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", (double)PetscAbsScalar(im), (double)PetscAbsScalar(jm));
520:     }

522:     if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
523:     else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
524:     for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
525:   }
526:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));

528:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
529:   PetscCall(CharacteristicGetValuesBegin(c));
530:   PetscCall(CharacteristicGetValuesEnd(c));
531:   if (c->fieldInterpLocal) {
532:     PetscCall(DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray));
533:     PetscCall(DMRestoreLocalVector(c->fieldDA, &fieldLocal));
534:   }
535:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));

537:   /* Return field of characteristics at t_n-1 */
538:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
539:   PetscCall(DMDAGetInfo(c->fieldDA, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
540:   PetscCall(DMDAVecGetArray(c->fieldDA, solution, &solArray));
541:   for (n = 0; n < c->queueSize; n++) {
542:     Qi = c->queue[n];
543:     for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i * dof + c->fieldComp[comp]] = Qi.field[comp];
544:   }
545:   PetscCall(DMDAVecRestoreArray(c->fieldDA, solution, &solArray));
546:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
547:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));

549:   /* Cleanup */
550:   PetscCall(PetscFree(interpIndices));
551:   PetscCall(PetscFree(velocityValues));
552:   PetscCall(PetscFree(velocityValuesOld));
553:   PetscCall(PetscFree(fieldValues));
554:   PetscFunctionReturn(PETSC_SUCCESS);
555: }

557: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
558: {
559:   PetscFunctionBegin;
560:   PetscCall(PetscMPIIntCast(numNeighbors, &c->numNeighbors));
561:   PetscCall(PetscFree(c->neighbors));
562:   PetscCall(PetscMalloc1(numNeighbors, &c->neighbors));
563:   PetscCall(PetscArraycpy(c->neighbors, neighbors, numNeighbors));
564:   PetscFunctionReturn(PETSC_SUCCESS);
565: }

567: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
568: {
569:   PetscFunctionBegin;
570:   PetscCheck(c->queueSize < c->queueMax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Exceeded maximum queue size %" PetscInt_FMT, c->queueMax);
571:   c->queue[c->queueSize++] = *point;
572:   PetscFunctionReturn(PETSC_SUCCESS);
573: }

575: PetscErrorCode CharacteristicSendCoordinatesBegin(Characteristic c)
576: {
577:   PetscMPIInt rank, tag = 121;
578:   PetscInt    i, n;

580:   PetscFunctionBegin;
581:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
582:   PetscCall(CharacteristicHeapSort(c, c->queue, c->queueSize));
583:   PetscCall(PetscArrayzero(c->needCount, c->numNeighbors));
584:   for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
585:   c->fillCount[0] = 0;
586:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Irecv(&c->fillCount[n], 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &c->request[n - 1]));
587:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Send(&c->needCount[n], 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
588:   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
589:   /* Initialize the remote queue */
590:   c->queueLocalMax = c->localOffsets[0] = 0;
591:   c->queueRemoteMax = c->remoteOffsets[0] = 0;
592:   for (n = 1; n < c->numNeighbors; n++) {
593:     c->remoteOffsets[n] = c->queueRemoteMax;
594:     c->queueRemoteMax += c->fillCount[n];
595:     c->localOffsets[n] = c->queueLocalMax;
596:     c->queueLocalMax += c->needCount[n];
597:   }
598:   /* HACK BEGIN */
599:   for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
600:   c->needCount[0] = 0;
601:   /* HACK END */
602:   if (c->queueRemoteMax) {
603:     PetscCall(PetscMalloc1(c->queueRemoteMax, &c->queueRemote));
604:   } else c->queueRemote = NULL;
605:   c->queueRemoteSize = c->queueRemoteMax;

607:   /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
608:   for (n = 1; n < c->numNeighbors; n++) {
609:     PetscCall(PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]));
610:     PetscCallMPI(MPIU_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &c->request[n - 1]));
611:   }
612:   for (n = 1; n < c->numNeighbors; n++) {
613:     PetscCall(PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n]));
614:     PetscCallMPI(MPIU_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
615:   }
616:   PetscFunctionReturn(PETSC_SUCCESS);
617: }

619: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
620: {
621: #if 0
622:   PetscMPIInt rank;
623:   PetscInt    n;
624: #endif

626:   PetscFunctionBegin;
627:   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
628: #if 0
629:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
630:   for (n = 0; n < c->queueRemoteSize; n++) {
631:     PetscCheck(c->neighbors[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
632:   }
633: #endif
634:   PetscFunctionReturn(PETSC_SUCCESS);
635: }

637: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
638: {
639:   PetscMPIInt tag = 121;
640:   PetscInt    n;

642:   PetscFunctionBegin;
643:   /* SEND AND RECEIVE FILLED REQUESTS for velocities at t_n+1/2 */
644:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &c->request[n - 1]));
645:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
646:   PetscFunctionReturn(PETSC_SUCCESS);
647: }

649: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
650: {
651:   PetscFunctionBegin;
652:   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
653:   /* Free queue of requests from other procs */
654:   PetscCall(PetscFree(c->queueRemote));
655:   PetscFunctionReturn(PETSC_SUCCESS);
656: }

658: /*
659:   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
660: */
661: static PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
662: {
663:   CharacteristicPointDA2D temp;
664:   PetscInt                n;

666:   PetscFunctionBegin;
667:   if (0) { /* Check the order of the queue before sorting */
668:     PetscCall(PetscInfo(NULL, "Before Heap sort\n"));
669:     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
670:   }

672:   /* SORTING PHASE */
673:   for (n = (size / 2) - 1; n >= 0; n--) { PetscCall(CharacteristicSiftDown(c, queue, n, size - 1)); /* Rich had size-1 here, Matt had size*/ }
674:   for (n = size - 1; n >= 1; n--) {
675:     temp     = queue[0];
676:     queue[0] = queue[n];
677:     queue[n] = temp;
678:     PetscCall(CharacteristicSiftDown(c, queue, 0, n - 1));
679:   }
680:   if (0) { /* Check the order of the queue after sorting */
681:     PetscCall(PetscInfo(NULL, "Avter  Heap sort\n"));
682:     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
683:   }
684:   PetscFunctionReturn(PETSC_SUCCESS);
685: }

687: /*
688:   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
689: */
690: static PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
691: {
692:   PetscBool               done = PETSC_FALSE;
693:   PetscInt                maxChild;
694:   CharacteristicPointDA2D temp;

696:   PetscFunctionBegin;
697:   while ((root * 2 <= bottom) && (!done)) {
698:     if (root * 2 == bottom) maxChild = root * 2;
699:     else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2;
700:     else maxChild = root * 2 + 1;

702:     if (queue[root].proc < queue[maxChild].proc) {
703:       temp            = queue[root];
704:       queue[root]     = queue[maxChild];
705:       queue[maxChild] = temp;
706:       root            = maxChild;
707:     } else done = PETSC_TRUE;
708:   }
709:   PetscFunctionReturn(PETSC_SUCCESS);
710: }

712: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
713: static PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
714: {
715:   DMBoundaryType bx, by;
716:   PetscBool      IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
717:   MPI_Comm       comm;
718:   PetscMPIInt    rank;
719:   PetscMPIInt  **procs, pi, pj, pim, pip, pjm, pjp, PIi, PJi;
720:   PetscInt       PI, PJ;

722:   PetscFunctionBegin;
723:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
724:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
725:   PetscCall(DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI, &PJ, NULL, NULL, NULL, &bx, &by, NULL, NULL));
726:   PetscCall(PetscMPIIntCast(PI, &PIi));
727:   PetscCall(PetscMPIIntCast(PJ, &PJi));
728:   if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
729:   if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;

731:   neighbors[0] = rank;
732:   rank         = 0;
733:   PetscCall(PetscMalloc1(PJ, &procs));
734:   for (pj = 0; pj < PJ; pj++) {
735:     PetscCall(PetscMalloc1(PI, &procs[pj]));
736:     for (pi = 0; pi < PI; pi++) {
737:       procs[pj][pi] = rank;
738:       rank++;
739:     }
740:   }

742:   pi  = neighbors[0] % PI;
743:   pj  = neighbors[0] / PI;
744:   pim = pi - 1;
745:   if (pim < 0) pim = PIi - 1;
746:   pip = (pi + 1) % PIi;
747:   pjm = pj - 1;
748:   if (pjm < 0) pjm = PJi - 1;
749:   pjp = (pj + 1) % PJi;

751:   neighbors[1] = procs[pj][pim];
752:   neighbors[2] = procs[pjp][pim];
753:   neighbors[3] = procs[pjp][pi];
754:   neighbors[4] = procs[pjp][pip];
755:   neighbors[5] = procs[pj][pip];
756:   neighbors[6] = procs[pjm][pip];
757:   neighbors[7] = procs[pjm][pi];
758:   neighbors[8] = procs[pjm][pim];

760:   if (!IPeriodic) {
761:     if (pi == 0) neighbors[1] = neighbors[2] = neighbors[8] = neighbors[0];
762:     if (pi == PI - 1) neighbors[4] = neighbors[5] = neighbors[6] = neighbors[0];
763:   }

765:   if (!JPeriodic) {
766:     if (pj == 0) neighbors[6] = neighbors[7] = neighbors[8] = neighbors[0];
767:     if (pj == PJ - 1) neighbors[2] = neighbors[3] = neighbors[4] = neighbors[0];
768:   }

770:   for (pj = 0; pj < PJ; pj++) PetscCall(PetscFree(procs[pj]));
771:   PetscCall(PetscFree(procs));
772:   PetscFunctionReturn(PETSC_SUCCESS);
773: }

775: /*
776:   SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
777:     2 | 3 | 4
778:     __|___|__
779:     1 | 0 | 5
780:     __|___|__
781:     8 | 7 | 6
782:       |   |
783: */
784: static PetscMPIInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
785: {
786:   DMDALocalInfo info;
787:   PetscReal     is, ie, js, je;

789:   PetscCallAbort(PETSC_COMM_SELF, DMDAGetLocalInfo(da, &info));
790:   is = (PetscReal)info.xs - 0.5;
791:   ie = (PetscReal)info.xs + info.xm - 0.5;
792:   js = (PetscReal)info.ys - 0.5;
793:   je = (PetscReal)info.ys + info.ym - 0.5;

795:   if (ir >= is && ir <= ie) { /* center column */
796:     if (jr >= js && jr <= je) return 0;
797:     else if (jr < js) return 7;
798:     else return 3;
799:   } else if (ir < is) { /* left column */
800:     if (jr >= js && jr <= je) return 1;
801:     else if (jr < js) return 8;
802:     else return 2;
803:   } else { /* right column */
804:     if (jr >= js && jr <= je) return 5;
805:     else if (jr < js) return 6;
806:     else return 4;
807:   }
808: }