Actual source code: characteristic.c

  1: #include <petsc/private/characteristicimpl.h>
  2: #include <petscdmda.h>
  3: #include <petscviewer.h>

  5: PetscClassId  CHARACTERISTIC_CLASSID;
  6: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
  7: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
  8: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
  9: /*
 10:    Contains the list of registered characteristic routines
 11: */
 12: PetscFunctionList CharacteristicList              = NULL;
 13: PetscBool         CharacteristicRegisterAllCalled = PETSC_FALSE;

 15: static PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt[]);
 16: static PetscMPIInt    DMDAGetNeighborRelative(DM, PetscReal, PetscReal);

 18: static PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
 19: static PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);

 21: static PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
 22: {
 23:   PetscBool iascii;

 25:   PetscFunctionBegin;
 27:   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c), &viewer));
 29:   PetscCheckSameComm(c, 1, viewer, 2);

 31:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
 32:   if (!iascii) PetscTryTypeMethod(c, view, viewer);
 33:   PetscFunctionReturn(PETSC_SUCCESS);
 34: }

 36: PetscErrorCode CharacteristicDestroy(Characteristic *c)
 37: {
 38:   PetscFunctionBegin;
 39:   if (!*c) PetscFunctionReturn(PETSC_SUCCESS);
 41:   if (--((PetscObject)*c)->refct > 0) PetscFunctionReturn(PETSC_SUCCESS);

 43:   PetscTryTypeMethod(*c, destroy);
 44:   PetscCallMPI(MPI_Type_free(&(*c)->itemType));
 45:   PetscCall(PetscFree((*c)->queue));
 46:   PetscCall(PetscFree((*c)->queueLocal));
 47:   PetscCall(PetscFree((*c)->queueRemote));
 48:   PetscCall(PetscFree((*c)->neighbors));
 49:   PetscCall(PetscFree((*c)->needCount));
 50:   PetscCall(PetscFree((*c)->localOffsets));
 51:   PetscCall(PetscFree((*c)->fillCount));
 52:   PetscCall(PetscFree((*c)->remoteOffsets));
 53:   PetscCall(PetscFree((*c)->request));
 54:   PetscCall(PetscFree((*c)->status));
 55:   PetscCall(PetscHeaderDestroy(c));
 56:   PetscFunctionReturn(PETSC_SUCCESS);
 57: }

 59: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
 60: {
 61:   Characteristic newC;

 63:   PetscFunctionBegin;
 64:   PetscAssertPointer(c, 2);
 65:   *c = NULL;
 66:   PetscCall(CharacteristicInitializePackage());

 68:   PetscCall(PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView));
 69:   *c = newC;

 71:   newC->structured          = PETSC_TRUE;
 72:   newC->numIds              = 0;
 73:   newC->velocityDA          = NULL;
 74:   newC->velocity            = NULL;
 75:   newC->velocityOld         = NULL;
 76:   newC->numVelocityComp     = 0;
 77:   newC->velocityComp        = NULL;
 78:   newC->velocityInterp      = NULL;
 79:   newC->velocityInterpLocal = NULL;
 80:   newC->velocityCtx         = NULL;
 81:   newC->fieldDA             = NULL;
 82:   newC->field               = NULL;
 83:   newC->numFieldComp        = 0;
 84:   newC->fieldComp           = NULL;
 85:   newC->fieldInterp         = NULL;
 86:   newC->fieldInterpLocal    = NULL;
 87:   newC->fieldCtx            = NULL;
 88:   newC->itemType            = 0;
 89:   newC->queue               = NULL;
 90:   newC->queueSize           = 0;
 91:   newC->queueMax            = 0;
 92:   newC->queueLocal          = NULL;
 93:   newC->queueLocalSize      = 0;
 94:   newC->queueLocalMax       = 0;
 95:   newC->queueRemote         = NULL;
 96:   newC->queueRemoteSize     = 0;
 97:   newC->queueRemoteMax      = 0;
 98:   newC->numNeighbors        = 0;
 99:   newC->neighbors           = NULL;
100:   newC->needCount           = NULL;
101:   newC->localOffsets        = NULL;
102:   newC->fillCount           = NULL;
103:   newC->remoteOffsets       = NULL;
104:   newC->request             = NULL;
105:   newC->status              = NULL;
106:   PetscFunctionReturn(PETSC_SUCCESS);
107: }

109: /*@
110:   CharacteristicSetType - Builds Characteristic for a particular solver.

112:   Logically Collective

114:   Input Parameters:
115: + c    - the method of characteristics context
116: - type - a known method

118:   Options Database Key:
119: . -characteristic_type <method> - Sets the method; use -help for a list
120:     of available methods

122:   Level: intermediate

124:   Notes:
125:   See "include/petsccharacteristic.h" for available methods

127:   Normally, it is best to use the CharacteristicSetFromOptions() command and
128:   then set the Characteristic type from the options database rather than by using
129:   this routine.  Using the options database provides the user with
130:   maximum flexibility in evaluating the many different Krylov methods.
131:   The CharacteristicSetType() routine is provided for those situations where it
132:   is necessary to set the iterative solver independently of the command
133:   line or options database.  This might be the case, for example, when
134:   the choice of iterative solver changes during the execution of the
135:   program, and the user's application is taking responsibility for
136:   choosing the appropriate method.  In other words, this routine is
137:   not for beginners.

139: .seealso: [](ch_ts), `CharacteristicType`
140: @*/
141: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
142: {
143:   PetscBool match;
144:   PetscErrorCode (*r)(Characteristic);

146:   PetscFunctionBegin;
148:   PetscAssertPointer(type, 2);

150:   PetscCall(PetscObjectTypeCompare((PetscObject)c, type, &match));
151:   if (match) PetscFunctionReturn(PETSC_SUCCESS);

153:   if (c->data) {
154:     /* destroy the old private Characteristic context */
155:     PetscUseTypeMethod(c, destroy);
156:     c->ops->destroy = NULL;
157:     c->data         = NULL;
158:   }

160:   PetscCall(PetscFunctionListFind(CharacteristicList, type, &r));
161:   PetscCheck(r, PetscObjectComm((PetscObject)c), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
162:   c->setupcalled = 0;
163:   PetscCall((*r)(c));
164:   PetscCall(PetscObjectChangeTypeName((PetscObject)c, type));
165:   PetscFunctionReturn(PETSC_SUCCESS);
166: }

168: /*@
169:   CharacteristicSetUp - Sets up the internal data structures for the
170:   later use of an iterative solver.

172:   Collective

174:   Input Parameter:
175: . c - iterative context obtained from CharacteristicCreate()

177:   Level: developer

179: .seealso: [](ch_ts), `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()`
180: @*/
181: PetscErrorCode CharacteristicSetUp(Characteristic c)
182: {
183:   PetscFunctionBegin;

186:   if (!((PetscObject)c)->type_name) PetscCall(CharacteristicSetType(c, CHARACTERISTICDA));

188:   if (c->setupcalled == 2) PetscFunctionReturn(PETSC_SUCCESS);

190:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
191:   if (!c->setupcalled) PetscUseTypeMethod(c, setup);
192:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
193:   c->setupcalled = 2;
194:   PetscFunctionReturn(PETSC_SUCCESS);
195: }

197: /*@C
198:   CharacteristicRegister -  Adds a solver to the method of characteristics package.

200:   Not Collective, No Fortran Support

202:   Input Parameters:
203: + sname    - name of a new user-defined solver
204: - function - routine to create method context

206:   Level: advanced

208:   Example Usage:
209: .vb
210:     CharacteristicRegister("my_char", MyCharCreate);
211: .ve

213:   Then, your Characteristic type can be chosen with the procedural interface via
214: .vb
215:     CharacteristicCreate(MPI_Comm, Characteristic* &char);
216:     CharacteristicSetType(char,"my_char");
217: .ve
218:   or at runtime via the option
219: .vb
220:     -characteristic_type my_char
221: .ve

223:   Notes:
224:   CharacteristicRegister() may be called multiple times to add several user-defined solvers.

226: .seealso: [](ch_ts), `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()`
227: @*/
228: PetscErrorCode CharacteristicRegister(const char sname[], PetscErrorCode (*function)(Characteristic))
229: {
230:   PetscFunctionBegin;
231:   PetscCall(CharacteristicInitializePackage());
232:   PetscCall(PetscFunctionListAdd(&CharacteristicList, sname, function));
233:   PetscFunctionReturn(PETSC_SUCCESS);
234: }

236: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
237: {
238:   PetscFunctionBegin;
239:   c->velocityDA      = da;
240:   c->velocity        = v;
241:   c->velocityOld     = vOld;
242:   c->numVelocityComp = numComponents;
243:   c->velocityComp    = components;
244:   c->velocityInterp  = interp;
245:   c->velocityCtx     = ctx;
246:   PetscFunctionReturn(PETSC_SUCCESS);
247: }

249: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
250: {
251:   PetscFunctionBegin;
252:   c->velocityDA          = da;
253:   c->velocity            = v;
254:   c->velocityOld         = vOld;
255:   c->numVelocityComp     = numComponents;
256:   c->velocityComp        = components;
257:   c->velocityInterpLocal = interp;
258:   c->velocityCtx         = ctx;
259:   PetscFunctionReturn(PETSC_SUCCESS);
260: }

262: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
263: {
264:   PetscFunctionBegin;
265: #if 0
266:   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
267: #endif
268:   c->fieldDA      = da;
269:   c->field        = v;
270:   c->numFieldComp = numComponents;
271:   c->fieldComp    = components;
272:   c->fieldInterp  = interp;
273:   c->fieldCtx     = ctx;
274:   PetscFunctionReturn(PETSC_SUCCESS);
275: }

277: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
278: {
279:   PetscFunctionBegin;
280: #if 0
281:   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
282: #endif
283:   c->fieldDA          = da;
284:   c->field            = v;
285:   c->numFieldComp     = numComponents;
286:   c->fieldComp        = components;
287:   c->fieldInterpLocal = interp;
288:   c->fieldCtx         = ctx;
289:   PetscFunctionReturn(PETSC_SUCCESS);
290: }

292: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
293: {
294:   CharacteristicPointDA2D Qi;
295:   DM                      da = c->velocityDA;
296:   Vec                     velocityLocal, velocityLocalOld;
297:   Vec                     fieldLocal;
298:   DMDALocalInfo           info;
299:   PetscScalar           **solArray;
300:   void                   *velocityArray;
301:   void                   *velocityArrayOld;
302:   void                   *fieldArray;
303:   PetscScalar            *interpIndices;
304:   PetscScalar            *velocityValues, *velocityValuesOld;
305:   PetscScalar            *fieldValues;
306:   PetscMPIInt             rank;
307:   PetscInt                dim;
308:   PetscMPIInt             neighbors[9];
309:   PetscInt                dof;
310:   PetscInt                gx, gy;
311:   PetscInt                n, is, ie, js, je, comp;

313:   PetscFunctionBegin;
314:   c->queueSize = 0;
315:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
316:   PetscCall(DMDAGetNeighborsRank(da, neighbors));
317:   PetscCall(CharacteristicSetNeighbors(c, 9, neighbors));
318:   PetscCall(CharacteristicSetUp(c));
319:   /* global and local grid info */
320:   PetscCall(DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
321:   PetscCall(DMDAGetLocalInfo(da, &info));
322:   is = info.xs;
323:   ie = info.xs + info.xm;
324:   js = info.ys;
325:   je = info.ys + info.ym;
326:   /* Allocation */
327:   PetscCall(PetscMalloc1(dim, &interpIndices));
328:   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValues));
329:   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValuesOld));
330:   PetscCall(PetscMalloc1(c->numFieldComp, &fieldValues));
331:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));

333:   /*
334:      PART 1, AT t-dt/2
335:     */
336:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));
337:   /* GET POSITION AT HALF TIME IN THE PAST */
338:   if (c->velocityInterpLocal) {
339:     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocal));
340:     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocalOld));
341:     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
342:     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
343:     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
344:     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
345:     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray));
346:     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
347:   }
348:   PetscCall(PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n"));
349:   for (Qi.j = js; Qi.j < je; Qi.j++) {
350:     for (Qi.i = is; Qi.i < ie; Qi.i++) {
351:       interpIndices[0] = Qi.i;
352:       interpIndices[1] = Qi.j;
353:       if (c->velocityInterpLocal) PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
354:       else PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
355:       Qi.x = Qi.i - velocityValues[0] * dt / 2.0;
356:       Qi.y = Qi.j - velocityValues[1] * dt / 2.0;

358:       /* Determine whether the position at t - dt/2 is local */
359:       Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);

361:       /* Check for Periodic boundaries and move all periodic points back onto the domain */
362:       PetscCall(DMDAMapCoordsToPeriodicDomain(da, &Qi.x, &Qi.y));
363:       PetscCall(CharacteristicAddPoint(c, &Qi));
364:     }
365:   }
366:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));

368:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
369:   PetscCall(CharacteristicSendCoordinatesBegin(c));
370:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));

372:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));
373:   /* Calculate velocity at t_n+1/2 (local values) */
374:   PetscCall(PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n"));
375:   for (n = 0; n < c->queueSize; n++) {
376:     Qi = c->queue[n];
377:     if (c->neighbors[Qi.proc] == rank) {
378:       interpIndices[0] = Qi.x;
379:       interpIndices[1] = Qi.y;
380:       if (c->velocityInterpLocal) {
381:         PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
382:         PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
383:       } else {
384:         PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
385:         PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
386:       }
387:       Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
388:       Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
389:     }
390:     c->queue[n] = Qi;
391:   }
392:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));

394:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
395:   PetscCall(CharacteristicSendCoordinatesEnd(c));
396:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));

398:   /* Calculate velocity at t_n+1/2 (fill remote requests) */
399:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
400:   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize));
401:   for (n = 0; n < c->queueRemoteSize; n++) {
402:     Qi               = c->queueRemote[n];
403:     interpIndices[0] = Qi.x;
404:     interpIndices[1] = Qi.y;
405:     if (c->velocityInterpLocal) {
406:       PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
407:       PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
408:     } else {
409:       PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
410:       PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
411:     }
412:     Qi.x              = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
413:     Qi.y              = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
414:     c->queueRemote[n] = Qi;
415:   }
416:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
417:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
418:   PetscCall(CharacteristicGetValuesBegin(c));
419:   PetscCall(CharacteristicGetValuesEnd(c));
420:   if (c->velocityInterpLocal) {
421:     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray));
422:     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
423:     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocal));
424:     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocalOld));
425:   }
426:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));

428:   /*
429:      PART 2, AT t-dt
430:   */

432:   /* GET POSITION AT t_n (local values) */
433:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
434:   PetscCall(PetscInfo(NULL, "Calculating position at t_{n}\n"));
435:   for (n = 0; n < c->queueSize; n++) {
436:     Qi   = c->queue[n];
437:     Qi.x = Qi.i - Qi.x * dt;
438:     Qi.y = Qi.j - Qi.y * dt;

440:     /* Determine whether the position at t-dt is local */
441:     Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);

443:     /* Check for Periodic boundaries and move all periodic points back onto the domain */
444:     PetscCall(DMDAMapCoordsToPeriodicDomain(da, &Qi.x, &Qi.y));

446:     c->queue[n] = Qi;
447:   }
448:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));

450:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
451:   PetscCall(CharacteristicSendCoordinatesBegin(c));
452:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));

454:   /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
455:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
456:   if (c->fieldInterpLocal) {
457:     PetscCall(DMGetLocalVector(c->fieldDA, &fieldLocal));
458:     PetscCall(DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
459:     PetscCall(DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
460:     PetscCall(DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray));
461:   }
462:   PetscCall(PetscInfo(NULL, "Calculating local field at t_{n}\n"));
463:   for (n = 0; n < c->queueSize; n++) {
464:     if (c->neighbors[c->queue[n].proc] == rank) {
465:       interpIndices[0] = c->queue[n].x;
466:       interpIndices[1] = c->queue[n].y;
467:       if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
468:       else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
469:       for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
470:     }
471:   }
472:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));

474:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
475:   PetscCall(CharacteristicSendCoordinatesEnd(c));
476:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));

478:   /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
479:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));
480:   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize));
481:   for (n = 0; n < c->queueRemoteSize; n++) {
482:     interpIndices[0] = c->queueRemote[n].x;
483:     interpIndices[1] = c->queueRemote[n].y;

485:     /* for debugging purposes */
486:     if (1) { /* hacked bounds test...let's do better */
487:       PetscScalar im = interpIndices[0];
488:       PetscScalar jm = interpIndices[1];

490:       PetscCheck((im >= (PetscScalar)is - 1.) && (im <= (PetscScalar)ie) && (jm >= (PetscScalar)js - 1.) && (jm <= (PetscScalar)je), PETSC_COMM_SELF, PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", (double)PetscAbsScalar(im), (double)PetscAbsScalar(jm));
491:     }

493:     if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
494:     else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
495:     for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
496:   }
497:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));

499:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
500:   PetscCall(CharacteristicGetValuesBegin(c));
501:   PetscCall(CharacteristicGetValuesEnd(c));
502:   if (c->fieldInterpLocal) {
503:     PetscCall(DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray));
504:     PetscCall(DMRestoreLocalVector(c->fieldDA, &fieldLocal));
505:   }
506:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));

508:   /* Return field of characteristics at t_n-1 */
509:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
510:   PetscCall(DMDAGetInfo(c->fieldDA, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
511:   PetscCall(DMDAVecGetArray(c->fieldDA, solution, &solArray));
512:   for (n = 0; n < c->queueSize; n++) {
513:     Qi = c->queue[n];
514:     for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i * dof + c->fieldComp[comp]] = Qi.field[comp];
515:   }
516:   PetscCall(DMDAVecRestoreArray(c->fieldDA, solution, &solArray));
517:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
518:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));

520:   /* Cleanup */
521:   PetscCall(PetscFree(interpIndices));
522:   PetscCall(PetscFree(velocityValues));
523:   PetscCall(PetscFree(velocityValuesOld));
524:   PetscCall(PetscFree(fieldValues));
525:   PetscFunctionReturn(PETSC_SUCCESS);
526: }

528: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
529: {
530:   PetscFunctionBegin;
531:   c->numNeighbors = (PetscMPIInt)numNeighbors;
532:   PetscCall(PetscFree(c->neighbors));
533:   PetscCall(PetscMalloc1(numNeighbors, &c->neighbors));
534:   PetscCall(PetscArraycpy(c->neighbors, neighbors, numNeighbors));
535:   PetscFunctionReturn(PETSC_SUCCESS);
536: }

538: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
539: {
540:   PetscFunctionBegin;
541:   PetscCheck(c->queueSize < c->queueMax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Exceeded maximum queue size %" PetscInt_FMT, c->queueMax);
542:   c->queue[c->queueSize++] = *point;
543:   PetscFunctionReturn(PETSC_SUCCESS);
544: }

546: PetscErrorCode CharacteristicSendCoordinatesBegin(Characteristic c)
547: {
548:   PetscMPIInt rank, tag = 121;
549:   PetscInt    i, n;

551:   PetscFunctionBegin;
552:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
553:   PetscCall(CharacteristicHeapSort(c, c->queue, c->queueSize));
554:   PetscCall(PetscArrayzero(c->needCount, c->numNeighbors));
555:   for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
556:   c->fillCount[0] = 0;
557:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Irecv(&c->fillCount[n], 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &c->request[n - 1]));
558:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Send(&c->needCount[n], 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
559:   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
560:   /* Initialize the remote queue */
561:   c->queueLocalMax = c->localOffsets[0] = 0;
562:   c->queueRemoteMax = c->remoteOffsets[0] = 0;
563:   for (n = 1; n < c->numNeighbors; n++) {
564:     c->remoteOffsets[n] = c->queueRemoteMax;
565:     c->queueRemoteMax += c->fillCount[n];
566:     c->localOffsets[n] = c->queueLocalMax;
567:     c->queueLocalMax += c->needCount[n];
568:   }
569:   /* HACK BEGIN */
570:   for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
571:   c->needCount[0] = 0;
572:   /* HACK END */
573:   if (c->queueRemoteMax) {
574:     PetscCall(PetscMalloc1(c->queueRemoteMax, &c->queueRemote));
575:   } else c->queueRemote = NULL;
576:   c->queueRemoteSize = c->queueRemoteMax;

578:   /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
579:   for (n = 1; n < c->numNeighbors; n++) {
580:     PetscCall(PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]));
581:     PetscCallMPI(MPIU_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &c->request[n - 1]));
582:   }
583:   for (n = 1; n < c->numNeighbors; n++) {
584:     PetscCall(PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n]));
585:     PetscCallMPI(MPIU_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
586:   }
587:   PetscFunctionReturn(PETSC_SUCCESS);
588: }

590: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
591: {
592: #if 0
593:   PetscMPIInt rank;
594:   PetscInt    n;
595: #endif

597:   PetscFunctionBegin;
598:   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
599: #if 0
600:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
601:   for (n = 0; n < c->queueRemoteSize; n++) {
602:     PetscCheck(c->neighbors[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
603:   }
604: #endif
605:   PetscFunctionReturn(PETSC_SUCCESS);
606: }

608: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
609: {
610:   PetscMPIInt tag = 121;
611:   PetscInt    n;

613:   PetscFunctionBegin;
614:   /* SEND AND RECEIVE FILLED REQUESTS for velocities at t_n+1/2 */
615:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &c->request[n - 1]));
616:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPIU_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
617:   PetscFunctionReturn(PETSC_SUCCESS);
618: }

620: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
621: {
622:   PetscFunctionBegin;
623:   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
624:   /* Free queue of requests from other procs */
625:   PetscCall(PetscFree(c->queueRemote));
626:   PetscFunctionReturn(PETSC_SUCCESS);
627: }

629: /*
630:   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
631: */
632: static PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
633: {
634:   CharacteristicPointDA2D temp;
635:   PetscInt                n;

637:   PetscFunctionBegin;
638:   if (0) { /* Check the order of the queue before sorting */
639:     PetscCall(PetscInfo(NULL, "Before Heap sort\n"));
640:     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
641:   }

643:   /* SORTING PHASE */
644:   for (n = (size / 2) - 1; n >= 0; n--) { PetscCall(CharacteristicSiftDown(c, queue, n, size - 1)); /* Rich had size-1 here, Matt had size*/ }
645:   for (n = size - 1; n >= 1; n--) {
646:     temp     = queue[0];
647:     queue[0] = queue[n];
648:     queue[n] = temp;
649:     PetscCall(CharacteristicSiftDown(c, queue, 0, n - 1));
650:   }
651:   if (0) { /* Check the order of the queue after sorting */
652:     PetscCall(PetscInfo(NULL, "Avter  Heap sort\n"));
653:     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
654:   }
655:   PetscFunctionReturn(PETSC_SUCCESS);
656: }

658: /*
659:   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
660: */
661: static PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
662: {
663:   PetscBool               done = PETSC_FALSE;
664:   PetscInt                maxChild;
665:   CharacteristicPointDA2D temp;

667:   PetscFunctionBegin;
668:   while ((root * 2 <= bottom) && (!done)) {
669:     if (root * 2 == bottom) maxChild = root * 2;
670:     else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2;
671:     else maxChild = root * 2 + 1;

673:     if (queue[root].proc < queue[maxChild].proc) {
674:       temp            = queue[root];
675:       queue[root]     = queue[maxChild];
676:       queue[maxChild] = temp;
677:       root            = maxChild;
678:     } else done = PETSC_TRUE;
679:   }
680:   PetscFunctionReturn(PETSC_SUCCESS);
681: }

683: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
684: static PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
685: {
686:   DMBoundaryType bx, by;
687:   PetscBool      IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
688:   MPI_Comm       comm;
689:   PetscMPIInt    rank;
690:   PetscMPIInt  **procs, pi, pj, pim, pip, pjm, pjp;
691:   PetscInt       PI, PJ;

693:   PetscFunctionBegin;
694:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
695:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
696:   PetscCall(DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI, &PJ, NULL, NULL, NULL, &bx, &by, NULL, NULL));

698:   if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
699:   if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;

701:   neighbors[0] = rank;
702:   rank         = 0;
703:   PetscCall(PetscMalloc1(PJ, &procs));
704:   for (pj = 0; pj < PJ; pj++) {
705:     PetscCall(PetscMalloc1(PI, &procs[pj]));
706:     for (pi = 0; pi < PI; pi++) {
707:       procs[pj][pi] = rank;
708:       rank++;
709:     }
710:   }

712:   pi  = neighbors[0] % PI;
713:   pj  = neighbors[0] / PI;
714:   pim = pi - 1;
715:   if (pim < 0) pim = (PetscMPIInt)PI - 1;
716:   pip = (pi + 1) % (PetscMPIInt)PI;
717:   pjm = pj - 1;
718:   if (pjm < 0) pjm = (PetscMPIInt)PJ - 1;
719:   pjp = (pj + 1) % (PetscMPIInt)PJ;

721:   neighbors[1] = procs[pj][pim];
722:   neighbors[2] = procs[pjp][pim];
723:   neighbors[3] = procs[pjp][pi];
724:   neighbors[4] = procs[pjp][pip];
725:   neighbors[5] = procs[pj][pip];
726:   neighbors[6] = procs[pjm][pip];
727:   neighbors[7] = procs[pjm][pi];
728:   neighbors[8] = procs[pjm][pim];

730:   if (!IPeriodic) {
731:     if (pi == 0) neighbors[1] = neighbors[2] = neighbors[8] = neighbors[0];
732:     if (pi == PI - 1) neighbors[4] = neighbors[5] = neighbors[6] = neighbors[0];
733:   }

735:   if (!JPeriodic) {
736:     if (pj == 0) neighbors[6] = neighbors[7] = neighbors[8] = neighbors[0];
737:     if (pj == PJ - 1) neighbors[2] = neighbors[3] = neighbors[4] = neighbors[0];
738:   }

740:   for (pj = 0; pj < PJ; pj++) PetscCall(PetscFree(procs[pj]));
741:   PetscCall(PetscFree(procs));
742:   PetscFunctionReturn(PETSC_SUCCESS);
743: }

745: /*
746:   SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
747:     2 | 3 | 4
748:     __|___|__
749:     1 | 0 | 5
750:     __|___|__
751:     8 | 7 | 6
752:       |   |
753: */
754: static PetscMPIInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
755: {
756:   DMDALocalInfo info;
757:   PetscReal     is, ie, js, je;

759:   PetscCallAbort(PETSC_COMM_SELF, DMDAGetLocalInfo(da, &info));
760:   is = (PetscReal)info.xs - 0.5;
761:   ie = (PetscReal)info.xs + info.xm - 0.5;
762:   js = (PetscReal)info.ys - 0.5;
763:   je = (PetscReal)info.ys + info.ym - 0.5;

765:   if (ir >= is && ir <= ie) { /* center column */
766:     if (jr >= js && jr <= je) return 0;
767:     else if (jr < js) return 7;
768:     else return 3;
769:   } else if (ir < is) { /* left column */
770:     if (jr >= js && jr <= je) return 1;
771:     else if (jr < js) return 8;
772:     else return 2;
773:   } else { /* right column */
774:     if (jr >= js && jr <= je) return 5;
775:     else if (jr < js) return 6;
776:     else return 4;
777:   }
778: }