Actual source code: characteristic.c


  2: #include <petsc/private/characteristicimpl.h>
  3: #include <petscdmda.h>
  4: #include <petscviewer.h>

  6: PetscClassId  CHARACTERISTIC_CLASSID;
  7: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
  8: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
  9: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
 10: /*
 11:    Contains the list of registered characteristic routines
 12: */
 13: PetscFunctionList CharacteristicList              = NULL;
 14: PetscBool         CharacteristicRegisterAllCalled = PETSC_FALSE;

 16: PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt[]);
 17: PetscInt       DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
 18: PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar[]);

 20: PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
 21: PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);

 23: PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
 24: {
 25:   PetscBool iascii;

 27:   PetscFunctionBegin;
 29:   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c), &viewer));
 31:   PetscCheckSameComm(c, 1, viewer, 2);

 33:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
 34:   if (!iascii) PetscTryTypeMethod(c, view, viewer);
 35:   PetscFunctionReturn(PETSC_SUCCESS);
 36: }

 38: PetscErrorCode CharacteristicDestroy(Characteristic *c)
 39: {
 40:   PetscFunctionBegin;
 41:   if (!*c) PetscFunctionReturn(PETSC_SUCCESS);
 43:   if (--((PetscObject)(*c))->refct > 0) PetscFunctionReturn(PETSC_SUCCESS);

 45:   if ((*c)->ops->destroy) PetscCall((*(*c)->ops->destroy)((*c)));
 46:   PetscCallMPI(MPI_Type_free(&(*c)->itemType));
 47:   PetscCall(PetscFree((*c)->queue));
 48:   PetscCall(PetscFree((*c)->queueLocal));
 49:   PetscCall(PetscFree((*c)->queueRemote));
 50:   PetscCall(PetscFree((*c)->neighbors));
 51:   PetscCall(PetscFree((*c)->needCount));
 52:   PetscCall(PetscFree((*c)->localOffsets));
 53:   PetscCall(PetscFree((*c)->fillCount));
 54:   PetscCall(PetscFree((*c)->remoteOffsets));
 55:   PetscCall(PetscFree((*c)->request));
 56:   PetscCall(PetscFree((*c)->status));
 57:   PetscCall(PetscHeaderDestroy(c));
 58:   PetscFunctionReturn(PETSC_SUCCESS);
 59: }

 61: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
 62: {
 63:   Characteristic newC;

 65:   PetscFunctionBegin;
 67:   *c = NULL;
 68:   PetscCall(CharacteristicInitializePackage());

 70:   PetscCall(PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView));
 71:   *c = newC;

 73:   newC->structured          = PETSC_TRUE;
 74:   newC->numIds              = 0;
 75:   newC->velocityDA          = NULL;
 76:   newC->velocity            = NULL;
 77:   newC->velocityOld         = NULL;
 78:   newC->numVelocityComp     = 0;
 79:   newC->velocityComp        = NULL;
 80:   newC->velocityInterp      = NULL;
 81:   newC->velocityInterpLocal = NULL;
 82:   newC->velocityCtx         = NULL;
 83:   newC->fieldDA             = NULL;
 84:   newC->field               = NULL;
 85:   newC->numFieldComp        = 0;
 86:   newC->fieldComp           = NULL;
 87:   newC->fieldInterp         = NULL;
 88:   newC->fieldInterpLocal    = NULL;
 89:   newC->fieldCtx            = NULL;
 90:   newC->itemType            = 0;
 91:   newC->queue               = NULL;
 92:   newC->queueSize           = 0;
 93:   newC->queueMax            = 0;
 94:   newC->queueLocal          = NULL;
 95:   newC->queueLocalSize      = 0;
 96:   newC->queueLocalMax       = 0;
 97:   newC->queueRemote         = NULL;
 98:   newC->queueRemoteSize     = 0;
 99:   newC->queueRemoteMax      = 0;
100:   newC->numNeighbors        = 0;
101:   newC->neighbors           = NULL;
102:   newC->needCount           = NULL;
103:   newC->localOffsets        = NULL;
104:   newC->fillCount           = NULL;
105:   newC->remoteOffsets       = NULL;
106:   newC->request             = NULL;
107:   newC->status              = NULL;
108:   PetscFunctionReturn(PETSC_SUCCESS);
109: }

111: /*@C
112:    CharacteristicSetType - Builds Characteristic for a particular solver.

114:    Logically Collective

116:    Input Parameters:
117: +  c    - the method of characteristics context
118: -  type - a known method

120:    Options Database Key:
121: .  -characteristic_type <method> - Sets the method; use -help for a list
122:     of available methods

124:   Level: intermediate

126:    Notes:
127:    See "include/petsccharacteristic.h" for available methods

129:   Normally, it is best to use the CharacteristicSetFromOptions() command and
130:   then set the Characteristic type from the options database rather than by using
131:   this routine.  Using the options database provides the user with
132:   maximum flexibility in evaluating the many different Krylov methods.
133:   The CharacteristicSetType() routine is provided for those situations where it
134:   is necessary to set the iterative solver independently of the command
135:   line or options database.  This might be the case, for example, when
136:   the choice of iterative solver changes during the execution of the
137:   program, and the user's application is taking responsibility for
138:   choosing the appropriate method.  In other words, this routine is
139:   not for beginners.

141: .seealso: [](ch_ts), `CharacteristicType`
142: @*/
143: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
144: {
145:   PetscBool match;
146:   PetscErrorCode (*r)(Characteristic);

148:   PetscFunctionBegin;

152:   PetscCall(PetscObjectTypeCompare((PetscObject)c, type, &match));
153:   if (match) PetscFunctionReturn(PETSC_SUCCESS);

155:   if (c->data) {
156:     /* destroy the old private Characteristic context */
157:     PetscUseTypeMethod(c, destroy);
158:     c->ops->destroy = NULL;
159:     c->data         = NULL;
160:   }

162:   PetscCall(PetscFunctionListFind(CharacteristicList, type, &r));
163:   PetscCheck(r, PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
164:   c->setupcalled = 0;
165:   PetscCall((*r)(c));
166:   PetscCall(PetscObjectChangeTypeName((PetscObject)c, type));
167:   PetscFunctionReturn(PETSC_SUCCESS);
168: }

170: /*@
171:    CharacteristicSetUp - Sets up the internal data structures for the
172:    later use of an iterative solver.

174:    Collective

176:    Input Parameter:
177: .  ksp   - iterative context obtained from CharacteristicCreate()

179:    Level: developer

181: .seealso: [](ch_ts), `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()`
182: @*/
183: PetscErrorCode CharacteristicSetUp(Characteristic c)
184: {
185:   PetscFunctionBegin;

188:   if (!((PetscObject)c)->type_name) PetscCall(CharacteristicSetType(c, CHARACTERISTICDA));

190:   if (c->setupcalled == 2) PetscFunctionReturn(PETSC_SUCCESS);

192:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
193:   if (!c->setupcalled) PetscUseTypeMethod(c, setup);
194:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
195:   c->setupcalled = 2;
196:   PetscFunctionReturn(PETSC_SUCCESS);
197: }

199: /*@C
200:   CharacteristicRegister -  Adds a solver to the method of characteristics package.

202:    Not Collective

204:    Input Parameters:
205: +  sname - name of a new user-defined solver
206: -  function - routine to create method context

208:   Level: advanced

210:   Sample usage:
211: .vb
212:     CharacteristicRegister("my_char", MyCharCreate);
213: .ve

215:   Then, your Characteristic type can be chosen with the procedural interface via
216: .vb
217:     CharacteristicCreate(MPI_Comm, Characteristic* &char);
218:     CharacteristicSetType(char,"my_char");
219: .ve
220:    or at runtime via the option
221: .vb
222:     -characteristic_type my_char
223: .ve

225:    Notes:
226:    CharacteristicRegister() may be called multiple times to add several user-defined solvers.

228: .seealso: [](ch_ts), `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()`
229: @*/
230: PetscErrorCode CharacteristicRegister(const char sname[], PetscErrorCode (*function)(Characteristic))
231: {
232:   PetscFunctionBegin;
233:   PetscCall(CharacteristicInitializePackage());
234:   PetscCall(PetscFunctionListAdd(&CharacteristicList, sname, function));
235:   PetscFunctionReturn(PETSC_SUCCESS);
236: }

238: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
239: {
240:   PetscFunctionBegin;
241:   c->velocityDA      = da;
242:   c->velocity        = v;
243:   c->velocityOld     = vOld;
244:   c->numVelocityComp = numComponents;
245:   c->velocityComp    = components;
246:   c->velocityInterp  = interp;
247:   c->velocityCtx     = ctx;
248:   PetscFunctionReturn(PETSC_SUCCESS);
249: }

251: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
252: {
253:   PetscFunctionBegin;
254:   c->velocityDA          = da;
255:   c->velocity            = v;
256:   c->velocityOld         = vOld;
257:   c->numVelocityComp     = numComponents;
258:   c->velocityComp        = components;
259:   c->velocityInterpLocal = interp;
260:   c->velocityCtx         = ctx;
261:   PetscFunctionReturn(PETSC_SUCCESS);
262: }

264: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
265: {
266:   PetscFunctionBegin;
267: #if 0
268:   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
269: #endif
270:   c->fieldDA      = da;
271:   c->field        = v;
272:   c->numFieldComp = numComponents;
273:   c->fieldComp    = components;
274:   c->fieldInterp  = interp;
275:   c->fieldCtx     = ctx;
276:   PetscFunctionReturn(PETSC_SUCCESS);
277: }

279: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
280: {
281:   PetscFunctionBegin;
282: #if 0
283:   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
284: #endif
285:   c->fieldDA          = da;
286:   c->field            = v;
287:   c->numFieldComp     = numComponents;
288:   c->fieldComp        = components;
289:   c->fieldInterpLocal = interp;
290:   c->fieldCtx         = ctx;
291:   PetscFunctionReturn(PETSC_SUCCESS);
292: }

294: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
295: {
296:   CharacteristicPointDA2D Qi;
297:   DM                      da = c->velocityDA;
298:   Vec                     velocityLocal, velocityLocalOld;
299:   Vec                     fieldLocal;
300:   DMDALocalInfo           info;
301:   PetscScalar           **solArray;
302:   void                   *velocityArray;
303:   void                   *velocityArrayOld;
304:   void                   *fieldArray;
305:   PetscScalar            *interpIndices;
306:   PetscScalar            *velocityValues, *velocityValuesOld;
307:   PetscScalar            *fieldValues;
308:   PetscMPIInt             rank;
309:   PetscInt                dim;
310:   PetscMPIInt             neighbors[9];
311:   PetscInt                dof;
312:   PetscInt                gx, gy;
313:   PetscInt                n, is, ie, js, je, comp;

315:   PetscFunctionBegin;
316:   c->queueSize = 0;
317:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
318:   PetscCall(DMDAGetNeighborsRank(da, neighbors));
319:   PetscCall(CharacteristicSetNeighbors(c, 9, neighbors));
320:   PetscCall(CharacteristicSetUp(c));
321:   /* global and local grid info */
322:   PetscCall(DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
323:   PetscCall(DMDAGetLocalInfo(da, &info));
324:   is = info.xs;
325:   ie = info.xs + info.xm;
326:   js = info.ys;
327:   je = info.ys + info.ym;
328:   /* Allocation */
329:   PetscCall(PetscMalloc1(dim, &interpIndices));
330:   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValues));
331:   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValuesOld));
332:   PetscCall(PetscMalloc1(c->numFieldComp, &fieldValues));
333:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));

335:   /* -----------------------------------------------------------------------
336:      PART 1, AT t-dt/2
337:      -----------------------------------------------------------------------*/
338:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));
339:   /* GET POSITION AT HALF TIME IN THE PAST */
340:   if (c->velocityInterpLocal) {
341:     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocal));
342:     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocalOld));
343:     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
344:     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
345:     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
346:     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
347:     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray));
348:     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
349:   }
350:   PetscCall(PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n"));
351:   for (Qi.j = js; Qi.j < je; Qi.j++) {
352:     for (Qi.i = is; Qi.i < ie; Qi.i++) {
353:       interpIndices[0] = Qi.i;
354:       interpIndices[1] = Qi.j;
355:       if (c->velocityInterpLocal) PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
356:       else PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
357:       Qi.x = Qi.i - velocityValues[0] * dt / 2.0;
358:       Qi.y = Qi.j - velocityValues[1] * dt / 2.0;

360:       /* Determine whether the position at t - dt/2 is local */
361:       Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);

363:       /* Check for Periodic boundaries and move all periodic points back onto the domain */
364:       PetscCall(DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y)));
365:       PetscCall(CharacteristicAddPoint(c, &Qi));
366:     }
367:   }
368:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));

370:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
371:   PetscCall(CharacteristicSendCoordinatesBegin(c));
372:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));

374:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));
375:   /* Calculate velocity at t_n+1/2 (local values) */
376:   PetscCall(PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n"));
377:   for (n = 0; n < c->queueSize; n++) {
378:     Qi = c->queue[n];
379:     if (c->neighbors[Qi.proc] == rank) {
380:       interpIndices[0] = Qi.x;
381:       interpIndices[1] = Qi.y;
382:       if (c->velocityInterpLocal) {
383:         PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
384:         PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
385:       } else {
386:         PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
387:         PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
388:       }
389:       Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
390:       Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
391:     }
392:     c->queue[n] = Qi;
393:   }
394:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));

396:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
397:   PetscCall(CharacteristicSendCoordinatesEnd(c));
398:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));

400:   /* Calculate velocity at t_n+1/2 (fill remote requests) */
401:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
402:   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize));
403:   for (n = 0; n < c->queueRemoteSize; n++) {
404:     Qi               = c->queueRemote[n];
405:     interpIndices[0] = Qi.x;
406:     interpIndices[1] = Qi.y;
407:     if (c->velocityInterpLocal) {
408:       PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
409:       PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
410:     } else {
411:       PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
412:       PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
413:     }
414:     Qi.x              = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
415:     Qi.y              = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
416:     c->queueRemote[n] = Qi;
417:   }
418:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
419:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
420:   PetscCall(CharacteristicGetValuesBegin(c));
421:   PetscCall(CharacteristicGetValuesEnd(c));
422:   if (c->velocityInterpLocal) {
423:     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray));
424:     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
425:     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocal));
426:     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocalOld));
427:   }
428:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));

430:   /* -----------------------------------------------------------------------
431:      PART 2, AT t-dt
432:      -----------------------------------------------------------------------*/

434:   /* GET POSITION AT t_n (local values) */
435:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
436:   PetscCall(PetscInfo(NULL, "Calculating position at t_{n}\n"));
437:   for (n = 0; n < c->queueSize; n++) {
438:     Qi   = c->queue[n];
439:     Qi.x = Qi.i - Qi.x * dt;
440:     Qi.y = Qi.j - Qi.y * dt;

442:     /* Determine whether the position at t-dt is local */
443:     Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);

445:     /* Check for Periodic boundaries and move all periodic points back onto the domain */
446:     PetscCall(DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y)));

448:     c->queue[n] = Qi;
449:   }
450:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));

452:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
453:   PetscCall(CharacteristicSendCoordinatesBegin(c));
454:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));

456:   /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
457:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
458:   if (c->fieldInterpLocal) {
459:     PetscCall(DMGetLocalVector(c->fieldDA, &fieldLocal));
460:     PetscCall(DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
461:     PetscCall(DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
462:     PetscCall(DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray));
463:   }
464:   PetscCall(PetscInfo(NULL, "Calculating local field at t_{n}\n"));
465:   for (n = 0; n < c->queueSize; n++) {
466:     if (c->neighbors[c->queue[n].proc] == rank) {
467:       interpIndices[0] = c->queue[n].x;
468:       interpIndices[1] = c->queue[n].y;
469:       if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
470:       else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
471:       for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
472:     }
473:   }
474:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));

476:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
477:   PetscCall(CharacteristicSendCoordinatesEnd(c));
478:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));

480:   /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
481:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));
482:   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize));
483:   for (n = 0; n < c->queueRemoteSize; n++) {
484:     interpIndices[0] = c->queueRemote[n].x;
485:     interpIndices[1] = c->queueRemote[n].y;

487:     /* for debugging purposes */
488:     if (1) { /* hacked bounds test...let's do better */
489:       PetscScalar im = interpIndices[0];
490:       PetscScalar jm = interpIndices[1];

492:       PetscCheck((im >= (PetscScalar)is - 1.) && (im <= (PetscScalar)ie) && (jm >= (PetscScalar)js - 1.) && (jm <= (PetscScalar)je), PETSC_COMM_SELF, PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", (double)PetscAbsScalar(im), (double)PetscAbsScalar(jm));
493:     }

495:     if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
496:     else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
497:     for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
498:   }
499:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));

501:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
502:   PetscCall(CharacteristicGetValuesBegin(c));
503:   PetscCall(CharacteristicGetValuesEnd(c));
504:   if (c->fieldInterpLocal) {
505:     PetscCall(DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray));
506:     PetscCall(DMRestoreLocalVector(c->fieldDA, &fieldLocal));
507:   }
508:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));

510:   /* Return field of characteristics at t_n-1 */
511:   PetscCall(PetscLogEventBegin(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
512:   PetscCall(DMDAGetInfo(c->fieldDA, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
513:   PetscCall(DMDAVecGetArray(c->fieldDA, solution, &solArray));
514:   for (n = 0; n < c->queueSize; n++) {
515:     Qi = c->queue[n];
516:     for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i * dof + c->fieldComp[comp]] = Qi.field[comp];
517:   }
518:   PetscCall(DMDAVecRestoreArray(c->fieldDA, solution, &solArray));
519:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
520:   PetscCall(PetscLogEventEnd(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));

522:   /* Cleanup */
523:   PetscCall(PetscFree(interpIndices));
524:   PetscCall(PetscFree(velocityValues));
525:   PetscCall(PetscFree(velocityValuesOld));
526:   PetscCall(PetscFree(fieldValues));
527:   PetscFunctionReturn(PETSC_SUCCESS);
528: }

530: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
531: {
532:   PetscFunctionBegin;
533:   c->numNeighbors = numNeighbors;
534:   PetscCall(PetscFree(c->neighbors));
535:   PetscCall(PetscMalloc1(numNeighbors, &c->neighbors));
536:   PetscCall(PetscArraycpy(c->neighbors, neighbors, numNeighbors));
537:   PetscFunctionReturn(PETSC_SUCCESS);
538: }

540: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
541: {
542:   PetscFunctionBegin;
543:   PetscCheck(c->queueSize < c->queueMax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Exceeded maximum queue size %" PetscInt_FMT, c->queueMax);
544:   c->queue[c->queueSize++] = *point;
545:   PetscFunctionReturn(PETSC_SUCCESS);
546: }

548: PetscErrorCode CharacteristicSendCoordinatesBegin(Characteristic c)
549: {
550:   PetscMPIInt rank, tag = 121;
551:   PetscInt    i, n;

553:   PetscFunctionBegin;
554:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
555:   PetscCall(CharacteristicHeapSort(c, c->queue, c->queueSize));
556:   PetscCall(PetscArrayzero(c->needCount, c->numNeighbors));
557:   for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
558:   c->fillCount[0] = 0;
559:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
560:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
561:   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
562:   /* Initialize the remote queue */
563:   c->queueLocalMax = c->localOffsets[0] = 0;
564:   c->queueRemoteMax = c->remoteOffsets[0] = 0;
565:   for (n = 1; n < c->numNeighbors; n++) {
566:     c->remoteOffsets[n] = c->queueRemoteMax;
567:     c->queueRemoteMax += c->fillCount[n];
568:     c->localOffsets[n] = c->queueLocalMax;
569:     c->queueLocalMax += c->needCount[n];
570:   }
571:   /* HACK BEGIN */
572:   for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
573:   c->needCount[0] = 0;
574:   /* HACK END */
575:   if (c->queueRemoteMax) {
576:     PetscCall(PetscMalloc1(c->queueRemoteMax, &c->queueRemote));
577:   } else c->queueRemote = NULL;
578:   c->queueRemoteSize = c->queueRemoteMax;

580:   /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
581:   for (n = 1; n < c->numNeighbors; n++) {
582:     PetscCall(PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]));
583:     PetscCallMPI(MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
584:   }
585:   for (n = 1; n < c->numNeighbors; n++) {
586:     PetscCall(PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n]));
587:     PetscCallMPI(MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
588:   }
589:   PetscFunctionReturn(PETSC_SUCCESS);
590: }

592: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
593: {
594: #if 0
595:   PetscMPIInt rank;
596:   PetscInt    n;
597: #endif

599:   PetscFunctionBegin;
600:   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
601: #if 0
602:   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
603:   for (n = 0; n < c->queueRemoteSize; n++) {
604:     PetscCheck(c->neighbors[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
605:   }
606: #endif
607:   PetscFunctionReturn(PETSC_SUCCESS);
608: }

610: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
611: {
612:   PetscMPIInt tag = 121;
613:   PetscInt    n;

615:   PetscFunctionBegin;
616:   /* SEND AND RECEIVE FILLED REQUESTS for velocities at t_n+1/2 */
617:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
618:   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
619:   PetscFunctionReturn(PETSC_SUCCESS);
620: }

622: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
623: {
624:   PetscFunctionBegin;
625:   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
626:   /* Free queue of requests from other procs */
627:   PetscCall(PetscFree(c->queueRemote));
628:   PetscFunctionReturn(PETSC_SUCCESS);
629: }

631: /*---------------------------------------------------------------------*/
632: /*
633:   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
634: */
635: PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
636: /*---------------------------------------------------------------------*/
637: {
638:   CharacteristicPointDA2D temp;
639:   PetscInt                n;

641:   PetscFunctionBegin;
642:   if (0) { /* Check the order of the queue before sorting */
643:     PetscCall(PetscInfo(NULL, "Before Heap sort\n"));
644:     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
645:   }

647:   /* SORTING PHASE */
648:   for (n = (size / 2) - 1; n >= 0; n--) { PetscCall(CharacteristicSiftDown(c, queue, n, size - 1)); /* Rich had size-1 here, Matt had size*/ }
649:   for (n = size - 1; n >= 1; n--) {
650:     temp     = queue[0];
651:     queue[0] = queue[n];
652:     queue[n] = temp;
653:     PetscCall(CharacteristicSiftDown(c, queue, 0, n - 1));
654:   }
655:   if (0) { /* Check the order of the queue after sorting */
656:     PetscCall(PetscInfo(NULL, "Avter  Heap sort\n"));
657:     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
658:   }
659:   PetscFunctionReturn(PETSC_SUCCESS);
660: }

662: /*---------------------------------------------------------------------*/
663: /*
664:   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
665: */
666: PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
667: /*---------------------------------------------------------------------*/
668: {
669:   PetscBool               done = PETSC_FALSE;
670:   PetscInt                maxChild;
671:   CharacteristicPointDA2D temp;

673:   PetscFunctionBegin;
674:   while ((root * 2 <= bottom) && (!done)) {
675:     if (root * 2 == bottom) maxChild = root * 2;
676:     else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2;
677:     else maxChild = root * 2 + 1;

679:     if (queue[root].proc < queue[maxChild].proc) {
680:       temp            = queue[root];
681:       queue[root]     = queue[maxChild];
682:       queue[maxChild] = temp;
683:       root            = maxChild;
684:     } else done = PETSC_TRUE;
685:   }
686:   PetscFunctionReturn(PETSC_SUCCESS);
687: }

689: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
690: PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
691: {
692:   DMBoundaryType bx, by;
693:   PetscBool      IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
694:   MPI_Comm       comm;
695:   PetscMPIInt    rank;
696:   PetscInt     **procs, pi, pj, pim, pip, pjm, pjp, PI, PJ;

698:   PetscFunctionBegin;
699:   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
700:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
701:   PetscCall(DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI, &PJ, NULL, NULL, NULL, &bx, &by, NULL, NULL));

703:   if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
704:   if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;

706:   neighbors[0] = rank;
707:   rank         = 0;
708:   PetscCall(PetscMalloc1(PJ, &procs));
709:   for (pj = 0; pj < PJ; pj++) {
710:     PetscCall(PetscMalloc1(PI, &(procs[pj])));
711:     for (pi = 0; pi < PI; pi++) {
712:       procs[pj][pi] = rank;
713:       rank++;
714:     }
715:   }

717:   pi  = neighbors[0] % PI;
718:   pj  = neighbors[0] / PI;
719:   pim = pi - 1;
720:   if (pim < 0) pim = PI - 1;
721:   pip = (pi + 1) % PI;
722:   pjm = pj - 1;
723:   if (pjm < 0) pjm = PJ - 1;
724:   pjp = (pj + 1) % PJ;

726:   neighbors[1] = procs[pj][pim];
727:   neighbors[2] = procs[pjp][pim];
728:   neighbors[3] = procs[pjp][pi];
729:   neighbors[4] = procs[pjp][pip];
730:   neighbors[5] = procs[pj][pip];
731:   neighbors[6] = procs[pjm][pip];
732:   neighbors[7] = procs[pjm][pi];
733:   neighbors[8] = procs[pjm][pim];

735:   if (!IPeriodic) {
736:     if (pi == 0) neighbors[1] = neighbors[2] = neighbors[8] = neighbors[0];
737:     if (pi == PI - 1) neighbors[4] = neighbors[5] = neighbors[6] = neighbors[0];
738:   }

740:   if (!JPeriodic) {
741:     if (pj == 0) neighbors[6] = neighbors[7] = neighbors[8] = neighbors[0];
742:     if (pj == PJ - 1) neighbors[2] = neighbors[3] = neighbors[4] = neighbors[0];
743:   }

745:   for (pj = 0; pj < PJ; pj++) PetscCall(PetscFree(procs[pj]));
746:   PetscCall(PetscFree(procs));
747:   PetscFunctionReturn(PETSC_SUCCESS);
748: }

750: /*
751:   SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
752:     2 | 3 | 4
753:     __|___|__
754:     1 | 0 | 5
755:     __|___|__
756:     8 | 7 | 6
757:       |   |
758: */
759: PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
760: {
761:   DMDALocalInfo info;
762:   PetscReal     is, ie, js, je;

764:   PetscCall(DMDAGetLocalInfo(da, &info));
765:   is = (PetscReal)info.xs - 0.5;
766:   ie = (PetscReal)info.xs + info.xm - 0.5;
767:   js = (PetscReal)info.ys - 0.5;
768:   je = (PetscReal)info.ys + info.ym - 0.5;

770:   if (ir >= is && ir <= ie) { /* center column */
771:     if (jr >= js && jr <= je) return 0;
772:     else if (jr < js) return 7;
773:     else return 3;
774:   } else if (ir < is) { /* left column */
775:     if (jr >= js && jr <= je) return 1;
776:     else if (jr < js) return 8;
777:     else return 2;
778:   } else { /* right column */
779:     if (jr >= js && jr <= je) return 5;
780:     else if (jr < js) return 6;
781:     else return 4;
782:   }
783: }