Actual source code: characteristic.c
2: #include <petsc/private/characteristicimpl.h>
3: #include <petscdmda.h>
4: #include <petscviewer.h>
6: PetscClassId CHARACTERISTIC_CLASSID;
7: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10: /*
11: Contains the list of registered characteristic routines
12: */
13: PetscFunctionList CharacteristicList = NULL;
14: PetscBool CharacteristicRegisterAllCalled = PETSC_FALSE;
16: PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt[]);
17: PetscInt DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
18: PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar[]);
20: PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
21: PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
23: PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
24: {
25: PetscBool iascii;
28: if (!viewer) PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c), &viewer);
32: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
33: if (!iascii) PetscTryTypeMethod(c, view, viewer);
34: return 0;
35: }
37: PetscErrorCode CharacteristicDestroy(Characteristic *c)
38: {
39: if (!*c) return 0;
41: if (--((PetscObject)(*c))->refct > 0) return 0;
43: if ((*c)->ops->destroy) (*(*c)->ops->destroy)((*c));
44: MPI_Type_free(&(*c)->itemType);
45: PetscFree((*c)->queue);
46: PetscFree((*c)->queueLocal);
47: PetscFree((*c)->queueRemote);
48: PetscFree((*c)->neighbors);
49: PetscFree((*c)->needCount);
50: PetscFree((*c)->localOffsets);
51: PetscFree((*c)->fillCount);
52: PetscFree((*c)->remoteOffsets);
53: PetscFree((*c)->request);
54: PetscFree((*c)->status);
55: PetscHeaderDestroy(c);
56: return 0;
57: }
59: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
60: {
61: Characteristic newC;
64: *c = NULL;
65: CharacteristicInitializePackage();
67: PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView);
68: *c = newC;
70: newC->structured = PETSC_TRUE;
71: newC->numIds = 0;
72: newC->velocityDA = NULL;
73: newC->velocity = NULL;
74: newC->velocityOld = NULL;
75: newC->numVelocityComp = 0;
76: newC->velocityComp = NULL;
77: newC->velocityInterp = NULL;
78: newC->velocityInterpLocal = NULL;
79: newC->velocityCtx = NULL;
80: newC->fieldDA = NULL;
81: newC->field = NULL;
82: newC->numFieldComp = 0;
83: newC->fieldComp = NULL;
84: newC->fieldInterp = NULL;
85: newC->fieldInterpLocal = NULL;
86: newC->fieldCtx = NULL;
87: newC->itemType = 0;
88: newC->queue = NULL;
89: newC->queueSize = 0;
90: newC->queueMax = 0;
91: newC->queueLocal = NULL;
92: newC->queueLocalSize = 0;
93: newC->queueLocalMax = 0;
94: newC->queueRemote = NULL;
95: newC->queueRemoteSize = 0;
96: newC->queueRemoteMax = 0;
97: newC->numNeighbors = 0;
98: newC->neighbors = NULL;
99: newC->needCount = NULL;
100: newC->localOffsets = NULL;
101: newC->fillCount = NULL;
102: newC->remoteOffsets = NULL;
103: newC->request = NULL;
104: newC->status = NULL;
105: return 0;
106: }
108: /*@C
109: CharacteristicSetType - Builds Characteristic for a particular solver.
111: Logically Collective on Characteristic
113: Input Parameters:
114: + c - the method of characteristics context
115: - type - a known method
117: Options Database Key:
118: . -characteristic_type <method> - Sets the method; use -help for a list
119: of available methods
121: Notes:
122: See "include/petsccharacteristic.h" for available methods
124: Normally, it is best to use the CharacteristicSetFromOptions() command and
125: then set the Characteristic type from the options database rather than by using
126: this routine. Using the options database provides the user with
127: maximum flexibility in evaluating the many different Krylov methods.
128: The CharacteristicSetType() routine is provided for those situations where it
129: is necessary to set the iterative solver independently of the command
130: line or options database. This might be the case, for example, when
131: the choice of iterative solver changes during the execution of the
132: program, and the user's application is taking responsibility for
133: choosing the appropriate method. In other words, this routine is
134: not for beginners.
136: Level: intermediate
138: .seealso: `CharacteristicType`
140: @*/
141: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
142: {
143: PetscBool match;
144: PetscErrorCode (*r)(Characteristic);
149: PetscObjectTypeCompare((PetscObject)c, type, &match);
150: if (match) return 0;
152: if (c->data) {
153: /* destroy the old private Characteristic context */
154: PetscUseTypeMethod(c, destroy);
155: c->ops->destroy = NULL;
156: c->data = NULL;
157: }
159: PetscFunctionListFind(CharacteristicList, type, &r);
161: c->setupcalled = 0;
162: (*r)(c);
163: PetscObjectChangeTypeName((PetscObject)c, type);
164: return 0;
165: }
167: /*@
168: CharacteristicSetUp - Sets up the internal data structures for the
169: later use of an iterative solver.
171: Collective on Characteristic
173: Input Parameter:
174: . ksp - iterative context obtained from CharacteristicCreate()
176: Level: developer
178: .seealso: `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()`
179: @*/
180: PetscErrorCode CharacteristicSetUp(Characteristic c)
181: {
184: if (!((PetscObject)c)->type_name) CharacteristicSetType(c, CHARACTERISTICDA);
186: if (c->setupcalled == 2) return 0;
188: PetscLogEventBegin(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL);
189: if (!c->setupcalled) PetscUseTypeMethod(c, setup);
190: PetscLogEventEnd(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL);
191: c->setupcalled = 2;
192: return 0;
193: }
195: /*@C
196: CharacteristicRegister - Adds a solver to the method of characteristics package.
198: Not Collective
200: Input Parameters:
201: + name_solver - name of a new user-defined solver
202: - routine_create - routine to create method context
204: Sample usage:
205: .vb
206: CharacteristicRegister("my_char", MyCharCreate);
207: .ve
209: Then, your Characteristic type can be chosen with the procedural interface via
210: .vb
211: CharacteristicCreate(MPI_Comm, Characteristic* &char);
212: CharacteristicSetType(char,"my_char");
213: .ve
214: or at runtime via the option
215: .vb
216: -characteristic_type my_char
217: .ve
219: Notes:
220: CharacteristicRegister() may be called multiple times to add several user-defined solvers.
222: .seealso: `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()`
224: Level: advanced
225: @*/
226: PetscErrorCode CharacteristicRegister(const char sname[], PetscErrorCode (*function)(Characteristic))
227: {
228: CharacteristicInitializePackage();
229: PetscFunctionListAdd(&CharacteristicList, sname, function);
230: return 0;
231: }
233: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
234: {
235: c->velocityDA = da;
236: c->velocity = v;
237: c->velocityOld = vOld;
238: c->numVelocityComp = numComponents;
239: c->velocityComp = components;
240: c->velocityInterp = interp;
241: c->velocityCtx = ctx;
242: return 0;
243: }
245: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
246: {
247: c->velocityDA = da;
248: c->velocity = v;
249: c->velocityOld = vOld;
250: c->numVelocityComp = numComponents;
251: c->velocityComp = components;
252: c->velocityInterpLocal = interp;
253: c->velocityCtx = ctx;
254: return 0;
255: }
257: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
258: {
259: #if 0
261: #endif
262: c->fieldDA = da;
263: c->field = v;
264: c->numFieldComp = numComponents;
265: c->fieldComp = components;
266: c->fieldInterp = interp;
267: c->fieldCtx = ctx;
268: return 0;
269: }
271: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
272: {
273: #if 0
275: #endif
276: c->fieldDA = da;
277: c->field = v;
278: c->numFieldComp = numComponents;
279: c->fieldComp = components;
280: c->fieldInterpLocal = interp;
281: c->fieldCtx = ctx;
282: return 0;
283: }
285: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
286: {
287: CharacteristicPointDA2D Qi;
288: DM da = c->velocityDA;
289: Vec velocityLocal, velocityLocalOld;
290: Vec fieldLocal;
291: DMDALocalInfo info;
292: PetscScalar **solArray;
293: void *velocityArray;
294: void *velocityArrayOld;
295: void *fieldArray;
296: PetscScalar *interpIndices;
297: PetscScalar *velocityValues, *velocityValuesOld;
298: PetscScalar *fieldValues;
299: PetscMPIInt rank;
300: PetscInt dim;
301: PetscMPIInt neighbors[9];
302: PetscInt dof;
303: PetscInt gx, gy;
304: PetscInt n, is, ie, js, je, comp;
306: c->queueSize = 0;
307: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
308: DMDAGetNeighborsRank(da, neighbors);
309: CharacteristicSetNeighbors(c, 9, neighbors);
310: CharacteristicSetUp(c);
311: /* global and local grid info */
312: DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
313: DMDAGetLocalInfo(da, &info);
314: is = info.xs;
315: ie = info.xs + info.xm;
316: js = info.ys;
317: je = info.ys + info.ym;
318: /* Allocation */
319: PetscMalloc1(dim, &interpIndices);
320: PetscMalloc1(c->numVelocityComp, &velocityValues);
321: PetscMalloc1(c->numVelocityComp, &velocityValuesOld);
322: PetscMalloc1(c->numFieldComp, &fieldValues);
323: PetscLogEventBegin(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL);
325: /* -----------------------------------------------------------------------
326: PART 1, AT t-dt/2
327: -----------------------------------------------------------------------*/
328: PetscLogEventBegin(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL);
329: /* GET POSITION AT HALF TIME IN THE PAST */
330: if (c->velocityInterpLocal) {
331: DMGetLocalVector(c->velocityDA, &velocityLocal);
332: DMGetLocalVector(c->velocityDA, &velocityLocalOld);
333: DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
334: DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
335: DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
336: DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
337: DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray);
338: DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
339: }
340: PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n");
341: for (Qi.j = js; Qi.j < je; Qi.j++) {
342: for (Qi.i = is; Qi.i < ie; Qi.i++) {
343: interpIndices[0] = Qi.i;
344: interpIndices[1] = Qi.j;
345: if (c->velocityInterpLocal) c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
346: else c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
347: Qi.x = Qi.i - velocityValues[0] * dt / 2.0;
348: Qi.y = Qi.j - velocityValues[1] * dt / 2.0;
350: /* Determine whether the position at t - dt/2 is local */
351: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
353: /* Check for Periodic boundaries and move all periodic points back onto the domain */
354: DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y));
355: CharacteristicAddPoint(c, &Qi);
356: }
357: }
358: PetscLogEventEnd(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL);
360: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
361: CharacteristicSendCoordinatesBegin(c);
362: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
364: PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL);
365: /* Calculate velocity at t_n+1/2 (local values) */
366: PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n");
367: for (n = 0; n < c->queueSize; n++) {
368: Qi = c->queue[n];
369: if (c->neighbors[Qi.proc] == rank) {
370: interpIndices[0] = Qi.x;
371: interpIndices[1] = Qi.y;
372: if (c->velocityInterpLocal) {
373: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
374: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
375: } else {
376: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
377: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
378: }
379: Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
380: Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
381: }
382: c->queue[n] = Qi;
383: }
384: PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL);
386: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
387: CharacteristicSendCoordinatesEnd(c);
388: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
390: /* Calculate velocity at t_n+1/2 (fill remote requests) */
391: PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL);
392: PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize);
393: for (n = 0; n < c->queueRemoteSize; n++) {
394: Qi = c->queueRemote[n];
395: interpIndices[0] = Qi.x;
396: interpIndices[1] = Qi.y;
397: if (c->velocityInterpLocal) {
398: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
399: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
400: } else {
401: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
402: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
403: }
404: Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
405: Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
406: c->queueRemote[n] = Qi;
407: }
408: PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL);
409: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
410: CharacteristicGetValuesBegin(c);
411: CharacteristicGetValuesEnd(c);
412: if (c->velocityInterpLocal) {
413: DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray);
414: DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
415: DMRestoreLocalVector(c->velocityDA, &velocityLocal);
416: DMRestoreLocalVector(c->velocityDA, &velocityLocalOld);
417: }
418: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL);
420: /* -----------------------------------------------------------------------
421: PART 2, AT t-dt
422: -----------------------------------------------------------------------*/
424: /* GET POSITION AT t_n (local values) */
425: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL);
426: PetscInfo(NULL, "Calculating position at t_{n}\n");
427: for (n = 0; n < c->queueSize; n++) {
428: Qi = c->queue[n];
429: Qi.x = Qi.i - Qi.x * dt;
430: Qi.y = Qi.j - Qi.y * dt;
432: /* Determine whether the position at t-dt is local */
433: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
435: /* Check for Periodic boundaries and move all periodic points back onto the domain */
436: DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y));
438: c->queue[n] = Qi;
439: }
440: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL);
442: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
443: CharacteristicSendCoordinatesBegin(c);
444: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
446: /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
447: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL);
448: if (c->fieldInterpLocal) {
449: DMGetLocalVector(c->fieldDA, &fieldLocal);
450: DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
451: DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
452: DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray);
453: }
454: PetscInfo(NULL, "Calculating local field at t_{n}\n");
455: for (n = 0; n < c->queueSize; n++) {
456: if (c->neighbors[c->queue[n].proc] == rank) {
457: interpIndices[0] = c->queue[n].x;
458: interpIndices[1] = c->queue[n].y;
459: if (c->fieldInterpLocal) c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
460: else c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
461: for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
462: }
463: }
464: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL);
466: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
467: CharacteristicSendCoordinatesEnd(c);
468: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
470: /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
471: PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL);
472: PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize);
473: for (n = 0; n < c->queueRemoteSize; n++) {
474: interpIndices[0] = c->queueRemote[n].x;
475: interpIndices[1] = c->queueRemote[n].y;
477: /* for debugging purposes */
478: if (1) { /* hacked bounds test...let's do better */
479: PetscScalar im = interpIndices[0];
480: PetscScalar jm = interpIndices[1];
483: }
485: if (c->fieldInterpLocal) c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
486: else c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
487: for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
488: }
489: PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL);
491: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
492: CharacteristicGetValuesBegin(c);
493: CharacteristicGetValuesEnd(c);
494: if (c->fieldInterpLocal) {
495: DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray);
496: DMRestoreLocalVector(c->fieldDA, &fieldLocal);
497: }
498: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL);
500: /* Return field of characteristics at t_n-1 */
501: PetscLogEventBegin(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL);
502: DMDAGetInfo(c->fieldDA, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL);
503: DMDAVecGetArray(c->fieldDA, solution, &solArray);
504: for (n = 0; n < c->queueSize; n++) {
505: Qi = c->queue[n];
506: for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i * dof + c->fieldComp[comp]] = Qi.field[comp];
507: }
508: DMDAVecRestoreArray(c->fieldDA, solution, &solArray);
509: PetscLogEventEnd(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL);
510: PetscLogEventEnd(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL);
512: /* Cleanup */
513: PetscFree(interpIndices);
514: PetscFree(velocityValues);
515: PetscFree(velocityValuesOld);
516: PetscFree(fieldValues);
517: return 0;
518: }
520: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
521: {
522: c->numNeighbors = numNeighbors;
523: PetscFree(c->neighbors);
524: PetscMalloc1(numNeighbors, &c->neighbors);
525: PetscArraycpy(c->neighbors, neighbors, numNeighbors);
526: return 0;
527: }
529: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
530: {
532: c->queue[c->queueSize++] = *point;
533: return 0;
534: }
536: int CharacteristicSendCoordinatesBegin(Characteristic c)
537: {
538: PetscMPIInt rank, tag = 121;
539: PetscInt i, n;
541: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
542: CharacteristicHeapSort(c, c->queue, c->queueSize);
543: PetscArrayzero(c->needCount, c->numNeighbors);
544: for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
545: c->fillCount[0] = 0;
546: for (n = 1; n < c->numNeighbors; n++) MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1]));
547: for (n = 1; n < c->numNeighbors; n++) MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
548: MPI_Waitall(c->numNeighbors - 1, c->request, c->status);
549: /* Initialize the remote queue */
550: c->queueLocalMax = c->localOffsets[0] = 0;
551: c->queueRemoteMax = c->remoteOffsets[0] = 0;
552: for (n = 1; n < c->numNeighbors; n++) {
553: c->remoteOffsets[n] = c->queueRemoteMax;
554: c->queueRemoteMax += c->fillCount[n];
555: c->localOffsets[n] = c->queueLocalMax;
556: c->queueLocalMax += c->needCount[n];
557: }
558: /* HACK BEGIN */
559: for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
560: c->needCount[0] = 0;
561: /* HACK END */
562: if (c->queueRemoteMax) {
563: PetscMalloc1(c->queueRemoteMax, &c->queueRemote);
564: } else c->queueRemote = NULL;
565: c->queueRemoteSize = c->queueRemoteMax;
567: /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
568: for (n = 1; n < c->numNeighbors; n++) {
569: PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]);
570: MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1]));
571: }
572: for (n = 1; n < c->numNeighbors; n++) {
573: PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n]);
574: MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
575: }
576: return 0;
577: }
579: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
580: {
581: #if 0
582: PetscMPIInt rank;
583: PetscInt n;
584: #endif
586: MPI_Waitall(c->numNeighbors - 1, c->request, c->status);
587: #if 0
588: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
589: for (n = 0; n < c->queueRemoteSize; n++) {
591: }
592: #endif
593: return 0;
594: }
596: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
597: {
598: PetscMPIInt tag = 121;
599: PetscInt n;
601: /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
602: for (n = 1; n < c->numNeighbors; n++) MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1]));
603: for (n = 1; n < c->numNeighbors; n++) MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
604: return 0;
605: }
607: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
608: {
609: MPI_Waitall(c->numNeighbors - 1, c->request, c->status);
610: /* Free queue of requests from other procs */
611: PetscFree(c->queueRemote);
612: return 0;
613: }
615: /*---------------------------------------------------------------------*/
616: /*
617: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
618: */
619: PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
620: /*---------------------------------------------------------------------*/
621: {
622: CharacteristicPointDA2D temp;
623: PetscInt n;
625: if (0) { /* Check the order of the queue before sorting */
626: PetscInfo(NULL, "Before Heap sort\n");
627: for (n = 0; n < size; n++) PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc);
628: }
630: /* SORTING PHASE */
631: for (n = (size / 2) - 1; n >= 0; n--) { CharacteristicSiftDown(c, queue, n, size - 1); /* Rich had size-1 here, Matt had size*/ }
632: for (n = size - 1; n >= 1; n--) {
633: temp = queue[0];
634: queue[0] = queue[n];
635: queue[n] = temp;
636: CharacteristicSiftDown(c, queue, 0, n - 1);
637: }
638: if (0) { /* Check the order of the queue after sorting */
639: PetscInfo(NULL, "Avter Heap sort\n");
640: for (n = 0; n < size; n++) PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc);
641: }
642: return 0;
643: }
645: /*---------------------------------------------------------------------*/
646: /*
647: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
648: */
649: PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
650: /*---------------------------------------------------------------------*/
651: {
652: PetscBool done = PETSC_FALSE;
653: PetscInt maxChild;
654: CharacteristicPointDA2D temp;
656: while ((root * 2 <= bottom) && (!done)) {
657: if (root * 2 == bottom) maxChild = root * 2;
658: else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2;
659: else maxChild = root * 2 + 1;
661: if (queue[root].proc < queue[maxChild].proc) {
662: temp = queue[root];
663: queue[root] = queue[maxChild];
664: queue[maxChild] = temp;
665: root = maxChild;
666: } else done = PETSC_TRUE;
667: }
668: return 0;
669: }
671: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
672: PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
673: {
674: DMBoundaryType bx, by;
675: PetscBool IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
676: MPI_Comm comm;
677: PetscMPIInt rank;
678: PetscInt **procs, pi, pj, pim, pip, pjm, pjp, PI, PJ;
680: PetscObjectGetComm((PetscObject)da, &comm);
681: MPI_Comm_rank(comm, &rank);
682: DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI, &PJ, NULL, NULL, NULL, &bx, &by, NULL, NULL);
684: if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
685: if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
687: neighbors[0] = rank;
688: rank = 0;
689: PetscMalloc1(PJ, &procs);
690: for (pj = 0; pj < PJ; pj++) {
691: PetscMalloc1(PI, &(procs[pj]));
692: for (pi = 0; pi < PI; pi++) {
693: procs[pj][pi] = rank;
694: rank++;
695: }
696: }
698: pi = neighbors[0] % PI;
699: pj = neighbors[0] / PI;
700: pim = pi - 1;
701: if (pim < 0) pim = PI - 1;
702: pip = (pi + 1) % PI;
703: pjm = pj - 1;
704: if (pjm < 0) pjm = PJ - 1;
705: pjp = (pj + 1) % PJ;
707: neighbors[1] = procs[pj][pim];
708: neighbors[2] = procs[pjp][pim];
709: neighbors[3] = procs[pjp][pi];
710: neighbors[4] = procs[pjp][pip];
711: neighbors[5] = procs[pj][pip];
712: neighbors[6] = procs[pjm][pip];
713: neighbors[7] = procs[pjm][pi];
714: neighbors[8] = procs[pjm][pim];
716: if (!IPeriodic) {
717: if (pi == 0) neighbors[1] = neighbors[2] = neighbors[8] = neighbors[0];
718: if (pi == PI - 1) neighbors[4] = neighbors[5] = neighbors[6] = neighbors[0];
719: }
721: if (!JPeriodic) {
722: if (pj == 0) neighbors[6] = neighbors[7] = neighbors[8] = neighbors[0];
723: if (pj == PJ - 1) neighbors[2] = neighbors[3] = neighbors[4] = neighbors[0];
724: }
726: for (pj = 0; pj < PJ; pj++) PetscFree(procs[pj]);
727: PetscFree(procs);
728: return 0;
729: }
731: /*
732: SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
733: 2 | 3 | 4
734: __|___|__
735: 1 | 0 | 5
736: __|___|__
737: 8 | 7 | 6
738: | |
739: */
740: PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
741: {
742: DMDALocalInfo info;
743: PetscReal is, ie, js, je;
745: DMDAGetLocalInfo(da, &info);
746: is = (PetscReal)info.xs - 0.5;
747: ie = (PetscReal)info.xs + info.xm - 0.5;
748: js = (PetscReal)info.ys - 0.5;
749: je = (PetscReal)info.ys + info.ym - 0.5;
751: if (ir >= is && ir <= ie) { /* center column */
752: if (jr >= js && jr <= je) return 0;
753: else if (jr < js) return 7;
754: else return 3;
755: } else if (ir < is) { /* left column */
756: if (jr >= js && jr <= je) return 1;
757: else if (jr < js) return 8;
758: else return 2;
759: } else { /* right column */
760: if (jr >= js && jr <= je) return 5;
761: else if (jr < js) return 6;
762: else return 4;
763: }
764: }