Actual source code: characteristic.c
2: #include <petsc/private/characteristicimpl.h>
3: #include <petscdmda.h>
4: #include <petscviewer.h>
6: PetscClassId CHARACTERISTIC_CLASSID;
7: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10: /*
11: Contains the list of registered characteristic routines
12: */
13: PetscFunctionList CharacteristicList = NULL;
14: PetscBool CharacteristicRegisterAllCalled = PETSC_FALSE;
16: PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt []);
17: PetscInt DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
18: PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar []);
20: PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
21: PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
23: PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
24: {
25: PetscBool iascii;
28: if (!viewer) {
29: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c),&viewer);
30: }
34: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
35: if (!iascii) {
36: if (c->ops->view) {
37: (*c->ops->view)(c, viewer);
38: }
39: }
40: return 0;
41: }
43: PetscErrorCode CharacteristicDestroy(Characteristic *c)
44: {
45: if (!*c) return 0;
47: if (--((PetscObject)(*c))->refct > 0) return 0;
49: if ((*c)->ops->destroy) {
50: (*(*c)->ops->destroy)((*c));
51: }
52: MPI_Type_free(&(*c)->itemType);
53: PetscFree((*c)->queue);
54: PetscFree((*c)->queueLocal);
55: PetscFree((*c)->queueRemote);
56: PetscFree((*c)->neighbors);
57: PetscFree((*c)->needCount);
58: PetscFree((*c)->localOffsets);
59: PetscFree((*c)->fillCount);
60: PetscFree((*c)->remoteOffsets);
61: PetscFree((*c)->request);
62: PetscFree((*c)->status);
63: PetscHeaderDestroy(c);
64: return 0;
65: }
67: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
68: {
69: Characteristic newC;
72: *c = NULL;
73: CharacteristicInitializePackage();
75: PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView);
76: *c = newC;
78: newC->structured = PETSC_TRUE;
79: newC->numIds = 0;
80: newC->velocityDA = NULL;
81: newC->velocity = NULL;
82: newC->velocityOld = NULL;
83: newC->numVelocityComp = 0;
84: newC->velocityComp = NULL;
85: newC->velocityInterp = NULL;
86: newC->velocityInterpLocal = NULL;
87: newC->velocityCtx = NULL;
88: newC->fieldDA = NULL;
89: newC->field = NULL;
90: newC->numFieldComp = 0;
91: newC->fieldComp = NULL;
92: newC->fieldInterp = NULL;
93: newC->fieldInterpLocal = NULL;
94: newC->fieldCtx = NULL;
95: newC->itemType = 0;
96: newC->queue = NULL;
97: newC->queueSize = 0;
98: newC->queueMax = 0;
99: newC->queueLocal = NULL;
100: newC->queueLocalSize = 0;
101: newC->queueLocalMax = 0;
102: newC->queueRemote = NULL;
103: newC->queueRemoteSize = 0;
104: newC->queueRemoteMax = 0;
105: newC->numNeighbors = 0;
106: newC->neighbors = NULL;
107: newC->needCount = NULL;
108: newC->localOffsets = NULL;
109: newC->fillCount = NULL;
110: newC->remoteOffsets = NULL;
111: newC->request = NULL;
112: newC->status = NULL;
113: return 0;
114: }
116: /*@C
117: CharacteristicSetType - Builds Characteristic for a particular solver.
119: Logically Collective on Characteristic
121: Input Parameters:
122: + c - the method of characteristics context
123: - type - a known method
125: Options Database Key:
126: . -characteristic_type <method> - Sets the method; use -help for a list
127: of available methods
129: Notes:
130: See "include/petsccharacteristic.h" for available methods
132: Normally, it is best to use the CharacteristicSetFromOptions() command and
133: then set the Characteristic type from the options database rather than by using
134: this routine. Using the options database provides the user with
135: maximum flexibility in evaluating the many different Krylov methods.
136: The CharacteristicSetType() routine is provided for those situations where it
137: is necessary to set the iterative solver independently of the command
138: line or options database. This might be the case, for example, when
139: the choice of iterative solver changes during the execution of the
140: program, and the user's application is taking responsibility for
141: choosing the appropriate method. In other words, this routine is
142: not for beginners.
144: Level: intermediate
146: .seealso: CharacteristicType
148: @*/
149: PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
150: {
151: PetscBool match;
152: PetscErrorCode (*r)(Characteristic);
157: PetscObjectTypeCompare((PetscObject) c, type, &match);
158: if (match) return 0;
160: if (c->data) {
161: /* destroy the old private Characteristic context */
162: (*c->ops->destroy)(c);
163: c->ops->destroy = NULL;
164: c->data = NULL;
165: }
167: PetscFunctionListFind(CharacteristicList,type,&r);
169: c->setupcalled = 0;
170: (*r)(c);
171: PetscObjectChangeTypeName((PetscObject) c, type);
172: return 0;
173: }
175: /*@
176: CharacteristicSetUp - Sets up the internal data structures for the
177: later use of an iterative solver.
179: Collective on Characteristic
181: Input Parameter:
182: . ksp - iterative context obtained from CharacteristicCreate()
184: Level: developer
186: .seealso: CharacteristicCreate(), CharacteristicSolve(), CharacteristicDestroy()
187: @*/
188: PetscErrorCode CharacteristicSetUp(Characteristic c)
189: {
192: if (!((PetscObject)c)->type_name) {
193: CharacteristicSetType(c, CHARACTERISTICDA);
194: }
196: if (c->setupcalled == 2) return 0;
198: PetscLogEventBegin(CHARACTERISTIC_SetUp,c,NULL,NULL,NULL);
199: if (!c->setupcalled) {
200: (*c->ops->setup)(c);
201: }
202: PetscLogEventEnd(CHARACTERISTIC_SetUp,c,NULL,NULL,NULL);
203: c->setupcalled = 2;
204: return 0;
205: }
207: /*@C
208: CharacteristicRegister - Adds a solver to the method of characteristics package.
210: Not Collective
212: Input Parameters:
213: + name_solver - name of a new user-defined solver
214: - routine_create - routine to create method context
216: Sample usage:
217: .vb
218: CharacteristicRegister("my_char", MyCharCreate);
219: .ve
221: Then, your Characteristic type can be chosen with the procedural interface via
222: .vb
223: CharacteristicCreate(MPI_Comm, Characteristic* &char);
224: CharacteristicSetType(char,"my_char");
225: .ve
226: or at runtime via the option
227: .vb
228: -characteristic_type my_char
229: .ve
231: Notes:
232: CharacteristicRegister() may be called multiple times to add several user-defined solvers.
234: .seealso: CharacteristicRegisterAll(), CharacteristicRegisterDestroy()
236: Level: advanced
237: @*/
238: PetscErrorCode CharacteristicRegister(const char sname[],PetscErrorCode (*function)(Characteristic))
239: {
240: CharacteristicInitializePackage();
241: PetscFunctionListAdd(&CharacteristicList,sname,function);
242: return 0;
243: }
245: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
246: {
247: c->velocityDA = da;
248: c->velocity = v;
249: c->velocityOld = vOld;
250: c->numVelocityComp = numComponents;
251: c->velocityComp = components;
252: c->velocityInterp = interp;
253: c->velocityCtx = ctx;
254: return 0;
255: }
257: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal [], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
258: {
259: c->velocityDA = da;
260: c->velocity = v;
261: c->velocityOld = vOld;
262: c->numVelocityComp = numComponents;
263: c->velocityComp = components;
264: c->velocityInterpLocal = interp;
265: c->velocityCtx = ctx;
266: return 0;
267: }
269: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
270: {
271: #if 0
273: #endif
274: c->fieldDA = da;
275: c->field = v;
276: c->numFieldComp = numComponents;
277: c->fieldComp = components;
278: c->fieldInterp = interp;
279: c->fieldCtx = ctx;
280: return 0;
281: }
283: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal[], PetscInt, PetscInt[], PetscScalar [], void*), void *ctx)
284: {
285: #if 0
287: #endif
288: c->fieldDA = da;
289: c->field = v;
290: c->numFieldComp = numComponents;
291: c->fieldComp = components;
292: c->fieldInterpLocal = interp;
293: c->fieldCtx = ctx;
294: return 0;
295: }
297: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
298: {
299: CharacteristicPointDA2D Qi;
300: DM da = c->velocityDA;
301: Vec velocityLocal, velocityLocalOld;
302: Vec fieldLocal;
303: DMDALocalInfo info;
304: PetscScalar **solArray;
305: void *velocityArray;
306: void *velocityArrayOld;
307: void *fieldArray;
308: PetscScalar *interpIndices;
309: PetscScalar *velocityValues, *velocityValuesOld;
310: PetscScalar *fieldValues;
311: PetscMPIInt rank;
312: PetscInt dim;
313: PetscMPIInt neighbors[9];
314: PetscInt dof;
315: PetscInt gx, gy;
316: PetscInt n, is, ie, js, je, comp;
318: c->queueSize = 0;
319: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
320: DMDAGetNeighborsRank(da, neighbors);
321: CharacteristicSetNeighbors(c, 9, neighbors);
322: CharacteristicSetUp(c);
323: /* global and local grid info */
324: DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
325: DMDAGetLocalInfo(da, &info);
326: is = info.xs; ie = info.xs+info.xm;
327: js = info.ys; je = info.ys+info.ym;
328: /* Allocation */
329: PetscMalloc1(dim, &interpIndices);
330: PetscMalloc1(c->numVelocityComp, &velocityValues);
331: PetscMalloc1(c->numVelocityComp, &velocityValuesOld);
332: PetscMalloc1(c->numFieldComp, &fieldValues);
333: PetscLogEventBegin(CHARACTERISTIC_Solve,NULL,NULL,NULL,NULL);
335: /* -----------------------------------------------------------------------
336: PART 1, AT t-dt/2
337: -----------------------------------------------------------------------*/
338: PetscLogEventBegin(CHARACTERISTIC_QueueSetup,NULL,NULL,NULL,NULL);
339: /* GET POSITION AT HALF TIME IN THE PAST */
340: if (c->velocityInterpLocal) {
341: DMGetLocalVector(c->velocityDA, &velocityLocal);
342: DMGetLocalVector(c->velocityDA, &velocityLocalOld);
343: DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
344: DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
345: DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
346: DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
347: DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray);
348: DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
349: }
350: PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n");
351: for (Qi.j = js; Qi.j < je; Qi.j++) {
352: for (Qi.i = is; Qi.i < ie; Qi.i++) {
353: interpIndices[0] = Qi.i;
354: interpIndices[1] = Qi.j;
355: if (c->velocityInterpLocal) c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
356: else c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
357: Qi.x = Qi.i - velocityValues[0]*dt/2.0;
358: Qi.y = Qi.j - velocityValues[1]*dt/2.0;
360: /* Determine whether the position at t - dt/2 is local */
361: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
363: /* Check for Periodic boundaries and move all periodic points back onto the domain */
364: DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
365: CharacteristicAddPoint(c, &Qi);
366: }
367: }
368: PetscLogEventEnd(CHARACTERISTIC_QueueSetup,NULL,NULL,NULL,NULL);
370: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL);
371: CharacteristicSendCoordinatesBegin(c);
372: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL);
374: PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal,NULL,NULL,NULL,NULL);
375: /* Calculate velocity at t_n+1/2 (local values) */
376: PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n");
377: for (n = 0; n < c->queueSize; n++) {
378: Qi = c->queue[n];
379: if (c->neighbors[Qi.proc] == rank) {
380: interpIndices[0] = Qi.x;
381: interpIndices[1] = Qi.y;
382: if (c->velocityInterpLocal) {
383: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
384: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
385: } else {
386: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
387: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
388: }
389: Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
390: Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
391: }
392: c->queue[n] = Qi;
393: }
394: PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal,NULL,NULL,NULL,NULL);
396: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL);
397: CharacteristicSendCoordinatesEnd(c);
398: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL);
400: /* Calculate velocity at t_n+1/2 (fill remote requests) */
401: PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote,NULL,NULL,NULL,NULL);
402: PetscInfo(NULL, "Calculating %d remote velocities at t_{n - 1/2}\n", c->queueRemoteSize);
403: for (n = 0; n < c->queueRemoteSize; n++) {
404: Qi = c->queueRemote[n];
405: interpIndices[0] = Qi.x;
406: interpIndices[1] = Qi.y;
407: if (c->velocityInterpLocal) {
408: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
409: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
410: } else {
411: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
412: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
413: }
414: Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
415: Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
416: c->queueRemote[n] = Qi;
417: }
418: PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote,NULL,NULL,NULL,NULL);
419: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL);
420: CharacteristicGetValuesBegin(c);
421: CharacteristicGetValuesEnd(c);
422: if (c->velocityInterpLocal) {
423: DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray);
424: DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
425: DMRestoreLocalVector(c->velocityDA, &velocityLocal);
426: DMRestoreLocalVector(c->velocityDA, &velocityLocalOld);
427: }
428: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL);
430: /* -----------------------------------------------------------------------
431: PART 2, AT t-dt
432: -----------------------------------------------------------------------*/
434: /* GET POSITION AT t_n (local values) */
435: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL);
436: PetscInfo(NULL, "Calculating position at t_{n}\n");
437: for (n = 0; n < c->queueSize; n++) {
438: Qi = c->queue[n];
439: Qi.x = Qi.i - Qi.x*dt;
440: Qi.y = Qi.j - Qi.y*dt;
442: /* Determine whether the position at t-dt is local */
443: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
445: /* Check for Periodic boundaries and move all periodic points back onto the domain */
446: DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
448: c->queue[n] = Qi;
449: }
450: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL);
452: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL);
453: CharacteristicSendCoordinatesBegin(c);
454: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL);
456: /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
457: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL);
458: if (c->fieldInterpLocal) {
459: DMGetLocalVector(c->fieldDA, &fieldLocal);
460: DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
461: DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
462: DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray);
463: }
464: PetscInfo(NULL, "Calculating local field at t_{n}\n");
465: for (n = 0; n < c->queueSize; n++) {
466: if (c->neighbors[c->queue[n].proc] == rank) {
467: interpIndices[0] = c->queue[n].x;
468: interpIndices[1] = c->queue[n].y;
469: if (c->fieldInterpLocal) c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
470: else c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
471: for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
472: }
473: }
474: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL);
476: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL);
477: CharacteristicSendCoordinatesEnd(c);
478: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL);
480: /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
481: PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote,NULL,NULL,NULL,NULL);
482: PetscInfo(NULL, "Calculating %d remote field points at t_{n}\n", c->queueRemoteSize);
483: for (n = 0; n < c->queueRemoteSize; n++) {
484: interpIndices[0] = c->queueRemote[n].x;
485: interpIndices[1] = c->queueRemote[n].y;
487: /* for debugging purposes */
488: if (1) { /* hacked bounds test...let's do better */
489: PetscScalar im = interpIndices[0]; PetscScalar jm = interpIndices[1];
492: }
494: if (c->fieldInterpLocal) c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
495: else c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
496: for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
497: }
498: PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote,NULL,NULL,NULL,NULL);
500: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL);
501: CharacteristicGetValuesBegin(c);
502: CharacteristicGetValuesEnd(c);
503: if (c->fieldInterpLocal) {
504: DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray);
505: DMRestoreLocalVector(c->fieldDA, &fieldLocal);
506: }
507: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL);
509: /* Return field of characteristics at t_n-1 */
510: PetscLogEventBegin(CHARACTERISTIC_DAUpdate,NULL,NULL,NULL,NULL);
511: DMDAGetInfo(c->fieldDA,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&dof,NULL,NULL,NULL,NULL,NULL);
512: DMDAVecGetArray(c->fieldDA, solution, &solArray);
513: for (n = 0; n < c->queueSize; n++) {
514: Qi = c->queue[n];
515: for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i*dof+c->fieldComp[comp]] = Qi.field[comp];
516: }
517: DMDAVecRestoreArray(c->fieldDA, solution, &solArray);
518: PetscLogEventEnd(CHARACTERISTIC_DAUpdate,NULL,NULL,NULL,NULL);
519: PetscLogEventEnd(CHARACTERISTIC_Solve,NULL,NULL,NULL,NULL);
521: /* Cleanup */
522: PetscFree(interpIndices);
523: PetscFree(velocityValues);
524: PetscFree(velocityValuesOld);
525: PetscFree(fieldValues);
526: return 0;
527: }
529: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
530: {
531: c->numNeighbors = numNeighbors;
532: PetscFree(c->neighbors);
533: PetscMalloc1(numNeighbors, &c->neighbors);
534: PetscArraycpy(c->neighbors, neighbors, numNeighbors);
535: return 0;
536: }
538: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
539: {
541: c->queue[c->queueSize++] = *point;
542: return 0;
543: }
545: int CharacteristicSendCoordinatesBegin(Characteristic c)
546: {
547: PetscMPIInt rank, tag = 121;
548: PetscInt i, n;
550: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
551: CharacteristicHeapSort(c, c->queue, c->queueSize);
552: PetscArrayzero(c->needCount, c->numNeighbors);
553: for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
554: c->fillCount[0] = 0;
555: for (n = 1; n < c->numNeighbors; n++) {
556: MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
557: }
558: for (n = 1; n < c->numNeighbors; n++) {
559: MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
560: }
561: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
562: /* Initialize the remote queue */
563: c->queueLocalMax = c->localOffsets[0] = 0;
564: c->queueRemoteMax = c->remoteOffsets[0] = 0;
565: for (n = 1; n < c->numNeighbors; n++) {
566: c->remoteOffsets[n] = c->queueRemoteMax;
567: c->queueRemoteMax += c->fillCount[n];
568: c->localOffsets[n] = c->queueLocalMax;
569: c->queueLocalMax += c->needCount[n];
570: }
571: /* HACK BEGIN */
572: for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
573: c->needCount[0] = 0;
574: /* HACK END */
575: if (c->queueRemoteMax) {
576: PetscMalloc1(c->queueRemoteMax, &c->queueRemote);
577: } else c->queueRemote = NULL;
578: c->queueRemoteSize = c->queueRemoteMax;
580: /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
581: for (n = 1; n < c->numNeighbors; n++) {
582: PetscInfo(NULL, "Receiving %d requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]);
583: MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
584: }
585: for (n = 1; n < c->numNeighbors; n++) {
586: PetscInfo(NULL, "Sending %d requests for values from proc %d\n", c->needCount[n], c->neighbors[n]);
587: MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
588: }
589: return 0;
590: }
592: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
593: {
594: #if 0
595: PetscMPIInt rank;
596: PetscInt n;
597: #endif
599: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
600: #if 0
601: MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank);
602: for (n = 0; n < c->queueRemoteSize; n++) {
604: }
605: #endif
606: return 0;
607: }
609: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
610: {
611: PetscMPIInt tag = 121;
612: PetscInt n;
614: /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
615: for (n = 1; n < c->numNeighbors; n++) {
616: MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1]));
617: }
618: for (n = 1; n < c->numNeighbors; n++) {
619: MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c));
620: }
621: return 0;
622: }
624: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
625: {
626: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
627: /* Free queue of requests from other procs */
628: PetscFree(c->queueRemote);
629: return 0;
630: }
632: /*---------------------------------------------------------------------*/
633: /*
634: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
635: */
636: PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
637: /*---------------------------------------------------------------------*/
638: {
639: CharacteristicPointDA2D temp;
640: PetscInt n;
642: if (0) { /* Check the order of the queue before sorting */
643: PetscInfo(NULL, "Before Heap sort\n");
644: for (n=0; n<size; n++) {
645: PetscInfo(NULL,"%d %d\n",n,queue[n].proc);
646: }
647: }
649: /* SORTING PHASE */
650: for (n = (size / 2)-1; n >= 0; n--) {
651: CharacteristicSiftDown(c, queue, n, size-1); /* Rich had size-1 here, Matt had size*/
652: }
653: for (n = size-1; n >= 1; n--) {
654: temp = queue[0];
655: queue[0] = queue[n];
656: queue[n] = temp;
657: CharacteristicSiftDown(c, queue, 0, n-1);
658: }
659: if (0) { /* Check the order of the queue after sorting */
660: PetscInfo(NULL, "Avter Heap sort\n");
661: for (n=0; n<size; n++) {
662: PetscInfo(NULL,"%d %d\n",n,queue[n].proc);
663: }
664: }
665: return 0;
666: }
668: /*---------------------------------------------------------------------*/
669: /*
670: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
671: */
672: PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
673: /*---------------------------------------------------------------------*/
674: {
675: PetscBool done = PETSC_FALSE;
676: PetscInt maxChild;
677: CharacteristicPointDA2D temp;
679: while ((root*2 <= bottom) && (!done)) {
680: if (root*2 == bottom) maxChild = root * 2;
681: else if (queue[root*2].proc > queue[root*2+1].proc) maxChild = root * 2;
682: else maxChild = root * 2 + 1;
684: if (queue[root].proc < queue[maxChild].proc) {
685: temp = queue[root];
686: queue[root] = queue[maxChild];
687: queue[maxChild] = temp;
688: root = maxChild;
689: } else done = PETSC_TRUE;
690: }
691: return 0;
692: }
694: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
695: PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
696: {
697: DMBoundaryType bx, by;
698: PetscBool IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
699: MPI_Comm comm;
700: PetscMPIInt rank;
701: PetscInt **procs,pi,pj,pim,pip,pjm,pjp,PI,PJ;
703: PetscObjectGetComm((PetscObject) da, &comm);
704: MPI_Comm_rank(comm, &rank);
705: DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI,&PJ, NULL, NULL, NULL, &bx, &by,NULL, NULL);
707: if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
708: if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
710: neighbors[0] = rank;
711: rank = 0;
712: PetscMalloc1(PJ,&procs);
713: for (pj=0; pj<PJ; pj++) {
714: PetscMalloc1(PI,&(procs[pj]));
715: for (pi=0; pi<PI; pi++) {
716: procs[pj][pi] = rank;
717: rank++;
718: }
719: }
721: pi = neighbors[0] % PI;
722: pj = neighbors[0] / PI;
723: pim = pi-1; if (pim<0) pim=PI-1;
724: pip = (pi+1)%PI;
725: pjm = pj-1; if (pjm<0) pjm=PJ-1;
726: pjp = (pj+1)%PJ;
728: neighbors[1] = procs[pj] [pim];
729: neighbors[2] = procs[pjp][pim];
730: neighbors[3] = procs[pjp][pi];
731: neighbors[4] = procs[pjp][pip];
732: neighbors[5] = procs[pj] [pip];
733: neighbors[6] = procs[pjm][pip];
734: neighbors[7] = procs[pjm][pi];
735: neighbors[8] = procs[pjm][pim];
737: if (!IPeriodic) {
738: if (pi==0) neighbors[1]=neighbors[2]=neighbors[8]=neighbors[0];
739: if (pi==PI-1) neighbors[4]=neighbors[5]=neighbors[6]=neighbors[0];
740: }
742: if (!JPeriodic) {
743: if (pj==0) neighbors[6]=neighbors[7]=neighbors[8]=neighbors[0];
744: if (pj==PJ-1) neighbors[2]=neighbors[3]=neighbors[4]=neighbors[0];
745: }
747: for (pj = 0; pj < PJ; pj++) {
748: PetscFree(procs[pj]);
749: }
750: PetscFree(procs);
751: return 0;
752: }
754: /*
755: SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
756: 2 | 3 | 4
757: __|___|__
758: 1 | 0 | 5
759: __|___|__
760: 8 | 7 | 6
761: | |
762: */
763: PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
764: {
765: DMDALocalInfo info;
766: PetscReal is,ie,js,je;
768: DMDAGetLocalInfo(da, &info);
769: is = (PetscReal) info.xs - 0.5; ie = (PetscReal) info.xs + info.xm - 0.5;
770: js = (PetscReal) info.ys - 0.5; je = (PetscReal) info.ys + info.ym - 0.5;
772: if (ir >= is && ir <= ie) { /* center column */
773: if (jr >= js && jr <= je) return 0;
774: else if (jr < js) return 7;
775: else return 3;
776: } else if (ir < is) { /* left column */
777: if (jr >= js && jr <= je) return 1;
778: else if (jr < js) return 8;
779: else return 2;
780: } else { /* right column */
781: if (jr >= js && jr <= je) return 5;
782: else if (jr < js) return 6;
783: else return 4;
784: }
785: }