Actual source code: characteristic.c
2: #include <private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/
4: PetscClassId CHARACTERISTIC_CLASSID;
5: PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
6: PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
7: PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
8: PetscBool CharacteristicRegisterAllCalled = PETSC_FALSE;
9: /*
10: Contains the list of registered characteristic routines
11: */
12: PetscFList CharacteristicList = PETSC_NULL;
14: PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt []);
15: PetscInt DMDAGetNeighborRelative(DM, PassiveReal, PassiveReal);
16: PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar [] );
18: PetscErrorCode HeapSort(Characteristic, Queue, PetscInt);
19: PetscErrorCode SiftDown(Characteristic, Queue, PetscInt, PetscInt);
23: PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
24: {
25: PetscBool iascii;
30: if (!viewer) {
31: PetscViewerASCIIGetStdout(((PetscObject)c)->comm,&viewer);
32: }
36: PetscTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
37: if (!iascii) {
38: if (c->ops->view) {
39: (*c->ops->view)(c, viewer);
40: }
41: }
42: return(0);
43: }
47: PetscErrorCode CharacteristicDestroy(Characteristic *c)
48: {
52: if (!*c) return(0);
54: if (--((PetscObject)(*c))->refct > 0) return(0);
56: if ((*c)->ops->destroy) {
57: (*(*c)->ops->destroy)((*c));
58: }
59: MPI_Type_free(&(*c)->itemType);
60: PetscFree((*c)->queue);
61: PetscFree((*c)->queueLocal);
62: PetscFree((*c)->queueRemote);
63: PetscFree((*c)->neighbors);
64: PetscFree((*c)->needCount);
65: PetscFree((*c)->localOffsets);
66: PetscFree((*c)->fillCount);
67: PetscFree((*c)->remoteOffsets);
68: PetscFree((*c)->request);
69: PetscFree((*c)->status);
70: PetscHeaderDestroy(c);
71: return(0);
72: }
76: PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
77: {
78: Characteristic newC;
83: *c = PETSC_NULL;
84: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
85: CharacteristicInitializePackage(PETSC_NULL);
86: #endif
88: PetscHeaderCreate(newC, _p_Characteristic, struct _CharacteristicOps, CHARACTERISTIC_CLASSID, -1, "Characteristic", "Characteristic", "SemiLagrange", comm, CharacteristicDestroy, CharacteristicView);
89: PetscLogObjectCreate(newC);
90: *c = newC;
92: newC->structured = PETSC_TRUE;
93: newC->numIds = 0;
94: newC->velocityDA = PETSC_NULL;
95: newC->velocity = PETSC_NULL;
96: newC->velocityOld = PETSC_NULL;
97: newC->numVelocityComp = 0;
98: newC->velocityComp = PETSC_NULL;
99: newC->velocityInterp = PETSC_NULL;
100: newC->velocityInterpLocal = PETSC_NULL;
101: newC->velocityCtx = PETSC_NULL;
102: newC->fieldDA = PETSC_NULL;
103: newC->field = PETSC_NULL;
104: newC->numFieldComp = 0;
105: newC->fieldComp = PETSC_NULL;
106: newC->fieldInterp = PETSC_NULL;
107: newC->fieldInterpLocal = PETSC_NULL;
108: newC->fieldCtx = PETSC_NULL;
109: newC->itemType = PETSC_NULL;
110: newC->queue = PETSC_NULL;
111: newC->queueSize = 0;
112: newC->queueMax = 0;
113: newC->queueLocal = PETSC_NULL;
114: newC->queueLocalSize = 0;
115: newC->queueLocalMax = 0;
116: newC->queueRemote = PETSC_NULL;
117: newC->queueRemoteSize = 0;
118: newC->queueRemoteMax = 0;
119: newC->numNeighbors = 0;
120: newC->neighbors = PETSC_NULL;
121: newC->needCount = PETSC_NULL;
122: newC->localOffsets = PETSC_NULL;
123: newC->fillCount = PETSC_NULL;
124: newC->remoteOffsets = PETSC_NULL;
125: newC->request = PETSC_NULL;
126: newC->status = PETSC_NULL;
127: return(0);
128: }
132: /*@C
133: CharacteristicSetType - Builds Characteristic for a particular solver.
135: Logically Collective on Characteristic
137: Input Parameters:
138: + c - the method of characteristics context
139: - type - a known method
141: Options Database Key:
142: . -characteristic_type <method> - Sets the method; use -help for a list
143: of available methods
145: Notes:
146: See "include/petsccharacteristic.h" for available methods
148: Normally, it is best to use the CharacteristicSetFromOptions() command and
149: then set the Characteristic type from the options database rather than by using
150: this routine. Using the options database provides the user with
151: maximum flexibility in evaluating the many different Krylov methods.
152: The CharacteristicSetType() routine is provided for those situations where it
153: is necessary to set the iterative solver independently of the command
154: line or options database. This might be the case, for example, when
155: the choice of iterative solver changes during the execution of the
156: program, and the user's application is taking responsibility for
157: choosing the appropriate method. In other words, this routine is
158: not for beginners.
160: Level: intermediate
162: .keywords: Characteristic, set, method
164: .seealso: CharacteristicType
166: @*/
167: PetscErrorCode CharacteristicSetType(Characteristic c, const CharacteristicType type)
168: {
169: PetscErrorCode ierr, (*r)(Characteristic);
170: PetscBool match;
176: PetscTypeCompare((PetscObject) c, type, &match);
177: if (match) return(0);
179: if (c->data) {
180: /* destroy the old private Characteristic context */
181: (*c->ops->destroy)(c);
182: c->data = 0;
183: }
185: PetscFListFind(CharacteristicList, ((PetscObject)c)->comm,type,PETSC_TRUE, (void (**)(void)) &r);
186: if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
187: c->setupcalled = 0;
188: (*r)(c);
189: PetscObjectChangeTypeName((PetscObject) c, type);
190: return(0);
191: }
195: /*@
196: CharacteristicSetUp - Sets up the internal data structures for the
197: later use of an iterative solver.
199: Collective on Characteristic
201: Input Parameter:
202: . ksp - iterative context obtained from CharacteristicCreate()
204: Level: developer
206: .keywords: Characteristic, setup
208: .seealso: CharacteristicCreate(), CharacteristicSolve(), CharacteristicDestroy()
209: @*/
210: PetscErrorCode CharacteristicSetUp(Characteristic c)
211: {
217: if (!((PetscObject)c)->type_name){
218: CharacteristicSetType(c, CHARACTERISTICDA);
219: }
221: if (c->setupcalled == 2) return(0);
223: PetscLogEventBegin(CHARACTERISTIC_SetUp,c,0,0,0);
224: if (!c->setupcalled) {
225: (*c->ops->setup)(c);
226: }
227: PetscLogEventEnd(CHARACTERISTIC_SetUp,c,0,0,0);
228: c->setupcalled = 2;
229: return(0);
230: }
234: /*@C
235: CharacteristicRegister - See CharacteristicRegisterDynamic()
237: Level: advanced
238: @*/
239: PetscErrorCode CharacteristicRegister(const char sname[],const char path[],const char name[],PetscErrorCode (*function)(Characteristic))
240: {
242: char fullname[PETSC_MAX_PATH_LEN];
245: PetscFListConcat(path,name,fullname);
246: PetscFListAdd(&CharacteristicList,sname,fullname,(void (*)(void))function);
247: return(0);
248: }
252: PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal [], PetscInt, PetscInt [], PetscScalar [], void *), void *ctx)
253: {
255: c->velocityDA = da;
256: c->velocity = v;
257: c->velocityOld = vOld;
258: c->numVelocityComp = numComponents;
259: c->velocityComp = components;
260: c->velocityInterp = interp;
261: c->velocityCtx = ctx;
262: return(0);
263: }
267: PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal [], PetscInt, PetscInt [], PetscScalar [], void *), void *ctx)
268: {
270: c->velocityDA = da;
271: c->velocity = v;
272: c->velocityOld = vOld;
273: c->numVelocityComp = numComponents;
274: c->velocityComp = components;
275: c->velocityInterpLocal = interp;
276: c->velocityCtx = ctx;
277: return(0);
278: }
282: PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal [], PetscInt, PetscInt [], PetscScalar [], void *), void *ctx)
283: {
285: #if 0
286: if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
287: #endif
288: c->fieldDA = da;
289: c->field = v;
290: c->numFieldComp = numComponents;
291: c->fieldComp = components;
292: c->fieldInterp = interp;
293: c->fieldCtx = ctx;
294: return(0);
295: }
299: PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal [], PetscInt, PetscInt [], PetscScalar [], void *), void *ctx)
300: {
302: #if 0
303: if (numComponents > 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
304: #endif
305: c->fieldDA = da;
306: c->field = v;
307: c->numFieldComp = numComponents;
308: c->fieldComp = components;
309: c->fieldInterpLocal = interp;
310: c->fieldCtx = ctx;
311: return(0);
312: }
316: PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
317: {
318: CharacteristicPointDA2D Qi;
319: DM da = c->velocityDA;
320: Vec velocityLocal, velocityLocalOld;
321: Vec fieldLocal;
322: DMDALocalInfo info;
323: PetscScalar **solArray;
324: void *velocityArray;
325: void *velocityArrayOld;
326: void *fieldArray;
327: PassiveScalar *interpIndices;
328: PassiveScalar *velocityValues, *velocityValuesOld;
329: PassiveScalar *fieldValues;
330: PetscMPIInt rank;
331: PetscInt dim;
332: PetscMPIInt neighbors[9];
333: PetscInt dof;
334: PetscInt gx, gy;
335: PetscInt n, is, ie, js, je, comp;
336: PetscErrorCode ierr;
339: c->queueSize = 0;
340: MPI_Comm_rank(((PetscObject)c)->comm, &rank);
341: DMDAGetNeighborsRank(da, neighbors);
342: CharacteristicSetNeighbors(c, 9, neighbors);
343: CharacteristicSetUp(c);
344: /* global and local grid info */
345: DMDAGetInfo(da, &dim, &gx, &gy, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
346: DMDAGetLocalInfo(da, &info);
347: is = info.xs; ie = info.xs+info.xm;
348: js = info.ys; je = info.ys+info.ym;
349: /* Allocation */
350: PetscMalloc(dim*sizeof(PetscScalar), &interpIndices);
351: PetscMalloc(c->numVelocityComp*sizeof(PetscScalar), &velocityValues);
352: PetscMalloc(c->numVelocityComp*sizeof(PetscScalar), &velocityValuesOld);
353: PetscMalloc(c->numFieldComp*sizeof(PetscScalar), &fieldValues);
354: PetscLogEventBegin(CHARACTERISTIC_Solve,0,0,0,0);
356: /* -----------------------------------------------------------------------
357: PART 1, AT t-dt/2
358: -----------------------------------------------------------------------*/
359: PetscLogEventBegin(CHARACTERISTIC_QueueSetup,0,0,0,0);
360: /* GET POSITION AT HALF TIME IN THE PAST */
361: if (c->velocityInterpLocal) {
362: DMGetLocalVector(c->velocityDA, &velocityLocal);
363: DMGetLocalVector(c->velocityDA, &velocityLocalOld);
364: DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
365: DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal);
366: DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
367: DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld);
368: DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray);
369: DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
370: }
371: PetscInfo(PETSC_NULL, "Calculating position at t_{n - 1/2}\n");
372: for(Qi.j = js; Qi.j < je; Qi.j++) {
373: for(Qi.i = is; Qi.i < ie; Qi.i++) {
374: interpIndices[0] = Qi.i;
375: interpIndices[1] = Qi.j;
376: if (c->velocityInterpLocal) {
377: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
378: } else {
379: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
380: }
381: Qi.x = Qi.i - velocityValues[0]*dt/2.0;
382: Qi.y = Qi.j - velocityValues[1]*dt/2.0;
384: /* Determine whether the position at t - dt/2 is local */
385: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
387: /* Check for Periodic boundaries and move all periodic points back onto the domain */
388: DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
389: CharacteristicAddPoint(c, &Qi);
390: }
391: }
392: PetscLogEventEnd(CHARACTERISTIC_QueueSetup,0,0,0,0);
394: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
395: CharacteristicSendCoordinatesBegin(c);
396: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
398: PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);
399: /* Calculate velocity at t_n+1/2 (local values) */
400: PetscInfo(PETSC_NULL, "Calculating local velocities at t_{n - 1/2}\n");
401: for(n = 0; n < c->queueSize; n++) {
402: Qi = c->queue[n];
403: if (c->neighbors[Qi.proc] == rank) {
404: interpIndices[0] = Qi.x;
405: interpIndices[1] = Qi.y;
406: if (c->velocityInterpLocal) {
407: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
408: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
409: } else {
410: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
411: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
412: }
413: Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
414: Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
415: }
416: c->queue[n] = Qi;
417: }
418: PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal,0,0,0,0);
420: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
421: CharacteristicSendCoordinatesEnd(c);
422: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
425: /* Calculate velocity at t_n+1/2 (fill remote requests) */
426: PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
427: PetscInfo1(PETSC_NULL, "Calculating %d remote velocities at t_{n - 1/2}\n", c->queueRemoteSize);
428: for(n = 0; n < c->queueRemoteSize; n++) {
429: Qi = c->queueRemote[n];
430: interpIndices[0] = Qi.x;
431: interpIndices[1] = Qi.y;
432: if (c->velocityInterpLocal) {
433: c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
434: c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
435: } else {
436: c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx);
437: c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx);
438: }
439: Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
440: Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
441: c->queueRemote[n] = Qi;
442: }
443: PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote,0,0,0,0);
444: PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
445: CharacteristicGetValuesBegin(c);
446: CharacteristicGetValuesEnd(c);
447: if (c->velocityInterpLocal) {
448: DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray);
449: DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld);
450: DMRestoreLocalVector(c->velocityDA, &velocityLocal);
451: DMRestoreLocalVector(c->velocityDA, &velocityLocalOld);
452: }
453: PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,0,0,0,0);
455: /* -----------------------------------------------------------------------
456: PART 2, AT t-dt
457: -----------------------------------------------------------------------*/
459: /* GET POSITION AT t_n (local values) */
460: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
461: PetscInfo(PETSC_NULL, "Calculating position at t_{n}\n");
462: for(n = 0; n < c->queueSize; n++) {
463: Qi = c->queue[n];
464: Qi.x = Qi.i - Qi.x*dt;
465: Qi.y = Qi.j - Qi.y*dt;
467: /* Determine whether the position at t-dt is local */
468: Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
470: /* Check for Periodic boundaries and move all periodic points back onto the domain */
471: DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y));
473: c->queue[n] = Qi;
474: }
475: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
477: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
478: CharacteristicSendCoordinatesBegin(c);
479: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
481: /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
482: PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
483: if (c->fieldInterpLocal) {
484: DMGetLocalVector(c->fieldDA, &fieldLocal);
485: DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
486: DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal);
487: DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray);
488: }
489: PetscInfo(PETSC_NULL, "Calculating local field at t_{n}\n");
490: for(n = 0; n < c->queueSize; n++) {
491: if (c->neighbors[c->queue[n].proc] == rank) {
492: interpIndices[0] = c->queue[n].x;
493: interpIndices[1] = c->queue[n].y;
494: if (c->fieldInterpLocal) {
495: c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
496: } else {
497: c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
498: }
499: for(comp = 0; comp < c->numFieldComp; comp++) {
500: c->queue[n].field[comp] = fieldValues[comp];
501: }
502: }
503: }
504: PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,0,0,0,0);
506: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
507: CharacteristicSendCoordinatesEnd(c);
508: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
510: /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
511: PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote,0,0,0,0);
512: PetscInfo1(PETSC_NULL, "Calculating %d remote field points at t_{n}\n", c->queueRemoteSize);
513: for(n = 0; n < c->queueRemoteSize; n++) {
514: interpIndices[0] = c->queueRemote[n].x;
515: interpIndices[1] = c->queueRemote[n].y;
517: /* for debugging purposes */
518: if (1) { /* hacked bounds test...let's do better */
519: PetscScalar im = interpIndices[0]; PetscScalar jm = interpIndices[1];
521: if (( im < (PetscScalar) is - 1.) || (im > (PetscScalar) ie) || (jm < (PetscScalar) js - 1.) || (jm > (PetscScalar) je)) {
522: SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", im, jm);
523: }
524: }
526: if (c->fieldInterpLocal) {
527: c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
528: } else {
529: c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx);
530: }
531: for(comp = 0; comp < c->numFieldComp; comp++) {
532: c->queueRemote[n].field[comp] = fieldValues[comp];
533: }
534: }
535: PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote,0,0,0,0);
537: PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
538: CharacteristicGetValuesBegin(c);
539: CharacteristicGetValuesEnd(c);
540: if (c->fieldInterpLocal) {
541: DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray);
542: DMRestoreLocalVector(c->fieldDA, &fieldLocal);
543: }
544: PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,0,0,0,0);
546: /* Return field of characteristics at t_n-1 */
547: PetscLogEventBegin(CHARACTERISTIC_DAUpdate,0,0,0,0);
548: DMDAGetInfo(c->fieldDA,0,0,0,0,0,0,0,&dof,0,0,0,0,0);
549: DMDAVecGetArray(c->fieldDA, solution, &solArray);
550: for(n = 0; n < c->queueSize; n++) {
551: Qi = c->queue[n];
552: for(comp = 0; comp < c->numFieldComp; comp++) {
553: solArray[Qi.j][Qi.i*dof+c->fieldComp[comp]] = Qi.field[comp];
554: }
555: }
556: DMDAVecRestoreArray(c->fieldDA, solution, &solArray);
557: PetscLogEventEnd(CHARACTERISTIC_DAUpdate,0,0,0,0);
558: PetscLogEventEnd(CHARACTERISTIC_Solve,0,0,0,0);
560: /* Cleanup */
561: PetscFree(interpIndices);
562: PetscFree(velocityValues);
563: PetscFree(velocityValuesOld);
564: PetscFree(fieldValues);
565: return(0);
566: }
570: PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
571: {
575: c->numNeighbors = numNeighbors;
576: PetscFree(c->neighbors);
577: PetscMalloc(numNeighbors * sizeof(PetscMPIInt), &c->neighbors);
578: PetscMemcpy(c->neighbors, neighbors, numNeighbors * sizeof(PetscMPIInt));
579: return(0);
580: }
584: PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
585: {
587: if (c->queueSize >= c->queueMax) {
588: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Exceeeded maximum queue size %d", c->queueMax);
589: }
590: c->queue[c->queueSize++] = *point;
591: return(0);
592: }
596: int CharacteristicSendCoordinatesBegin(Characteristic c)
597: {
598: PetscMPIInt rank, tag = 121;
599: PetscInt i, n;
603: MPI_Comm_rank(((PetscObject)c)->comm, &rank);
604: HeapSort(c, c->queue, c->queueSize);
605: PetscMemzero(c->needCount, c->numNeighbors * sizeof(PetscInt));
606: for(i = 0; i < c->queueSize; i++) {
607: c->needCount[c->queue[i].proc]++;
608: }
609: c->fillCount[0] = 0;
610: for(n = 1; n < c->numNeighbors; n++) {
611: MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, ((PetscObject)c)->comm, &(c->request[n-1]));
612: }
613: for(n = 1; n < c->numNeighbors; n++) {
614: MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, ((PetscObject)c)->comm);
615: }
616: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
617: /* Initialize the remote queue */
618: c->queueLocalMax = c->localOffsets[0] = 0;
619: c->queueRemoteMax = c->remoteOffsets[0] = 0;
620: for(n = 1; n < c->numNeighbors; n++) {
621: c->remoteOffsets[n] = c->queueRemoteMax;
622: c->queueRemoteMax += c->fillCount[n];
623: c->localOffsets[n] = c->queueLocalMax;
624: c->queueLocalMax += c->needCount[n];
625: }
626: /* HACK BEGIN */
627: for(n = 1; n < c->numNeighbors; n++) {
628: c->localOffsets[n] += c->needCount[0];
629: }
630: c->needCount[0] = 0;
631: /* HACK END */
632: if (c->queueRemoteMax) {
633: PetscMalloc(sizeof(CharacteristicPointDA2D) * c->queueRemoteMax, &c->queueRemote);
634: } else {
635: c->queueRemote = PETSC_NULL;
636: }
637: c->queueRemoteSize = c->queueRemoteMax;
639: /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
640: for(n = 1; n < c->numNeighbors; n++) {
641: PetscInfo2(PETSC_NULL, "Receiving %d requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]);
642: MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, ((PetscObject)c)->comm, &(c->request[n-1]));
643: }
644: for(n = 1; n < c->numNeighbors; n++) {
645: PetscInfo2(PETSC_NULL, "Sending %d requests for values from proc %d\n", c->needCount[n], c->neighbors[n]);
646: MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, ((PetscObject)c)->comm);
647: }
648: return(0);
649: }
653: PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
654: {
655: #if 0
656: PetscMPIInt rank;
657: PetscInt n;
658: #endif
662: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
663: #if 0
664: MPI_Comm_rank(((PetscObject)c)->comm, &rank);
665: for(n = 0; n < c->queueRemoteSize; n++) {
666: if (c->neighbors[c->queueRemote[n].proc] == rank) {
667: SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is fucked up, n = %d proc = %d", n, c->queueRemote[n].proc);
668: }
669: }
670: #endif
671: return(0);
672: }
676: PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
677: {
678: PetscMPIInt tag = 121;
679: PetscInt n;
683: /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
684: for(n = 1; n < c->numNeighbors; n++) {
685: MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, ((PetscObject)c)->comm, &(c->request[n-1]));
686: }
687: for(n = 1; n < c->numNeighbors; n++) {
688: MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, ((PetscObject)c)->comm);
689: }
690: return(0);
691: }
695: PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
696: {
700: MPI_Waitall(c->numNeighbors-1, c->request, c->status);
701: /* Free queue of requests from other procs */
702: PetscFree(c->queueRemote);
703: return(0);
704: }
706: /*---------------------------------------------------------------------*/
709: /*
710: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
711: */
712: int HeapSort(Characteristic c, Queue queue, PetscInt size)
713: /*---------------------------------------------------------------------*/
714: {
715: CharacteristicPointDA2D temp;
716: int n;
717:
718: if (0) { /* Check the order of the queue before sorting */
719: PetscInfo(PETSC_NULL, "Before Heap sort\n");
720: for (n=0; n<size; n++) {
721: PetscInfo2(PETSC_NULL,"%d %d\n",n,queue[n].proc);
722: }
723: }
725: /* SORTING PHASE */
726: for (n = (size / 2)-1; n >= 0; n--)
727: SiftDown(c, queue, n, size-1); /* Rich had size-1 here, Matt had size*/
728: for (n = size-1; n >= 1; n--) {
729: temp = queue[0];
730: queue[0] = queue[n];
731: queue[n] = temp;
732: SiftDown(c, queue, 0, n-1);
733: }
734: if (0) { /* Check the order of the queue after sorting */
735: PetscInfo(PETSC_NULL, "Avter Heap sort\n");
736: for (n=0; n<size; n++) {
737: PetscInfo2(PETSC_NULL,"%d %d\n",n,queue[n].proc);
738: }
739: }
740: return 0;
741: }
743: /*---------------------------------------------------------------------*/
746: /*
747: Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
748: */
749: PetscErrorCode SiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
750: /*---------------------------------------------------------------------*/
751: {
752: PetscBool done = PETSC_FALSE;
753: PetscInt maxChild;
754: CharacteristicPointDA2D temp;
757: while ((root*2 <= bottom) && (!done)) {
758: if (root*2 == bottom) maxChild = root * 2;
759: else if (queue[root*2].proc > queue[root*2+1].proc) maxChild = root * 2;
760: else maxChild = root * 2 + 1;
762: if (queue[root].proc < queue[maxChild].proc) {
763: temp = queue[root];
764: queue[root] = queue[maxChild];
765: queue[maxChild] = temp;
766: root = maxChild;
767: } else
768: done = PETSC_TRUE;
769: }
770: return(0);
771: }
775: /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
776: PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
777: {
778: DMDABoundaryType bx, by;
779: PetscBool IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
780: MPI_Comm comm;
781: PetscMPIInt rank;
782: PetscInt **procs,pi,pj,pim,pip,pjm,pjp,PI,PJ;
786: PetscObjectGetComm((PetscObject) da, &comm);
787: MPI_Comm_rank(comm, &rank);
788: DMDAGetInfo(da, 0, 0, 0, 0, &PI,&PJ, 0, 0, 0, &bx, &by,0, 0);
790: if (bx == DMDA_BOUNDARY_PERIODIC) {
791: IPeriodic = PETSC_TRUE;
792: }
793: if (by == DMDA_BOUNDARY_PERIODIC) {
794: JPeriodic = PETSC_TRUE;
795: }
797: neighbors[0] = rank;
798: rank = 0;
799: PetscMalloc(sizeof(PetscInt*)*PJ,&procs);
800: for (pj=0;pj<PJ;pj++) {
801: PetscMalloc(sizeof(PetscInt)*PI,&(procs[pj]));
802: for (pi=0;pi<PI;pi++) {
803: procs[pj][pi] = rank;
804: rank++;
805: }
806: }
807:
808: pi = neighbors[0] % PI;
809: pj = neighbors[0] / PI;
810: pim = pi-1; if (pim<0) pim=PI-1;
811: pip = (pi+1)%PI;
812: pjm = pj-1; if (pjm<0) pjm=PJ-1;
813: pjp = (pj+1)%PJ;
815: neighbors[1] = procs[pj] [pim];
816: neighbors[2] = procs[pjp][pim];
817: neighbors[3] = procs[pjp][pi];
818: neighbors[4] = procs[pjp][pip];
819: neighbors[5] = procs[pj] [pip];
820: neighbors[6] = procs[pjm][pip];
821: neighbors[7] = procs[pjm][pi];
822: neighbors[8] = procs[pjm][pim];
824: if (!IPeriodic) {
825: if (pi==0) neighbors[1]=neighbors[2]=neighbors[8]=neighbors[0];
826: if (pi==PI-1) neighbors[4]=neighbors[5]=neighbors[6]=neighbors[0];
827: }
829: if (!JPeriodic) {
830: if (pj==0) neighbors[6]=neighbors[7]=neighbors[8]=neighbors[0];
831: if (pj==PJ-1) neighbors[2]=neighbors[3]=neighbors[4]=neighbors[0];
832: }
834: for(pj = 0; pj < PJ; pj++) {
835: PetscFree(procs[pj]);
836: }
837: PetscFree(procs);
838: return(0);
839: }
843: /*
844: SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
845: 2 | 3 | 4
846: __|___|__
847: 1 | 0 | 5
848: __|___|__
849: 8 | 7 | 6
850: | |
851: */
852: PetscInt DMDAGetNeighborRelative(DM da, PassiveReal ir, PassiveReal jr)
853: {
854: DMDALocalInfo info;
855: PassiveReal is,ie,js,je;
857:
858: DMDAGetLocalInfo(da, &info);
859: is = (PassiveReal) info.xs - 0.5; ie = (PassiveReal) info.xs + info.xm - 0.5;
860: js = (PassiveReal) info.ys - 0.5; je = (PassiveReal) info.ys + info.ym - 0.5;
861:
862: if (ir >= is && ir <= ie) { /* center column */
863: if (jr >= js && jr <= je) {
864: return 0;
865: } else if (jr < js) {
866: return 7;
867: } else {
868: return 3;
869: }
870: } else if (ir < is) { /* left column */
871: if (jr >= js && jr <= je) {
872: return 1;
873: } else if (jr < js) {
874: return 8;
875: } else {
876: return 2;
877: }
878: } else { /* right column */
879: if (jr >= js && jr <= je) {
880: return 5;
881: } else if (jr < js) {
882: return 6;
883: } else {
884: return 4;
885: }
886: }
887: }