Actual source code: dacorn.c
2: /*
3: Code for manipulating distributed regular arrays in parallel.
4: */
6: #include <private/daimpl.h> /*I "petscdmda.h" I*/
10: /*@
11: DMDASetCoordinates - Sets into the DMDA a vector that indicates the
12: coordinates of the local nodes (NOT including ghost nodes).
14: Collective on DMDA
16: Input Parameter:
17: + da - the distributed array
18: - c - coordinate vector
20: Note:
21: The coordinates should NOT include those for all ghost points
23: Level: intermediate
25: .keywords: distributed array, get, corners, nodes, local indices, coordinates
27: .seealso: DMDASetGhostCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
28: @*/
29: PetscErrorCode DMDASetCoordinates(DM da,Vec c)
30: {
32: DM_DA *dd = (DM_DA*)da->data;
37: PetscObjectReference((PetscObject)c);
38: VecDestroy(&dd->coordinates);
39: dd->coordinates = c;
40: VecSetBlockSize(c,dd->dim);
41: VecDestroy(&dd->ghosted_coordinates);
42: return(0);
43: }
47: /*@
48: DMDASetGhostedCoordinates - Sets into the DMDA a vector that indicates the
49: coordinates of the local nodes, including ghost nodes.
51: Collective on DMDA
53: Input Parameter:
54: + da - the distributed array
55: - c - coordinate vector
57: Note:
58: The coordinates of interior ghost points can be set using DMDASetCoordinates()
59: followed by DMDAGetGhostedCoordinates(). This is intended to enable the setting
60: of ghost coordinates outside of the domain.
62: Non-ghosted coordinates, if set, are assumed still valid.
64: Level: intermediate
66: .keywords: distributed array, get, corners, nodes, local indices, coordinates
68: .seealso: DMDASetCoordinates(), DMDAGetGhostCorners(), DMDAGetCoordinates(), DMDASetUniformCoordinates(). DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
69: @*/
70: PetscErrorCode DMDASetGhostedCoordinates(DM da,Vec c)
71: {
73: DM_DA *dd = (DM_DA*)da->data;
78: PetscObjectReference((PetscObject)c);
79: VecDestroy(&dd->ghosted_coordinates);
80: dd->ghosted_coordinates = c;
81: VecSetBlockSize(c,dd->dim);
82: return(0);
83: }
87: /*@
88: DMDAGetCoordinates - Gets the node coordinates associated with a DMDA.
90: Not Collective
92: Input Parameter:
93: . da - the distributed array
95: Output Parameter:
96: . c - coordinate vector
98: Note:
99: Each process has only the coordinates for its local nodes (does NOT have the
100: coordinates for the ghost nodes).
102: For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
103: and (x_0,y_0,z_0,x_1,y_1,z_1...)
105: Level: intermediate
107: .keywords: distributed array, get, corners, nodes, local indices, coordinates
109: .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetGhostedCoordinates(), DMDAGetCoordinateDA()
110: @*/
111: PetscErrorCode DMDAGetCoordinates(DM da,Vec *c)
112: {
113: DM_DA *dd = (DM_DA*)da->data;
117: *c = dd->coordinates;
118: return(0);
119: }
123: /*@
124: DMDAGetCoordinateDA - Gets the DMDA that scatters between global and local DMDA coordinates
126: Collective on DMDA
128: Input Parameter:
129: . da - the distributed array
131: Output Parameter:
132: . dac - coordinate DMDA
134: Level: intermediate
136: .keywords: distributed array, get, corners, nodes, local indices, coordinates
138: .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates()
139: @*/
140: PetscErrorCode DMDAGetCoordinateDA(DM da,DM *cda)
141: {
142: PetscMPIInt size;
144: DM_DA *dd = (DM_DA*)da->data;
147: if (!dd->da_coordinates) {
148: MPI_Comm_size(((PetscObject)da)->comm,&size);
149: if (dd->dim == 1) {
150: PetscInt s,m,*lc,l;
151: DMDABoundaryType bx;
152: DMDAGetInfo(da,0,&m,0,0,0,0,0,0,&s,&bx,0,0,0);
153: DMDAGetCorners(da,0,0,0,&l,0,0);
154: PetscMalloc(size*sizeof(PetscInt),&lc);
155: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
156: DMDACreate1d(((PetscObject)da)->comm,bx,m,1,s,lc,&dd->da_coordinates);
157: PetscFree(lc);
158: } else if (dd->dim == 2) {
159: PetscInt i,s,m,*lc,*ld,l,k,n,M,N;
160: DMDABoundaryType bx,by;
161: DMDAGetInfo(da,0,&m,&n,0,&M,&N,0,0,&s,&bx,&by,0,0);
162: DMDAGetCorners(da,0,0,0,&l,&k,0);
163: PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);
164: /* only first M values in lc matter */
165: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
166: /* every Mth value in ld matters */
167: MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);
168: for ( i=0; i<N; i++) {
169: ld[i] = ld[M*i];
170: }
171: DMDACreate2d(((PetscObject)da)->comm,bx,by,DMDA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,&dd->da_coordinates);
172: PetscFree2(lc,ld);
173: } else if (dd->dim == 3) {
174: PetscInt i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p;
175: DMDABoundaryType bx,by,bz;
176: DMDAGetInfo(da,0,&m,&n,&p,&M,&N,&P,0,&s,&bx,&by,&bz,0);
177: DMDAGetCorners(da,0,0,0,&l,&k,&q);
178: PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);
179: /* only first M values in lc matter */
180: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
181: /* every Mth value in ld matters */
182: MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);
183: for ( i=0; i<N; i++) {
184: ld[i] = ld[M*i];
185: }
186: MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)da)->comm);
187: for ( i=0; i<P; i++) {
188: le[i] = le[M*N*i];
189: }
190: DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,DMDA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,&dd->da_coordinates);
191: PetscFree3(lc,ld,le);
192: }
193: }
194: *cda = dd->da_coordinates;
195: return(0);
196: }
201: /*@
202: DMDAGetGhostedCoordinates - Gets the node coordinates associated with a DMDA.
204: Collective on DMDA
206: Input Parameter:
207: . da - the distributed array
209: Output Parameter:
210: . c - coordinate vector
212: Note:
213: Each process has only the coordinates for its local AND ghost nodes
215: For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
216: and (x_0,y_0,z_0,x_1,y_1,z_1...)
218: Level: intermediate
220: .keywords: distributed array, get, corners, nodes, local indices, coordinates
222: .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetCoordinateDA()
223: @*/
224: PetscErrorCode DMDAGetGhostedCoordinates(DM da,Vec *c)
225: {
227: DM_DA *dd = (DM_DA*)da->data;
232: if (!dd->coordinates) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"You must call DMDASetCoordinates() before this call");
233: if (!dd->ghosted_coordinates) {
234: DM dac;
235: DMDAGetCoordinateDA(da,&dac);
236: DMCreateLocalVector(dac,&dd->ghosted_coordinates);
237: DMGlobalToLocalBegin(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);
238: DMGlobalToLocalEnd(dac,dd->coordinates,INSERT_VALUES,dd->ghosted_coordinates);
239: }
240: *c = dd->ghosted_coordinates;
241: return(0);
242: }
246: /*@C
247: DMDASetFieldName - Sets the names of individual field components in multicomponent
248: vectors associated with a DMDA.
250: Not Collective
252: Input Parameters:
253: + da - the distributed array
254: . nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the
255: number of degrees of freedom per node within the DMDA
256: - names - the name of the field (component)
258: Level: intermediate
260: .keywords: distributed array, get, component name
262: .seealso: DMDAGetFieldName()
263: @*/
264: PetscErrorCode DMDASetFieldName(DM da,PetscInt nf,const char name[])
265: {
267: DM_DA *dd = (DM_DA*)da->data;
271: if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
272: PetscFree(dd->fieldname[nf]);
273: PetscStrallocpy(name,&dd->fieldname[nf]);
274: return(0);
275: }
279: /*@C
280: DMDAGetFieldName - Gets the names of individual field components in multicomponent
281: vectors associated with a DMDA.
283: Not Collective
285: Input Parameter:
286: + da - the distributed array
287: - nf - field number for the DMDA (0, 1, ... dof-1), where dof indicates the
288: number of degrees of freedom per node within the DMDA
290: Output Parameter:
291: . names - the name of the field (component)
293: Level: intermediate
295: .keywords: distributed array, get, component name
297: .seealso: DMDASetFieldName()
298: @*/
299: PetscErrorCode DMDAGetFieldName(DM da,PetscInt nf,const char **name)
300: {
301: DM_DA *dd = (DM_DA*)da->data;
306: if (nf < 0 || nf >= dd->w) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
307: *name = dd->fieldname[nf];
308: return(0);
309: }
313: /*@
314: DMDAGetCorners - Returns the global (x,y,z) indices of the lower left
315: corner of the local region, excluding ghost points.
317: Not Collective
319: Input Parameter:
320: . da - the distributed array
322: Output Parameters:
323: + x,y,z - the corner indices (where y and z are optional; these are used
324: for 2D and 3D problems)
325: - m,n,p - widths in the corresponding directions (where n and p are optional;
326: these are used for 2D and 3D problems)
328: Note:
329: The corner information is independent of the number of degrees of
330: freedom per node set with the DMDACreateXX() routine. Thus the x, y, z, and
331: m, n, p can be thought of as coordinates on a logical grid, where each
332: grid point has (potentially) several degrees of freedom.
333: Any of y, z, n, and p can be passed in as PETSC_NULL if not needed.
335: Level: beginner
337: .keywords: distributed array, get, corners, nodes, local indices
339: .seealso: DMDAGetGhostCorners(), DMDAGetOwnershipRanges()
340: @*/
341: PetscErrorCode DMDAGetCorners(DM da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p)
342: {
343: PetscInt w;
344: DM_DA *dd = (DM_DA*)da->data;
348: /* since the xs, xe ... have all been multiplied by the number of degrees
349: of freedom per cell, w = dd->w, we divide that out before returning.*/
350: w = dd->w;
351: if (x) *x = dd->xs/w; if(m) *m = (dd->xe - dd->xs)/w;
352: /* the y and z have NOT been multiplied by w */
353: if (y) *y = dd->ys; if (n) *n = (dd->ye - dd->ys);
354: if (z) *z = dd->zs; if (p) *p = (dd->ze - dd->zs);
355: return(0);
356: }
360: /*@
361: DMDAGetLocalBoundingBox - Returns the local bounding box for the DMDA.
363: Not Collective
365: Input Parameter:
366: . da - the distributed array
368: Output Parameters:
369: + lmin - local minimum coordinates (length dim, optional)
370: - lmax - local maximim coordinates (length dim, optional)
372: Level: beginner
374: .keywords: distributed array, get, coordinates
376: .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetBoundingBox()
377: @*/
378: PetscErrorCode DMDAGetLocalBoundingBox(DM da,PetscReal lmin[],PetscReal lmax[])
379: {
380: PetscErrorCode ierr;
381: Vec coords = PETSC_NULL;
382: PetscInt dim,i,j;
383: const PetscScalar *local_coords;
384: PetscReal min[3]={PETSC_MAX_REAL,PETSC_MAX_REAL,PETSC_MAX_REAL},max[3]={PETSC_MIN_REAL,PETSC_MIN_REAL,PETSC_MIN_REAL};
385: PetscInt N,Ni;
386: DM_DA *dd = (DM_DA*)da->data;
390: dim = dd->dim;
391: DMDAGetCoordinates(da,&coords);
392: VecGetArrayRead(coords,&local_coords);
393: VecGetLocalSize(coords,&N);
394: Ni = N/dim;
395: for (i=0; i<Ni; i++) {
396: for (j=0; j<dim; j++) {
397: min[j] = PetscMin(min[j],PetscRealPart(local_coords[i*dim+j]));
398: max[j] = PetscMax(min[j],PetscRealPart(local_coords[i*dim+j]));
399: }
400: }
401: VecRestoreArrayRead(coords,&local_coords);
402: if (lmin) {PetscMemcpy(lmin,min,dim*sizeof(PetscReal));}
403: if (lmax) {PetscMemcpy(lmax,max,dim*sizeof(PetscReal));}
404: return(0);
405: }
409: /*@
410: DMDAGetBoundingBox - Returns the global bounding box for the DMDA.
412: Collective on DMDA
414: Input Parameter:
415: . da - the distributed array
417: Output Parameters:
418: + gmin - global minimum coordinates (length dim, optional)
419: - gmax - global maximim coordinates (length dim, optional)
421: Level: beginner
423: .keywords: distributed array, get, coordinates
425: .seealso: DMDAGetCoordinateDA(), DMDAGetCoordinates(), DMDAGetLocalBoundingBox()
426: @*/
427: PetscErrorCode DMDAGetBoundingBox(DM da,PetscReal gmin[],PetscReal gmax[])
428: {
430: PetscMPIInt count;
431: PetscReal lmin[3],lmax[3];
432: DM_DA *dd = (DM_DA*)da->data;
436: count = PetscMPIIntCast(dd->dim);
437: DMDAGetLocalBoundingBox(da,lmin,lmax);
438: if (gmin) {MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPIU_MIN,((PetscObject)da)->comm);}
439: if (gmax) {MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPIU_MAX,((PetscObject)da)->comm);}
440: return(0);
441: }
445: /*@
446: DMDAGetReducedDA - Gets the DMDA with the same layout but with fewer or more fields
448: Collective on DMDA
450: Input Parameter:
451: + da - the distributed array
452: . nfields - number of fields in new DMDA
454: Output Parameter:
455: . nda - the new DMDA
457: Level: intermediate
459: .keywords: distributed array, get, corners, nodes, local indices, coordinates
461: .seealso: DMDAGetGhostCorners(), DMDASetCoordinates(), DMDASetUniformCoordinates(), DMDAGetCoordinates(), DMDAGetGhostedCoordinates()
462: @*/
463: PetscErrorCode DMDAGetReducedDA(DM da,PetscInt nfields,DM *nda)
464: {
466: DM_DA *dd = (DM_DA*)da->data;
469: if (dd->dim == 1) {
470: PetscInt s,m,l;
471: DMDABoundaryType bx;
472: DMDAGetInfo(da,0,&m,0,0,0,0,0,0,&s,&bx,0,0,0);
473: DMDAGetCorners(da,0,0,0,&l,0,0);
474: DMDACreate1d(((PetscObject)da)->comm,bx,m,nfields,s,dd->lx,nda);
475: } else if (dd->dim == 2) {
476: PetscInt s,m,l,k,n,M,N;
477: DMDABoundaryType bx,by;
478: DMDAGetInfo(da,0,&m,&n,0,&M,&N,0,0,&s,&bx,&by,0,0);
479: DMDAGetCorners(da,0,0,0,&l,&k,0);
480: DMDACreate2d(((PetscObject)da)->comm,bx,by,DMDA_STENCIL_BOX,m,n,M,N,nfields,s,dd->lx,dd->ly,nda);
481: } else if (dd->dim == 3) {
482: PetscInt s,m,l,k,q,n,M,N,P,p;
483: DMDABoundaryType bx,by,bz;
484: DMDAGetInfo(da,0,&m,&n,&p,&M,&N,&P,0,&s,&bx,&by,&bz,0);
485: DMDAGetCorners(da,0,0,0,&l,&k,&q);
486: DMDACreate3d(((PetscObject)da)->comm,bx,by,bz,DMDA_STENCIL_BOX,m,n,p,M,N,P,nfields,s,dd->lx,dd->ly,dd->lz,&dd->da_coordinates);
487: }
488: if (dd->coordinates) {
489: DM_DA *ndd = (DM_DA*)(*nda)->data;
490: PetscObjectReference((PetscObject)dd->coordinates);
491: ndd->coordinates = dd->coordinates;
492: }
493: return(0);
494: }