Actual source code: da1.c

  2: /* 
  3:    Code for manipulating distributed regular 1d arrays in parallel.
  4:    This file was created by Peter Mell   6/30/95    
  5: */

  7: #include <private/daimpl.h>     /*I  "petscdmda.h"   I*/

  9: const char *DMDABoundaryTypes[] = {"BOUNDARY_NONE","BOUNDARY_GHOSTED","BOUNDARY_PERIODIC","DMDA_",0};

 13: PetscErrorCode DMView_DA_1d(DM da,PetscViewer viewer)
 14: {
 16:   PetscMPIInt    rank;
 17:   PetscBool      iascii,isdraw,isbinary;
 18:   DM_DA          *dd = (DM_DA*)da->data;
 19: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 20:   PetscBool      ismatlab;
 21: #endif

 24:   MPI_Comm_rank(((PetscObject)da)->comm,&rank);

 26:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 27:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
 28:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
 29: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 30:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
 31: #endif
 32:   if (iascii) {
 33:     PetscViewerFormat format;

 35:     PetscViewerGetFormat(viewer, &format);
 36:     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
 37:       DMDALocalInfo info;
 38:       DMDAGetLocalInfo(da,&info);
 39:       PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
 40:       PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,dd->M,dd->m,dd->w,dd->s);
 41:       PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);
 42:       PetscViewerFlush(viewer);
 43:       PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
 44:     } else {
 45:       DMView_DA_VTK(da, viewer);
 46:     }
 47:   } else if (isdraw) {
 48:     PetscDraw  draw;
 49:     double     ymin = -1,ymax = 1,xmin = -1,xmax = dd->M,x;
 50:     PetscInt   base;
 51:     char       node[10];
 52:     PetscBool  isnull;

 54:     PetscViewerDrawGetDraw(viewer,0,&draw);
 55:     PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

 57:     PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
 58:     PetscDrawSynchronizedClear(draw);

 60:     /* first processor draws all node lines */
 61:     if (!rank) {
 62:       PetscInt xmin_tmp;
 63:       ymin = 0.0; ymax = 0.3;
 64: 
 65:       /* ADIC doesn't like doubles in a for loop */
 66:       for (xmin_tmp =0; xmin_tmp < dd->M; xmin_tmp++) {
 67:          PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
 68:       }

 70:       xmin = 0.0; xmax = dd->M - 1;
 71:       PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
 72:       PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
 73:     }

 75:     PetscDrawSynchronizedFlush(draw);
 76:     PetscDrawPause(draw);

 78:     /* draw my box */
 79:     ymin = 0; ymax = 0.3; xmin = dd->xs / dd->w; xmax = (dd->xe / dd->w)  - 1;
 80:     PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
 81:     PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
 82:     PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
 83:     PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);

 85:     /* Put in index numbers */
 86:     base = dd->base / dd->w;
 87:     for (x=xmin; x<=xmax; x++) {
 88:       sprintf(node,"%d",(int)base++);
 89:       PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
 90:     }

 92:     PetscDrawSynchronizedFlush(draw);
 93:     PetscDrawPause(draw);
 94:   } else if (isbinary){
 95:     DMView_DA_Binary(da,viewer);
 96: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 97:   } else if (ismatlab) {
 98:     DMView_DA_Matlab(da,viewer);
 99: #endif
100:   } else SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DMDA 1d",((PetscObject)viewer)->type_name);
101:   return(0);
102: }

106: /*
107:     Processes command line options to determine if/how a DMDA
108:   is to be viewed. Called by DMDACreateXX()
109: */
110: PetscErrorCode DMView_DA_Private(DM da)
111: {
113:   PetscBool      flg1 = PETSC_FALSE;
114:   PetscViewer    view;

117:   PetscOptionsBegin(((PetscObject)da)->comm,((PetscObject)da)->prefix,"DMDA viewing options","DMDA");
118:     PetscOptionsBool("-da_view","Print information about the DMDA's distribution","DMView",PETSC_FALSE,&flg1,PETSC_NULL);
119:     if (flg1) {
120:       PetscViewerASCIIGetStdout(((PetscObject)da)->comm,&view);
121:       DMView(da,view);
122:     }
123:     flg1 = PETSC_FALSE;
124:     PetscOptionsBool("-da_view_draw","Draw how the DMDA is distributed","DMView",PETSC_FALSE,&flg1,PETSC_NULL);
125:     if (flg1) {DMView(da,PETSC_VIEWER_DRAW_(((PetscObject)da)->comm));}
126:   PetscOptionsEnd();
127:   return(0);
128: }

132: PetscErrorCode  DMSetUp_DA_1D(DM da)
133: {
134:   DM_DA                  *dd = (DM_DA*)da->data;
135:   const PetscInt         M     = dd->M;
136:   const PetscInt         dof   = dd->w;
137:   const PetscInt         s     = dd->s;
138:   const PetscInt         sDist = s*dof;  /* absolute stencil distance */
139:   const PetscInt         *lx    = dd->lx;
140:   const DMDABoundaryType bx  = dd->bx;
141:   MPI_Comm               comm;
142:   Vec                    local, global;
143:   VecScatter             ltog, gtol;
144:   IS                     to, from;
145:   PetscBool              flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
146:   PetscMPIInt            rank, size;
147:   PetscInt               i,*idx,nn,left,xs,xe,x,Xs,Xe,start,end,m,IXs,IXe;
148:   PetscErrorCode         ierr;

151:   if (dof < 1) SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
152:   if (s < 0) SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);

154:   dd->dim = 1;
155:   PetscMalloc(dof*sizeof(char*),&dd->fieldname);
156:   PetscMemzero(dd->fieldname,dof*sizeof(char*));
157:   PetscObjectGetComm((PetscObject) da, &comm);
158:   MPI_Comm_size(comm,&size);
159:   MPI_Comm_rank(comm,&rank);

161:   dd->m = size;
162:   m     = dd->m;

164:   if (s > 0) {
165:     /* if not communicating data then should be ok to have nothing on some processes */
166:     if (M < m)     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"More processes than data points! %D %D",m,M);
167:     if ((M-1) < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
168:   }

170:   /* 
171:      Determine locally owned region 
172:      xs is the first local node number, x is the number of local nodes 
173:   */
174:   if (!lx) {
175:     PetscMalloc(m*sizeof(PetscInt), &dd->lx);
176:     PetscOptionsGetBool(PETSC_NULL,"-da_partition_blockcomm",&flg1,PETSC_NULL);
177:     PetscOptionsGetBool(PETSC_NULL,"-da_partition_nodes_at_end",&flg2,PETSC_NULL);
178:     if (flg1) {      /* Block Comm type Distribution */
179:       xs = rank*M/m;
180:       x  = (rank + 1)*M/m - xs;
181:     } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
182:       x = (M + rank)/m;
183:       if (M/m == x) { xs = rank*x; }
184:       else          { xs = rank*(x-1) + (M+rank)%(x*m); }
185:     } else { /* The odd nodes are evenly distributed across the first k nodes */
186:       /* Regular PETSc Distribution */
187:       x = M/m + ((M % m) > rank);
188:       if (rank >= (M % m)) {xs = (rank * (PetscInt)(M/m) + M % m);}
189:       else                 {xs = rank * (PetscInt)(M/m) + rank;}
190:     }
191:     MPI_Allgather(&xs,1,MPIU_INT,dd->lx,1,MPIU_INT,comm);
192:     for (i=0; i<m-1; i++) dd->lx[i] = dd->lx[i+1] - dd->lx[i];
193:     dd->lx[m-1] = M - dd->lx[m-1];
194:   } else {
195:     x  = lx[rank];
196:     xs = 0;
197:     for (i=0; i<rank; i++) {
198:       xs += lx[i];
199:     }
200:     /* verify that data user provided is consistent */
201:     left = xs;
202:     for (i=rank; i<size; i++) {
203:       left += lx[i];
204:     }
205:     if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
206:   }

208:   /*
209:    check if the scatter requires more than one process neighbor or wraps around
210:    the domain more than once
211:   */
212:   if ((x < s) & ((M > 1) | (bx == DMDA_BOUNDARY_PERIODIC))) {
213:     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
214:   }

216:   /* From now on x,xs,xe,Xs,Xe are the exact location in the array */
217:   x  *= dof;
218:   xs *= dof;
219:   xe  = xs + x;

221:   /* determine ghost region */
222:   if (bx) {
223:     Xs = xs - sDist;
224:     Xe = xe + sDist;
225:   } else {
226:     if ((xs-sDist) >= 0)     Xs = xs-sDist;  else Xs = 0;
227:     if ((xe+sDist) <= M*dof) Xe = xe+sDist;  else Xe = M*dof;
228:   }

230:   if (bx == DMDA_BOUNDARY_PERIODIC) {
231:     IXs = xs - sDist;
232:     IXe = xe + sDist;
233:   } else {
234:     if ((xs-sDist) >= 0)     IXs = xs-sDist;  else IXs = 0;
235:     if ((xe+sDist) <= M*dof) IXe = xe+sDist;  else IXe = M*dof;
236:   }

238:   /* allocate the base parallel and sequential vectors */
239:   dd->Nlocal = x;
240:   VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);
241:   VecSetBlockSize(global,dof);
242:   dd->nlocal = (Xe-Xs);
243:   VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);
244:   VecSetBlockSize(local,dof);

246:   /* Create Local to Global Vector Scatter Context */
247:   /* local to global inserts non-ghost point region into global */
248:   VecGetOwnershipRange(global,&start,&end);
249:   ISCreateStride(comm,x,start,1,&to);
250:   ISCreateStride(comm,x,xs-Xs,1,&from);
251:   VecScatterCreate(local,from,global,to,&ltog);
252:   PetscLogObjectParent(da,ltog);
253:   ISDestroy(&from);
254:   ISDestroy(&to);

256:   /* Create Global to Local Vector Scatter Context */
257:   /* global to local must retrieve ghost points */
258:   ISCreateStride(comm,(IXe-IXs),IXs-Xs,1,&to);

260:   PetscMalloc((x+2*sDist)*sizeof(PetscInt),&idx);
261:   PetscLogObjectMemory(da,(x+2*sDist)*sizeof(PetscInt));

263:   for (i=0; i<IXs-Xs; i++) {idx[i] = -1; } /* prepend with -1s if needed for ghosted case*/

265:   nn = IXs-Xs;
266:   if (bx == DMDA_BOUNDARY_PERIODIC) { /* Handle all cases with wrap first */
267:     for (i=0; i<sDist; i++) {  /* Left ghost points */
268:       if ((xs-sDist+i)>=0) { idx[nn++] = xs-sDist+i;}
269:       else                 { idx[nn++] = M*dof+(xs-sDist+i);}
270:     }

272:     for (i=0; i<x; i++) { idx [nn++] = xs + i;}  /* Non-ghost points */

274:     for (i=0; i<sDist; i++) { /* Right ghost points */
275:       if ((xe+i)<M*dof) { idx [nn++] =  xe+i; }
276:       else              { idx [nn++] = (xe+i) - M*dof;}
277:     }
278:   } else {      /* Now do all cases with no wrapping */
279:     if (sDist <= xs) {for (i=0; i<sDist; i++) {idx[nn++] = xs - sDist + i;}}
280:     else             {for (i=0; i<xs;    i++) {idx[nn++] = i;}}

282:     for (i=0; i<x; i++) { idx [nn++] = xs + i;}

284:     if ((xe+sDist)<=M*dof) {for (i=0;  i<sDist;   i++) {idx[nn++]=xe+i;}}
285:     else                   {for (i=xe; i<(M*dof); i++) {idx[nn++]=i;}}
286:   }

288:   ISCreateGeneral(comm,nn-IXs+Xs,&idx[IXs-Xs],PETSC_COPY_VALUES,&from);
289:   VecScatterCreate(global,from,local,to,&gtol);
290:   PetscLogObjectParent(da,to);
291:   PetscLogObjectParent(da,from);
292:   PetscLogObjectParent(da,gtol);
293:   ISDestroy(&to);
294:   ISDestroy(&from);
295:   VecDestroy(&local);
296:   VecDestroy(&global);

298:   dd->xs = xs; dd->xe = xe; dd->ys = 0; dd->ye = 1; dd->zs = 0; dd->ze = 1;
299:   dd->Xs = Xs; dd->Xe = Xe; dd->Ys = 0; dd->Ye = 1; dd->Zs = 0; dd->Ze = 1;

301:   dd->gtol      = gtol;
302:   dd->ltog      = ltog;
303:   dd->base      = xs;
304:   da->ops->view = DMView_DA_1d;

306:   /* 
307:      Set the local to global ordering in the global vector, this allows use
308:      of VecSetValuesLocal().
309:   */
310:   for (i=0; i<Xe-IXe; i++) {idx[nn++] = -1; } /* pad with -1s if needed for ghosted case*/

312:   ISLocalToGlobalMappingCreate(comm,nn,idx,PETSC_COPY_VALUES,&da->ltogmap);
313:   ISLocalToGlobalMappingBlock(da->ltogmap,dd->w,&da->ltogmapb);
314:   PetscLogObjectParent(da,da->ltogmap);

316:   dd->idx = idx;
317:   dd->Nl  = nn;

319:   return(0);
320: }


325: /*@C
326:    DMDACreate1d - Creates an object that will manage the communication of  one-dimensional 
327:    regular array data that is distributed across some processors.

329:    Collective on MPI_Comm

331:    Input Parameters:
332: +  comm - MPI communicator
333: .  bx - type of ghost cells at the boundary the array should have, if any. Use 
334:           DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, or DMDA_BOUNDARY_PERIODIC.
335: .  M - global dimension of the array (use -M to indicate that it may be set to a different value 
336:             from the command line with -da_grid_x <M>)
337: .  dof - number of degrees of freedom per node
338: .  s - stencil width
339: -  lx - array containing number of nodes in the X direction on each processor, 
340:         or PETSC_NULL. If non-null, must be of length as the number of processes in the MPI_Comm.

342:    Output Parameter:
343: .  da - the resulting distributed array object

345:    Options Database Key:
346: +  -da_view - Calls DMView() at the conclusion of DMDACreate1d()
347: .  -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
348: .  -da_refine_x <rx> - refinement factor 
349: -  -da_refine <n> - refine the DMDA n times before creating it, if M < 0

351:    Level: beginner

353:    Notes:
354:    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
355:    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
356:    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.


359: .keywords: distributed array, create, one-dimensional

361: .seealso: DMDestroy(), DMView(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDASetRefinementFactor(),
362:           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDAGetRefinementFactor(),
363:           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges()

365: @*/
366: PetscErrorCode  DMDACreate1d(MPI_Comm comm, DMDABoundaryType bx, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DM *da)
367: {
369:   PetscMPIInt    size;

372:   DMDACreate(comm, da);
373:   DMDASetDim(*da, 1);
374:   DMDASetSizes(*da, M, 1, 1);
375:   MPI_Comm_size(comm, &size);
376:   DMDASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);
377:   DMDASetBoundaryType(*da, bx, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE);
378:   DMDASetDof(*da, dof);
379:   DMDASetStencilWidth(*da, s);
380:   DMDASetOwnershipRanges(*da, lx, PETSC_NULL, PETSC_NULL);
381:   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
382:   DMSetFromOptions(*da);
383:   DMSetUp(*da);
384:   DMView_DA_Private(*da);
385:   return(0);
386: }