Actual source code: da2.c

  2: #include <private/daimpl.h>    /*I   "petscdmda.h"   I*/

  6: PetscErrorCode DMView_DA_2d(DM da,PetscViewer viewer)
  7: {
  9:   PetscMPIInt    rank;
 10:   PetscBool      iascii,isdraw,isbinary;
 11:   DM_DA          *dd = (DM_DA*)da->data;
 12: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 13:   PetscBool      ismatlab;
 14: #endif

 17:   MPI_Comm_rank(((PetscObject)da)->comm,&rank);

 19:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 20:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
 21:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
 22: #if defined(PETSC_HAVE_MATLAB_ENGINE)
 23:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
 24: #endif
 25:   if (iascii) {
 26:     PetscViewerFormat format;

 28:     PetscViewerGetFormat(viewer, &format);
 29:     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
 30:       DMDALocalInfo info;
 31:       DMDAGetLocalInfo(da,&info);
 32:       PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
 33:       PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D N %D m %D n %D w %D s %D\n",rank,dd->M,dd->N,dd->m,dd->n,dd->w,dd->s);
 34:       PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D, Y range of indices: %D %D\n",info.xs,info.xs+info.xm,info.ys,info.ys+info.ym);
 35:       PetscViewerFlush(viewer);
 36:       PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
 37:     } else {
 38:       DMView_DA_VTK(da,viewer);
 39:     }
 40:   } else if (isdraw) {
 41:     PetscDraw  draw;
 42:     double     ymin = -1*dd->s-1,ymax = dd->N+dd->s;
 43:     double     xmin = -1*dd->s-1,xmax = dd->M+dd->s;
 44:     double     x,y;
 45:     PetscInt   base,*idx;
 46:     char       node[10];
 47:     PetscBool  isnull;
 48: 
 49:     PetscViewerDrawGetDraw(viewer,0,&draw);
 50:     PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
 51:     if (!dd->coordinates) {
 52:       PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
 53:     }
 54:     PetscDrawSynchronizedClear(draw);

 56:     /* first processor draw all node lines */
 57:     if (!rank) {
 58:       ymin = 0.0; ymax = dd->N - 1;
 59:       for (xmin=0; xmin<dd->M; xmin++) {
 60:         PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_BLACK);
 61:       }
 62:       xmin = 0.0; xmax = dd->M - 1;
 63:       for (ymin=0; ymin<dd->N; ymin++) {
 64:         PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
 65:       }
 66:     }
 67:     PetscDrawSynchronizedFlush(draw);
 68:     PetscDrawPause(draw);

 70:     /* draw my box */
 71:     ymin = dd->ys; ymax = dd->ye - 1; xmin = dd->xs/dd->w;
 72:     xmax =(dd->xe-1)/dd->w;
 73:     PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
 74:     PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
 75:     PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
 76:     PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);

 78:     /* put in numbers */
 79:     base = (dd->base)/dd->w;
 80:     for (y=ymin; y<=ymax; y++) {
 81:       for (x=xmin; x<=xmax; x++) {
 82:         sprintf(node,"%d",(int)base++);
 83:         PetscDrawString(draw,x,y,PETSC_DRAW_BLACK,node);
 84:       }
 85:     }

 87:     PetscDrawSynchronizedFlush(draw);
 88:     PetscDrawPause(draw);
 89:     /* overlay ghost numbers, useful for error checking */
 90:     /* put in numbers */

 92:     base = 0; idx = dd->idx;
 93:     ymin = dd->Ys; ymax = dd->Ye; xmin = dd->Xs; xmax = dd->Xe;
 94:     for (y=ymin; y<ymax; y++) {
 95:       for (x=xmin; x<xmax; x++) {
 96:         if ((base % dd->w) == 0) {
 97:           sprintf(node,"%d",(int)(idx[base]/dd->w));
 98:           PetscDrawString(draw,x/dd->w,y,PETSC_DRAW_BLUE,node);
 99:         }
100:         base++;
101:       }
102:     }
103:     PetscDrawSynchronizedFlush(draw);
104:     PetscDrawPause(draw);
105:   } else if (isbinary){
106:     DMView_DA_Binary(da,viewer);
107: #if defined(PETSC_HAVE_MATLAB_ENGINE)
108:   } else if (ismatlab) {
109:     DMView_DA_Matlab(da,viewer);
110: #endif
111:   } else SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DMDA 1d",((PetscObject)viewer)->type_name);
112:   return(0);
113: }

115: /*
116:       M is number of grid points 
117:       m is number of processors

119: */
122: PetscErrorCode  DMDASplitComm2d(MPI_Comm comm,PetscInt M,PetscInt N,PetscInt sw,MPI_Comm *outcomm)
123: {
125:   PetscInt       m,n = 0,x = 0,y = 0;
126:   PetscMPIInt    size,csize,rank;

129:   MPI_Comm_size(comm,&size);
130:   MPI_Comm_rank(comm,&rank);

132:   csize = 4*size;
133:   do {
134:     if (csize % 4) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Cannot split communicator of size %d tried %d %D %D",size,csize,x,y);
135:     csize   = csize/4;
136: 
137:     m = (PetscInt)(0.5 + sqrt(((double)M)*((double)csize)/((double)N)));
138:     if (!m) m = 1;
139:     while (m > 0) {
140:       n = csize/m;
141:       if (m*n == csize) break;
142:       m--;
143:     }
144:     if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}

146:     x = M/m + ((M % m) > ((csize-1) % m));
147:     y = (N + (csize-1)/m)/n;
148:   } while ((x < 4 || y < 4) && csize > 1);
149:   if (size != csize) {
150:     MPI_Group    entire_group,sub_group;
151:     PetscMPIInt  i,*groupies;

153:     MPI_Comm_group(comm,&entire_group);
154:     PetscMalloc(csize*sizeof(PetscInt),&groupies);
155:     for (i=0; i<csize; i++) {
156:       groupies[i] = (rank/csize)*csize + i;
157:     }
158:     MPI_Group_incl(entire_group,csize,groupies,&sub_group);
159:     PetscFree(groupies);
160:     MPI_Comm_create(comm,sub_group,outcomm);
161:     MPI_Group_free(&entire_group);
162:     MPI_Group_free(&sub_group);
163:     PetscInfo1(0,"DMDASplitComm2d:Creating redundant coarse problems of size %d\n",csize);
164:   } else {
165:     *outcomm = comm;
166:   }
167:   return(0);
168: }

172: static PetscErrorCode DMDAFunction(DM dm,Vec x,Vec F)
173: {
175:   Vec            localX;
176: 
178:   DMGetLocalVector(dm,&localX);
179:   DMGlobalToLocalBegin(dm,x,INSERT_VALUES,localX);
180:   DMGlobalToLocalEnd(dm,x,INSERT_VALUES,localX);
181:   DMDAFormFunction1(dm,localX,F,dm->ctx);
182:   DMRestoreLocalVector(dm,&localX);
183:   return(0);
184: }

188: /*@C
189:        DMDASetLocalFunction - Caches in a DM a local function. 

191:    Logically Collective on DMDA

193:    Input Parameter:
194: +  da - initial distributed array
195: -  lf - the local function

197:    Level: intermediate

199:    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.

201: .keywords:  distributed array, refine

203: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunctioni()
204: @*/
205: PetscErrorCode  DMDASetLocalFunction(DM da,DMDALocalFunction1 lf)
206: {
208:   DM_DA          *dd = (DM_DA*)da->data;

212:   DMSetFunction(da,DMDAFunction);
213:   dd->lf       = lf;
214:   return(0);
215: }

219: /*@C
220:        DMDASetLocalFunctioni - Caches in a DM a local function that evaluates a single component

222:    Logically Collective on DMDA

224:    Input Parameter:
225: +  da - initial distributed array
226: -  lfi - the local function

228:    Level: intermediate

230: .keywords:  distributed array, refine

232: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
233: @*/
234: PetscErrorCode  DMDASetLocalFunctioni(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
235: {
236:   DM_DA          *dd = (DM_DA*)da->data;
239:   dd->lfi = lfi;
240:   return(0);
241: }

245: /*@C
246:        DMDASetLocalFunctionib - Caches in a DM a block local function that evaluates a single component

248:    Logically Collective on DMDA

250:    Input Parameter:
251: +  da - initial distributed array
252: -  lfi - the local function

254:    Level: intermediate

256: .keywords:  distributed array, refine

258: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
259: @*/
260: PetscErrorCode  DMDASetLocalFunctionib(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
261: {
262:   DM_DA          *dd = (DM_DA*)da->data;
265:   dd->lfib = lfi;
266:   return(0);
267: }

271: PetscErrorCode DMDASetLocalAdicFunction_Private(DM da,DMDALocalFunction1 ad_lf)
272: {
273:   DM_DA          *dd = (DM_DA*)da->data;
276:   dd->adic_lf = ad_lf;
277:   return(0);
278: }

280: /*MC
281:        DMDASetLocalAdicFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR

283:    Synopsis:
284:    PetscErrorCode DMDASetLocalAdicFunctioni(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
285:    
286:    Logically Collective on DMDA

288:    Input Parameter:
289: +  da - initial distributed array
290: -  ad_lfi - the local function as computed by ADIC/ADIFOR

292:    Level: intermediate

294: .keywords:  distributed array, refine

296: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
297:           DMDASetLocalJacobian(), DMDASetLocalFunctioni()
298: M*/

302: PetscErrorCode DMDASetLocalAdicFunctioni_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
303: {
304:   DM_DA          *dd = (DM_DA*)da->data;
307:   dd->adic_lfi = ad_lfi;
308:   return(0);
309: }

311: /*MC
312:        DMDASetLocalAdicMFFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR

314:    Synopsis:
315:    PetscErrorCode  DMDASetLocalAdicFunctioni(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
316:    
317:    Logically Collective on DMDA

319:    Input Parameter:
320: +  da - initial distributed array
321: -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR

323:    Level: intermediate

325: .keywords:  distributed array, refine

327: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
328:           DMDASetLocalJacobian(), DMDASetLocalFunctioni()
329: M*/

333: PetscErrorCode DMDASetLocalAdicMFFunctioni_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
334: {
335:   DM_DA          *dd = (DM_DA*)da->data;
338:   dd->adicmf_lfi = admf_lfi;
339:   return(0);
340: }

342: /*MC
343:        DMDASetLocalAdicFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR

345:    Synopsis:
346:    PetscErrorCode DMDASetLocalAdicFunctionib(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
347:    
348:    Logically Collective on DMDA

350:    Input Parameter:
351: +  da - initial distributed array
352: -  ad_lfi - the local function as computed by ADIC/ADIFOR

354:    Level: intermediate

356: .keywords:  distributed array, refine

358: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
359:           DMDASetLocalJacobian(), DMDASetLocalFunctionib()
360: M*/

364: PetscErrorCode DMDASetLocalAdicFunctionib_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
365: {
366:   DM_DA          *dd = (DM_DA*)da->data;
369:   dd->adic_lfib = ad_lfi;
370:   return(0);
371: }

373: /*MC
374:        DMDASetLocalAdicMFFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR

376:    Synopsis:
377:    PetscErrorCode  DMDASetLocalAdicFunctionib(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)

379:    Logically Collective on DMDA

381:    Input Parameter:
382: +  da - initial distributed array
383: -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR

385:    Level: intermediate

387: .keywords:  distributed array, refine

389: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
390:           DMDASetLocalJacobian(), DMDASetLocalFunctionib()
391: M*/

395: PetscErrorCode DMDASetLocalAdicMFFunctionib_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
396: {
397:   DM_DA          *dd = (DM_DA*)da->data;
400:   dd->adicmf_lfib = admf_lfi;
401:   return(0);
402: }

404: /*MC
405:        DMDASetLocalAdicMFFunction - Caches in a DM a local function computed by ADIC/ADIFOR

407:    Synopsis:
408:    PetscErrorCode DMDASetLocalAdicMFFunction(DM da,DMDALocalFunction1 ad_lf)

410:    Logically Collective on DMDA

412:    Input Parameter:
413: +  da - initial distributed array
414: -  ad_lf - the local function as computed by ADIC/ADIFOR

416:    Level: intermediate

418: .keywords:  distributed array, refine

420: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
421:           DMDASetLocalJacobian()
422: M*/

426: PetscErrorCode DMDASetLocalAdicMFFunction_Private(DM da,DMDALocalFunction1 ad_lf)
427: {
428:   DM_DA          *dd = (DM_DA*)da->data;
431:   dd->adicmf_lf = ad_lf;
432:   return(0);
433: }

437: PetscErrorCode DMDAJacobianLocal(DM dm,Vec x,Mat A,Mat B, MatStructure *str)
438: {
440:   Vec            localX;
441: 
443:   DMGetLocalVector(dm,&localX);
444:   DMGlobalToLocalBegin(dm,x,INSERT_VALUES,localX);
445:   DMGlobalToLocalEnd(dm,x,INSERT_VALUES,localX);
446:   MatFDColoringApply(B,dm->fd,localX,str,dm);
447:   DMRestoreLocalVector(dm,&localX);
448:   /* Assemble true Jacobian; if it is different */
449:   if (A != B) {
450:     MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
451:     MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
452:   }
453:   MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
454:   *str = SAME_NONZERO_PATTERN;
455:   return(0);
456: }


461: static PetscErrorCode DMDAJacobian(DM dm,Vec x,Mat A,Mat B, MatStructure *str)
462: {
464:   Vec            localX;
465: 
467:   DMGetLocalVector(dm,&localX);
468:   DMGlobalToLocalBegin(dm,x,INSERT_VALUES,localX);
469:   DMGlobalToLocalEnd(dm,x,INSERT_VALUES,localX);
470:   DMDAComputeJacobian1(dm,localX,B,dm->ctx);
471:   DMRestoreLocalVector(dm,&localX);
472:   /* Assemble true Jacobian; if it is different */
473:   if (A != B) {
474:     MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
475:     MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
476:   }
477:   MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
478:   *str = SAME_NONZERO_PATTERN;
479:   return(0);
480: }

482: /*@C
483:        DMDASetLocalJacobian - Caches in a DM a local Jacobian computation function

485:    Logically Collective on DMDA

487:    
488:    Input Parameter:
489: +  da - initial distributed array
490: -  lj - the local Jacobian

492:    Level: intermediate

494:    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.

496: .keywords:  distributed array, refine

498: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
499: @*/
502: PetscErrorCode  DMDASetLocalJacobian(DM da,DMDALocalFunction1 lj)
503: {
505:   DM_DA          *dd = (DM_DA*)da->data;

509:   DMSetJacobian(da,DMDAJacobian);
510:   dd->lj    = lj;
511:   return(0);
512: }

516: /*@C
517:        DMDAGetLocalFunction - Gets from a DM a local function and its ADIC/ADIFOR Jacobian

519:    Note Collective

521:    Input Parameter:
522: .  da - initial distributed array

524:    Output Parameter:
525: .  lf - the local function

527:    Level: intermediate

529: .keywords:  distributed array, refine

531: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalJacobian(), DMDASetLocalFunction()
532: @*/
533: PetscErrorCode  DMDAGetLocalFunction(DM da,DMDALocalFunction1 *lf)
534: {
535:   DM_DA *dd = (DM_DA*)da->data;
538:   if (lf) *lf = dd->lf;
539:   return(0);
540: }

544: /*@C
545:        DMDAGetLocalJacobian - Gets from a DM a local jacobian

547:    Not Collective

549:    Input Parameter:
550: .  da - initial distributed array

552:    Output Parameter:
553: .  lj - the local jacobian

555:    Level: intermediate

557: .keywords:  distributed array, refine

559: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalJacobian()
560: @*/
561: PetscErrorCode  DMDAGetLocalJacobian(DM da,DMDALocalFunction1 *lj)
562: {
563:   DM_DA *dd = (DM_DA*)da->data;
566:   if (lj) *lj = dd->lj;
567:   return(0);
568: }

572: /*@
573:     DMDAFormFunction - Evaluates a user provided function on each processor that 
574:         share a DMDA

576:    Input Parameters:
577: +    da - the DM that defines the grid
578: .    vu - input vector
579: .    vfu - output vector 
580: -    w - any user data

582:     Notes: Does NOT do ghost updates on vu upon entry

584:            This should eventually replace DMDAFormFunction1

586:     Level: advanced

588: .seealso: DMDAComputeJacobian1WithAdic()

590: @*/
591: PetscErrorCode  DMDAFormFunction(DM da,PetscErrorCode (*lf)(void),Vec vu,Vec vfu,void *w)
592: {
594:   void           *u,*fu;
595:   DMDALocalInfo  info;
596:   PetscErrorCode (*f)(DMDALocalInfo*,void*,void*,void*) = (PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))lf;
597: 
599:   DMDAGetLocalInfo(da,&info);
600:   DMDAVecGetArray(da,vu,&u);
601:   DMDAVecGetArray(da,vfu,&fu);

603:   (*f)(&info,u,fu,w);

605:   DMDAVecRestoreArray(da,vu,&u);
606:   DMDAVecRestoreArray(da,vfu,&fu);
607:   return(0);
608: }

612: /*@C 
613:    DMDAFormFunctionLocal - This is a universal function evaluation routine for
614:    a local DM function.

616:    Collective on DMDA

618:    Input Parameters:
619: +  da - the DM context
620: .  func - The local function
621: .  X - input vector
622: .  F - function vector
623: -  ctx - A user context

625:    Level: intermediate

627: .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
628:           SNESSetFunction(), SNESSetJacobian()

630: @*/
631: PetscErrorCode  DMDAFormFunctionLocal(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx)
632: {
633:   Vec            localX;
634:   DMDALocalInfo  info;
635:   void           *u;
636:   void           *fu;

640:   DMGetLocalVector(da,&localX);
641:   /*
642:      Scatter ghost points to local vector, using the 2-step process
643:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
644:   */
645:   DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
646:   DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
647:   DMDAGetLocalInfo(da,&info);
648:   DMDAVecGetArray(da,localX,&u);
649:   DMDAVecGetArray(da,F,&fu);
650:   (*func)(&info,u,fu,ctx);
651:   DMDAVecRestoreArray(da,localX,&u);
652:   DMDAVecRestoreArray(da,F,&fu);
653:   DMRestoreLocalVector(da,&localX);
654:   return(0);
655: }

659: /*@C 
660:    DMDAFormFunctionLocalGhost - This is a universal function evaluation routine for
661:    a local DM function, but the ghost values of the output are communicated and added.

663:    Collective on DMDA

665:    Input Parameters:
666: +  da - the DM context
667: .  func - The local function
668: .  X - input vector
669: .  F - function vector
670: -  ctx - A user context

672:    Level: intermediate

674: .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
675:           SNESSetFunction(), SNESSetJacobian()

677: @*/
678: PetscErrorCode  DMDAFormFunctionLocalGhost(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx)
679: {
680:   Vec            localX, localF;
681:   DMDALocalInfo  info;
682:   void           *u;
683:   void           *fu;

687:   DMGetLocalVector(da,&localX);
688:   DMGetLocalVector(da,&localF);
689:   /*
690:      Scatter ghost points to local vector, using the 2-step process
691:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
692:   */
693:   DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
694:   DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
695:   VecSet(F, 0.0);
696:   VecSet(localF, 0.0);
697:   DMDAGetLocalInfo(da,&info);
698:   DMDAVecGetArray(da,localX,&u);
699:   DMDAVecGetArray(da,localF,&fu);
700:   (*func)(&info,u,fu,ctx);
701:   DMLocalToGlobalBegin(da,localF,ADD_VALUES,F);
702:   DMLocalToGlobalEnd(da,localF,ADD_VALUES,F);
703:   DMDAVecRestoreArray(da,localX,&u);
704:   DMDAVecRestoreArray(da,localF,&fu);
705:   DMRestoreLocalVector(da,&localX);
706:   DMRestoreLocalVector(da,&localF);
707:   return(0);
708: }

712: /*@
713:     DMDAFormFunction1 - Evaluates a user provided function on each processor that 
714:         share a DMDA

716:    Input Parameters:
717: +    da - the DM that defines the grid
718: .    vu - input vector
719: .    vfu - output vector 
720: -    w - any user data

722:     Notes: Does NOT do ghost updates on vu upon entry

724:     Level: advanced

726: .seealso: DMDAComputeJacobian1WithAdic()

728: @*/
729: PetscErrorCode  DMDAFormFunction1(DM da,Vec vu,Vec vfu,void *w)
730: {
732:   void           *u,*fu;
733:   DMDALocalInfo  info;
734:   DM_DA          *dd = (DM_DA*)da->data;
735: 
737:   DMDAGetLocalInfo(da,&info);
738:   DMDAVecGetArray(da,vu,&u);
739:   DMDAVecGetArray(da,vfu,&fu);

741:   CHKMEMQ;
742:   (*dd->lf)(&info,u,fu,w);
743:   CHKMEMQ;

745:   DMDAVecRestoreArray(da,vu,&u);
746:   DMDAVecRestoreArray(da,vfu,&fu);
747:   return(0);
748: }

752: PetscErrorCode  DMDAFormFunctioniTest1(DM da,void *w)
753: {
754:   Vec            vu,fu,fui;
756:   PetscInt       i,n;
757:   PetscScalar    *ui;
758:   PetscRandom    rnd;
759:   PetscReal      norm;

762:   DMGetLocalVector(da,&vu);
763:   PetscRandomCreate(PETSC_COMM_SELF,&rnd);
764:   PetscRandomSetFromOptions(rnd);
765:   VecSetRandom(vu,rnd);
766:   PetscRandomDestroy(&rnd);

768:   DMGetGlobalVector(da,&fu);
769:   DMGetGlobalVector(da,&fui);
770: 
771:   DMDAFormFunction1(da,vu,fu,w);

773:   VecGetArray(fui,&ui);
774:   VecGetLocalSize(fui,&n);
775:   for (i=0; i<n; i++) {
776:     DMDAFormFunctioni1(da,i,vu,ui+i,w);
777:   }
778:   VecRestoreArray(fui,&ui);

780:   VecAXPY(fui,-1.0,fu);
781:   VecNorm(fui,NORM_2,&norm);
782:   PetscPrintf(((PetscObject)da)->comm,"Norm of difference in vectors %G\n",norm);
783:   VecView(fu,0);
784:   VecView(fui,0);

786:   DMRestoreLocalVector(da,&vu);
787:   DMRestoreGlobalVector(da,&fu);
788:   DMRestoreGlobalVector(da,&fui);
789:   return(0);
790: }

794: /*@
795:     DMDAFormFunctioni1 - Evaluates a user provided point-wise function

797:    Input Parameters:
798: +    da - the DM that defines the grid
799: .    i - the component of the function we wish to compute (must be local)
800: .    vu - input vector
801: .    vfu - output value
802: -    w - any user data

804:     Notes: Does NOT do ghost updates on vu upon entry

806:     Level: advanced

808: .seealso: DMDAComputeJacobian1WithAdic()

810: @*/
811: PetscErrorCode  DMDAFormFunctioni1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
812: {
814:   void           *u;
815:   DMDALocalInfo  info;
816:   MatStencil     stencil;
817:   DM_DA          *dd = (DM_DA*)da->data;
818: 

821:   DMDAGetLocalInfo(da,&info);
822:   DMDAVecGetArray(da,vu,&u);

824:   /* figure out stencil value from i */
825:   stencil.c = i % info.dof;
826:   stencil.i = (i % (info.xm*info.dof))/info.dof;
827:   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
828:   stencil.k = i/(info.xm*info.ym*info.dof);

830:   (*dd->lfi)(&info,&stencil,u,vfu,w);

832:   DMDAVecRestoreArray(da,vu,&u);
833:   return(0);
834: }

838: /*@
839:     DMDAFormFunctionib1 - Evaluates a user provided point-block function

841:    Input Parameters:
842: +    da - the DM that defines the grid
843: .    i - the component of the function we wish to compute (must be local)
844: .    vu - input vector
845: .    vfu - output value
846: -    w - any user data

848:     Notes: Does NOT do ghost updates on vu upon entry

850:     Level: advanced

852: .seealso: DMDAComputeJacobian1WithAdic()

854: @*/
855: PetscErrorCode  DMDAFormFunctionib1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
856: {
858:   void           *u;
859:   DMDALocalInfo  info;
860:   MatStencil     stencil;
861:   DM_DA          *dd = (DM_DA*)da->data;
862: 
864:   DMDAGetLocalInfo(da,&info);
865:   DMDAVecGetArray(da,vu,&u);

867:   /* figure out stencil value from i */
868:   stencil.c = i % info.dof;
869:   if (stencil.c) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Point-block functions can only be called for the entire block");
870:   stencil.i = (i % (info.xm*info.dof))/info.dof;
871:   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
872:   stencil.k = i/(info.xm*info.ym*info.dof);

874:   (*dd->lfib)(&info,&stencil,u,vfu,w);

876:   DMDAVecRestoreArray(da,vu,&u);
877:   return(0);
878: }

880: #if defined(new)
883: /*
884:   DMDAGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix where local
885:     function lives on a DMDA

887:         y ~= (F(u + ha) - F(u))/h, 
888:   where F = nonlinear function, as set by SNESSetFunction()
889:         u = current iterate
890:         h = difference interval
891: */
892: PetscErrorCode DMDAGetDiagonal_MFFD(DM da,Vec U,Vec a)
893: {
894:   PetscScalar    h,*aa,*ww,v;
895:   PetscReal      epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON;
897:   PetscInt       gI,nI;
898:   MatStencil     stencil;
899:   DMDALocalInfo  info;
900: 
902:   (*ctx->func)(0,U,a,ctx->funcctx);
903:   (*ctx->funcisetbase)(U,ctx->funcctx);

905:   VecGetArray(U,&ww);
906:   VecGetArray(a,&aa);
907: 
908:   nI = 0;
909:     h  = ww[gI];
910:     if (h == 0.0) h = 1.0;
911: #if !defined(PETSC_USE_COMPLEX)
912:     if (h < umin && h >= 0.0)      h = umin;
913:     else if (h < 0.0 && h > -umin) h = -umin;
914: #else
915:     if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0)     h = umin;
916:     else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin;
917: #endif
918:     h     *= epsilon;
919: 
920:     ww[gI] += h;
921:     (*ctx->funci)(i,w,&v,ctx->funcctx);
922:     aa[nI]  = (v - aa[nI])/h;
923:     ww[gI] -= h;
924:     nI++;
925:   }
926:   VecRestoreArray(U,&ww);
927:   VecRestoreArray(a,&aa);
928:   return(0);
929: }
930: #endif

932: #if defined(PETSC_HAVE_ADIC)
934: #include <adic/ad_utils.h>

939: /*@C
940:     DMDAComputeJacobian1WithAdic - Evaluates a adiC provided Jacobian function on each processor that 
941:         share a DMDA

943:    Input Parameters:
944: +    da - the DM that defines the grid
945: .    vu - input vector (ghosted)
946: .    J - output matrix
947: -    w - any user data

949:    Level: advanced

951:     Notes: Does NOT do ghost updates on vu upon entry

953: .seealso: DMDAFormFunction1()

955: @*/
956: PetscErrorCode  DMDAComputeJacobian1WithAdic(DM da,Vec vu,Mat J,void *w)
957: {
959:   PetscInt       gtdof,tdof;
960:   PetscScalar    *ustart;
961:   DMDALocalInfo  info;
962:   void           *ad_u,*ad_f,*ad_ustart,*ad_fstart;
963:   ISColoring     iscoloring;

966:   DMDAGetLocalInfo(da,&info);

968:   PetscADResetIndep();

970:   /* get space for derivative objects.  */
971:   DMDAGetAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,&gtdof);
972:   DMDAGetAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);
973:   VecGetArray(vu,&ustart);
974:   DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);

976:   PetscADSetValueAndColor(ad_ustart,gtdof,iscoloring->colors,ustart);

978:   VecRestoreArray(vu,&ustart);
979:   ISColoringDestroy(&iscoloring);
980:   PetscADIncrementTotalGradSize(iscoloring->n);
981:   PetscADSetIndepDone();

983:   PetscLogEventBegin(DMDA_LocalADFunction,0,0,0,0);
984:   (*dd->adic_lf)(&info,ad_u,ad_f,w);
985:   PetscLogEventEnd(DMDA_LocalADFunction,0,0,0,0);

987:   /* stick the values into the matrix */
988:   MatSetValuesAdic(J,(PetscScalar**)ad_fstart);

990:   /* return space for derivative objects.  */
991:   DMDARestoreAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,&gtdof);
992:   DMDARestoreAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);
993:   return(0);
994: }

998: /*@C
999:     DMDAMultiplyByJacobian1WithAdic - Applies an ADIC-provided Jacobian function to a vector on 
1000:     each processor that shares a DMDA.

1002:     Input Parameters:
1003: +   da - the DM that defines the grid
1004: .   vu - Jacobian is computed at this point (ghosted)
1005: .   v - product is done on this vector (ghosted)
1006: .   fu - output vector = J(vu)*v (not ghosted)
1007: -   w - any user data

1009:     Notes: 
1010:     This routine does NOT do ghost updates on vu upon entry.

1012:    Level: advanced

1014: .seealso: DMDAFormFunction1()

1016: @*/
1017: PetscErrorCode  DMDAMultiplyByJacobian1WithAdic(DM da,Vec vu,Vec v,Vec f,void *w)
1018: {
1020:   PetscInt       i,gtdof,tdof;
1021:   PetscScalar    *avu,*av,*af,*ad_vustart,*ad_fstart;
1022:   DMDALocalInfo  info;
1023:   void           *ad_vu,*ad_f;

1026:   DMDAGetLocalInfo(da,&info);

1028:   /* get space for derivative objects.  */
1029:   DMDAGetAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,&gtdof);
1030:   DMDAGetAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);

1032:   /* copy input vector into derivative object */
1033:   VecGetArray(vu,&avu);
1034:   VecGetArray(v,&av);
1035:   for (i=0; i<gtdof; i++) {
1036:     ad_vustart[2*i]   = avu[i];
1037:     ad_vustart[2*i+1] = av[i];
1038:   }
1039:   VecRestoreArray(vu,&avu);
1040:   VecRestoreArray(v,&av);

1042:   PetscADResetIndep();
1043:   PetscADIncrementTotalGradSize(1);
1044:   PetscADSetIndepDone();

1046:   (*dd->adicmf_lf)(&info,ad_vu,ad_f,w);

1048:   /* stick the values into the vector */
1049:   VecGetArray(f,&af);
1050:   for (i=0; i<tdof; i++) {
1051:     af[i] = ad_fstart[2*i+1];
1052:   }
1053:   VecRestoreArray(f,&af);

1055:   /* return space for derivative objects.  */
1056:   DMDARestoreAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,&gtdof);
1057:   DMDARestoreAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);
1058:   return(0);
1059: }
1060: #endif

1064: /*@
1065:     DMDAComputeJacobian1 - Evaluates a local Jacobian function on each processor that 
1066:         share a DMDA

1068:    Input Parameters:
1069: +    da - the DM that defines the grid
1070: .    vu - input vector (ghosted)
1071: .    J - output matrix
1072: -    w - any user data

1074:     Notes: Does NOT do ghost updates on vu upon entry

1076:     Level: advanced

1078: .seealso: DMDAFormFunction1()

1080: @*/
1081: PetscErrorCode  DMDAComputeJacobian1(DM da,Vec vu,Mat J,void *w)
1082: {
1084:   void           *u;
1085:   DMDALocalInfo  info;
1086:   DM_DA          *dd = (DM_DA*)da->data;

1089:   DMDAGetLocalInfo(da,&info);
1090:   DMDAVecGetArray(da,vu,&u);
1091:   (*dd->lj)(&info,u,J,w);
1092:   DMDAVecRestoreArray(da,vu,&u);
1093:   return(0);
1094: }


1099: /*
1100:     DMDAComputeJacobian1WithAdifor - Evaluates a ADIFOR provided Jacobian local function on each processor that 
1101:         share a DMDA

1103:    Input Parameters:
1104: +    da - the DM that defines the grid
1105: .    vu - input vector (ghosted)
1106: .    J - output matrix
1107: -    w - any user data

1109:     Notes: Does NOT do ghost updates on vu upon entry

1111: .seealso: DMDAFormFunction1()

1113: */
1114: PetscErrorCode  DMDAComputeJacobian1WithAdifor(DM da,Vec vu,Mat J,void *w)
1115: {
1116:   PetscErrorCode  ierr;
1117:   PetscInt        i,Nc,N;
1118:   ISColoringValue *color;
1119:   DMDALocalInfo   info;
1120:   PetscScalar     *u,*g_u,*g_f,*f = 0,*p_u;
1121:   ISColoring      iscoloring;
1122:   DM_DA          *dd = (DM_DA*)da->data;
1123:   void            (*lf)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*) =
1124:                   (void (*)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*))*dd->adifor_lf;

1127:   DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);
1128:   Nc   = iscoloring->n;
1129:   DMDAGetLocalInfo(da,&info);
1130:   N    = info.gxm*info.gym*info.gzm*info.dof;

1132:   /* get space for derivative objects.  */
1133:   PetscMalloc(Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar),&g_u);
1134:   PetscMemzero(g_u,Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar));
1135:   p_u   = g_u;
1136:   color = iscoloring->colors;
1137:   for (i=0; i<N; i++) {
1138:     p_u[*color++] = 1.0;
1139:     p_u          += Nc;
1140:   }
1141:   ISColoringDestroy(&iscoloring);
1142:   PetscMalloc2(Nc*info.xm*info.ym*info.zm*info.dof,PetscScalar,&g_f,info.xm*info.ym*info.zm*info.dof,PetscScalar,&f);

1144:   /* Seed the input array g_u with coloring information */
1145: 
1146:   VecGetArray(vu,&u);
1147:   (lf)(&Nc,&info,u,g_u,&Nc,f,g_f,&Nc,w,&ierr);
1148:   VecRestoreArray(vu,&u);

1150:   /* stick the values into the matrix */
1151:   /* PetscScalarView(Nc*info.xm*info.ym,g_f,0); */
1152:   MatSetValuesAdifor(J,Nc,g_f);

1154:   /* return space for derivative objects.  */
1155:   PetscFree(g_u);
1156:   PetscFree2(g_f,f);
1157:   return(0);
1158: }

1162: /*@C 
1163:    DMDAFormjacobianLocal - This is a universal Jacobian evaluation routine for
1164:    a local DM function.

1166:    Collective on DMDA

1168:    Input Parameters:
1169: +  da - the DM context
1170: .  func - The local function
1171: .  X - input vector
1172: .  J - Jacobian matrix
1173: -  ctx - A user context

1175:    Level: intermediate

1177: .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
1178:           SNESSetFunction(), SNESSetJacobian()

1180: @*/
1181: PetscErrorCode  DMDAFormJacobianLocal(DM da, DMDALocalFunction1 func, Vec X, Mat J, void *ctx)
1182: {
1183:   Vec            localX;
1184:   DMDALocalInfo  info;
1185:   void           *u;

1189:   DMGetLocalVector(da,&localX);
1190:   /*
1191:      Scatter ghost points to local vector, using the 2-step process
1192:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
1193:   */
1194:   DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
1195:   DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
1196:   DMDAGetLocalInfo(da,&info);
1197:   DMDAVecGetArray(da,localX,&u);
1198:   (*func)(&info,u,J,ctx);
1199:   DMDAVecRestoreArray(da,localX,&u);
1200:   DMRestoreLocalVector(da,&localX);
1201:   return(0);
1202: }

1206: /*@C
1207:     DMDAMultiplyByJacobian1WithAD - Applies a Jacobian function supplied by ADIFOR or ADIC
1208:     to a vector on each processor that shares a DMDA.

1210:    Input Parameters:
1211: +    da - the DM that defines the grid
1212: .    vu - Jacobian is computed at this point (ghosted)
1213: .    v - product is done on this vector (ghosted)
1214: .    fu - output vector = J(vu)*v (not ghosted)
1215: -    w - any user data

1217:     Notes: 
1218:     This routine does NOT do ghost updates on vu and v upon entry.
1219:            
1220:     Automatically calls DMDAMultiplyByJacobian1WithAdifor() or DMDAMultiplyByJacobian1WithAdic()
1221:     depending on whether DMDASetLocalAdicMFFunction() or DMDASetLocalAdiforMFFunction() was called.

1223:    Level: advanced

1225: .seealso: DMDAFormFunction1(), DMDAMultiplyByJacobian1WithAdifor(), DMDAMultiplyByJacobian1WithAdic()

1227: @*/
1228: PetscErrorCode  DMDAMultiplyByJacobian1WithAD(DM da,Vec u,Vec v,Vec f,void *w)
1229: {
1231:   DM_DA          *dd = (DM_DA*)da->data;

1234:   if (dd->adicmf_lf) {
1235: #if defined(PETSC_HAVE_ADIC)
1236:     DMDAMultiplyByJacobian1WithAdic(da,u,v,f,w);
1237: #else
1238:     SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP_SYS,"Requires ADIC to be installed and cannot use complex numbers");
1239: #endif
1240:   } else if (dd->adiformf_lf) {
1241:     DMDAMultiplyByJacobian1WithAdifor(da,u,v,f,w);
1242:   } else {
1243:     SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ORDER,"Must call DMDASetLocalAdiforMFFunction() or DMDASetLocalAdicMFFunction() before using");
1244:   }
1245:   return(0);
1246: }


1251: /*@C
1252:     DMDAMultiplyByJacobian1WithAdifor - Applies a ADIFOR provided Jacobian function on each processor that 
1253:         share a DM to a vector

1255:    Input Parameters:
1256: +    da - the DM that defines the grid
1257: .    vu - Jacobian is computed at this point (ghosted)
1258: .    v - product is done on this vector (ghosted)
1259: .    fu - output vector = J(vu)*v (not ghosted)
1260: -    w - any user data

1262:     Notes: Does NOT do ghost updates on vu and v upon entry

1264:    Level: advanced

1266: .seealso: DMDAFormFunction1()

1268: @*/
1269: PetscErrorCode  DMDAMultiplyByJacobian1WithAdifor(DM da,Vec u,Vec v,Vec f,void *w)
1270: {
1272:   PetscScalar    *au,*av,*af,*awork;
1273:   Vec            work;
1274:   DMDALocalInfo  info;
1275:   DM_DA          *dd = (DM_DA*)da->data;
1276:   void           (*lf)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*) =
1277:                  (void (*)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*))*dd->adiformf_lf;

1280:   DMDAGetLocalInfo(da,&info);

1282:   DMGetGlobalVector(da,&work);
1283:   VecGetArray(u,&au);
1284:   VecGetArray(v,&av);
1285:   VecGetArray(f,&af);
1286:   VecGetArray(work,&awork);
1287:   (lf)(&info,au,av,awork,af,w,&ierr);
1288:   VecRestoreArray(u,&au);
1289:   VecRestoreArray(v,&av);
1290:   VecRestoreArray(f,&af);
1291:   VecRestoreArray(work,&awork);
1292:   DMRestoreGlobalVector(da,&work);

1294:   return(0);
1295: }

1299: PetscErrorCode  DMSetUp_DA_2D(DM da)
1300: {
1301:   DM_DA                  *dd = (DM_DA*)da->data;
1302:   const PetscInt         M            = dd->M;
1303:   const PetscInt         N            = dd->N;
1304:   PetscInt               m            = dd->m;
1305:   PetscInt               n            = dd->n;
1306:   const PetscInt         dof          = dd->w;
1307:   const PetscInt         s            = dd->s;
1308:   const DMDABoundaryType bx         = dd->bx;
1309:   const DMDABoundaryType by         = dd->by;
1310:   const DMDAStencilType  stencil_type = dd->stencil_type;
1311:   PetscInt               *lx           = dd->lx;
1312:   PetscInt               *ly           = dd->ly;
1313:   MPI_Comm               comm;
1314:   PetscMPIInt            rank,size;
1315:   PetscInt               xs,xe,ys,ye,x,y,Xs,Xe,Ys,Ye,start,end,IXs,IXe,IYs,IYe;
1316:   PetscInt               up,down,left,right,i,n0,n1,n2,n3,n5,n6,n7,n8,*idx,nn,*idx_cpy;
1317:   const PetscInt         *idx_full;
1318:   PetscInt               xbase,*bases,*ldims,j,x_t,y_t,s_t,base,count;
1319:   PetscInt               s_x,s_y; /* s proportionalized to w */
1320:   PetscInt               sn0 = 0,sn2 = 0,sn6 = 0,sn8 = 0;
1321:   Vec                    local,global;
1322:   VecScatter             ltog,gtol;
1323:   IS                     to,from,ltogis;
1324:   PetscErrorCode         ierr;

1327:   PetscObjectGetComm((PetscObject)da,&comm);

1329:   if (dof < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
1330:   if (s < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);

1332:   MPI_Comm_size(comm,&size);
1333:   MPI_Comm_rank(comm,&rank);

1335:   dd->dim         = 2;
1336:   PetscMalloc(dof*sizeof(char*),&dd->fieldname);
1337:   PetscMemzero(dd->fieldname,dof*sizeof(char*));

1339:   if (m != PETSC_DECIDE) {
1340:     if (m < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in X direction: %D",m);
1341:     else if (m > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in X direction: %D %d",m,size);
1342:   }
1343:   if (n != PETSC_DECIDE) {
1344:     if (n < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in Y direction: %D",n);
1345:     else if (n > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in Y direction: %D %d",n,size);
1346:   }

1348:   if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
1349:     if (n != PETSC_DECIDE) {
1350:       m = size/n;
1351:     } else if (m != PETSC_DECIDE) {
1352:       n = size/m;
1353:     } else {
1354:       /* try for squarish distribution */
1355:       m = (PetscInt)(0.5 + sqrt(((double)M)*((double)size)/((double)N)));
1356:       if (!m) m = 1;
1357:       while (m > 0) {
1358:         n = size/m;
1359:         if (m*n == size) break;
1360:         m--;
1361:       }
1362:       if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
1363:     }
1364:     if (m*n != size) SETERRQ(comm,PETSC_ERR_PLIB,"Unable to create partition, check the size of the communicator and input m and n ");
1365:   } else if (m*n != size) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Given Bad partition");

1367:   if (M < m) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in x direction is too fine! %D %D",M,m);
1368:   if (N < n) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in y direction is too fine! %D %D",N,n);

1370:   /* 
1371:      Determine locally owned region 
1372:      xs is the first local node number, x is the number of local nodes 
1373:   */
1374:   if (!lx) {
1375:     PetscMalloc(m*sizeof(PetscInt), &dd->lx);
1376:     lx = dd->lx;
1377:     for (i=0; i<m; i++) {
1378:       lx[i] = M/m + ((M % m) > i);
1379:     }
1380:   }
1381:   x  = lx[rank % m];
1382:   xs = 0;
1383:   for (i=0; i<(rank % m); i++) {
1384:     xs += lx[i];
1385:   }
1386: #if defined(PETSC_USE_DEBUG)
1387:   left = xs;
1388:   for (i=(rank % m); i<m; i++) {
1389:     left += lx[i];
1390:   }
1391:   if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M: %D %D",left,M);
1392: #endif

1394:   /* 
1395:      Determine locally owned region 
1396:      ys is the first local node number, y is the number of local nodes 
1397:   */
1398:   if (!ly) {
1399:     PetscMalloc(n*sizeof(PetscInt), &dd->ly);
1400:     ly = dd->ly;
1401:     for (i=0; i<n; i++) {
1402:       ly[i] = N/n + ((N % n) > i);
1403:     }
1404:   }
1405:   y  = ly[rank/m];
1406:   ys = 0;
1407:   for (i=0; i<(rank/m); i++) {
1408:     ys += ly[i];
1409:   }
1410: #if defined(PETSC_USE_DEBUG)
1411:   left = ys;
1412:   for (i=(rank/m); i<n; i++) {
1413:     left += ly[i];
1414:   }
1415:   if (left != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of ly across processors not equal to N: %D %D",left,N);
1416: #endif

1418:   /*
1419:    check if the scatter requires more than one process neighbor or wraps around
1420:    the domain more than once
1421:   */
1422:   if ((x < s) && ((m > 1) || (bx == DMDA_BOUNDARY_PERIODIC))) {
1423:     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
1424:   }
1425:   if ((y < s) && ((n > 1) || (by == DMDA_BOUNDARY_PERIODIC))) {
1426:     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local y-width of domain y %D is smaller than stencil width s %D",y,s);
1427:   }
1428:   xe = xs + x;
1429:   ye = ys + y;

1431:   /* determine ghost region (Xs) and region scattered into (IXs)  */
1432:   /* Assume No Periodicity */
1433:   if (xs-s > 0) { Xs = xs - s; IXs = xs - s; } else { Xs = 0; IXs = 0; }
1434:   if (xe+s <= M) { Xe = xe + s; IXe = xe + s; } else { Xe = M; IXe = M; }
1435:   if (ys-s > 0) { Ys = ys - s; IYs = ys - s; } else { Ys = 0; IYs = 0; }
1436:   if (ye+s <= N) { Ye = ye + s; IYe = ye + s; } else { Ye = N; IYe = N; }

1438:   /* fix for periodicity/ghosted */
1439:   if (bx) { Xs = xs - s; Xe = xe + s; }
1440:   if (bx == DMDA_BOUNDARY_PERIODIC) { IXs = xs - s; IXe = xe + s; }
1441:   if (by) { Ys = ys - s; Ye = ye + s; }
1442:   if (by == DMDA_BOUNDARY_PERIODIC) { IYs = ys - s; IYe = ye + s; }

1444:   /* Resize all X parameters to reflect w */
1445:   s_x = s;
1446:   s_y = s;

1448:   /* determine starting point of each processor */
1449:   nn    = x*y;
1450:   PetscMalloc2(size+1,PetscInt,&bases,size,PetscInt,&ldims);
1451:   MPI_Allgather(&nn,1,MPIU_INT,ldims,1,MPIU_INT,comm);
1452:   bases[0] = 0;
1453:   for (i=1; i<=size; i++) {
1454:     bases[i] = ldims[i-1];
1455:   }
1456:   for (i=1; i<=size; i++) {
1457:     bases[i] += bases[i-1];
1458:   }
1459:   base = bases[rank]*dof;

1461:   /* allocate the base parallel and sequential vectors */
1462:   dd->Nlocal = x*y*dof;
1463:   VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);
1464:   VecSetBlockSize(global,dof);
1465:   dd->nlocal = (Xe-Xs)*(Ye-Ys)*dof;
1466:   VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);
1467:   VecSetBlockSize(local,dof);

1469:   /* generate appropriate vector scatters */
1470:   /* local to global inserts non-ghost point region into global */
1471:   VecGetOwnershipRange(global,&start,&end);
1472:   ISCreateStride(comm,x*y*dof,start,1,&to);

1474:   count = x*y;
1475:   PetscMalloc(x*y*sizeof(PetscInt),&idx);
1476:   left = xs - Xs; right = left + x;
1477:   down = ys - Ys; up = down + y;
1478:   count = 0;
1479:   for (i=down; i<up; i++) {
1480:     for (j=left; j<right; j++) {
1481:       idx[count++] = i*(Xe-Xs) + j;
1482:     }
1483:   }

1485:   ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&from);
1486:   VecScatterCreate(local,from,global,to,&ltog);
1487:   PetscLogObjectParent(dd,ltog);
1488:   ISDestroy(&from);
1489:   ISDestroy(&to);

1491:   /* global to local must include ghost points within the domain,
1492:      but not ghost points outside the domain that aren't periodic */
1493:   if (stencil_type == DMDA_STENCIL_BOX) {
1494:     count = (IXe-IXs)*(IYe-IYs);
1495:     PetscMalloc(count*sizeof(PetscInt),&idx);

1497:     left = IXs - Xs; right = left + (IXe-IXs);
1498:     down = IYs - Ys; up = down + (IYe-IYs);
1499:     count = 0;
1500:     for (i=down; i<up; i++) {
1501:       for (j=left; j<right; j++) {
1502:         idx[count++] = j + i*(Xe-Xs);
1503:       }
1504:     }
1505:     ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);

1507:   } else {
1508:     /* must drop into cross shape region */
1509:     /*       ---------|
1510:             |  top    |
1511:          |---         ---| up
1512:          |   middle      |
1513:          |               |
1514:          ----         ---- down
1515:             | bottom  |
1516:             -----------
1517:          Xs xs        xe Xe */
1518:     count = (ys-IYs)*x + y*(IXe-IXs) + (IYe-ye)*x;
1519:     PetscMalloc(count*sizeof(PetscInt),&idx);

1521:     left = xs - Xs; right = left + x;
1522:     down = ys - Ys; up = down + y;
1523:     count = 0;
1524:     /* bottom */
1525:     for (i=(IYs-Ys); i<down; i++) {
1526:       for (j=left; j<right; j++) {
1527:         idx[count++] = j + i*(Xe-Xs);
1528:       }
1529:     }
1530:     /* middle */
1531:     for (i=down; i<up; i++) {
1532:       for (j=(IXs-Xs); j<(IXe-Xs); j++) {
1533:         idx[count++] = j + i*(Xe-Xs);
1534:       }
1535:     }
1536:     /* top */
1537:     for (i=up; i<up+IYe-ye; i++) {
1538:       for (j=left; j<right; j++) {
1539:         idx[count++] = j + i*(Xe-Xs);
1540:       }
1541:     }
1542:     ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);
1543:   }


1546:   /* determine who lies on each side of us stored in    n6 n7 n8
1547:                                                         n3    n5
1548:                                                         n0 n1 n2
1549:   */

1551:   /* Assume the Non-Periodic Case */
1552:   n1 = rank - m;
1553:   if (rank % m) {
1554:     n0 = n1 - 1;
1555:   } else {
1556:     n0 = -1;
1557:   }
1558:   if ((rank+1) % m) {
1559:     n2 = n1 + 1;
1560:     n5 = rank + 1;
1561:     n8 = rank + m + 1; if (n8 >= m*n) n8 = -1;
1562:   } else {
1563:     n2 = -1; n5 = -1; n8 = -1;
1564:   }
1565:   if (rank % m) {
1566:     n3 = rank - 1;
1567:     n6 = n3 + m; if (n6 >= m*n) n6 = -1;
1568:   } else {
1569:     n3 = -1; n6 = -1;
1570:   }
1571:   n7 = rank + m; if (n7 >= m*n) n7 = -1;

1573:   if (bx == DMDA_BOUNDARY_PERIODIC && by == DMDA_BOUNDARY_PERIODIC) {
1574:   /* Modify for Periodic Cases */
1575:     /* Handle all four corners */
1576:     if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m-1;
1577:     if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0;
1578:     if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size-m;
1579:     if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size-1;

1581:     /* Handle Top and Bottom Sides */
1582:     if (n1 < 0) n1 = rank + m * (n-1);
1583:     if (n7 < 0) n7 = rank - m * (n-1);
1584:     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
1585:     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
1586:     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
1587:     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;

1589:     /* Handle Left and Right Sides */
1590:     if (n3 < 0) n3 = rank + (m-1);
1591:     if (n5 < 0) n5 = rank - (m-1);
1592:     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
1593:     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
1594:     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
1595:     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
1596:   } else if (by == DMDA_BOUNDARY_PERIODIC) {  /* Handle Top and Bottom Sides */
1597:     if (n1 < 0) n1 = rank + m * (n-1);
1598:     if (n7 < 0) n7 = rank - m * (n-1);
1599:     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
1600:     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
1601:     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
1602:     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
1603:   } else if (bx == DMDA_BOUNDARY_PERIODIC) { /* Handle Left and Right Sides */
1604:     if (n3 < 0) n3 = rank + (m-1);
1605:     if (n5 < 0) n5 = rank - (m-1);
1606:     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
1607:     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
1608:     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
1609:     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
1610:   }

1612:   PetscMalloc(9*sizeof(PetscInt),&dd->neighbors);
1613:   dd->neighbors[0] = n0;
1614:   dd->neighbors[1] = n1;
1615:   dd->neighbors[2] = n2;
1616:   dd->neighbors[3] = n3;
1617:   dd->neighbors[4] = rank;
1618:   dd->neighbors[5] = n5;
1619:   dd->neighbors[6] = n6;
1620:   dd->neighbors[7] = n7;
1621:   dd->neighbors[8] = n8;

1623:   if (stencil_type == DMDA_STENCIL_STAR) {
1624:     /* save corner processor numbers */
1625:     sn0 = n0; sn2 = n2; sn6 = n6; sn8 = n8;
1626:     n0 = n2 = n6 = n8 = -1;
1627:   }

1629:   PetscMalloc((Xe-Xs)*(Ye-Ys)*sizeof(PetscInt),&idx);
1630:   PetscLogObjectMemory(da,(Xe-Xs)*(Ye-Ys)*sizeof(PetscInt));

1632:   nn = 0;
1633:   xbase = bases[rank];
1634:   for (i=1; i<=s_y; i++) {
1635:     if (n0 >= 0) { /* left below */
1636:       x_t = lx[n0 % m];
1637:       y_t = ly[(n0/m)];
1638:       s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
1639:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1640:     }
1641:     if (n1 >= 0) { /* directly below */
1642:       x_t = x;
1643:       y_t = ly[(n1/m)];
1644:       s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
1645:       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1646:     }
1647:     if (n2 >= 0) { /* right below */
1648:       x_t = lx[n2 % m];
1649:       y_t = ly[(n2/m)];
1650:       s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
1651:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1652:     }
1653:   }

1655:   for (i=0; i<y; i++) {
1656:     if (n3 >= 0) { /* directly left */
1657:       x_t = lx[n3 % m];
1658:       /* y_t = y; */
1659:       s_t = bases[n3] + (i+1)*x_t - s_x;
1660:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1661:     }

1663:     for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */

1665:     if (n5 >= 0) { /* directly right */
1666:       x_t = lx[n5 % m];
1667:       /* y_t = y; */
1668:       s_t = bases[n5] + (i)*x_t;
1669:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1670:     }
1671:   }

1673:   for (i=1; i<=s_y; i++) {
1674:     if (n6 >= 0) { /* left above */
1675:       x_t = lx[n6 % m];
1676:       /* y_t = ly[(n6/m)]; */
1677:       s_t = bases[n6] + (i)*x_t - s_x;
1678:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1679:     }
1680:     if (n7 >= 0) { /* directly above */
1681:       x_t = x;
1682:       /* y_t = ly[(n7/m)]; */
1683:       s_t = bases[n7] + (i-1)*x_t;
1684:       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1685:     }
1686:     if (n8 >= 0) { /* right above */
1687:       x_t = lx[n8 % m];
1688:       /* y_t = ly[(n8/m)]; */
1689:       s_t = bases[n8] + (i-1)*x_t;
1690:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1691:     }
1692:   }

1694:   ISCreateBlock(comm,dof,nn,idx,PETSC_COPY_VALUES,&from);
1695:   VecScatterCreate(global,from,local,to,&gtol);
1696:   PetscLogObjectParent(da,gtol);
1697:   ISDestroy(&to);
1698:   ISDestroy(&from);

1700:   if (stencil_type == DMDA_STENCIL_STAR) {
1701:     n0 = sn0; n2 = sn2; n6 = sn6; n8 = sn8;
1702:   }

1704:   if ((stencil_type == DMDA_STENCIL_STAR) ||
1705:       (bx && bx != DMDA_BOUNDARY_PERIODIC) ||
1706:       (by && by != DMDA_BOUNDARY_PERIODIC)) {
1707:     /*
1708:         Recompute the local to global mappings, this time keeping the 
1709:       information about the cross corner processor numbers and any ghosted
1710:       but not periodic indices.
1711:     */
1712:     nn = 0;
1713:     xbase = bases[rank];
1714:     for (i=1; i<=s_y; i++) {
1715:       if (n0 >= 0) { /* left below */
1716:         x_t = lx[n0 % m];
1717:         y_t = ly[(n0/m)];
1718:         s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
1719:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1720:       } else if (xs-Xs > 0 && ys-Ys > 0) {
1721:         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1722:       }
1723:       if (n1 >= 0) { /* directly below */
1724:         x_t = x;
1725:         y_t = ly[(n1/m)];
1726:         s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
1727:         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1728:       } else if (ys-Ys > 0) {
1729:         for (j=0; j<x; j++) { idx[nn++] = -1;}
1730:       }
1731:       if (n2 >= 0) { /* right below */
1732:         x_t = lx[n2 % m];
1733:         y_t = ly[(n2/m)];
1734:         s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
1735:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1736:       } else if (Xe-xe> 0 && ys-Ys > 0) {
1737:         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1738:       }
1739:     }

1741:     for (i=0; i<y; i++) {
1742:       if (n3 >= 0) { /* directly left */
1743:         x_t = lx[n3 % m];
1744:         /* y_t = y; */
1745:         s_t = bases[n3] + (i+1)*x_t - s_x;
1746:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1747:       } else if (xs-Xs > 0) {
1748:         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1749:       }

1751:       for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */

1753:       if (n5 >= 0) { /* directly right */
1754:         x_t = lx[n5 % m];
1755:         /* y_t = y; */
1756:         s_t = bases[n5] + (i)*x_t;
1757:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1758:       } else if (Xe-xe > 0) {
1759:         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1760:       }
1761:     }

1763:     for (i=1; i<=s_y; i++) {
1764:       if (n6 >= 0) { /* left above */
1765:         x_t = lx[n6 % m];
1766:         /* y_t = ly[(n6/m)]; */
1767:         s_t = bases[n6] + (i)*x_t - s_x;
1768:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1769:       } else if (xs-Xs > 0 && Ye-ye > 0) {
1770:         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1771:       }
1772:       if (n7 >= 0) { /* directly above */
1773:         x_t = x;
1774:         /* y_t = ly[(n7/m)]; */
1775:         s_t = bases[n7] + (i-1)*x_t;
1776:         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1777:       } else if (Ye-ye > 0) {
1778:         for (j=0; j<x; j++) { idx[nn++] = -1;}
1779:       }
1780:       if (n8 >= 0) { /* right above */
1781:         x_t = lx[n8 % m];
1782:         /* y_t = ly[(n8/m)]; */
1783:         s_t = bases[n8] + (i-1)*x_t;
1784:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1785:       } else if (Xe-xe > 0 && Ye-ye > 0) {
1786:         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1787:       }
1788:     }
1789:   }
1790:   /*
1791:      Set the local to global ordering in the global vector, this allows use
1792:      of VecSetValuesLocal().
1793:   */
1794:   ISCreateBlock(comm,dof,nn,idx,PETSC_OWN_POINTER,&ltogis);
1795:   PetscMalloc(nn*dof*sizeof(PetscInt),&idx_cpy);
1796:   PetscLogObjectMemory(da,nn*dof*sizeof(PetscInt));
1797:   ISGetIndices(ltogis, &idx_full);
1798:   PetscMemcpy(idx_cpy,idx_full,nn*dof*sizeof(PetscInt));
1799:   ISRestoreIndices(ltogis, &idx_full);
1800:   ISLocalToGlobalMappingCreateIS(ltogis,&da->ltogmap);
1801:   PetscLogObjectParent(da,da->ltogmap);
1802:   ISDestroy(&ltogis);
1803:   ISLocalToGlobalMappingBlock(da->ltogmap,dd->w,&da->ltogmapb);
1804:   PetscLogObjectParent(da,da->ltogmap);

1806:   PetscFree2(bases,ldims);
1807:   dd->m  = m;  dd->n  = n;
1808:   /* note petsc expects xs/xe/Xs/Xe to be multiplied by #dofs in many places */
1809:   dd->xs = xs*dof; dd->xe = xe*dof; dd->ys = ys; dd->ye = ye; dd->zs = 0; dd->ze = 1;
1810:   dd->Xs = Xs*dof; dd->Xe = Xe*dof; dd->Ys = Ys; dd->Ye = Ye; dd->Zs = 0; dd->Ze = 1;

1812:   VecDestroy(&local);
1813:   VecDestroy(&global);

1815:   dd->gtol      = gtol;
1816:   dd->ltog      = ltog;
1817:   dd->idx       = idx_cpy;
1818:   dd->Nl        = nn*dof;
1819:   dd->base      = base;
1820:   da->ops->view = DMView_DA_2d;
1821:   dd->ltol = PETSC_NULL;
1822:   dd->ao   = PETSC_NULL;

1824:   return(0);
1825: }

1829: /*@C
1830:    DMDACreate2d -  Creates an object that will manage the communication of  two-dimensional 
1831:    regular array data that is distributed across some processors.

1833:    Collective on MPI_Comm

1835:    Input Parameters:
1836: +  comm - MPI communicator
1837: .  bx,by - type of ghost nodes the array have. 
1838:          Use one of DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, DMDA_BOUNDARY_PERIODIC.
1839: .  stencil_type - stencil type.  Use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
1840: .  M,N - global dimension in each direction of the array (use -M and or -N to indicate that it may be set to a different value 
1841:             from the command line with -da_grid_x <M> -da_grid_y <N>)
1842: .  m,n - corresponding number of processors in each dimension 
1843:          (or PETSC_DECIDE to have calculated)
1844: .  dof - number of degrees of freedom per node
1845: .  s - stencil width
1846: -  lx, ly - arrays containing the number of nodes in each cell along
1847:            the x and y coordinates, or PETSC_NULL. If non-null, these
1848:            must be of length as m and n, and the corresponding
1849:            m and n cannot be PETSC_DECIDE. The sum of the lx[] entries
1850:            must be M, and the sum of the ly[] entries must be N.

1852:    Output Parameter:
1853: .  da - the resulting distributed array object

1855:    Options Database Key:
1856: +  -da_view - Calls DMView() at the conclusion of DMDACreate2d()
1857: .  -da_grid_x <nx> - number of grid points in x direction, if M < 0
1858: .  -da_grid_y <ny> - number of grid points in y direction, if N < 0
1859: .  -da_processors_x <nx> - number of processors in x direction
1860: .  -da_processors_y <ny> - number of processors in y direction
1861: .  -da_refine_x <rx> - refinement ratio in x direction
1862: .  -da_refine_y <ry> - refinement ratio in y direction
1863: -  -da_refine <n> - refine the DMDA n times before creating, if M or N < 0


1866:    Level: beginner

1868:    Notes:
1869:    The stencil type DMDA_STENCIL_STAR with width 1 corresponds to the 
1870:    standard 5-pt stencil, while DMDA_STENCIL_BOX with width 1 denotes
1871:    the standard 9-pt stencil.

1873:    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
1874:    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
1875:    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.

1877: .keywords: distributed array, create, two-dimensional

1879: .seealso: DMDestroy(), DMView(), DMDACreate1d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDAGetRefinementFactor(),
1880:           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDASetRefinementFactor(),
1881:           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMLoad(), DMDAGetOwnershipRanges()

1883: @*/

1885: PetscErrorCode  DMDACreate2d(MPI_Comm comm,DMDABoundaryType bx,DMDABoundaryType by,DMDAStencilType stencil_type,
1886:                           PetscInt M,PetscInt N,PetscInt m,PetscInt n,PetscInt dof,PetscInt s,const PetscInt lx[],const PetscInt ly[],DM *da)
1887: {

1891:   DMDACreate(comm, da);
1892:   DMDASetDim(*da, 2);
1893:   DMDASetSizes(*da, M, N, 1);
1894:   DMDASetNumProcs(*da, m, n, PETSC_DECIDE);
1895:   DMDASetBoundaryType(*da, bx, by, DMDA_BOUNDARY_NONE);
1896:   DMDASetDof(*da, dof);
1897:   DMDASetStencilType(*da, stencil_type);
1898:   DMDASetStencilWidth(*da, s);
1899:   DMDASetOwnershipRanges(*da, lx, ly, PETSC_NULL);
1900:   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
1901:   DMSetFromOptions(*da);
1902:   DMSetUp(*da);
1903:   DMView_DA_Private(*da);
1904:   return(0);
1905: }