Actual source code: pdvec.c

  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/

  9: PetscErrorCode VecDestroy_MPI(Vec v)
 10: {
 11:   Vec_MPI        *x = (Vec_MPI*)v->data;

 15: #if defined(PETSC_USE_LOG)
 16:   PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
 17: #endif
 18:   if (!x) return(0);
 19:   PetscFree(x->array_allocated);

 21:   /* Destroy local representation of vector if it exists */
 22:   if (x->localrep) {
 23:     VecDestroy(&x->localrep);
 24:     VecScatterDestroy(&x->localupdate);
 25:   }
 26:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 27:   VecStashDestroy_Private(&v->bstash);
 28:   VecStashDestroy_Private(&v->stash);
 29:   PetscFree(v->data);
 30:   return(0);
 31: }

 35: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 36: {
 37:   PetscErrorCode    ierr;
 38:   PetscInt          i,work = xin->map->n,cnt,len,nLen;
 39:   PetscMPIInt       j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
 40:   MPI_Status        status;
 41:   PetscScalar       *values;
 42:   const PetscScalar *xarray;
 43:   const char        *name;
 44:   PetscViewerFormat format;

 47:   VecGetArrayRead(xin,&xarray);
 48:   /* determine maximum message to arrive */
 49:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
 50:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,((PetscObject)xin)->comm);
 51:   MPI_Comm_size(((PetscObject)xin)->comm,&size);

 53:   if (!rank) {
 54:     PetscMalloc(len*sizeof(PetscScalar),&values);
 55:     PetscViewerGetFormat(viewer,&format);
 56:     /*
 57:         MATLAB format and ASCII format are very similar except
 58:         MATLAB uses %18.16e format while ASCII uses %g
 59:     */
 60:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 61:       PetscObjectGetName((PetscObject)xin,&name);
 62:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 63:       for (i=0; i<xin->map->n; i++) {
 64: #if defined(PETSC_USE_COMPLEX)
 65:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 66:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
 67:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 68:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
 69:         } else {
 70:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
 71:         }
 72: #else
 73:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
 74: #endif
 75:       }
 76:       /* receive and print messages */
 77:       for (j=1; j<size; j++) {
 78:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
 79:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 80:         for (i=0; i<n; i++) {
 81: #if defined(PETSC_USE_COMPLEX)
 82:           if (PetscImaginaryPart(values[i]) > 0.0) {
 83:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
 84:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 85:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
 86:           } else {
 87:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
 88:           }
 89: #else
 90:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
 91: #endif
 92:         }
 93:       }
 94:       PetscViewerASCIIPrintf(viewer,"];\n");

 96:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
 97:       for (i=0; i<xin->map->n; i++) {
 98: #if defined(PETSC_USE_COMPLEX)
 99:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
100: #else
101:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
102: #endif
103:       }
104:       /* receive and print messages */
105:       for (j=1; j<size; j++) {
106:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
107:         MPI_Get_count(&status,MPIU_SCALAR,&n);
108:         for (i=0; i<n; i++) {
109: #if defined(PETSC_USE_COMPLEX)
110:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
111: #else
112:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
113: #endif
114:         }
115:       }
116:     } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
117:       /*
118:         state 0: No header has been output
119:         state 1: Only POINT_DATA has been output
120:         state 2: Only CELL_DATA has been output
121:         state 3: Output both, POINT_DATA last
122:         state 4: Output both, CELL_DATA last
123:       */
124:       static PetscInt stateId = -1;
125:       int outputState = 0;
126:       PetscBool  hasState;
127:       int doOutput = 0;
128:       PetscInt bs, b;

130:       if (stateId < 0) {
131:         PetscObjectComposedDataRegister(&stateId);
132:       }
133:       PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
134:       if (!hasState) {
135:         outputState = 0;
136:       }
137:       PetscObjectGetName((PetscObject) xin, &name);
138:       VecGetLocalSize(xin, &nLen);
139:       n    = PetscMPIIntCast(nLen);
140:       VecGetBlockSize(xin, &bs);
141:       if (format == PETSC_VIEWER_ASCII_VTK) {
142:         if (outputState == 0) {
143:           outputState = 1;
144:           doOutput = 1;
145:         } else if (outputState == 1) {
146:           doOutput = 0;
147:         } else if (outputState == 2) {
148:           outputState = 3;
149:           doOutput = 1;
150:         } else if (outputState == 3) {
151:           doOutput = 0;
152:         } else if (outputState == 4) {
153:           SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
154:         }
155:         if (doOutput) {
156:           PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
157:         }
158:       } else {
159:         if (outputState == 0) {
160:           outputState = 2;
161:           doOutput = 1;
162:         } else if (outputState == 1) {
163:           outputState = 4;
164:           doOutput = 1;
165:         } else if (outputState == 2) {
166:           doOutput = 0;
167:         } else if (outputState == 3) {
168:           SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
169:         } else if (outputState == 4) {
170:           doOutput = 0;
171:         }
172:         if (doOutput) {
173:           PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
174:         }
175:       }
176:       PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
177:       if (name) {
178:         if (bs == 3) {
179:           PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
180:         } else {
181:           PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
182:         }
183:       } else {
184:         PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
185:       }
186:       if (bs != 3) {
187:         PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
188:       }
189:       for (i=0; i<n/bs; i++) {
190:         for (b=0; b<bs; b++) {
191:           if (b > 0) {
192:             PetscViewerASCIIPrintf(viewer," ");
193:           }
194:           PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(xarray[i*bs+b]));
195:         }
196:         PetscViewerASCIIPrintf(viewer,"\n");
197:       }
198:       for (j=1; j<size; j++) {
199:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
200:         MPI_Get_count(&status,MPIU_SCALAR,&n);
201:         for (i=0; i<n/bs; i++) {
202:           for (b=0; b<bs; b++) {
203:             if (b > 0) {
204:               PetscViewerASCIIPrintf(viewer," ");
205:             }
206:             PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(values[i*bs+b]));
207:           }
208:           PetscViewerASCIIPrintf(viewer,"\n");
209:         }
210:       }
211:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
212:       PetscInt bs, b;

214:       VecGetLocalSize(xin, &nLen);
215:       n    = PetscMPIIntCast(nLen);
216:       VecGetBlockSize(xin, &bs);
217:       if ((bs < 1) || (bs > 3)) {
218:         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
219:       }
220:       for (i=0; i<n/bs; i++) {
221:         for (b=0; b<bs; b++) {
222:           if (b > 0) {
223:             PetscViewerASCIIPrintf(viewer," ");
224:           }
225:           PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(xarray[i*bs+b]));
226:         }
227:         for (b=bs; b<3; b++) {
228:           PetscViewerASCIIPrintf(viewer," 0.0");
229:         }
230:         PetscViewerASCIIPrintf(viewer,"\n");
231:       }
232:       for (j=1; j<size; j++) {
233:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
234:         MPI_Get_count(&status,MPIU_SCALAR,&n);
235:         for (i=0; i<n/bs; i++) {
236:           for (b=0; b<bs; b++) {
237:             if (b > 0) {
238:               PetscViewerASCIIPrintf(viewer," ");
239:             }
240:             PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(values[i*bs+b]));
241:           }
242:           for (b=bs; b<3; b++) {
243:             PetscViewerASCIIPrintf(viewer," 0.0");
244:           }
245:           PetscViewerASCIIPrintf(viewer,"\n");
246:         }
247:       }
248:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
249:       PetscInt bs, b, vertexCount = 1;

251:       VecGetLocalSize(xin, &nLen);
252:       n    = PetscMPIIntCast(nLen);
253:       VecGetBlockSize(xin, &bs);
254:       if ((bs < 1) || (bs > 3)) {
255:         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
256:       }
257:       PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
258:       for (i=0; i<n/bs; i++) {
259:         PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
260:         for (b=0; b<bs; b++) {
261:           if (b > 0) {
262:             PetscViewerASCIIPrintf(viewer," ");
263:           }
264: #if !defined(PETSC_USE_COMPLEX)
265:           PetscViewerASCIIPrintf(viewer,"% 12.5E",xarray[i*bs+b]);
266: #endif
267:         }
268:         PetscViewerASCIIPrintf(viewer,"\n");
269:       }
270:       for (j=1; j<size; j++) {
271:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
272:         MPI_Get_count(&status,MPIU_SCALAR,&n);
273:         for (i=0; i<n/bs; i++) {
274:           PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
275:           for (b=0; b<bs; b++) {
276:             if (b > 0) {
277:               PetscViewerASCIIPrintf(viewer," ");
278:             }
279: #if !defined(PETSC_USE_COMPLEX)
280:             PetscViewerASCIIPrintf(viewer,"% 12.5E",values[i*bs+b]);
281: #endif
282:           }
283:           PetscViewerASCIIPrintf(viewer,"\n");
284:         }
285:       }
286:     } else {
287:       PetscObjectPrintClassNamePrefixType((PetscObject)xin,viewer,"Vector Object");
288:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
289:       cnt = 0;
290:       for (i=0; i<xin->map->n; i++) {
291:         if (format == PETSC_VIEWER_ASCII_INDEX) {
292:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
293:         }
294: #if defined(PETSC_USE_COMPLEX)
295:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
296:           PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
297:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
298:           PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
299:         } else {
300:           PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(xarray[i]));
301:         }
302: #else
303:         PetscViewerASCIIPrintf(viewer,"%G\n",xarray[i]);
304: #endif
305:       }
306:       /* receive and print messages */
307:       for (j=1; j<size; j++) {
308:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
309:         MPI_Get_count(&status,MPIU_SCALAR,&n);
310:         if (format != PETSC_VIEWER_ASCII_COMMON) {
311:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
312:         }
313:         for (i=0; i<n; i++) {
314:           if (format == PETSC_VIEWER_ASCII_INDEX) {
315:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
316:           }
317: #if defined(PETSC_USE_COMPLEX)
318:           if (PetscImaginaryPart(values[i]) > 0.0) {
319:             PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
320:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
321:             PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
322:           } else {
323:             PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(values[i]));
324:           }
325: #else
326:           PetscViewerASCIIPrintf(viewer,"%G\n",values[i]);
327: #endif
328:         }
329:       }
330:     }
331:     PetscFree(values);
332:   } else {
333:     /* send values */
334:     MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,((PetscObject)xin)->comm);
335:   }
336:   PetscViewerFlush(viewer);
337:   VecRestoreArrayRead(xin,&xarray);
338:   return(0);
339: }

343: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
344: {
345:   PetscErrorCode    ierr;
346:   PetscMPIInt       rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
347:   PetscInt          len,n = xin->map->n,j,tr[2];
348:   int               fdes;
349:   MPI_Status        status;
350:   PetscScalar       *values;
351:   const PetscScalar *xarray;
352:   FILE              *file;
353: #if defined(PETSC_HAVE_MPIIO)
354:   PetscBool         isMPIIO;
355: #endif
356:   PetscBool         skipHeader;
357:   PetscInt          message_count,flowcontrolcount;

360:   VecGetArrayRead(xin,&xarray);
361:   PetscViewerBinaryGetDescriptor(viewer,&fdes);
362:   PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);

364:   /* determine maximum message to arrive */
365:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
366:   MPI_Comm_size(((PetscObject)xin)->comm,&size);

368:   if (!skipHeader) {
369:     tr[0] = VEC_FILE_CLASSID;
370:     tr[1] = xin->map->N;
371:     PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
372:   }

374: #if defined(PETSC_HAVE_MPIIO)
375:   PetscViewerBinaryGetMPIIO(viewer,&isMPIIO);
376:   if (!isMPIIO) {
377: #endif
378:     PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
379:     if (!rank) {
380:       PetscBinaryWrite(fdes,(void*)xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);

382:       len = 0;
383:       for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
384:       PetscMalloc(len*sizeof(PetscScalar),&values);
385:       mesgsize = PetscMPIIntCast(len);
386:       /* receive and save messages */
387:       for (j=1; j<size; j++) {
388:         PetscViewerFlowControlStepMaster(viewer,j,message_count,flowcontrolcount);
389:         MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
390:         MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
391:         n = (PetscInt)mesglen;
392:         PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_TRUE);
393:       }
394:       PetscViewerFlowControlEndMaster(viewer,message_count);
395:       PetscFree(values);
396:     } else {
397:       PetscViewerFlowControlStepWorker(viewer,rank,message_count);
398:       mesgsize = PetscMPIIntCast(xin->map->n);
399:       MPI_Send((void*)xarray,mesgsize,MPIU_SCALAR,0,tag,((PetscObject)xin)->comm);
400:       PetscViewerFlowControlEndWorker(viewer,message_count);
401:     }
402: #if defined(PETSC_HAVE_MPIIO)
403:   } else {
404:     MPI_Offset   off;
405:     MPI_File     mfdes;
406:     PetscMPIInt  gsizes[1],lsizes[1],lstarts[1];
407:     MPI_Datatype view;

409:     gsizes[0]  = PetscMPIIntCast(xin->map->N);
410:     lsizes[0]  = PetscMPIIntCast(n);
411:     lstarts[0] = PetscMPIIntCast(xin->map->rstart);
412:     MPI_Type_create_subarray(1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
413:     MPI_Type_commit(&view);

415:     PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
416:     PetscViewerBinaryGetMPIIOOffset(viewer,&off);
417:     MPIU_File_write_all(mfdes,(void*)xarray,lsizes[0],MPIU_SCALAR,MPI_STATUS_IGNORE);
418:     PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
419:     MPI_Type_free(&view);
420:   }
421: #endif

423:   VecRestoreArrayRead(xin,&xarray);
424:   if (!rank) {
425:     PetscViewerBinaryGetInfoPointer(viewer,&file);
426:     if (file) {
427:       if (((PetscObject)xin)->prefix) {
428:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,xin->map->bs);
429:       } else {
430:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->map->bs);
431:       }
432:     }
433:   }
434:   return(0);
435: }

439: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
440: {
441:   PetscDraw         draw;
442:   PetscBool         isnull;
443:   PetscErrorCode    ierr;

445: #if defined(PETSC_USE_64BIT_INDICES)
447:   PetscViewerDrawGetDraw(viewer,0,&draw);
448:   PetscDrawIsNull(draw,&isnull);
449:   if (isnull) return(0);
450:   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported with 64 bit integers");
451: #else
452:   PetscMPIInt       size,rank;
453:   int               i,N = xin->map->N,*lens;
454:   PetscReal         *xx,*yy;
455:   PetscDrawLG       lg;
456:   const PetscScalar *xarray;

459:   PetscViewerDrawGetDraw(viewer,0,&draw);
460:   PetscDrawIsNull(draw,&isnull);
461:   if (isnull) return(0);

463:   VecGetArrayRead(xin,&xarray);
464:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
465:   PetscDrawCheckResizedWindow(draw);
466:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
467:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
468:   if (!rank) {
469:     PetscDrawLGReset(lg);
470:     PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
471:     for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
472:     yy   = xx + N;
473:     PetscMalloc(size*sizeof(PetscInt),&lens);
474:     for (i=0; i<size; i++) {
475:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
476:     }
477: #if !defined(PETSC_USE_COMPLEX)
478:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,((PetscObject)xin)->comm);
479: #else
480:     {
481:       PetscReal *xr;
482:       PetscMalloc((xin->map->n+1)*sizeof(PetscReal),&xr);
483:       for (i=0; i<xin->map->n; i++) {
484:         xr[i] = PetscRealPart(xarray[i]);
485:       }
486:       MPI_Gatherv(xr,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,((PetscObject)xin)->comm);
487:       PetscFree(xr);
488:     }
489: #endif
490:     PetscFree(lens);
491:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
492:     PetscFree(xx);
493:   } else {
494: #if !defined(PETSC_USE_COMPLEX)
495:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,((PetscObject)xin)->comm);
496: #else
497:     {
498:       PetscReal *xr;
499:       PetscMalloc((xin->map->n+1)*sizeof(PetscReal),&xr);
500:       for (i=0; i<xin->map->n; i++) {
501:         xr[i] = PetscRealPart(xarray[i]);
502:       }
503:       MPI_Gatherv(xr,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,((PetscObject)xin)->comm);
504:       PetscFree(xr);
505:     }
506: #endif
507:   }
508:   PetscDrawLGDraw(lg);
509:   PetscDrawSynchronizedFlush(draw);
510:   VecRestoreArrayRead(xin,&xarray);
511: #endif
512:   return(0);
513: }

517: PetscErrorCode  VecView_MPI_Draw(Vec xin,PetscViewer viewer)
518: {
519:   PetscErrorCode    ierr;
520:   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
521:   PetscInt          i,start,end;
522:   MPI_Status        status;
523:   PetscReal         coors[4],ymin,ymax,xmin,xmax,tmp;
524:   PetscDraw         draw;
525:   PetscBool         isnull;
526:   PetscDrawAxis     axis;
527:   const PetscScalar *xarray;

530:   PetscViewerDrawGetDraw(viewer,0,&draw);
531:   PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

533:   VecGetArrayRead(xin,&xarray);
534:   PetscDrawCheckResizedWindow(draw);
535:   xmin = 1.e20; xmax = -1.e20;
536:   for (i=0; i<xin->map->n; i++) {
537: #if defined(PETSC_USE_COMPLEX)
538:     if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
539:     if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
540: #else
541:     if (xarray[i] < xmin) xmin = xarray[i];
542:     if (xarray[i] > xmax) xmax = xarray[i];
543: #endif
544:   }
545:   if (xmin + 1.e-10 > xmax) {
546:     xmin -= 1.e-5;
547:     xmax += 1.e-5;
548:   }
549:   MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPIU_MIN,0,((PetscObject)xin)->comm);
550:   MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPIU_MAX,0,((PetscObject)xin)->comm);
551:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
552:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
553:   PetscDrawAxisCreate(draw,&axis);
554:   PetscLogObjectParent(draw,axis);
555:   if (!rank) {
556:     PetscDrawClear(draw);
557:     PetscDrawFlush(draw);
558:     PetscDrawAxisSetLimits(axis,0.0,(double)xin->map->N,ymin,ymax);
559:     PetscDrawAxisDraw(axis);
560:     PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
561:   }
562:   PetscDrawAxisDestroy(&axis);
563:   MPI_Bcast(coors,4,MPIU_REAL,0,((PetscObject)xin)->comm);
564:   if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
565:   /* draw local part of vector */
566:   VecGetOwnershipRange(xin,&start,&end);
567:   if (rank < size-1) { /*send value to right */
568:     MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,((PetscObject)xin)->comm);
569:   }
570:   for (i=1; i<xin->map->n; i++) {
571: #if !defined(PETSC_USE_COMPLEX)
572:     PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
573:                    xarray[i],PETSC_DRAW_RED);
574: #else
575:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
576:                    PetscRealPart(xarray[i]),PETSC_DRAW_RED);
577: #endif
578:   }
579:   if (rank) { /* receive value from right */
580:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,((PetscObject)xin)->comm,&status);
581: #if !defined(PETSC_USE_COMPLEX)
582:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
583: #else
584:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
585: #endif
586:   }
587:   PetscDrawSynchronizedFlush(draw);
588:   PetscDrawPause(draw);
589:   VecRestoreArrayRead(xin,&xarray);
590:   return(0);
591: }

593: #if defined(PETSC_HAVE_MATLAB_ENGINE)
596: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
597: {
598:   PetscErrorCode    ierr;
599:   PetscMPIInt       rank,size,*lens;
600:   PetscInt          i,N = xin->map->N;
601:   const PetscScalar *xarray;
602:   PetscScalar       *xx;

605:   VecGetArrayRead(xin,&xarray);
606:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
607:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
608:   if (!rank) {
609:     PetscMalloc(N*sizeof(PetscScalar),&xx);
610:     PetscMalloc(size*sizeof(PetscMPIInt),&lens);
611:     for (i=0; i<size; i++) {
612:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
613:     }
614:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,((PetscObject)xin)->comm);
615:     PetscFree(lens);

617:     PetscObjectName((PetscObject)xin);
618:     PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);

620:     PetscFree(xx);
621:   } else {
622:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,((PetscObject)xin)->comm);
623:   }
624:   VecRestoreArrayRead(xin,&xarray);
625:   return(0);
626: }
627: #endif

629: #if defined(PETSC_HAVE_HDF5)
632: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
633: {
634:   /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
635:   hid_t             filespace; /* file dataspace identifier */
636:   hid_t             chunkspace; /* chunk dataset property identifier */
637:   hid_t                    plist_id;  /* property list identifier */
638:   hid_t             dset_id;   /* dataset identifier */
639:   hid_t             memspace;  /* memory dataspace identifier */
640:   hid_t             file_id;
641:   hid_t             group;
642:   herr_t            status;
643:   PetscInt          bs = xin->map->bs > 0 ? xin->map->bs : 1;
644:   hsize_t           i,dim;
645:   hsize_t           maxDims[4], dims[4], chunkDims[4], count[4],offset[4];
646:   PetscInt          timestep;
647:   PetscInt          low;
648:   const PetscScalar *x;
649:   const char        *vecname;
650:   PetscErrorCode    ierr;

653:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
654:   PetscViewerHDF5GetTimestep(viewer, &timestep);

656:   /* Create the dataspace for the dataset.
657:    *
658:    * dims - holds the current dimensions of the dataset
659:    *
660:    * maxDims - holds the maximum dimensions of the dataset (unlimited
661:    * for the number of time steps with the current dimensions for the
662:    * other dimensions; so only additional time steps can be added).
663:    *
664:    * chunkDims - holds the size of a single time step (required to
665:    * permit extending dataset).
666:    */
667:   dim  = 0;
668:   if (timestep >= 0) {
669:     dims[dim]    = timestep+1;
670:     maxDims[dim] = H5S_UNLIMITED;
671:     chunkDims[dim] = 1;
672:     ++dim;
673:   }
674:   dims[dim]    = PetscHDF5IntCast(xin->map->N)/bs;
675:   maxDims[dim] = dims[dim];
676:   chunkDims[dim] = dims[dim];
677:   ++dim;
678:   if (bs >= 1) {
679:     dims[dim]    = bs;
680:     maxDims[dim] = dims[dim];
681:     chunkDims[dim] = dims[dim];
682:     ++dim;
683:   }
684: #if defined(PETSC_USE_COMPLEX)
685:   dims[dim]    = 2;
686:   maxDims[dim] = dims[dim];
687:   chunkDims[dim] = dims[dim];
688:   ++dim;
689: #endif
690:   for (i=0; i < dim; ++i)
691:   filespace = H5Screate_simple(dim, dims, maxDims);
692:   if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");

694:   /* Create the dataset with default properties and close filespace */
695:   PetscObjectGetName((PetscObject) xin, &vecname);
696:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
697:     /* Create chunk */
698:     chunkspace = H5Pcreate(H5P_DATASET_CREATE);
699:     if (chunkspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
700:     status = H5Pset_chunk(chunkspace, dim, chunkDims); CHKERRQ(status);

702: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
703:     dset_id = H5Dcreate2(group, vecname, H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT);
704: #else
705:     dset_id = H5Dcreate(group, vecname, H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT);
706: #endif
707:     if (dset_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dcreate2()");
708:     status = H5Pclose(chunkspace);CHKERRQ(status);
709:   } else {
710:     dset_id = H5Dopen2(group, vecname, H5P_DEFAULT);
711:     status = H5Dset_extent(dset_id, dims);CHKERRQ(status);
712:   }
713:   status = H5Sclose(filespace);CHKERRQ(status);

715:   /* Each process defines a dataset and writes it to the hyperslab in the file */
716:   dim = 0;
717:   if (timestep >= 0) {
718:     count[dim] = 1;
719:     ++dim;
720:   }
721:   count[dim] = PetscHDF5IntCast(xin->map->n)/bs;
722:   ++dim;
723:   if (bs >= 1) {
724:     count[dim] = bs;
725:     ++dim;
726:   }
727: #if defined(PETSC_USE_COMPLEX)
728:   count[dim] = 2;
729:   ++dim;
730: #endif
731:   if (xin->map->n > 0) {
732:     memspace = H5Screate_simple(dim, count, NULL);
733:     if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");
734:   } else {
735:     /* Can't create dataspace with zero for any dimension, so create null dataspace. */
736:     memspace = H5Screate(H5S_NULL);
737:     if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate()");
738:   }

740:   /* Select hyperslab in the file */
741:   VecGetOwnershipRange(xin, &low, PETSC_NULL);
742:   dim = 0;
743:   if (timestep >= 0) {
744:     offset[dim] = timestep;
745:     ++dim;
746:   }
747:   offset[dim] = PetscHDF5IntCast(low/bs);
748:   ++dim;
749:   if (bs >= 1) {
750:     offset[dim] = 0;
751:     ++dim;
752:   }
753: #if defined(PETSC_USE_COMPLEX)
754:   offset[dim] = 0;
755:   ++dim;
756: #endif
757:   if (xin->map->n > 0) {
758:     filespace = H5Dget_space(dset_id);
759:     if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dget_space()");
760:     status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
761:   } else {
762:     /* Create null filespace to match null memspace. */
763:     filespace = H5Screate(H5S_NULL);
764:     if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate(H5S_NULL)");
765:   }

767:   /* Create property list for collective dataset write */
768:   plist_id = H5Pcreate(H5P_DATASET_XFER);
769:   if (plist_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
770: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
771:   status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
772: #endif
773:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

775:   VecGetArrayRead(xin, &x);
776:   status = H5Dwrite(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
777:   status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);CHKERRQ(status);
778:   VecRestoreArrayRead(xin, &x);

780:   /* Close/release resources */
781:   if (group != file_id) {
782:     status = H5Gclose(group);CHKERRQ(status);
783:   }
784:   status = H5Pclose(plist_id);CHKERRQ(status);
785:   status = H5Sclose(filespace);CHKERRQ(status);
786:   status = H5Sclose(memspace);CHKERRQ(status);
787:   status = H5Dclose(dset_id);CHKERRQ(status);
788:   return(0);
789: }
790: #endif

794: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
795: {
797:   PetscBool      iascii,isbinary,isdraw;
798: #if defined(PETSC_HAVE_MATHEMATICA)
799:   PetscBool      ismathematica;
800: #endif
801: #if defined(PETSC_HAVE_HDF5)
802:   PetscBool      ishdf5;
803: #endif
804: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
805:   PetscBool      ismatlab;
806: #endif

809:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
810:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
811:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
812: #if defined(PETSC_HAVE_MATHEMATICA)
813:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
814: #endif
815: #if defined(PETSC_HAVE_HDF5)
816:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
817: #endif
818: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
819:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
820: #endif
821:   if (iascii){
822:     VecView_MPI_ASCII(xin,viewer);
823:   } else if (isbinary) {
824:     VecView_MPI_Binary(xin,viewer);
825:   } else if (isdraw) {
826:     PetscViewerFormat format;

828:     PetscViewerGetFormat(viewer,&format);
829:     if (format == PETSC_VIEWER_DRAW_LG) {
830:       VecView_MPI_Draw_LG(xin,viewer);
831:     } else {
832:       VecView_MPI_Draw(xin,viewer);
833:     }
834: #if defined(PETSC_HAVE_MATHEMATICA)
835:   } else if (ismathematica) {
836:     PetscViewerMathematicaPutVector(viewer,xin);
837: #endif
838: #if defined(PETSC_HAVE_HDF5)
839:   } else if (ishdf5) {
840:     VecView_MPI_HDF5(xin,viewer);
841: #endif
842: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
843:   } else if (ismatlab) {
844:     VecView_MPI_Matlab(xin,viewer);
845: #endif
846:   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
847:   return(0);
848: }

852: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
853: {
855:   *N = xin->map->N;
856:   return(0);
857: }

861: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
862: {
863:   const PetscScalar *xx;
864:   PetscInt          i,tmp,start = xin->map->range[xin->stash.rank];
865:   PetscErrorCode    ierr;

868:   VecGetArrayRead(xin,&xx);
869:   for (i=0; i<ni; i++) {
870:     if (xin->stash.ignorenegidx && ix[i] < 0) continue;
871:     tmp = ix[i] - start;
872: #if defined(PETSC_USE_DEBUG)
873:     if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
874: #endif
875:     y[i] = xx[tmp];
876:   }
877:   VecRestoreArrayRead(xin,&xx);
878:   return(0);
879: }

883: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
884: {
886:   PetscMPIInt    rank = xin->stash.rank;
887:   PetscInt       *owners = xin->map->range,start = owners[rank];
888:   PetscInt       end = owners[rank+1],i,row;
889:   PetscScalar    *xx;

892: #if defined(PETSC_USE_DEBUG)
893:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
894:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
895: #endif
896:   VecGetArray(xin,&xx);
897:   xin->stash.insertmode = addv;

899:   if (addv == INSERT_VALUES) {
900:     for (i=0; i<ni; i++) {
901:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
902: #if defined(PETSC_USE_DEBUG)
903:       if (ix[i] < 0) {
904:         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
905:       }
906: #endif
907:       if ((row = ix[i]) >= start && row < end) {
908:         xx[row-start] = y[i];
909:       } else if (!xin->stash.donotstash) {
910: #if defined(PETSC_USE_DEBUG)
911:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
912: #endif
913:         VecStashValue_Private(&xin->stash,row,y[i]);
914:       }
915:     }
916:   } else {
917:     for (i=0; i<ni; i++) {
918:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
919: #if defined(PETSC_USE_DEBUG)
920:       if (ix[i] < 0) {
921:         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
922:       }
923: #endif
924:       if ((row = ix[i]) >= start && row < end) {
925:         xx[row-start] += y[i];
926:       } else if (!xin->stash.donotstash) {
927: #if defined(PETSC_USE_DEBUG)
928:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
929: #endif
930:         VecStashValue_Private(&xin->stash,row,y[i]);
931:       }
932:     }
933:   }
934:   VecRestoreArray(xin,&xx);
935:   return(0);
936: }

940: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
941: {
942:   PetscMPIInt    rank = xin->stash.rank;
943:   PetscInt       *owners = xin->map->range,start = owners[rank];
945:   PetscInt       end = owners[rank+1],i,row,bs = xin->map->bs,j;
946:   PetscScalar    *xx,*y = (PetscScalar*)yin;

949:   VecGetArray(xin,&xx);
950: #if defined(PETSC_USE_DEBUG)
951:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
952:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
953: #endif
954:   xin->stash.insertmode = addv;

956:   if (addv == INSERT_VALUES) {
957:     for (i=0; i<ni; i++) {
958:       if ((row = bs*ix[i]) >= start && row < end) {
959:         for (j=0; j<bs; j++) {
960:           xx[row-start+j] = y[j];
961:         }
962:       } else if (!xin->stash.donotstash) {
963:         if (ix[i] < 0) continue;
964: #if defined(PETSC_USE_DEBUG)
965:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
966: #endif
967:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
968:       }
969:       y += bs;
970:     }
971:   } else {
972:     for (i=0; i<ni; i++) {
973:       if ((row = bs*ix[i]) >= start && row < end) {
974:         for (j=0; j<bs; j++) {
975:           xx[row-start+j] += y[j];
976:         }
977:       } else if (!xin->stash.donotstash) {
978:         if (ix[i] < 0) continue;
979: #if defined(PETSC_USE_DEBUG)
980:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
981: #endif
982:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
983:       }
984:       y += bs;
985:     }
986:   }
987:   VecRestoreArray(xin,&xx);
988:   return(0);
989: }

991: /*
992:    Since nsends or nreceives may be zero we add 1 in certain mallocs
993: to make sure we never malloc an empty one.
994: */
997: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
998: {
1000:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1001:   PetscMPIInt    size;
1002:   InsertMode     addv;
1003:   MPI_Comm       comm = ((PetscObject)xin)->comm;

1006:   if (xin->stash.donotstash) {
1007:     return(0);
1008:   }

1010:   MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
1011:   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1012:   xin->stash.insertmode = addv; /* in case this processor had no cache */

1014:   bs = xin->map->bs;
1015:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
1016:   if (!xin->bstash.bowners && xin->map->bs != -1) {
1017:     PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
1018:     for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
1019:     xin->bstash.bowners = bowners;
1020:   } else {
1021:     bowners = xin->bstash.bowners;
1022:   }
1023:   VecStashScatterBegin_Private(&xin->stash,owners);
1024:   VecStashScatterBegin_Private(&xin->bstash,bowners);
1025:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1026:   PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1027:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1028:   PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);

1030:   return(0);
1031: }

1035: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1036: {
1038:   PetscInt       base,i,j,*row,flg,bs;
1039:   PetscMPIInt    n;
1040:   PetscScalar    *val,*vv,*array,*xarray;

1043:   if (!vec->stash.donotstash) {
1044:     VecGetArray(vec,&xarray);
1045:     base = vec->map->range[vec->stash.rank];
1046:     bs   = vec->map->bs;

1048:     /* Process the stash */
1049:     while (1) {
1050:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1051:       if (!flg) break;
1052:       if (vec->stash.insertmode == ADD_VALUES) {
1053:         for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
1054:       } else if (vec->stash.insertmode == INSERT_VALUES) {
1055:         for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
1056:       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1057:     }
1058:     VecStashScatterEnd_Private(&vec->stash);

1060:     /* now process the block-stash */
1061:     while (1) {
1062:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1063:       if (!flg) break;
1064:       for (i=0; i<n; i++) {
1065:         array = xarray+row[i]*bs-base;
1066:         vv    = val+i*bs;
1067:         if (vec->stash.insertmode == ADD_VALUES) {
1068:           for (j=0; j<bs; j++) { array[j] += vv[j];}
1069:         } else if (vec->stash.insertmode == INSERT_VALUES) {
1070:           for (j=0; j<bs; j++) { array[j] = vv[j]; }
1071:         } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1072:       }
1073:     }
1074:     VecStashScatterEnd_Private(&vec->bstash);
1075:     VecRestoreArray(vec,&xarray);
1076:   }
1077:   vec->stash.insertmode = NOT_SET_VALUES;
1078:   return(0);
1079: }