Actual source code: vecio.c

  2: /* 
  3:    This file contains simple binary input routines for vectors.  The
  4:    analogous output routines are within each vector implementation's 
  5:    VecView (with viewer types PETSCVIEWERBINARY)
  6:  */

  8: #include <petscsys.h>
  9: #include <petscvec.h>         /*I  "petscvec.h"  I*/
 10: #include <private/vecimpl.h>
 11: #include <petscmat.h> /* so that MAT_FILE_CLASSID is defined */

 15: static PetscErrorCode PetscViewerBinaryReadVecHeader_Private(PetscViewer viewer,PetscInt *rows)
 16: {
 18:   MPI_Comm       comm;
 19:   PetscInt       tr[2],type;

 22:   PetscObjectGetComm((PetscObject)viewer,&comm);
 23:   /* Read vector header */
 24:   PetscViewerBinaryRead(viewer,tr,2,PETSC_INT);
 25:   type = tr[0];
 26:   if (type != VEC_FILE_CLASSID) {
 27:     PetscLogEventEnd(VEC_Load,viewer,0,0,0);
 28:     if (type == MAT_FILE_CLASSID) {
 29:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Matrix is next in file, not a vector as you requested");
 30:     } else {
 31:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Not a vector next in file");
 32:     }
 33:   }
 34:   *rows = tr[1];
 35:   return(0);
 36: }

 38: #if defined(PETSC_HAVE_MPIIO)
 41: static PetscErrorCode VecLoad_Binary_MPIIO(Vec vec, PetscViewer viewer)
 42: {
 44:   PetscMPIInt    gsizes[1],lsizes[1],lstarts[1];
 45:   PetscScalar    *avec;
 46:   MPI_Datatype   view;
 47:   MPI_File       mfdes;
 48:   MPI_Aint       ub,ul;
 49:   MPI_Offset     off;

 52:   VecGetArray(vec,&avec);
 53:   gsizes[0]  = PetscMPIIntCast(vec->map->N);
 54:   lsizes[0]  = PetscMPIIntCast(vec->map->n);
 55:   lstarts[0] = PetscMPIIntCast(vec->map->rstart);
 56:   MPI_Type_create_subarray(1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
 57:   MPI_Type_commit(&view);

 59:   PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
 60:   PetscViewerBinaryGetMPIIOOffset(viewer,&off);
 61:   MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char *)"native",MPI_INFO_NULL);
 62:   MPIU_File_read_all(mfdes,avec,lsizes[0],MPIU_SCALAR,MPI_STATUS_IGNORE);
 63:   MPI_Type_get_extent(view,&ul,&ub);
 64:   PetscViewerBinaryAddMPIIOOffset(viewer,ub);
 65:   MPI_Type_free(&view);

 67:   VecRestoreArray(vec,&avec);
 68:   VecAssemblyBegin(vec);
 69:   VecAssemblyEnd(vec);
 70:   return(0);
 71: }
 72: #endif
 73: 
 76: PetscErrorCode VecLoad_Binary(Vec vec, PetscViewer viewer)
 77: {
 78:   PetscMPIInt    size,rank,tag;
 79:   int            fd;
 80:   PetscInt       i,rows = 0,n,*range,N,bs;
 82:   PetscBool      flag;
 83:   PetscScalar    *avec,*avecwork;
 84:   MPI_Comm       comm;
 85:   MPI_Request    request;
 86:   MPI_Status     status;
 87: #if defined(PETSC_HAVE_MPIIO)
 88:   PetscBool      useMPIIO;
 89: #endif

 92:   PetscObjectGetComm((PetscObject)viewer,&comm);
 93:   MPI_Comm_rank(comm,&rank);
 94:   MPI_Comm_size(comm,&size);
 95: 
 96:   PetscViewerBinaryGetDescriptor(viewer,&fd);
 97:   PetscViewerBinaryReadVecHeader_Private(viewer,&rows);
 98:   /* Set Vec sizes,blocksize,and type if not already set */
 99:   if (vec->map-> n < 0 && vec->map->N < 0) {
100:      VecSetSizes(vec,PETSC_DECIDE,rows);
101:   }
102:   PetscOptionsGetInt(PETSC_NULL, "-vecload_block_size", &bs, &flag);
103:   if (flag) {
104:     VecSetBlockSize(vec, bs);
105:   }

107:   /* If sizes and type already set,check if the vector global size is correct */
108:   VecGetSize(vec, &N);
109:   if (N != rows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%d) then input vector (%d)", rows, N);

111: #if defined(PETSC_HAVE_MPIIO)
112:   PetscViewerBinaryGetMPIIO(viewer,&useMPIIO);
113:   if (useMPIIO) {
114:     VecLoad_Binary_MPIIO(vec, viewer);
115:     return(0);
116:   }
117: #endif

119:   VecGetLocalSize(vec,&n);
120:   PetscObjectGetNewTag((PetscObject)viewer,&tag);
121:   VecGetArray(vec,&avec);
122:   if (!rank) {
123:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);

125:     if (size > 1) {
126:       /* read in other chuncks and send to other processors */
127:       /* determine maximum chunck owned by other */
128:       range = vec->map->range;
129:       n = 1;
130:       for (i=1; i<size; i++) {
131:         n = PetscMax(n,range[i+1] - range[i]);
132:       }
133:       PetscMalloc(n*sizeof(PetscScalar),&avecwork);
134:       for (i=1; i<size; i++) {
135:         n    = range[i+1] - range[i];
136:         PetscBinaryRead(fd,avecwork,n,PETSC_SCALAR);
137:         MPI_Isend(avecwork,n,MPIU_SCALAR,i,tag,comm,&request);
138:         MPI_Wait(&request,&status);
139:       }
140:       PetscFree(avecwork);
141:     }
142:   } else {
143:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
144:   }

146:   VecRestoreArray(vec,&avec);
147:   VecAssemblyBegin(vec);
148:   VecAssemblyEnd(vec);
149:   return(0);
150: }

152: #if defined(PETSC_HAVE_HDF5)
155: PetscErrorCode PetscViewerHDF5OpenGroup(PetscViewer viewer, hid_t *fileId, hid_t *groupId) {
156:   hid_t          file_id, group;
157:   const char    *groupName = PETSC_NULL;

161:   PetscViewerHDF5GetFileId(viewer, &file_id);
162:   PetscViewerHDF5GetGroup(viewer, &groupName);
163:   /* Open group */
164:   if (groupName) {
165:     PetscBool root;

167:     PetscStrcmp(groupName, "/", &root);
168:     if (!root && !H5Lexists(file_id, groupName, H5P_DEFAULT)) {
169: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
170:       group = H5Gcreate2(file_id, groupName, 0, H5P_DEFAULT, H5P_DEFAULT);
171: #else /* deprecated HDF5 1.6 API */
172:       group = H5Gcreate(file_id, groupName, 0);
173: #endif
174:       if (group < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Could not create group %s", groupName);
175:       H5Gclose(group);
176:     }
177: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
178:     group = H5Gopen2(file_id, groupName, H5P_DEFAULT);
179: #else
180:     group = H5Gopen(file_id, groupName);
181: #endif
182:     if (group < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Could not open group %s", groupName);
183:   } else {
184:     group = file_id;
185:   }
186:   *fileId  = file_id;
187:   *groupId = group;
188:   return(0);
189: }

193: /*
194:      This should handle properly the cases where PetscInt is 32 or 64 and hsize_t is 32 or 64. These means properly casting with
195:    checks back and forth between the two types of variables.
196: */
197: PetscErrorCode VecLoad_HDF5(Vec xin, PetscViewer viewer)
198: {
199:   hid_t          file_id, group, dset_id, filespace, memspace, plist_id;
200:   hsize_t        rdim, dim;
201:   hsize_t        dims[4], count[4], offset[4];
202:   herr_t         status;
203:   PetscInt       n, N, bs = 1, bsInd, lenInd, low, timestep;
204:   PetscScalar   *x;
205:   PetscBool      flag;
206:   const char    *vecname;

210:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
211:   PetscViewerHDF5GetTimestep(viewer, &timestep);
212:   PetscOptionsGetInt(PETSC_NULL, "-vecload_block_size", &bs, &flag);

214:   /* Create the dataset with default properties and close filespace */
215:   PetscObjectGetName((PetscObject)xin,&vecname);
216: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
217:   dset_id = H5Dopen2(group, vecname, H5P_DEFAULT);
218: #else
219:   dset_id = H5Dopen(group, vecname);
220: #endif
221:   if (dset_id == -1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Could not H5Dopen() with Vec named %s",vecname);
222:   /* Retrieve the dataspace for the dataset */
223:   filespace = H5Dget_space(dset_id);
224:   if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Could not H5Dget_space()");
225:   dim = 0;
226:   if (timestep >= 0) {
227:     ++dim;
228:   }
229:   ++dim;
230:   if (bs > 1) {
231:     ++dim;
232:   }
233: #if defined(PETSC_USE_COMPLEX)
234:   ++dim;
235: #endif
236:   rdim = H5Sget_simple_extent_dims(filespace, dims, PETSC_NULL);
237: #if defined(PETSC_USE_COMPLEX)
238:   bsInd = rdim-2;
239: #else
240:   bsInd = rdim-1;
241: #endif
242:   lenInd = timestep >= 0 ? 1 : 0;
243:   if (rdim != dim) {
244:     if (rdim == dim+1 && bs == 1) {
245:       bs = dims[bsInd];
246:       if (flag) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Block size 1 specified for vector does not match blocksize in file %d",bs);
247:     } else {
248:       SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file %d not %d as expected",rdim,dim);
249:     }
250: } else if (bs > 1 && bs != (PetscInt) dims[bsInd]) {
251:     SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Block size %d specified for vector does not match blocksize in file %d",bs,dims[bsInd]);
252:   }

254:   /* Set Vec sizes,blocksize,and type if not already set */
255:   if ((xin)->map-> n < 0 && (xin)->map->N < 0) {
256:     VecSetSizes(xin, PETSC_DECIDE, dims[lenInd]);
257:   }
258:   if (bs > 1 || flag) {
259:     VecSetBlockSize(xin, bs);
260:   }

262:   /* If sizes and type already set,check if the vector global size is correct */
263:   VecGetSize(xin, &N);
264:   if (N/bs != (PetscInt) dims[lenInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%d) then input vector (%d)", (PetscInt) dims[lenInd], N/bs);

266:   /* Each process defines a dataset and reads it from the hyperslab in the file */
267:   VecGetLocalSize(xin, &n);
268:   dim = 0;
269:   if (timestep >= 0) {
270:     count[dim] = 1;
271:     ++dim;
272:   }
273:   count[dim] = PetscHDF5IntCast(n)/bs;
274:   ++dim;
275:   if (bs > 1) {
276:     count[dim] = bs;
277:     ++dim;
278:   }
279: #if defined(PETSC_USE_COMPLEX)
280:   count[dim] = 2;
281:   ++dim;
282: #endif
283:   memspace = H5Screate_simple(dim, count, NULL);
284:   if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Could not H5Screate_simple()");

286:   /* Select hyperslab in the file */
287:   VecGetOwnershipRange(xin, &low, PETSC_NULL);
288:   dim = 0;
289:   if (timestep >= 0) {
290:     offset[dim] = timestep;
291:     ++dim;
292:   }
293:   offset[dim] = PetscHDF5IntCast(low/bs);
294:   ++dim;
295:   if (bs > 1) {
296:     offset[dim] = 0;
297:     ++dim;
298:   }
299: #if defined(PETSC_USE_COMPLEX)
300:   offset[dim] = 0;
301:   ++dim;
302: #endif
303:   status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);

305:   /* Create property list for collective dataset read */
306:   plist_id = H5Pcreate(H5P_DATASET_XFER);
307:   if (plist_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Could not H5Pcreate()");
308: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
309:   status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
310: #endif
311:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

313:   VecGetArray(xin, &x);
314:   status = H5Dread(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
315:   VecRestoreArray(xin, &x);

317:   /* Close/release resources */
318:   if (group != file_id) {
319:     status = H5Gclose(group);CHKERRQ(status);
320:   }
321:   status = H5Pclose(plist_id);CHKERRQ(status);
322:   status = H5Sclose(filespace);CHKERRQ(status);
323:   status = H5Sclose(memspace);CHKERRQ(status);
324:   status = H5Dclose(dset_id);CHKERRQ(status);

326:   VecAssemblyBegin(xin);
327:   VecAssemblyEnd(xin);
328:   return(0);
329: }
330: #endif


335: PetscErrorCode  VecLoad_Default(Vec newvec, PetscViewer viewer)
336: {
338:   PetscBool      isbinary;
339: #if defined(PETSC_HAVE_HDF5)
340:   PetscBool      ishdf5;
341: #endif

344:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
345: #if defined(PETSC_HAVE_HDF5)
346:   PetscTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
347: #endif

349: #if defined(PETSC_HAVE_HDF5)
350:   if (ishdf5) {
351:     if (!((PetscObject)newvec)->name) {
352:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Since HDF5 format gives ASCII name for each object in file; must use VecLoad() after setting name of Vec with PetscObjectSetName()");
353:      PetscLogEventEnd(VEC_Load,viewer,0,0,0);
354:     }
355:     VecLoad_HDF5(newvec, viewer);
356:   } else
357: #endif
358:   {
359:     VecLoad_Binary(newvec, viewer);
360:   }
361:   return(0);
362: }