Actual source code: vecio.c
petsc-3.6.4 2016-04-12
2: /*
3: This file contains simple binary input routines for vectors. The
4: analogous output routines are within each vector implementation's
5: VecView (with viewer types PETSCVIEWERBINARY)
6: */
8: #include <petscsys.h>
9: #include <petscvec.h> /*I "petscvec.h" I*/
10: #include <petsc/private/vecimpl.h>
11: #include <petscmat.h> /* so that MAT_FILE_CLASSID is defined */
12: #include <petscviewerhdf5.h>
16: static PetscErrorCode PetscViewerBinaryReadVecHeader_Private(PetscViewer viewer,PetscInt *rows)
17: {
19: MPI_Comm comm;
20: PetscInt tr[2],type;
23: PetscObjectGetComm((PetscObject)viewer,&comm);
24: /* Read vector header */
25: PetscViewerBinaryRead(viewer,tr,2,NULL,PETSC_INT);
26: type = tr[0];
27: if (type != VEC_FILE_CLASSID) {
28: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
29: if (type == MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Matrix is next in file, not a vector as you requested");
30: else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Not a vector next in file");
31: }
32: *rows = tr[1];
33: return(0);
34: }
36: #if defined(PETSC_HAVE_MPIIO)
39: static PetscErrorCode VecLoad_Binary_MPIIO(Vec vec, PetscViewer viewer)
40: {
42: PetscMPIInt lsize;
43: PetscScalar *avec;
44: MPI_File mfdes;
45: MPI_Offset off;
48: VecGetArray(vec,&avec);
49: PetscMPIIntCast(vec->map->n,&lsize);
51: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
52: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
53: off += vec->map->rstart*sizeof(PetscScalar);
54: MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
55: MPIU_File_read_all(mfdes,avec,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
56: PetscViewerBinaryAddMPIIOOffset(viewer,vec->map->N*sizeof(PetscScalar));
58: VecRestoreArray(vec,&avec);
59: VecAssemblyBegin(vec);
60: VecAssemblyEnd(vec);
61: return(0);
62: }
63: #endif
67: PetscErrorCode VecLoad_Binary(Vec vec, PetscViewer viewer)
68: {
69: PetscMPIInt size,rank,tag;
70: int fd;
71: PetscInt i,rows = 0,n,*range,N,bs;
73: PetscBool flag,skipheader;
74: PetscScalar *avec,*avecwork;
75: MPI_Comm comm;
76: MPI_Request request;
77: MPI_Status status;
78: #if defined(PETSC_HAVE_MPIIO)
79: PetscBool useMPIIO;
80: #endif
83: /* force binary viewer to load .info file if it has not yet done so */
84: PetscViewerSetUp(viewer);
85: PetscObjectGetComm((PetscObject)viewer,&comm);
86: MPI_Comm_rank(comm,&rank);
87: MPI_Comm_size(comm,&size);
89: PetscViewerBinaryGetDescriptor(viewer,&fd);
90: PetscViewerBinaryGetSkipHeader(viewer,&skipheader);
91: if (!skipheader) {
92: PetscViewerBinaryReadVecHeader_Private(viewer,&rows);
93: } else {
94: VecType vtype;
95: VecGetType(vec,&vtype);
96: if (!vtype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER, "Vector binary file header was skipped, thus the user must specify the type and length of input vector");
97: VecGetSize(vec,&N);
98: if (!N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER, "Vector binary file header was skipped, thus the user must specify the length of input vector");
99: rows = N;
100: }
101: /* Set Vec sizes,blocksize,and type if not already set. Block size first so that local sizes will be compatible. */
102: PetscOptionsGetInt(((PetscObject)vec)->prefix, "-vecload_block_size", &bs, &flag);
103: if (flag) {
104: VecSetBlockSize(vec, bs);
105: }
106: if (vec->map->n < 0 && vec->map->N < 0) {
107: VecSetSizes(vec,PETSC_DECIDE,rows);
108: }
110: /* If sizes and type already set,check if the vector global size is correct */
111: VecGetSize(vec, &N);
112: if (N != rows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%D) then input vector (%D)", rows, N);
114: #if defined(PETSC_HAVE_MPIIO)
115: PetscViewerBinaryGetUseMPIIO(viewer,&useMPIIO);
116: if (useMPIIO) {
117: VecLoad_Binary_MPIIO(vec, viewer);
118: return(0);
119: }
120: #endif
122: VecGetLocalSize(vec,&n);
123: PetscObjectGetNewTag((PetscObject)viewer,&tag);
124: VecGetArray(vec,&avec);
125: if (!rank) {
126: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
128: if (size > 1) {
129: /* read in other chuncks and send to other processors */
130: /* determine maximum chunck owned by other */
131: range = vec->map->range;
132: n = 1;
133: for (i=1; i<size; i++) n = PetscMax(n,range[i+1] - range[i]);
135: PetscMalloc1(n,&avecwork);
136: for (i=1; i<size; i++) {
137: n = range[i+1] - range[i];
138: PetscBinaryRead(fd,avecwork,n,PETSC_SCALAR);
139: MPI_Isend(avecwork,n,MPIU_SCALAR,i,tag,comm,&request);
140: MPI_Wait(&request,&status);
141: }
142: PetscFree(avecwork);
143: }
144: } else {
145: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
146: }
148: VecRestoreArray(vec,&avec);
149: VecAssemblyBegin(vec);
150: VecAssemblyEnd(vec);
151: return(0);
152: }
154: #if defined(PETSC_HAVE_HDF5)
157: PetscErrorCode PetscViewerHDF5OpenGroup(PetscViewer viewer, hid_t *fileId, hid_t *groupId)
158: {
159: hid_t file_id, group;
160: htri_t found;
161: const char *groupName = NULL;
165: PetscViewerHDF5GetFileId(viewer, &file_id);
166: PetscViewerHDF5GetGroup(viewer, &groupName);
167: /* Open group */
168: if (groupName) {
169: PetscBool root;
171: PetscStrcmp(groupName, "/", &root);
172: PetscStackCall("H5Lexists",found = H5Lexists(file_id, groupName, H5P_DEFAULT));
173: if (!root && (found <= 0)) {
174: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
175: PetscStackCallHDF5Return(group,H5Gcreate2,(file_id, groupName, 0, H5P_DEFAULT, H5P_DEFAULT));
176: #else /* deprecated HDF5 1.6 API */
177: PetscStackCallHDF5Return(group,H5Gcreate,(file_id, groupName, 0));
178: #endif
179: PetscStackCallHDF5(H5Gclose,(group));
180: }
181: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
182: PetscStackCallHDF5Return(group,H5Gopen2,(file_id, groupName, H5P_DEFAULT));
183: #else
184: PetscStackCallHDF5Return(group,H5Gopen,file_id, groupName));
185: #endif
186: } else group = file_id;
188: *fileId = file_id;
189: *groupId = group;
190: return(0);
191: }
195: PetscErrorCode PetscViewerHDF5ReadSizes(PetscViewer viewer, const char name[], PetscInt *bs, PetscInt *N)
196: {
197: hid_t file_id, group, dset_id, filespace;
198: int rdim, dim;
199: hsize_t dims[4];
200: PetscInt bsInd, lenInd, timestep;
204: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
205: PetscViewerHDF5GetTimestep(viewer, ×tep);
206: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
207: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, name, H5P_DEFAULT));
208: #else
209: PetscStackCallHDF5Return(dset_id,H5Dopen,(group, name));
210: #endif
211: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
212: dim = 0;
213: if (timestep >= 0) ++dim;
214: ++dim; /* length in blocks */
215: ++dim; /* block size */
216: #if defined(PETSC_USE_COMPLEX)
217: ++dim;
218: #endif
219: PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));
220: #if defined(PETSC_USE_COMPLEX)
221: bsInd = rdim-2;
222: #else
223: bsInd = rdim-1;
224: #endif
225: lenInd = timestep >= 0 ? 1 : 0;
226: if (rdim != dim) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file %d not %d as expected", rdim, dim);
227: /* Close/release resources */
228: PetscStackCallHDF5(H5Sclose,(filespace));
229: PetscStackCallHDF5(H5Dclose,(dset_id));
230: if (group != file_id) PetscStackCallHDF5(H5Gclose,(group));
231: if (bs) *bs = (PetscInt) dims[bsInd];
232: if (N) *N = (PetscInt) dims[lenInd]*dims[bsInd];
233: return(0);
234: }
238: /*
239: This should handle properly the cases where PetscInt is 32 or 64 and hsize_t is 32 or 64. These means properly casting with
240: checks back and forth between the two types of variables.
241: */
242: PetscErrorCode VecLoad_HDF5(Vec xin, PetscViewer viewer)
243: {
244: hid_t file_id, group, dset_id, filespace, memspace, plist_id;
245: int rdim, dim;
246: hsize_t dims[4], count[4], offset[4];
247: PetscInt n, N, bs = 1, bsInd, lenInd, low, timestep;
248: PetscScalar *x;
249: hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
250: const char *vecname;
252: PetscBool dim2 = PETSC_FALSE;
255: #if defined(PETSC_USE_REAL_SINGLE)
256: scalartype = H5T_NATIVE_FLOAT;
257: #elif defined(PETSC_USE_REAL___FLOAT128)
258: #error "HDF5 output with 128 bit floats not supported."
259: #else
260: scalartype = H5T_NATIVE_DOUBLE;
261: #endif
263: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
264: PetscViewerHDF5GetTimestep(viewer, ×tep);
265: VecGetBlockSize(xin,&bs);
266: /* Create the dataset with default properties and close filespace */
267: PetscObjectGetName((PetscObject)xin,&vecname);
268: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
269: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
270: #else
271: PetscStackCallHDF5Return(dset_id,H5Dopen,(group, vecname));
272: #endif
273: /* Retrieve the dataspace for the dataset */
274: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
275: dim = 0;
276: if (timestep >= 0) ++dim;
277: ++dim;
278: if (bs > 1) ++dim;
279: #if defined(PETSC_USE_COMPLEX)
280: ++dim;
281: #endif
282: PetscStackCallHDF5Return(rdim,H5Sget_simple_extent_dims,(filespace, dims, NULL));
283: #if defined(PETSC_USE_COMPLEX)
284: bsInd = rdim-2;
285: #else
286: bsInd = rdim-1;
287: #endif
288: lenInd = timestep >= 0 ? 1 : 0;
289:
290: if (rdim != dim) {
291: /* In this case the input dataset have one extra, unexpected dimension. */
292: if (rdim == dim+1)
293: {
294: /* In this case the block size is unset */
295: if (bs == -1)
296: {
297: VecSetBlockSize(xin, dims[bsInd]);
298: bs = dims[bsInd];
299: }
300:
301: /* In this case the block size unity */
302: else if (bs == 1 && dims[bsInd] == 1) dim2 = PETSC_TRUE;
303:
304: /* Special error message for the case where bs does not match the input file */
305: else if (bs != (PetscInt) dims[bsInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Block size of array in file is %D, not %D as expected",(PetscInt)dims[bsInd],bs);
306:
307: /* All other cases is errors */
308: else SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with bs = %D",rdim,dim,bs);
309: }
310: else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected",rdim,dim);
311: } else if (bs > 1 && bs != (PetscInt) dims[bsInd]) {
312: VecSetBlockSize(xin, dims[bsInd]);
313: bs = dims[bsInd];
314: }
315:
316: /* Set Vec sizes,blocksize,and type if not already set */
317: if ((xin)->map->n < 0 && (xin)->map->N < 0) {
318: VecSetSizes(xin, PETSC_DECIDE, dims[lenInd]*bs);
319: }
320: /* If sizes and type already set,check if the vector global size is correct */
321: VecGetSize(xin, &N);
322: if (N/bs != (PetscInt) dims[lenInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%D) then input vector (%D)", (PetscInt) dims[lenInd], N/bs);
324: /* Each process defines a dataset and reads it from the hyperslab in the file */
325: VecGetLocalSize(xin, &n);
326: dim = 0;
327: if (timestep >= 0) {
328: count[dim] = 1;
329: ++dim;
330: }
331: PetscHDF5IntCast(n/bs,count + dim);
332: ++dim;
333: if (bs > 1 || dim2) {
334: count[dim] = bs;
335: ++dim;
336: }
337: #if defined(PETSC_USE_COMPLEX)
338: count[dim] = 2;
339: ++dim;
340: #endif
341: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
343: /* Select hyperslab in the file */
344: VecGetOwnershipRange(xin, &low, NULL);
345: dim = 0;
346: if (timestep >= 0) {
347: offset[dim] = timestep;
348: ++dim;
349: }
350: PetscHDF5IntCast(low/bs,offset + dim);
351: ++dim;
352: if (bs > 1 || dim2) {
353: offset[dim] = 0;
354: ++dim;
355: }
356: #if defined(PETSC_USE_COMPLEX)
357: offset[dim] = 0;
358: ++dim;
359: #endif
360: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
362: /* Create property list for collective dataset read */
363: PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
364: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
365: PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
366: #endif
367: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
369: VecGetArray(xin, &x);
370: PetscStackCallHDF5(H5Dread,(dset_id, scalartype, memspace, filespace, plist_id, x));
371: VecRestoreArray(xin, &x);
373: /* Close/release resources */
374: if (group != file_id) {
375: PetscStackCallHDF5(H5Gclose,(group));
376: }
377: PetscStackCallHDF5(H5Pclose,(plist_id));
378: PetscStackCallHDF5(H5Sclose,(filespace));
379: PetscStackCallHDF5(H5Sclose,(memspace));
380: PetscStackCallHDF5(H5Dclose,(dset_id));
382: VecAssemblyBegin(xin);
383: VecAssemblyEnd(xin);
384: return(0);
385: }
386: #endif
391: PetscErrorCode VecLoad_Default(Vec newvec, PetscViewer viewer)
392: {
394: PetscBool isbinary;
395: #if defined(PETSC_HAVE_HDF5)
396: PetscBool ishdf5;
397: #endif
400: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
401: #if defined(PETSC_HAVE_HDF5)
402: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
403: #endif
405: #if defined(PETSC_HAVE_HDF5)
406: if (ishdf5) {
407: if (!((PetscObject)newvec)->name) {
408: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
409: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Since HDF5 format gives ASCII name for each object in file; must use VecLoad() after setting name of Vec with PetscObjectSetName()");
410: }
411: VecLoad_HDF5(newvec, viewer);
412: } else
413: #endif
414: {
415: VecLoad_Binary(newvec, viewer);
416: }
417: return(0);
418: }
422: /*@
423: VecChop - Set all values in the vector with an absolute value less than the tolerance to zero
425: Input Parameters:
426: + v - The vector
427: - tol - The zero tolerance
429: Output Parameters:
430: . v - The chopped vector
432: Level: intermediate
434: .seealso: VecCreate(), VecSet()
435: @*/
436: PetscErrorCode VecChop(Vec v, PetscReal tol)
437: {
438: PetscScalar *a;
439: PetscInt n, i;
443: VecGetLocalSize(v, &n);
444: VecGetArray(v, &a);
445: for (i = 0; i < n; ++i) {
446: if (PetscAbsScalar(a[i]) < tol) a[i] = 0.0;
447: }
448: VecRestoreArray(v, &a);
449: return(0);
450: }