Actual source code: vecio.c
1: /*$Id: vecio.c,v 1.74 2001/08/07 03:02:17 balay Exp $*/
3: /*
4: This file contains simple binary input routines for vectors. The
5: analogous output routines are within each vector implementation's
6: VecView (with viewer types PETSC_VIEWER_BINARY)
7: */
9: #include petsc.h
10: #include petscsys.h
11: #include petscvec.h
12: #include vecimpl.h
13: #if defined(PETSC_HAVE_PNETCDF)
14: EXTERN_C_BEGIN
15: #include "pnetcdf.h"
16: EXTERN_C_END
17: #endif
18: int VecLoad_Binary(PetscViewer,const VecType, Vec*);
19: int VecLoad_Netcdf(PetscViewer, Vec*);
20: int VecLoadIntoVector_Binary(PetscViewer, Vec);
21: int VecLoadIntoVector_Netcdf(PetscViewer, Vec);
25: /*@C
26: VecLoad - Loads a vector that has been stored in binary format
27: with VecView().
29: Collective on PetscViewer
31: Input Parameters:
32: + viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
33: NetCDF file viewer, obtained from PetscViewerNetcdfOpen()
34: - outtype - the type of vector VECSEQ or VECMPI or PETSC_NULL
36: Output Parameter:
37: . newvec - the newly loaded vector
39: Level: intermediate
41: Notes:
42: The input file must contain the full global vector, as
43: written by the routine VecView().
45: Notes for advanced users:
46: Most users should not need to know the details of the binary storage
47: format, since VecLoad() and VecView() completely hide these details.
48: But for anyone who's interested, the standard binary matrix storage
49: format is
50: .vb
51: int VEC_FILE_COOKIE
52: int number of rows
53: PetscScalar *values of all nonzeros
54: .ve
56: Note for Cray users, the int's stored in the binary file are 32 bit
57: integers; not 64 as they are represented in the memory, so if you
58: write your own routines to read/write these binary files from the Cray
59: you need to adjust the integer sizes that you read in, see
60: PetscReadBinary() and PetscWriteBinary() to see how this may be
61: done.
63: In addition, PETSc automatically does the byte swapping for
64: machines that store the bytes reversed, e.g. DEC alpha, freebsd,
65: linux, nt and the paragon; thus if you write your own binary
66: read/write routines you have to swap the bytes; see PetscReadBinary()
67: and PetscWriteBinary() to see how this may be done.
69: Concepts: vector^loading from file
71: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector()
72: @*/
73: int VecLoad(PetscViewer viewer,const VecType outtype,Vec *newvec)
74: {
75: int ierr;
76: PetscTruth isbinary,isnetcdf,flg;
77: char vtype[256],*prefix;
82: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
83: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
84: if ((!isbinary) && (!isnetcdf)) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary or NetCDF viewer");
86: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
87: VecInitializePackage(PETSC_NULL);
88: #endif
89: if (isnetcdf) {
90: VecLoad_Netcdf(viewer,newvec);
91: } else {
92: Vec factory;
93: MPI_Comm comm;
94: int (*r)(PetscViewer,const VecType,Vec*),size;
96: PetscObjectGetOptionsPrefix((PetscObject)viewer,&prefix);
97: PetscOptionsGetString(prefix,"-vec_type",vtype,256,&flg);
98: if (flg) {
99: outtype = vtype;
100: }
101: PetscOptionsGetString(prefix,"-vecload_type",vtype,256,&flg);
102: if (flg) {
103: outtype = vtype;
104: }
105: PetscObjectGetComm((PetscObject)viewer,&comm);
106: if (!outtype) {
107: MPI_Comm_size(comm,&size);
108: outtype = (size > 1) ? VECMPI : VECSEQ;
109: }
111: VecCreate(comm,&factory);
112: VecSetSizes(factory,1,PETSC_DETERMINE);
113: VecSetType(factory,outtype);
114: r = factory->ops->load;
115: VecDestroy(factory);
116: if (!r) SETERRQ1(PETSC_ERR_SUP,"VecLoad is not supported for type: %s",outtype);
117: (*r)(viewer,outtype,newvec);
118: }
119: return(0);
120: }
124: int VecLoad_Netcdf(PetscViewer viewer,Vec *newvec)
125: {
126: #if defined(PETSC_HAVE_PNETCDF)
127: int i,N,ierr,n,rank,bs;
128: int ncid,start;
129: Vec vec;
130: PetscScalar *avec;
131: MPI_Comm comm;
132: MPI_Request request;
133: MPI_Status status;
134: PetscMap map;
135: PetscTruth isnetcdf,flag;
136: char name[NC_MAX_NAME];
139: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
140: PetscObjectGetComm((PetscObject)viewer,&comm);
141: MPI_Comm_rank(comm,&rank);
142: PetscViewerNetcdfGetID(viewer,&ncid);
143: ncmpi_inq_dim(ncid,0,name,(size_t*)&N); /* N gets the global vector size */
144: VecCreate(comm,&vec);
145: VecSetSizes(vec,PETSC_DECIDE,N);
146: if (!rank) {
147: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
148: if (flag) {
149: VecSetBlockSize(vec,bs);
150: }
151: }
152: VecSetFromOptions(vec);
153: VecGetLocalSize(vec,&n);
154: VecGetOwnershipRange(vec,&start,PETSC_NULL);
155: VecGetArray(vec,&avec);
156: ncmpi_get_vara_double_all(ncid,0,(const size_t*)&start,(const size_t*)&n,(double *)avec);
157: VecRestoreArray(vec,&avec);
158: *newvec = vec;
159: VecAssemblyBegin(vec);
160: VecAssemblyEnd(vec);
161: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
162: return(0);
163: #else
165: SETERRQ(1,"Build PETSc with NetCDF to use this viewer");
166: #endif
167: }
171: int VecLoad_Binary(PetscViewer viewer,const VecType itype,Vec *newvec)
172: {
173: int i,rows,ierr,type,fd,rank,size,n,*range,tag,bs,nierr;
174: Vec vec;
175: PetscScalar *avec;
176: MPI_Comm comm;
177: MPI_Request request;
178: MPI_Status status;
179: PetscMap map;
180: PetscTruth flag;
183: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
184: PetscViewerBinaryGetDescriptor(viewer,&fd);
185: PetscObjectGetComm((PetscObject)viewer,&comm);
186: MPI_Comm_rank(comm,&rank);
187: MPI_Comm_size(comm,&size);
189: if (!rank) {
190: /* Read vector header. */
191: PetscBinaryRead(fd,&type,1,PETSC_INT);if (ierr) goto handleerror;
192: if (type != VEC_FILE_COOKIE) {PETSC_ERR_ARG_WRONG; goto handleerror;}
193: PetscBinaryRead(fd,&rows,1,PETSC_INT);if (ierr) goto handleerror;
194: MPI_Bcast(&rows,1,MPI_INT,0,comm);
195: VecCreate(comm,&vec);
196: VecSetSizes(vec,PETSC_DECIDE,rows);
197: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
198: if (flag) {
199: VecSetBlockSize(vec,bs);
200: }
201: VecSetFromOptions(vec);
202: VecGetLocalSize(vec,&n);
203: VecGetArray(vec,&avec);
204: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
205: VecRestoreArray(vec,&avec);
207: if (size > 1) {
208: /* read in other chuncks and send to other processors */
209: /* determine maximum chunck owned by other */
210: VecGetPetscMap(vec,&map);
211: PetscMapGetGlobalRange(map,&range);
212: n = 1;
213: for (i=1; i<size; i++) {
214: n = PetscMax(n,range[i] - range[i-1]);
215: }
216: PetscMalloc(n*sizeof(PetscScalar),&avec);
217: PetscObjectGetNewTag((PetscObject)viewer,&tag);
218: for (i=1; i<size; i++) {
219: n = range[i+1] - range[i];
220: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
221: MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
222: MPI_Wait(&request,&status);
223: }
224: PetscFree(avec);
225: }
226: } else {
227: MPI_Bcast(&rows,1,MPI_INT,0,comm);
228: if (rows == -1) SETERRQ(1,"Error loading vector");
229: VecCreate(comm,&vec);
230: VecSetSizes(vec,PETSC_DECIDE,rows);
231: VecSetFromOptions(vec);
232: VecGetLocalSize(vec,&n);
233: PetscObjectGetNewTag((PetscObject)viewer,&tag);
234: VecGetArray(vec,&avec);
235: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
236: VecRestoreArray(vec,&avec);
237: }
238: *newvec = vec;
239: VecAssemblyBegin(vec);
240: VecAssemblyEnd(vec);
241: return(0);
242: /* tell the other processors we've had an error */
243: handleerror:
244: nPetscLogEventEnd(VEC_Load,viewer,0,0,0);CHKERRQ(nierr);
245: n-1; MPI_Bcast(&nierr,1,MPI_INT,0,comm);
246: SETERRQ(ierr,"Error loading vector");
247: }
251: int VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
252: {
253: PetscTruth isbinary,isnetcdf;
254: int ierr;
258: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
259: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
260: if ((!isbinary) && (!isnetcdf)) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary or NetCDF viewer");
262: if (isnetcdf) {
263: VecLoadIntoVector_Netcdf(viewer,vec);
264: } else {
265: VecLoadIntoVector_Binary(viewer,vec);
266: }
267: return(0);
268: }
272: int VecLoadIntoVector_Netcdf(PetscViewer viewer,Vec vec)
273: {
274: #if defined(PETSC_HAVE_PNETCDF)
275: int i,N,rows,ierr,n,rank,bs;
276: int ncid,start;
277: PetscScalar *avec;
278: MPI_Comm comm;
279: MPI_Request request;
280: MPI_Status status;
281: PetscMap map;
282: PetscTruth isnetcdf,flag;
283: char name[NC_MAX_NAME];
286: PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
287: PetscObjectGetComm((PetscObject)viewer,&comm);
288: MPI_Comm_rank(comm,&rank);
289: PetscViewerNetcdfGetID(viewer,&ncid);
290: ncmpi_inq_dim(ncid,0,name,(size_t*)&N); /* N gets the global vector size */
291: if (!rank) {
292: VecGetSize(vec,&rows);
293: if (N != rows) SETERRQ(1,"Vector in file different length then input vector");
294: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
295: if (flag) {
296: VecSetBlockSize(vec,bs);
297: }
298: }
299: VecSetFromOptions(vec);
300: VecGetLocalSize(vec,&n);
301: VecGetOwnershipRange(vec,&start,PETSC_NULL);
302: VecGetArray(vec,&avec);
303: ncmpi_get_vara_double_all(ncid,0,(const size_t*)&start,(const size_t*)&n,(double *)avec);
304: VecRestoreArray(vec,&avec);
305: VecAssemblyBegin(vec);
306: VecAssemblyEnd(vec);
307: PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
308: return(0);
309: #else
311: SETERRQ(1,"Build PETSc with NetCDF to use this viewer");
312: #endif
313: }
317: int VecLoadIntoVector_Binary(PetscViewer viewer,Vec vec)
318: {
319: int i,rows,ierr,type,fd,rank,size,n,*range,tag,bs;
320: PetscScalar *avec;
321: MPI_Comm comm;
322: MPI_Request request;
323: MPI_Status status;
324: PetscMap map;
325: PetscTruth flag;
326: char *prefix;
329: PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
331: PetscViewerBinaryGetDescriptor(viewer,&fd);
332: PetscObjectGetComm((PetscObject)viewer,&comm);
333: MPI_Comm_rank(comm,&rank);
334: MPI_Comm_size(comm,&size);
336: if (!rank) {
337: /* Read vector header. */
338: PetscBinaryRead(fd,&type,1,PETSC_INT);
339: if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
340: PetscBinaryRead(fd,&rows,1,PETSC_INT);
341: VecGetSize(vec,&n);
342: if (n != rows) SETERRQ(1,"Vector in file different length then input vector");
343: MPI_Bcast(&rows,1,MPI_INT,0,comm);
345: PetscObjectGetOptionsPrefix((PetscObject)vec,&prefix);
346: PetscOptionsGetInt(prefix,"-vecload_block_size",&bs,&flag);
347: if (flag) {
348: VecSetBlockSize(vec,bs);
349: }
350: VecSetFromOptions(vec);
351: VecGetLocalSize(vec,&n);
352: VecGetArray(vec,&avec);
353: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
354: VecRestoreArray(vec,&avec);
356: if (size > 1) {
357: /* read in other chuncks and send to other processors */
358: /* determine maximum chunck owned by other */
359: VecGetPetscMap(vec,&map);
360: PetscMapGetGlobalRange(map,&range);
361: n = 1;
362: for (i=1; i<size; i++) {
363: n = PetscMax(n,range[i] - range[i-1]);
364: }
365: PetscMalloc(n*sizeof(PetscScalar),&avec);
366: PetscObjectGetNewTag((PetscObject)viewer,&tag);
367: for (i=1; i<size; i++) {
368: n = range[i+1] - range[i];
369: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
370: MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
371: MPI_Wait(&request,&status);
372: }
373: PetscFree(avec);
374: }
375: } else {
376: MPI_Bcast(&rows,1,MPI_INT,0,comm);
377: VecSetFromOptions(vec);
378: VecGetLocalSize(vec,&n);
379: PetscObjectGetNewTag((PetscObject)viewer,&tag);
380: VecGetArray(vec,&avec);
381: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
382: VecRestoreArray(vec,&avec);
383: }
384: VecAssemblyBegin(vec);
385: VecAssemblyEnd(vec);
386: PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
387: return(0);
388: }