Actual source code: vecio.c
1: /*$Id: vecio.c,v 1.74 2001/08/07 03:02:17 balay Exp $*/
3: /*
4: This file contains simple binary input routines for vectors. The
5: analogous output routines are within each vector implementation's
6: VecView (with viewer types PETSC_VIEWER_BINARY)
7: */
9: #include petsc.h
10: #include petscsys.h
11: #include petscvec.h
13: #undef __FUNCT__
15: /*@C
16: VecLoad - Loads a vector that has been stored in binary format
17: with VecView().
19: Collective on PetscViewer
21: Input Parameters:
22: . viewer - binary file viewer, obtained from PetscViewerBinaryOpen()
24: Output Parameter:
25: . newvec - the newly loaded vector
27: Level: intermediate
29: Notes:
30: The input file must contain the full global vector, as
31: written by the routine VecView().
33: Notes for advanced users:
34: Most users should not need to know the details of the binary storage
35: format, since VecLoad() and VecView() completely hide these details.
36: But for anyone who's interested, the standard binary matrix storage
37: format is
38: .vb
39: int VEC_FILE_COOKIE
40: int number of rows
41: PetscScalar *values of all nonzeros
42: .ve
44: Note for Cray users, the int's stored in the binary file are 32 bit
45: integers; not 64 as they are represented in the memory, so if you
46: write your own routines to read/write these binary files from the Cray
47: you need to adjust the integer sizes that you read in, see
48: PetscReadBinary() and PetscWriteBinary() to see how this may be
49: done.
51: In addition, PETSc automatically does the byte swapping for
52: machines that store the bytes reversed, e.g. DEC alpha, freebsd,
53: linux, nt and the paragon; thus if you write your own binary
54: read/write routines you have to swap the bytes; see PetscReadBinary()
55: and PetscWriteBinary() to see how this may be done.
57: Concepts: vector^loading from file
59: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector()
60: @*/
61: int VecLoad(PetscViewer viewer,Vec *newvec)
62: {
63: int i,rows,ierr,type,fd,rank,size,n,*range,tag,bs,nierr;
64: Vec vec;
65: PetscScalar *avec;
66: MPI_Comm comm;
67: MPI_Request request;
68: MPI_Status status;
69: PetscMap map;
70: PetscTruth isbinary,flag;
74: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
75: if (!isbinary) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
76: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
77: PetscViewerBinaryGetDescriptor(viewer,&fd);
78: PetscObjectGetComm((PetscObject)viewer,&comm);
79: MPI_Comm_rank(comm,&rank);
80: MPI_Comm_size(comm,&size);
82: if (!rank) {
83: /* Read vector header. */
84: PetscBinaryRead(fd,&type,1,PETSC_INT);if (ierr) goto handleerror;
85: if (type != VEC_FILE_COOKIE) {PETSC_ERR_ARG_WRONG; goto handleerror;}
86: PetscBinaryRead(fd,&rows,1,PETSC_INT);if (ierr) goto handleerror;
87: MPI_Bcast(&rows,1,MPI_INT,0,comm);
88: VecCreate(comm,&vec);
89: VecSetSizes(vec,PETSC_DECIDE,rows);
90: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
91: if (flag) {
92: VecSetBlockSize(vec,bs);
93: }
94: VecSetFromOptions(vec);
95: VecGetLocalSize(vec,&n);
96: VecGetArray(vec,&avec);
97: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
98: VecRestoreArray(vec,&avec);
100: if (size > 1) {
101: /* read in other chuncks and send to other processors */
102: /* determine maximum chunck owned by other */
103: VecGetPetscMap(vec,&map);
104: PetscMapGetGlobalRange(map,&range);
105: n = 1;
106: for (i=1; i<size; i++) {
107: n = PetscMax(n,range[i] - range[i-1]);
108: }
109: PetscMalloc(n*sizeof(PetscScalar),&avec);
110: PetscObjectGetNewTag((PetscObject)viewer,&tag);
111: for (i=1; i<size; i++) {
112: n = range[i+1] - range[i];
113: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
114: MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
115: MPI_Wait(&request,&status);
116: }
117: PetscFree(avec);
118: }
119: } else {
120: MPI_Bcast(&rows,1,MPI_INT,0,comm);
121: if (rows == -1) SETERRQ(1,"Error loading vector");
122: VecCreate(comm,&vec);
123: VecSetSizes(vec,PETSC_DECIDE,rows);
124: VecSetFromOptions(vec);
125: VecGetLocalSize(vec,&n);
126: PetscObjectGetNewTag((PetscObject)viewer,&tag);
127: VecGetArray(vec,&avec);
128: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
129: VecRestoreArray(vec,&avec);
130: }
131: *newvec = vec;
132: VecAssemblyBegin(vec);
133: VecAssemblyEnd(vec);
134: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
135: return(0);
136: /* tell the other processors we've had an error */
137: handleerror:
138: nPetscLogEventEnd(VEC_Load,viewer,0,0,0);CHKERRQ(nierr);
139: MPI_Bcast(&ierr,1,MPI_INT,0,comm);
140: SETERRQ(ierr,"Error loading vector");
141: }
143: #undef __FUNCT__
145: int VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
146: {
147: int i,rows,ierr,type,fd,rank,size,n,*range,tag,bs;
148: PetscScalar *avec;
149: MPI_Comm comm;
150: MPI_Request request;
151: MPI_Status status;
152: PetscMap map;
153: PetscTruth isbinary,flag;
157: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
158: if (!isbinary) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
159: PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
161: PetscViewerBinaryGetDescriptor(viewer,&fd);
162: PetscObjectGetComm((PetscObject)viewer,&comm);
163: MPI_Comm_rank(comm,&rank);
164: MPI_Comm_size(comm,&size);
166: if (!rank) {
167: /* Read vector header. */
168: PetscBinaryRead(fd,&type,1,PETSC_INT);
169: if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
170: PetscBinaryRead(fd,&rows,1,PETSC_INT);
171: VecGetSize(vec,&n);
172: if (n != rows) SETERRQ(1,"Vector in file different length then input vector");
173: MPI_Bcast(&rows,1,MPI_INT,0,comm);
175: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
176: if (flag) {
177: VecSetBlockSize(vec,bs);
178: }
179: VecSetFromOptions(vec);
180: VecGetLocalSize(vec,&n);
181: VecGetArray(vec,&avec);
182: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
183: VecRestoreArray(vec,&avec);
185: if (size > 1) {
186: /* read in other chuncks and send to other processors */
187: /* determine maximum chunck owned by other */
188: VecGetPetscMap(vec,&map);
189: PetscMapGetGlobalRange(map,&range);
190: n = 1;
191: for (i=1; i<size; i++) {
192: n = PetscMax(n,range[i] - range[i-1]);
193: }
194: PetscMalloc(n*sizeof(PetscScalar),&avec);
195: PetscObjectGetNewTag((PetscObject)viewer,&tag);
196: for (i=1; i<size; i++) {
197: n = range[i+1] - range[i];
198: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
199: MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
200: MPI_Wait(&request,&status);
201: }
202: PetscFree(avec);
203: }
204: } else {
205: MPI_Bcast(&rows,1,MPI_INT,0,comm);
206: VecSetFromOptions(vec);
207: VecGetLocalSize(vec,&n);
208: PetscObjectGetNewTag((PetscObject)viewer,&tag);
209: VecGetArray(vec,&avec);
210: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
211: VecRestoreArray(vec,&avec);
212: }
213: VecAssemblyBegin(vec);
214: VecAssemblyEnd(vec);
215: PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
216: return(0);
217: }