Actual source code: shvec.c

  1: /*$Id: shvec.c,v 1.53 2001/09/07 20:09:02 bsmith Exp $*/

  3: /*
  4:    This file contains routines for Parallel vector operations that use shared memory
  5:  */
 6:  #include src/vec/impls/mpi/pvecimpl.h

  8: /*
  9:      Could not get the include files to work properly on the SGI with 
 10:   the C++ compiler.
 11: */
 12: #if defined(PETSC_USE_SHARED_MEMORY) && !defined(__cplusplus)

 14: EXTERN int PetscSharedMalloc(MPI_Comm,int,int,void**);

 16: #undef __FUNCT__  
 18: int VecDuplicate_Shared(Vec win,Vec *v)
 19: {
 20:   int          ierr;
 21:   Vec_MPI      *w = (Vec_MPI *)win->data;
 22:   PetscScalar  *array;


 26:   /* first processor allocates entire array and sends it's address to the others */
 27:   PetscSharedMalloc(win->comm,win->n*sizeof(PetscScalar),win->N*sizeof(PetscScalar),(void**)&array);

 29:   VecCreate(win->comm,v);
 30:   VecSetSizes(*v,win->n,win->N);
 31:   VecCreate_MPI_Private(*v,w->nghost,array,win->map);

 33:   /* New vector should inherit stashing property of parent */
 34:   (*v)->stash.donotstash = win->stash.donotstash;
 35: 
 36:   PetscOListDuplicate(win->olist,&(*v)->olist);
 37:   PetscFListDuplicate(win->qlist,&(*v)->qlist);

 39:   if (win->mapping) {
 40:     (*v)->mapping = win->mapping;
 41:     PetscObjectReference((PetscObject)win->mapping);
 42:   }
 43:   (*v)->ops->duplicate = VecDuplicate_Shared;
 44:   (*v)->bs        = win->bs;
 45:   (*v)->bstash.bs = win->bstash.bs;
 46:   return(0);
 47: }


 50: EXTERN_C_BEGIN
 51: #undef __FUNCT__  
 53: int VecCreate_Shared(Vec vv)
 54: {
 55:   int          ierr;
 56:   PetscScalar  *array;

 59:   PetscSplitOwnership(vv->comm,&vv->n,&vv->N);
 60:   PetscSharedMalloc(vv->comm,vv->n*sizeof(PetscScalar),vv->N*sizeof(PetscScalar),(void**)&array);

 62:   VecCreate_MPI_Private(vv,0,array,PETSC_NULL);
 63:   vv->ops->duplicate = VecDuplicate_Shared;

 65:   return(0);
 66: }
 67: EXTERN_C_END


 70: /* ----------------------------------------------------------------------------------------
 71:      Code to manage shared memory allocation under the SGI with MPI

 73:   We associate with a communicator a shared memory "areana" from which memory may be shmalloced.
 74: */
 75:  #include petscsys.h
 76: #include "petscfix.h"
 77: #if defined(PETSC_HAVE_PWD_H)
 78: #include <pwd.h>
 79: #endif
 80: #include <ctype.h>
 81: #include <sys/types.h>
 82: #include <sys/stat.h>
 83: #if defined(PETSC_HAVE_UNISTD_H)
 84: #include <unistd.h>
 85: #endif
 86: #if defined(PETSC_HAVE_STDLIB_H)
 87: #include <stdlib.h>
 88: #endif
 89: #if !defined(PARCH_win32)
 90: #include <sys/param.h>
 91: #include <sys/utsname.h>
 92: #endif
 93: #if defined(PARCH_win32)
 94: #include <windows.h>
 95: #include <io.h>
 96: #include <direct.h>
 97: #endif
 98: #if defined (PARCH_win32_gnu)
 99: #include <windows.h>
100: #endif
101: #include <fcntl.h>
102: #include <time.h>  
103: #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
104: #include <sys/systeminfo.h>
105: #endif
106: #include "petscfix.h"

108: static int Petsc_Shared_keyval = MPI_KEYVAL_INVALID;
109: static int Petsc_Shared_size   = 100000000;

111: #undef __FUNCT__  
113: /*
114:    Private routine to delete internal storage when a communicator is freed.
115:   This is called by MPI, not by users.

117:   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
118:   it was MPI_Comm *comm.  
119: */
120: static int Petsc_DeleteShared(MPI_Comm comm,int keyval,void* attr_val,void* extra_state)
121: {

125:   PetscFree(attr_val);
126:   PetscFunctionReturn(MPI_SUCCESS);
127: }

129: #undef __FUNCT__  
131: int PetscSharedMemorySetSize(int s)
132: {
134:   Petsc_Shared_size = s;
135:   return(0);
136: }

138: #include "petscfix.h"

140: #include <ulocks.h>

142: #undef __FUNCT__  
144: int PetscSharedInitialize(MPI_Comm comm)
145: {
146:   int     rank,len,ierr,flag;
147:   char    filename[PETSC_MAX_PATH_LEN];
148:   usptr_t **arena;


152:   if (Petsc_Shared_keyval == MPI_KEYVAL_INVALID) {
153:     /* 
154:        The calling sequence of the 2nd argument to this function changed
155:        between MPI Standard 1.0 and the revisions 1.1 Here we match the 
156:        new standard, if you are using an MPI implementation that uses 
157:        the older version you will get a warning message about the next line;
158:        it is only a warning message and should do no harm.
159:     */
160:     MPI_Keyval_create(MPI_NULL_COPY_FN,Petsc_DeleteShared,&Petsc_Shared_keyval,0);
161:   }

163:   MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);

165:   if (!flag) {
166:     /* This communicator does not yet have a shared memory areana */
167:     PetscMalloc(sizeof(usptr_t*),&arena);

169:     MPI_Comm_rank(comm,&rank);
170:     if (!rank) {
171:       PetscStrcpy(filename,"/tmp/PETScArenaXXXXXX");
172: #ifdef PETSC_HAVE_MKSTEMP
173:       if (mkstemp(filename) < 0) {
174:         SETERRQ1(PETSC_ERR_FILE_OPEN, "Unable to open temporary file %s", filename);
175:       }
176: #else
177:       if (mktemp(filename) == PETSC_NULL) {
178:         SETERRQ1(PETSC_ERR_FILE_OPEN, "Unable to open temporary file %s", filename);
179:       }
180: #endif
181:       PetscStrlen(filename,&len);
182:     }
183:     ierr     = MPI_Bcast(&len,1,MPI_INT,0,comm);
184:     ierr     = MPI_Bcast(filename,len+1,MPI_CHAR,0,comm);
185:     ierr     = PetscOptionsGetInt(PETSC_NULL,"-shared_size",&Petsc_Shared_size,&flag);
186:     usconfig(CONF_INITSIZE,Petsc_Shared_size);
187:     *arena   = usinit(filename);
188:     ierr     = MPI_Attr_put(comm,Petsc_Shared_keyval,arena);
189:   }

191:   return(0);
192: }

194: #undef __FUNCT__  
196: int PetscSharedMalloc(MPI_Comm comm,int llen,int len,void **result)
197: {
198:   char    *value;
199:   int     ierr,shift,rank,flag;
200:   usptr_t **arena;

203:   *result = 0;
204:   if (Petsc_Shared_keyval == MPI_KEYVAL_INVALID) {
205:     PetscSharedInitialize(comm);
206:   }
207:   MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);
208:   if (!flag) {
209:     PetscSharedInitialize(comm);
210:     MPI_Attr_get(comm,Petsc_Shared_keyval,(void**)&arena,&flag);
211:     if (!flag) SETERRQ(1,"Unable to initialize shared memory");
212:   }

214:   ierr   = MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);
215:   shift -= llen;

217:   MPI_Comm_rank(comm,&rank);
218:   if (!rank) {
219:     value = (char*)usmalloc((size_t) len,*arena);
220:     if (!value) {
221:       (*PetscErrorPrintf)("PETSC ERROR: Unable to allocate shared memory locationn");
222:       (*PetscErrorPrintf)("PETSC ERROR: Run with option -shared_size <size> n");
223:       (*PetscErrorPrintf)("PETSC_ERROR: with size > %d n",(int)(1.2*(Petsc_Shared_size+len)));
224:       SETERRQ(1,"Unable to malloc shared memory");
225:     }
226:   }
227:   MPI_Bcast(&value,8,MPI_BYTE,0,comm);
228:   value += shift;

230:   return(0);
231: }

233: #else

235: EXTERN_C_BEGIN
236: extern int VecCreate_Seq(Vec);
237: EXTERN_C_END

239: EXTERN_C_BEGIN
240: #undef __FUNCT__  
242: int VecCreate_Shared(Vec vv)
243: {
244:   int ierr,size;

247:   MPI_Comm_size(vv->comm,&size);
248:   if (size > 1) {
249:     SETERRQ(1,"No supported for shared memory vector objects on this machine");
250:   }
251:   VecCreate_Seq(vv);
252:   return(0);
253: }
254: EXTERN_C_END

256: #endif

258: #undef __FUNCT__  
260: /*@C
261:    VecCreateShared - Creates a parallel vector that uses shared memory.

263:    Input Parameters:
264: .  comm - the MPI communicator to use
265: .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
266: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)

268:    Output Parameter:
269: .  vv - the vector

271:    Collective on MPI_Comm
272:  
273:    Notes:
274:    Currently VecCreateShared() is available only on the SGI; otherwise,
275:    this routine is the same as VecCreateMPI().

277:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
278:    same type as an existing vector.

280:    Level: advanced

282:    Concepts: vectors^creating with shared memory

284: .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(), 
285:           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()

287: @*/
288: int VecCreateShared(MPI_Comm comm,int n,int N,Vec *v)
289: {

293:   VecCreate(comm,v);
294:   VecSetSizes(*v,n,N);
295:   VecSetType(*v,VECSHARED);
296:   return(0);
297: }