Actual source code: shvec.c


  2: /*
  3:    This file contains routines for Parallel vector operations that use shared memory
  4:  */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>

  7: #if defined(PETSC_USE_SHARED_MEMORY)

  9: extern PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);

 11: PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
 12: {
 14:   Vec_MPI        *w = (Vec_MPI*)win->data;
 15:   PetscScalar    *array;

 18:   /* first processor allocates entire array and sends it's address to the others */
 19:   PetscSharedMalloc(PetscObjectComm((PetscObject)win),win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);

 21:   VecCreate(PetscObjectComm((PetscObject)win),v);
 22:   VecSetSizes(*v,win->map->n,win->map->N);
 23:   VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);
 24:   PetscLayoutReference(win->map,&(*v)->map);

 26:   /* New vector should inherit stashing property of parent */
 27:   (*v)->stash.donotstash   = win->stash.donotstash;
 28:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;

 30:   PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);
 31:   PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);

 33:   (*v)->ops->duplicate = VecDuplicate_Shared;
 34:   (*v)->bstash.bs      = win->bstash.bs;
 35:   return(0);
 36: }

 38: PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
 39: {
 41:   PetscScalar    *array;

 44:   PetscSplitOwnership(PetscObjectComm((PetscObject)vv),&vv->map->n,&vv->map->N);
 45:   PetscSharedMalloc(PetscObjectComm((PetscObject)vv),vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);

 47:   VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);
 48:   vv->ops->duplicate = VecDuplicate_Shared;
 49:   return(0);
 50: }

 52: /* ----------------------------------------------------------------------------------------
 53:      Code to manage shared memory allocation using standard Unix shared memory
 54: */
 55: #include <petscsys.h>
 56: #if defined(PETSC_HAVE_PWD_H)
 57: #include <pwd.h>
 58: #endif
 59: #include <ctype.h>
 60: #include <sys/stat.h>
 61: #if defined(PETSC_HAVE_UNISTD_H)
 62: #include <unistd.h>
 63: #endif
 64: #if defined(PETSC_HAVE_SYS_UTSNAME_H)
 65: #include <sys/utsname.h>
 66: #endif
 67: #include <fcntl.h>
 68: #include <time.h>
 69: #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
 70: #include <sys/systeminfo.h>
 71: #endif
 72: #include <sys/shm.h>
 73: #include <sys/mman.h>

 75: static PetscMPIInt Petsc_ShmComm_keyval = MPI_KEYVAL_INVALID;

 77: /*
 78:    Private routine to delete internal storage when a communicator is freed.
 79:   This is called by MPI, not by users.

 81:   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
 82:   it was MPI_Comm *comm.
 83: */
 84: static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void *attr_val,void *extra_state)
 85: {

 89:   PetscFree(attr_val);
 90:   PetscFunctionReturn(MPI_SUCCESS);
 91: }

 93: /*

 95:     This routine is still incomplete and needs work.

 97:     For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
 98: cat /etc/sysctl.conf
 99: kern.sysv.shmmax=67108864
100: kern.sysv.shmmin=1
101: kern.sysv.shmmni=32
102: kern.sysv.shmseg=512
103: kern.sysv.shmall=1024

105:   This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
106: ipcrm to remove the shared memory in use.

108: */
109: PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
110: {
112:   PetscInt       shift;
113:   PetscMPIInt    rank,flag;
114:   int            *arena,id,key = 0;
115:   char           *value;

118:   *result = 0;

120:   MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);
121:   shift -= llen;

123:   MPI_Comm_rank(comm,&rank);
124:   if (rank == 0) {
125:     id = shmget(key,len, 0666 |IPC_CREAT);
126:     if (id == -1) {
127:       perror("Unable to malloc shared memory");
128:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
129:     }
130:   } else {
131:     id = shmget(key,len, 0666);
132:     if (id == -1) {
133:       perror("Unable to malloc shared memory");
134:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
135:     }
136:   }
137:   value = shmat(id,(void*)0,0);
138:   if (value == (char*)-1) {
139:     perror("Unable to access shared memory allocated");
140:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to access shared memory allocated");
141:   }
142:   *result = (void*) (value + shift);
143:   return(0);
144: }

146: #else

148: PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
149: {
151:   PetscMPIInt    size;

154:   MPI_Comm_size(PetscObjectComm((PetscObject)vv),&size);
155:   if (size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
156:   VecCreate_Seq(vv);
157:   return(0);
158: }

160: #endif

162: /*@
163:    VecCreateShared - Creates a parallel vector that uses shared memory.

165:    Input Parameters:
166: +  comm - the MPI communicator to use
167: .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
168: -  N - global vector length (or PETSC_DECIDE to have calculated if n is given)

170:    Output Parameter:
171: .  vv - the vector

173:    Collective

175:    Notes:
176:    Currently VecCreateShared() is available only on the SGI; otherwise,
177:    this routine is the same as VecCreateMPI().

179:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
180:    same type as an existing vector.

182:    Level: advanced

184: .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
185:           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()

187: @*/
188: PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
189: {

193:   VecCreate(comm,v);
194:   VecSetSizes(*v,n,N);
195:   VecSetType(*v,VECSHARED);
196:   return(0);
197: }