Actual source code: shvec.c
2: /*
3: This file contains routines for Parallel vector operations that use shared memory
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
7: #if defined(PETSC_USE_SHARED_MEMORY)
9: extern PetscErrorCode PetscSharedMalloc(MPI_Comm, PetscInt, PetscInt, void **);
11: PetscErrorCode VecDuplicate_Shared(Vec win, Vec *v)
12: {
13: Vec_MPI *w = (Vec_MPI *)win->data;
14: PetscScalar *array;
16: PetscFunctionBegin;
17: /* first processor allocates entire array and sends its address to the others */
18: PetscCall(PetscSharedMalloc(PetscObjectComm((PetscObject)win), win->map->n * sizeof(PetscScalar), win->map->N * sizeof(PetscScalar), (void **)&array));
20: PetscCall(VecCreate(PetscObjectComm((PetscObject)win), v));
21: PetscCall(VecSetSizes(*v, win->map->n, win->map->N));
22: PetscCall(VecCreate_MPI_Private(*v, PETSC_FALSE, w->nghost, array));
23: PetscCall(PetscLayoutReference(win->map, &(*v)->map));
25: /* New vector should inherit stashing property of parent */
26: (*v)->stash.donotstash = win->stash.donotstash;
27: (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
29: PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)*v)->olist));
30: PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)*v)->qlist));
32: (*v)->ops->duplicate = VecDuplicate_Shared;
33: (*v)->bstash.bs = win->bstash.bs;
34: PetscFunctionReturn(PETSC_SUCCESS);
35: }
37: PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
38: {
39: PetscScalar *array;
41: PetscFunctionBegin;
42: PetscCall(PetscSplitOwnership(PetscObjectComm((PetscObject)vv), &vv->map->n, &vv->map->N));
43: PetscCall(PetscSharedMalloc(PetscObjectComm((PetscObject)vv), vv->map->n * sizeof(PetscScalar), vv->map->N * sizeof(PetscScalar), (void **)&array));
45: PetscCall(VecCreate_MPI_Private(vv, PETSC_FALSE, 0, array));
46: vv->ops->duplicate = VecDuplicate_Shared;
47: PetscFunctionReturn(PETSC_SUCCESS);
48: }
50: /* ----------------------------------------------------------------------------------------
51: Code to manage shared memory allocation using standard Unix shared memory
52: */
53: #include <petscsys.h>
54: #if defined(PETSC_HAVE_PWD_H)
55: #include <pwd.h>
56: #endif
57: #include <ctype.h>
58: #include <sys/stat.h>
59: #if defined(PETSC_HAVE_UNISTD_H)
60: #include <unistd.h>
61: #endif
62: #if defined(PETSC_HAVE_SYS_UTSNAME_H)
63: #include <sys/utsname.h>
64: #endif
65: #include <fcntl.h>
66: #include <time.h>
67: #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
68: #include <sys/systeminfo.h>
69: #endif
70: #include <sys/shm.h>
71: #include <sys/mman.h>
73: static PetscMPIInt Petsc_ShmComm_keyval = MPI_KEYVAL_INVALID;
75: /*
76: Private routine to delete internal storage when a communicator is freed.
77: This is called by MPI, not by users.
79: The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
80: it was MPI_Comm *comm.
81: */
82: static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm, PetscInt keyval, void *attr_val, void *extra_state)
83: {
84: PetscFunctionBegin;
85: PetscCall(PetscFree(attr_val));
86: PetscFunctionReturn(MPI_SUCCESS);
87: }
89: /*
91: This routine is still incomplete and needs work.
93: For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
94: cat /etc/sysctl.conf
95: kern.sysv.shmmax=67108864
96: kern.sysv.shmmin=1
97: kern.sysv.shmmni=32
98: kern.sysv.shmseg=512
99: kern.sysv.shmall=1024
101: This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
102: ipcrm to remove the shared memory in use.
104: */
105: PetscErrorCode PetscSharedMalloc(MPI_Comm comm, PetscInt llen, PetscInt len, void **result)
106: {
107: PetscInt shift;
108: PetscMPIInt rank, flag;
109: int *arena, id, key = 0;
110: char *value;
112: PetscFunctionBegin;
113: *result = 0;
115: PetscCallMPI(MPI_Scan(&llen, &shift, 1, MPI_INT, MPI_SUM, comm));
116: shift -= llen;
118: PetscCallMPI(MPI_Comm_rank(comm, &rank));
119: if (rank == 0) {
120: id = shmget(key, len, 0666 | IPC_CREAT);
121: if (id == -1) {
122: perror("Unable to malloc shared memory");
123: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Unable to malloc shared memory");
124: }
125: } else {
126: id = shmget(key, len, 0666);
127: if (id == -1) {
128: perror("Unable to malloc shared memory");
129: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Unable to malloc shared memory");
130: }
131: }
132: value = shmat(id, (void *)0, 0);
133: if (value == (char *)-1) {
134: perror("Unable to access shared memory allocated");
135: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Unable to access shared memory allocated");
136: }
137: *result = (void *)(value + shift);
138: PetscFunctionReturn(PETSC_SUCCESS);
139: }
141: #else
143: PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
144: {
145: PetscMPIInt size;
147: PetscFunctionBegin;
148: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)vv), &size));
149: PetscCheck(size <= 1, PETSC_COMM_SELF, PETSC_ERR_SUP_SYS, "No supported for shared memory vector objects on this machine");
150: PetscCall(VecCreate_Seq(vv));
151: PetscFunctionReturn(PETSC_SUCCESS);
152: }
154: #endif
156: /*@
157: VecCreateShared - Creates a parallel vector that uses shared memory.
159: Collective
161: Input Parameters:
162: + comm - the MPI communicator to use
163: . n - local vector length (or `PETSC_DECIDE` to have calculated if `N` is given)
164: - N - global vector length (or `PETSC_DECIDE` to have calculated if `n` is given)
166: Output Parameter:
167: . vv - the vector
169: Level: advanced
171: Notes:
172: Currently `VecCreateShared()` is available only on the SGI; otherwise,
173: this routine is the same as `VecCreateMPI()`.
175: Use `VecDuplicate()` or `VecDuplicateVecs()` to form additional vectors of the
176: same type as an existing vector.
178: .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecCreateMPI()`, `VecDuplicate()`, `VecDuplicateVecs()`,
179: `VecCreateGhost()`, `VecCreateMPIWithArray()`, `VecCreateGhostWithArray()`
180: @*/
181: PetscErrorCode VecCreateShared(MPI_Comm comm, PetscInt n, PetscInt N, Vec *v)
182: {
183: PetscFunctionBegin;
184: PetscCall(VecCreate(comm, v));
185: PetscCall(VecSetSizes(*v, n, N));
186: PetscCall(VecSetType(*v, VECSHARED));
187: PetscFunctionReturn(PETSC_SUCCESS);
188: }