blob: be2c2a62caf12f8d2d9637e90c308593a0e4d4f4 [file] [log] [blame]
Alexandre Lisionddd731e2014-01-31 11:50:08 -05001// Copyright (C) 2006-2010 David Sugar, Tycho Softworks.
2//
3// This file is part of GNU uCommon C++.
4//
5// GNU uCommon C++ is free software: you can redistribute it and/or modify
6// it under the terms of the GNU Lesser General Public License as published
7// by the Free Software Foundation, either version 3 of the License, or
8// (at your option) any later version.
9//
10// GNU uCommon C++ is distributed in the hope that it will be useful,
11// but WITHOUT ANY WARRANTY; without even the implied warranty of
12// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13// GNU Lesser General Public License for more details.
14//
15// You should have received a copy of the GNU Lesser General Public License
16// along with GNU uCommon C++. If not, see <http://www.gnu.org/licenses/>.
17
18/**
19 * Support for memory mapped objects.
20 * Memory mapped objects can be used to publish information so that it may be
21 * accessible directly by external programs. The mapped memory objects will
22 * usually be built as a vector vector or reusable type factory, in the latter
23 * case using the allocated shared memory block itself as a local heap. A
24 * simple template can be used to view the mapped contents that have been
25 * published by another process.
26 * @file ucommon/mapped.h
27 */
28
29#ifndef _UCOMMON_MAPPED_H_
30#define _UCOMMON_MAPPED_H_
31
32#ifndef _UCOMMON_LINKED_H_
33#include <ucommon/linked.h>
34#endif
35
36#ifndef _UCOMMON_THREAD_H_
37#include <ucommon/thread.h>
38#endif
39
40#ifndef _UCOMMON_STRING_H_
41#include <ucommon/string.h>
42#endif
43
44#ifndef _MSWINDOWS_
45#include <signal.h>
46#endif
47
48NAMESPACE_UCOMMON
49
50/**
51 * Construct or access a named section of memory. A logical name is used
52 * which might map to something that is invoked from a call like shm_open
53 * or a named w32 mapped swap segment. This is meant to support mapping a
54 * vector onto shared memory and is often used as a supporting class for our
55 * shared memory access templates.
56 * @author David Sugar <dyfet@gnutelephony.org>
57 */
58class __EXPORT MappedMemory
59{
60private:
61 size_t mapsize;
62 caddr_t map;
63 fd_t fd;
64
65protected:
66 size_t size, used;
67 char idname[65];
68 bool erase;
69
70 MappedMemory();
71
72 /**
73 * Supporting function to construct a new or access an existing
74 * shared memory segment. Used by primary constructors.
75 * @param name of segment to create or access.
76 * @param size of segment if creating new. Use 0 for read-only access.
77 */
78 void create(const char *name, size_t size = (size_t)0);
79
80 /**
81 * Handler to invoke in derived class when accessing outside the
82 * shared memory segment boundary.
83 */
84 virtual void *invalid(void) const;
85
86 /**
87 * Handler for failure to map (allocate) memory.
88 */
89 virtual void fault(void) const;
90
91public:
92 /**
93 * Construct a read/write access mapped shared segment of memory of a
94 * known size. This constructs a new memory segment.
95 * @param name of segment.
96 * @param size of segment.
97 */
98 MappedMemory(const char *name, size_t size);
99
100 /**
101 * Provide read-only mapped access to an existing named shared memory
102 * segment. The size of the map is found by the size of the already
103 * existing segment.
104 * @param name of existing segment.
105 */
106 MappedMemory(const char *name);
107
108 /**
109 * Unmap memory segment.
110 */
111 virtual ~MappedMemory();
112
113 /**
114 * Unmap memory segment.
115 */
116 void release(void);
117
118 /**
119 * Destroy a previously existing memory segment under the specified name.
120 * This is used both before creating a new one, and after a publishing
121 * process unmaps the segment it created.
122 * @param name of segment to remove.
123 */
124 static void remove(const char *name);
125
126 /**
127 * Test if map active.
128 * @return true if active map.
129 */
130 inline operator bool() const
131 {return (size != 0);};
132
133 /**
134 * Test if map is inactive.
135 * @return true if map inactive.
136 */
137 inline bool operator!() const
138 {return (size == 0);};
139
140 /**
141 * Extend size of managed heap on shared memory segment. This does not
142 * change the size of the mapped segment in any way, only that of any
143 * heap space that is being allocated and used from the mapped segment.
144 * @return start of space from map.
145 * @param size of space requested. Will fault if past end of segment.
146 */
147 void *sbrk(size_t size);
148
149 /**
150 * Get memory from a specific offset within the mapped memory segment.
151 * @param offset from start of segment. Will fault if past end.
152 * @return address of offset.
153 */
154 void *offset(size_t offset) const;
155
156 /**
157 * Copy memory from specific offset within the mapped memory segment.
158 * This function assures the copy is not in the middle of being modified.
159 * @param offset from start of segment.
160 * @param buffer to copy into.
161 * @param size of object to copy.
162 * @return true on success.
163 */
164 bool copy(size_t offset, void *buffer, size_t size) const;
165
166 /**
167 * Get size of mapped segment.
168 * @return size of mapped segment.
169 */
170 inline size_t len(void)
171 {return size;};
172
173 /**
174 * Get starting address of mapped segment.
175 * @return starting address of mapped segment.
176 */
177 inline caddr_t addr(void)
178 {return map;};
179
180 /**
181 * An API that allows "disabling" of publishing shared memory maps.
182 * This may be useful when an app doesn't want to use shared memory
183 * as a runtime or build option, but does not want to have to be "recoded"
184 * explicitly for non-shared memory either. Basically it substitutes a
185 * dummy map running on the local heap.
186 */
187 static void disable(void);
188};
189
190/**
191 * Map a reusable allocator over a named shared memory segment. This may be
192 * used to form a resource bound fixed size managed heap in shared memory.
193 * The request can either be fulfilled from the object reuse pool or from a
194 * new section of memory, and if all memory in the segment has been exhausted,
195 * it can wait until more objects are returned by another thread to the reuse
196 * pool.
197 * @author David Sugar <dyfet@gnutelephony.org>
198 */
199class __EXPORT MappedReuse : protected ReusableAllocator, protected MappedMemory
200{
201private:
202 unsigned objsize;
203 unsigned reading;
204 mutex_t mutex;
205
206protected:
207 MappedReuse(size_t osize);
208
209 inline void create(const char *fname, unsigned count)
210 {MappedMemory::create(fname, count * objsize);};
211
212public:
213 /**
214 * Construct a named memory segment for use with managed fixed size
215 * reusable objects. The segment is created as writable. There is no
216 * read-only version of mapped reuse since the mapped segment can be read
217 * by another process directly as a mapped read-only vector. The actual
218 * mapped type will be derived from ReusableObject to meet the needs of
219 * the reusable allocator. The template version should be used to
220 * assure type correctness rather than using this class directly.
221 * @param name of shared memory segment.
222 * @param size of the object type being mapped.
223 * @param count of the maximum number of active mapped objects.
224 */
225 MappedReuse(const char *name, size_t size, unsigned count);
226
227 /**
228 * Check whether there are objects available to be allocated.
229 * @return true if objects are available.
230 */
231 bool avail(void);
232
233 /**
234 * Request a reusable object from the free list or mapped space.
235 * @return free object or NULL if pool is exhausted.
236 */
237 ReusableObject *request(void);
238
239 /**
240 * Request a reusable object from the free list or mapped space.
241 * This method blocks until an object becomes available.
242 * @return free object.
243 */
244 ReusableObject *get(void);
245
246 /**
247 * Request a reusable object from the free list or mapped space.
248 * This method blocks until an object becomes available or the
249 * timeout has expired.
250 * @param timeout to wait in milliseconds.
251 * @return free object or NULL if timeout.
252 */
253 ReusableObject *getTimed(timeout_t timeout);
254
255 /**
256 * Used to get an object from the reuse pool when the mutex lock is
257 * already held.
258 * @return object from pool or NULL if exhausted.
259 */
260 ReusableObject *getLocked(void);
261
262 /**
263 * Used to return an object to the reuse pool when the mutex lock is
264 * already held.
265 * @param object being returned.
266 */
267 void removeLocked(ReusableObject *object);
268};
269
270/**
271 * Template class to map typed vector into shared memory. This is used to
272 * construct a typed read/write vector of objects that are held in a named
273 * shared memory segment.
274 * @author David Sugar <dyfet@gnutelephony.org>
275 */
276template <class T>
277class mapped_array : public MappedMemory
278{
279protected:
280 inline mapped_array() : MappedMemory() {};
281
282 inline void create(const char *fn, unsigned members)
283 {MappedMemory::create(fn, members * sizeof(T));};
284
285public:
286 /**
287 * Construct mapped vector array of typed objects. This is constructed
288 * for read/write access. mapped_view is used in all cases for read-only
289 * access to mapped data. Member objects are linearly allocated from
290 * the shared memory segment, or may simply be directly accessed by offset.
291 * @param name of mapped segment to construct.
292 * @param number of objects in the mapped vector.
293 */
294 inline mapped_array(const char *name, unsigned number) :
295 MappedMemory(name, number * sizeof(T)) {};
296
297 /**
298 * Initialize typed data in mapped array. Assumes default constructor
299 * for type.
300 */
301 inline void initialize(void)
302 {new((caddr_t)offset(0)) T[size / sizeof(T)];};
303
304 /**
305 * Add mapped space while holding lock for one object.
306 * @return address of object.
307 */
308 inline void *addLock(void)
309 {return sbrk(sizeof(T));};
310
311 /**
312 * Get typed pointer to member object of vector in mapped segment.
313 * @param member to access.
314 * @return typed pointer or NULL if past end of array.
315 */
316 inline T *operator()(unsigned member)
317 {return static_cast<T*>(offset(member * sizeof(T)));}
318
319 /**
320 * Allocate mapped space for one object.
321 * @return address of object.
322 */
323 inline T *operator()(void)
324 {return static_cast<T*>(sbrk(sizeof(T)));};
325
326 /**
327 * Reference typed object of vector in mapped segment.
328 * @param member to access.
329 * @return typed reference.
330 */
331 inline T& operator[](unsigned member)
332 {return *(operator()(member));};
333
334 /**
335 * Get member size of typed objects that can be held in mapped vector.
336 * @return members mapped in segment.
337 */
338 inline unsigned max(void)
339 {return (unsigned)(size / sizeof(T));};
340};
341
342/**
343 * Template class to map typed reusable objects into shared memory heap.
344 * This is used to construct a read/write heap of objects that are held in a
345 * named shared memory segment. Member objects are allocated from a reusable
346 * heap but are stored in the shared memory segment as a vector.
347 * @author David Sugar <dyfet@gnutelephony.org>
348 */
349template <class T>
350class mapped_reuse : public MappedReuse
351{
352protected:
353 inline mapped_reuse() :
354 MappedReuse(sizeof(T)) {};
355
356public:
357 /**
358 * Construct mapped reuse array of typed objects. This is constructed
359 * for read/write access. mapped_view is used in all cases for read-only
360 * access to mapped data.
361 * @param name of mapped segment to construct.
362 * @param number of objects in the mapped vector.
363 */
364 inline mapped_reuse(const char *name, unsigned number) :
365 MappedReuse(name, sizeof(T), number) {};
366
367 /**
368 * Initialize typed data in mapped array. Assumes default constructor
369 * for type.
370 */
371 inline void initialize(void)
372 {new((caddr_t)pos(0)) T[size / sizeof(T)];};
373
374 /**
375 * Check whether there are typed objects available to be allocated.
376 * @return true if objects are available.
377 */
378 inline operator bool() const
379 {return MappedReuse::avail();};
380
381 /**
382 * Check whether there are typed objects available to be allocated.
383 * @return true if no more typed objects are available.
384 */
385 inline bool operator!() const
386 {return !MappedReuse::avail();};
387
388 /**
389 * Request a typed reusable object from the free list or mapped space.
390 * This method blocks until an object becomes available.
391 * @return free object.
392 */
393 inline operator T*()
394 {return mapped_reuse::get();};
395
396 /**
397 * Request a typed reusable object from the free list or mapped space by
398 * pointer reference. This method blocks until an object becomes available.
399 * @return free object.
400 */
401 inline T* operator*()
402 {return mapped_reuse::get();};
403
404 /**
405 * Get typed object from a specific member offset within the mapped segment.
406 * @param member offset from start of segment. Will fault if past end.
407 * @return typed object pointer.
408 */
409 inline T *pos(size_t member)
410 {return static_cast<T*>(MappedReuse::offset(member * sizeof(T)));};
411
412 /**
413 * Request a typed reusable object from the free list or mapped space.
414 * This method blocks until an object becomes available.
415 * @return free typed object.
416 */
417 inline T *get(void)
418 {return static_cast<T*>(MappedReuse::get());};
419
420 /**
421 * Request a typed reusable object from the free list or mapped space.
422 * This method blocks until an object becomes available from another
423 * thread or the timeout expires.
424 * @param timeout in milliseconds.
425 * @return free typed object.
426 */
427 inline T *getTimed(timeout_t timeout)
428 {return static_cast<T*>(MappedReuse::getTimed(timeout));};
429
430 /**
431 * Request a typed reusable object from the free list or mapped space.
432 * This method does not block or wait.
433 * @return free typed object if available or NULL.
434 */
435 inline T *request(void)
436 {return static_cast<T*>(MappedReuse::request());};
437
438 /**
439 * Used to return a typed object to the reuse pool when the mutex lock is
440 * already held.
441 * @param object being returned.
442 */
443 inline void removeLocked(T *object)
444 {MappedReuse::removeLocked(object);};
445
446 /**
447 * Used to get a typed object from the reuse pool when the mutex lock is
448 * already held.
449 * @return typed object from pool or NULL if exhausted.
450 */
451 inline T *getLocked(void)
452 {return static_cast<T*>(MappedReuse::getLocked());};
453
454 /**
455 * Used to release a typed object back to the reuse typed object pool.
456 * @param object being released.
457 */
458 inline void release(T *object)
459 {ReusableAllocator::release(object);};
460};
461
462/**
463 * Class to access a named mapped segment published from another process.
464 * This offers a simple typed vector interface to access the shared memory
465 * segment in read-only mode.
466 * @author David Sugar <dyfet@gnutelephony.org>
467 */
468template <class T>
469class mapped_view : protected MappedMemory
470{
471public:
472 /**
473 * Map existing named memory segment. The size of the map is derived
474 * from the existing map alone.
475 * @param name of memory segment to map.
476 */
477 inline mapped_view(const char *name) :
478 MappedMemory(name) {};
479
480 /**
481 * Access typed member object in the mapped segment.
482 * @param member to access.
483 * @return typed object pointer.
484 */
485 inline volatile const T *operator()(unsigned member)
486 {return static_cast<const T*>(offset(member * sizeof(T)));}
487
488 /**
489 * Reference typed member object in the mapped segment.
490 * @param member to access.
491 * @return typed object reference.
492 */
493 inline volatile const T &operator[](unsigned member)
494 {return *(operator()(member));};
495
496 inline volatile const T *get(unsigned member)
497 {return static_cast<const T*>(offset(member * sizeof(T)));};
498
499 inline void copy(unsigned member, T& buffer)
500 {MappedMemory::copy(member * sizeof(T), &buffer, sizeof(T));};
501
502 /**
503 * Get count of typed member objects held in this map.
504 * @return count of typed member objects.
505 */
506 inline unsigned count(void)
507 {return (unsigned)(size / sizeof(T));};
508};
509
510END_NAMESPACE
511
512#endif