Nvwa  1.1
static_mem_pool.h
Go to the documentation of this file.
1 // -*- Mode: C++; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
2 // vim:tabstop=4:shiftwidth=4:expandtab:
3 
4 /*
5  * Copyright (C) 2004-2014 Wu Yongwei <adah at users dot sourceforge dot net>
6  *
7  * This software is provided 'as-is', without any express or implied
8  * warranty. In no event will the authors be held liable for any
9  * damages arising from the use of this software.
10  *
11  * Permission is granted to anyone to use this software for any purpose,
12  * including commercial applications, and to alter it and redistribute
13  * it freely, subject to the following restrictions:
14  *
15  * 1. The origin of this software must not be misrepresented; you must
16  * not claim that you wrote the original software. If you use this
17  * software in a product, an acknowledgement in the product
18  * documentation would be appreciated but is not required.
19  * 2. Altered source versions must be plainly marked as such, and must
20  * not be misrepresented as being the original software.
21  * 3. This notice may not be removed or altered from any source
22  * distribution.
23  *
24  * This file is part of Stones of Nvwa:
25  * http://sourceforge.net/projects/nvwa
26  *
27  */
28 
37 #ifndef NVWA_STATIC_MEM_POOL_H
38 #define NVWA_STATIC_MEM_POOL_H
39 
40 #include <new> // std::bad_alloc
41 #include <stdexcept> // std::runtime_error
42 #include <string> // std::string
43 #include <vector> // std::vector
44 #include <assert.h> // assert
45 #include <stddef.h> // size_t/NULL
46 #include "_nvwa.h" // NVWA/NVWA_NAMESPACE_*
47 #include "c++11.h" // _NOEXCEPT/_NULLPTR/_OVERRIDE
48 #include "class_level_lock.h" // nvwa::class_level_lock
49 #include "mem_pool_base.h" // nvwa::mem_pool_base
50 
51 /* Defines the macro for debugging output */
52 # ifdef _STATIC_MEM_POOL_DEBUG
53 # include <iostream>
54 # define _STATIC_MEM_POOL_TRACE(_Lck, _Msg) \
55  { \
56  if (_Lck) { \
57  static_mem_pool_set::lock guard; \
58  std::cerr << "static_mem_pool: " << _Msg << std::endl; \
59  } else { \
60  std::cerr << "static_mem_pool: " << _Msg << std::endl; \
61  } \
62  }
63 # else
64 # define _STATIC_MEM_POOL_TRACE(_Lck, _Msg) \
65  ((void)0)
66 # endif
67 
68 NVWA_NAMESPACE_BEGIN
69 
75 {
76 public:
78  static static_mem_pool_set& instance();
79  void recycle();
80  void add(mem_pool_base* memory_pool_p);
81 
82 private:
85 
86  typedef std::vector<mem_pool_base*> container_type;
87  container_type _M_memory_pool_set;
88 
89  /* Forbid their use */
91  const static_mem_pool_set& operator=(const static_mem_pool_set&);
92 };
93 
104 template <size_t _Sz, int _Gid = -1>
106 {
107  typedef typename class_level_lock<static_mem_pool<_Sz, _Gid>, (_Gid < 0)>
108  ::lock lock;
109 public:
119  {
120  lock guard;
121  if (!_S_instance_p)
122  {
123  _S_instance_p = _S_create_instance();
124  }
125  return *_S_instance_p;
126  }
135  {
136  assert(_S_instance_p != _NULLPTR);
137  return *_S_instance_p;
138  }
147  void* allocate()
148  {
149  {
150  lock guard;
151  if (_S_memory_block_p)
152  {
153  void* result = _S_memory_block_p;
154  _S_memory_block_p = _S_memory_block_p->_M_next;
155  return result;
156  }
157  }
158  return _S_alloc_sys(_S_align(_Sz));
159  }
165  void deallocate(void* ptr)
166  {
167  assert(ptr != _NULLPTR);
168  lock guard;
169  _Block_list* block = reinterpret_cast<_Block_list*>(ptr);
170  block->_M_next = _S_memory_block_p;
171  _S_memory_block_p = block;
172  }
173  virtual void recycle() _OVERRIDE;
174 
175 private:
177  {
178  _STATIC_MEM_POOL_TRACE(true, "static_mem_pool<" << _Sz << ','
179  << _Gid << "> is created");
180  }
181  ~static_mem_pool()
182  {
183 # ifdef _DEBUG
184  // Empty the pool to avoid false memory leakage alarms. This is
185  // generally not necessary for release binaries.
186  _Block_list* block = _S_memory_block_p;
187  while (block)
188  {
189  _Block_list* next = block->_M_next;
190  dealloc_sys(block);
191  block = next;
192  }
193  _S_memory_block_p = _NULLPTR;
194 # endif
195  _S_instance_p = _NULLPTR;
196  _S_destroyed = true;
197  _STATIC_MEM_POOL_TRACE(false, "static_mem_pool<" << _Sz << ','
198  << _Gid << "> is destroyed");
199  }
200  static size_t _S_align(size_t size)
201  {
202  return size >= sizeof(_Block_list) ? size : sizeof(_Block_list);
203  }
204  static void* _S_alloc_sys(size_t size);
205  static static_mem_pool* _S_create_instance();
206 
207  static bool _S_destroyed;
208  static static_mem_pool* _S_instance_p;
209  static mem_pool_base::_Block_list* _S_memory_block_p;
210 
211  /* Forbid their use */
213  const static_mem_pool& operator=(const static_mem_pool&);
214 };
215 
216 template <size_t _Sz, int _Gid> bool
218 template <size_t _Sz, int _Gid> mem_pool_base::_Block_list*
220 template <size_t _Sz, int _Gid> static_mem_pool<_Sz, _Gid>*
221  static_mem_pool<_Sz, _Gid>::_S_instance_p = _S_create_instance();
222 
228 template <size_t _Sz, int _Gid>
230 {
231  // Only here the global lock in static_mem_pool_set is obtained
232  // before the pool-specific lock. However, no race conditions are
233  // found so far.
234  lock guard;
235  _Block_list* block = _S_memory_block_p;
236  while (block)
237  {
238  if (_Block_list* temp = block->_M_next)
239  {
240  _Block_list* next = temp->_M_next;
241  block->_M_next = next;
242  dealloc_sys(temp);
243  block = next;
244  }
245  else
246  {
247  break;
248  }
249  }
250  _STATIC_MEM_POOL_TRACE(false, "static_mem_pool<" << _Sz << ','
251  << _Gid << "> is recycled");
252 }
253 
254 template <size_t _Sz, int _Gid>
256 {
258  void* result = mem_pool_base::alloc_sys(size);
259  if (!result)
260  {
261  static_mem_pool_set::instance().recycle();
262  result = mem_pool_base::alloc_sys(size);
263  }
264  return result;
265 }
266 
267 template <size_t _Sz, int _Gid>
269 {
270  if (_S_destroyed)
271  throw std::runtime_error("dead reference detected");
272 
273  static_mem_pool_set::instance(); // Force its creation
274  static_mem_pool* inst_p = new static_mem_pool();
275  try
276  {
277  static_mem_pool_set::instance().add(inst_p);
278  }
279  catch (...)
280  {
281  _STATIC_MEM_POOL_TRACE(true,
282  "Exception occurs in static_mem_pool_set::add");
283  // The strange cast below is to work around a bug in GCC 2.95.3
284  delete static_cast<mem_pool_base*>(inst_p);
285  throw;
286  }
287  return inst_p;
288 }
289 
290 NVWA_NAMESPACE_END
291 
301 #define DECLARE_STATIC_MEM_POOL(_Cls) \
302 public: \
303  static void* operator new(size_t size) \
304  { \
305  assert(size == sizeof(_Cls)); \
306  void* ptr; \
307  ptr = NVWA::static_mem_pool<sizeof(_Cls)>:: \
308  instance_known().allocate(); \
309  if (ptr == _NULLPTR) \
310  throw std::bad_alloc(); \
311  return ptr; \
312  } \
313  static void operator delete(void* ptr) \
314  { \
315  if (ptr) \
316  NVWA::static_mem_pool<sizeof(_Cls)>:: \
317  instance_known().deallocate(ptr); \
318  }
319 
329 #define DECLARE_STATIC_MEM_POOL__NOTHROW(_Cls) \
330 public: \
331  static void* operator new(size_t size) _NOEXCEPT \
332  { \
333  assert(size == sizeof(_Cls)); \
334  return NVWA::static_mem_pool<sizeof(_Cls)>:: \
335  instance_known().allocate(); \
336  } \
337  static void operator delete(void* ptr) \
338  { \
339  if (ptr) \
340  NVWA::static_mem_pool<sizeof(_Cls)>:: \
341  instance_known().deallocate(ptr); \
342  }
343 
354 #define DECLARE_STATIC_MEM_POOL_GROUPED(_Cls, _Gid) \
355 public: \
356  static void* operator new(size_t size) \
357  { \
358  assert(size == sizeof(_Cls)); \
359  void* ptr; \
360  ptr = NVWA::static_mem_pool<sizeof(_Cls), (_Gid)>:: \
361  instance_known().allocate(); \
362  if (ptr == _NULLPTR) \
363  throw std::bad_alloc(); \
364  return ptr; \
365  } \
366  static void operator delete(void* ptr) \
367  { \
368  if (ptr) \
369  NVWA::static_mem_pool<sizeof(_Cls), (_Gid)>:: \
370  instance_known().deallocate(ptr); \
371  }
372 
383 #define DECLARE_STATIC_MEM_POOL_GROUPED__NOTHROW(_Cls, _Gid) \
384 public: \
385  static void* operator new(size_t size) _NOEXCEPT \
386  { \
387  assert(size == sizeof(_Cls)); \
388  return NVWA::static_mem_pool<sizeof(_Cls), (_Gid)>:: \
389  instance_known().allocate(); \
390  } \
391  static void operator delete(void* ptr) \
392  { \
393  if (ptr) \
394  NVWA::static_mem_pool<sizeof(_Cls), (_Gid)>:: \
395  instance_known().deallocate(ptr); \
396  }
397 
398 #endif // NVWA_STATIC_MEM_POOL_H
Base class for memory pools.
Definition: mem_pool_base.h:48
static static_mem_pool & instance_known()
Gets the known instance of the static memory pool.
Definition: static_mem_pool.h:134
void * allocate()
Allocates memory and returns its pointer.
Definition: static_mem_pool.h:147
Singleton class template to manage the allocation/deallocation of memory blocks of one specific size...
Definition: static_mem_pool.h:105
static static_mem_pool & instance()
Gets the instance of the static memory pool.
Definition: static_mem_pool.h:118
Structure to store the next available memory block.
Definition: mem_pool_base.h:57
Helper class for class-level locking.
Definition: class_level_lock.h:76
In essence Loki ClassLevelLockable re-engineered to use a fast_mutex class.
void deallocate(void *ptr)
Deallocates memory by putting the memory block into the pool.
Definition: static_mem_pool.h:165
_Block_list * _M_next
Pointer to the next memory block.
Definition: mem_pool_base.h:59
Singleton class to maintain a set of existing instantiations of static_mem_pool.
Definition: static_mem_pool.h:74
Type that provides locking/unlocking semantics.
Definition: class_level_lock.h:89
C++11 feature detection macros and workarounds.
Header file for the memory pool base.
Common definitions for preprocessing.