pool_allocator.h
Go to the documentation of this file.00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00043 #ifndef _POOL_ALLOCATOR_H
00044 #define _POOL_ALLOCATOR_H 1
00045
00046 #include <bits/c++config.h>
00047 #include <cstdlib>
00048 #include <new>
00049 #include <bits/functexcept.h>
00050 #include <ext/atomicity.h>
00051 #include <ext/concurrence.h>
00052 #include <bits/move.h>
00053
00054 _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
00055
00056 using std::size_t;
00057 using std::ptrdiff_t;
00058
00059
00060
00061
00062
00063
00064
00065
00066
00067
00068
00069
00070
00071
00072
00073
00074 class __pool_alloc_base
00075 {
00076 protected:
00077
00078 enum { _S_align = 8 };
00079 enum { _S_max_bytes = 128 };
00080 enum { _S_free_list_size = (size_t)_S_max_bytes / (size_t)_S_align };
00081
00082 union _Obj
00083 {
00084 union _Obj* _M_free_list_link;
00085 char _M_client_data[1];
00086 };
00087
00088 static _Obj* volatile _S_free_list[_S_free_list_size];
00089
00090
00091 static char* _S_start_free;
00092 static char* _S_end_free;
00093 static size_t _S_heap_size;
00094
00095 size_t
00096 _M_round_up(size_t __bytes)
00097 { return ((__bytes + (size_t)_S_align - 1) & ~((size_t)_S_align - 1)); }
00098
00099 _Obj* volatile*
00100 _M_get_free_list(size_t __bytes);
00101
00102 __mutex&
00103 _M_get_mutex();
00104
00105
00106
00107 void*
00108 _M_refill(size_t __n);
00109
00110
00111
00112 char*
00113 _M_allocate_chunk(size_t __n, int& __nobjs);
00114 };
00115
00116
00117
00118
00119
00120
00121 template<typename _Tp>
00122 class __pool_alloc : private __pool_alloc_base
00123 {
00124 private:
00125 static _Atomic_word _S_force_new;
00126
00127 public:
00128 typedef size_t size_type;
00129 typedef ptrdiff_t difference_type;
00130 typedef _Tp* pointer;
00131 typedef const _Tp* const_pointer;
00132 typedef _Tp& reference;
00133 typedef const _Tp& const_reference;
00134 typedef _Tp value_type;
00135
00136 template<typename _Tp1>
00137 struct rebind
00138 { typedef __pool_alloc<_Tp1> other; };
00139
00140 __pool_alloc() throw() { }
00141
00142 __pool_alloc(const __pool_alloc&) throw() { }
00143
00144 template<typename _Tp1>
00145 __pool_alloc(const __pool_alloc<_Tp1>&) throw() { }
00146
00147 ~__pool_alloc() throw() { }
00148
00149 pointer
00150 address(reference __x) const { return &__x; }
00151
00152 const_pointer
00153 address(const_reference __x) const { return &__x; }
00154
00155 size_type
00156 max_size() const throw()
00157 { return size_t(-1) / sizeof(_Tp); }
00158
00159
00160
00161 void
00162 construct(pointer __p, const _Tp& __val)
00163 { ::new((void *)__p) _Tp(__val); }
00164
00165 #ifdef __GXX_EXPERIMENTAL_CXX0X__
00166 template<typename... _Args>
00167 void
00168 construct(pointer __p, _Args&&... __args)
00169 { ::new((void *)__p) _Tp(std::forward<_Args>(__args)...); }
00170 #endif
00171
00172 void
00173 destroy(pointer __p) { __p->~_Tp(); }
00174
00175 pointer
00176 allocate(size_type __n, const void* = 0);
00177
00178 void
00179 deallocate(pointer __p, size_type __n);
00180 };
00181
00182 template<typename _Tp>
00183 inline bool
00184 operator==(const __pool_alloc<_Tp>&, const __pool_alloc<_Tp>&)
00185 { return true; }
00186
00187 template<typename _Tp>
00188 inline bool
00189 operator!=(const __pool_alloc<_Tp>&, const __pool_alloc<_Tp>&)
00190 { return false; }
00191
00192 template<typename _Tp>
00193 _Atomic_word
00194 __pool_alloc<_Tp>::_S_force_new;
00195
00196 template<typename _Tp>
00197 _Tp*
00198 __pool_alloc<_Tp>::allocate(size_type __n, const void*)
00199 {
00200 pointer __ret = 0;
00201 if (__builtin_expect(__n != 0, true))
00202 {
00203 if (__builtin_expect(__n > this->max_size(), false))
00204 std::__throw_bad_alloc();
00205
00206
00207
00208
00209 if (_S_force_new == 0)
00210 {
00211 if (std::getenv("GLIBCXX_FORCE_NEW"))
00212 __atomic_add_dispatch(&_S_force_new, 1);
00213 else
00214 __atomic_add_dispatch(&_S_force_new, -1);
00215 }
00216
00217 const size_t __bytes = __n * sizeof(_Tp);
00218 if (__bytes > size_t(_S_max_bytes) || _S_force_new > 0)
00219 __ret = static_cast<_Tp*>(::operator new(__bytes));
00220 else
00221 {
00222 _Obj* volatile* __free_list = _M_get_free_list(__bytes);
00223
00224 __scoped_lock sentry(_M_get_mutex());
00225 _Obj* __restrict__ __result = *__free_list;
00226 if (__builtin_expect(__result == 0, 0))
00227 __ret = static_cast<_Tp*>(_M_refill(_M_round_up(__bytes)));
00228 else
00229 {
00230 *__free_list = __result->_M_free_list_link;
00231 __ret = reinterpret_cast<_Tp*>(__result);
00232 }
00233 if (__builtin_expect(__ret == 0, 0))
00234 std::__throw_bad_alloc();
00235 }
00236 }
00237 return __ret;
00238 }
00239
00240 template<typename _Tp>
00241 void
00242 __pool_alloc<_Tp>::deallocate(pointer __p, size_type __n)
00243 {
00244 if (__builtin_expect(__n != 0 && __p != 0, true))
00245 {
00246 const size_t __bytes = __n * sizeof(_Tp);
00247 if (__bytes > static_cast<size_t>(_S_max_bytes) || _S_force_new > 0)
00248 ::operator delete(__p);
00249 else
00250 {
00251 _Obj* volatile* __free_list = _M_get_free_list(__bytes);
00252 _Obj* __q = reinterpret_cast<_Obj*>(__p);
00253
00254 __scoped_lock sentry(_M_get_mutex());
00255 __q ->_M_free_list_link = *__free_list;
00256 *__free_list = __q;
00257 }
00258 }
00259 }
00260
00261 _GLIBCXX_END_NAMESPACE
00262
00263 #endif