epoc32/include/stdapis/stlport/stl/_pthread_alloc.h
branchSymbian3
changeset 4 837f303aceeb
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/epoc32/include/stdapis/stlport/stl/_pthread_alloc.h	Wed Mar 31 12:33:34 2010 +0100
     1.3 @@ -0,0 +1,489 @@
     1.4 +/*
     1.5 + *
     1.6 + * Copyright (c) 1994
     1.7 + * Hewlett-Packard Company
     1.8 + *
     1.9 + * Copyright (c) 1996,1997
    1.10 + * Silicon Graphics Computer Systems, Inc.
    1.11 + *
    1.12 + * Copyright (c) 1997
    1.13 + * Moscow Center for SPARC Technology
    1.14 + *
    1.15 + * Copyright (c) 1999 
    1.16 + * Boris Fomitchev
    1.17 + *
    1.18 + * This material is provided "as is", with absolutely no warranty expressed
    1.19 + * or implied. Any use is at your own risk.
    1.20 + *
    1.21 + * Permission to use or copy this software for any purpose is hereby granted 
    1.22 + * without fee, provided the above notices are retained on all copies.
    1.23 + * Permission to modify the code and to distribute modified code is granted,
    1.24 + * provided the above notices are retained, and a notice that the code was
    1.25 + * modified is included with the above copyright notice.
    1.26 + *
    1.27 + */
    1.28 +
    1.29 +#ifndef _STLP_PTHREAD_ALLOC_H
    1.30 +#define _STLP_PTHREAD_ALLOC_H
    1.31 +
    1.32 +// Pthread-specific node allocator.
    1.33 +// This is similar to the default allocator, except that free-list
    1.34 +// information is kept separately for each thread, avoiding locking.
    1.35 +// This should be reasonably fast even in the presence of threads.
    1.36 +// The down side is that storage may not be well-utilized.
    1.37 +// It is not an error to allocate memory in thread A and deallocate
    1.38 +// it in thread B.  But this effectively transfers ownership of the memory,
    1.39 +// so that it can only be reallocated by thread B.  Thus this can effectively
    1.40 +// result in a storage leak if it's done on a regular basis.
    1.41 +// It can also result in frequent sharing of
    1.42 +// cache lines among processors, with potentially serious performance
    1.43 +// consequences.
    1.44 +
    1.45 +#include <pthread.h>
    1.46 +
    1.47 +#ifndef _STLP_INTERNAL_ALLOC_H
    1.48 +#include <stl/_alloc.h>
    1.49 +#endif
    1.50 +
    1.51 +#ifndef __RESTRICT
    1.52 +#  define __RESTRICT
    1.53 +#endif
    1.54 +
    1.55 +_STLP_BEGIN_NAMESPACE
    1.56 +
    1.57 +#define _STLP_DATA_ALIGNMENT 8
    1.58 +
    1.59 +union _Pthread_alloc_obj {
    1.60 +    union _Pthread_alloc_obj * __free_list_link;
    1.61 +    char __client_data[_STLP_DATA_ALIGNMENT];    /* The client sees this.    */
    1.62 +};
    1.63 +
    1.64 +// Pthread allocators don't appear to the client to have meaningful
    1.65 +// instances.  We do in fact need to associate some state with each
    1.66 +// thread.  That state is represented by
    1.67 +// _Pthread_alloc_per_thread_state<_Max_size>.
    1.68 +
    1.69 +template<size_t _Max_size>
    1.70 +struct _Pthread_alloc_per_thread_state {
    1.71 +  typedef _Pthread_alloc_obj __obj;
    1.72 +  enum { _S_NFREELISTS = _Max_size/_STLP_DATA_ALIGNMENT };
    1.73 +
    1.74 +  // Free list link for list of available per thread structures.
    1.75 +  // When one of these becomes available for reuse due to thread
    1.76 +  // termination, any objects in its free list remain associated
    1.77 +  // with it.  The whole structure may then be used by a newly
    1.78 +  // created thread.
    1.79 +  _Pthread_alloc_per_thread_state() : __next(0)
    1.80 +  {
    1.81 +    memset((void *)__free_list, 0, (size_t)_S_NFREELISTS * sizeof(__obj *));
    1.82 +  }
    1.83 +  // Returns an object of size __n, and possibly adds to size n free list.
    1.84 +  void *_M_refill(size_t __n);
    1.85 +  
    1.86 +  _Pthread_alloc_obj* volatile __free_list[_S_NFREELISTS]; 
    1.87 +  _Pthread_alloc_per_thread_state<_Max_size> * __next; 
    1.88 +  // this data member is only to be used by per_thread_allocator, which returns memory to the originating thread.
    1.89 +  _STLP_mutex _M_lock;
    1.90 +
    1.91 + };
    1.92 +
    1.93 +// Pthread-specific allocator.
    1.94 +// The argument specifies the largest object size allocated from per-thread
    1.95 +// free lists.  Larger objects are allocated using malloc_alloc.
    1.96 +// Max_size must be a power of 2.
    1.97 +template < __DFL_NON_TYPE_PARAM(size_t, _Max_size, _MAX_BYTES) >
    1.98 +class _Pthread_alloc {
    1.99 +
   1.100 +public: // but only for internal use:
   1.101 +
   1.102 +  typedef _Pthread_alloc_obj __obj;
   1.103 +  typedef _Pthread_alloc_per_thread_state<_Max_size> __state_type;
   1.104 +  typedef char value_type;
   1.105 +
   1.106 +  // Allocates a chunk for nobjs of size size.  nobjs may be reduced
   1.107 +  // if it is inconvenient to allocate the requested number.
   1.108 +  static char *_S_chunk_alloc(size_t __size, size_t &__nobjs);
   1.109 +
   1.110 +  enum {_S_ALIGN = _STLP_DATA_ALIGNMENT};
   1.111 +
   1.112 +  static size_t _S_round_up(size_t __bytes) {
   1.113 +        return (((__bytes) + (int)_S_ALIGN-1) & ~((int)_S_ALIGN - 1));
   1.114 +  }
   1.115 +  static size_t _S_freelist_index(size_t __bytes) {
   1.116 +        return (((__bytes) + (int)_S_ALIGN-1)/(int)_S_ALIGN - 1);
   1.117 +  }
   1.118 +
   1.119 +private:
   1.120 +  // Chunk allocation state. And other shared state.
   1.121 +  // Protected by _S_chunk_allocator_lock.
   1.122 +  static _STLP_mutex_base _S_chunk_allocator_lock;
   1.123 +  static char *_S_start_free;
   1.124 +  static char *_S_end_free;
   1.125 +  static size_t _S_heap_size;
   1.126 +  static _Pthread_alloc_per_thread_state<_Max_size>* _S_free_per_thread_states;
   1.127 +  static pthread_key_t _S_key;
   1.128 +  static bool _S_key_initialized;
   1.129 +        // Pthread key under which per thread state is stored. 
   1.130 +        // Allocator instances that are currently unclaimed by any thread.
   1.131 +  static void _S_destructor(void *instance);
   1.132 +        // Function to be called on thread exit to reclaim per thread
   1.133 +        // state.
   1.134 +  static _Pthread_alloc_per_thread_state<_Max_size> *_S_new_per_thread_state();
   1.135 +public:
   1.136 +        // Return a recycled or new per thread state.
   1.137 +  static _Pthread_alloc_per_thread_state<_Max_size> *_S_get_per_thread_state();
   1.138 +private:
   1.139 +        // ensure that the current thread has an associated
   1.140 +        // per thread state.
   1.141 +  class _M_lock;
   1.142 +  friend class _M_lock;
   1.143 +  class _M_lock {
   1.144 +      public:
   1.145 +        _M_lock () { _S_chunk_allocator_lock._M_acquire_lock(); }
   1.146 +        ~_M_lock () { _S_chunk_allocator_lock._M_release_lock(); }
   1.147 +  };
   1.148 +
   1.149 +public:
   1.150 +
   1.151 +  /* n must be > 0      */
   1.152 +  static void * allocate(size_t __n)
   1.153 +  {
   1.154 +    __obj * volatile * __my_free_list;
   1.155 +    __obj * __RESTRICT __result;
   1.156 +    __state_type* __a;
   1.157 +
   1.158 +    if (__n > _Max_size) {
   1.159 +        return(__malloc_alloc<0>::allocate(__n));
   1.160 +    }
   1.161 +
   1.162 +    __a = _S_get_per_thread_state();
   1.163 +
   1.164 +    __my_free_list = __a -> __free_list + _S_freelist_index(__n);
   1.165 +    __result = *__my_free_list;
   1.166 +    if (__result == 0) {
   1.167 +        void *__r = __a -> _M_refill(_S_round_up(__n));
   1.168 +        return __r;
   1.169 +    }
   1.170 +    *__my_free_list = __result -> __free_list_link;
   1.171 +    return (__result);
   1.172 +  };
   1.173 +
   1.174 +  /* p may not be 0 */
   1.175 +  static void deallocate(void *__p, size_t __n)
   1.176 +  {
   1.177 +    __obj *__q = (__obj *)__p;
   1.178 +    __obj * volatile * __my_free_list;
   1.179 +    __state_type* __a;
   1.180 +
   1.181 +    if (__n > _Max_size) {
   1.182 +        __malloc_alloc<0>::deallocate(__p, __n);
   1.183 +        return;
   1.184 +    }
   1.185 +
   1.186 +    __a = _S_get_per_thread_state();
   1.187 +    
   1.188 +    __my_free_list = __a->__free_list + _S_freelist_index(__n);
   1.189 +    __q -> __free_list_link = *__my_free_list;
   1.190 +    *__my_free_list = __q;
   1.191 +  }
   1.192 +
   1.193 +  // boris : versions for per_thread_allocator
   1.194 +  /* n must be > 0      */
   1.195 +  static void * allocate(size_t __n, __state_type* __a)
   1.196 +  {
   1.197 +    __obj * volatile * __my_free_list;
   1.198 +    __obj * __RESTRICT __result;
   1.199 +
   1.200 +    if (__n > _Max_size) {
   1.201 +        return(__malloc_alloc<0>::allocate(__n));
   1.202 +    }
   1.203 +
   1.204 +    // boris : here, we have to lock per thread state, as we may be getting memory from
   1.205 +    // different thread pool.
   1.206 +    _STLP_mutex_lock __lock(__a->_M_lock);
   1.207 +
   1.208 +    __my_free_list = __a -> __free_list + _S_freelist_index(__n);
   1.209 +    __result = *__my_free_list;
   1.210 +    if (__result == 0) {
   1.211 +        void *__r = __a -> _M_refill(_S_round_up(__n));
   1.212 +        return __r;
   1.213 +    }
   1.214 +    *__my_free_list = __result -> __free_list_link;
   1.215 +    return (__result);
   1.216 +  };
   1.217 +
   1.218 +  /* p may not be 0 */
   1.219 +  static void deallocate(void *__p, size_t __n, __state_type* __a)
   1.220 +  {
   1.221 +    __obj *__q = (__obj *)__p;
   1.222 +    __obj * volatile * __my_free_list;
   1.223 +
   1.224 +    if (__n > _Max_size) {
   1.225 +        __malloc_alloc<0>::deallocate(__p, __n);
   1.226 +        return;
   1.227 +    }
   1.228 +
   1.229 +    // boris : here, we have to lock per thread state, as we may be returning memory from
   1.230 +    // different thread.
   1.231 +    _STLP_mutex_lock __lock(__a->_M_lock);
   1.232 +
   1.233 +    __my_free_list = __a->__free_list + _S_freelist_index(__n);
   1.234 +    __q -> __free_list_link = *__my_free_list;
   1.235 +    *__my_free_list = __q;
   1.236 +  }
   1.237 +
   1.238 +  static void * reallocate(void *__p, size_t __old_sz, size_t __new_sz);
   1.239 +
   1.240 +} ;
   1.241 +
   1.242 +# if defined (_STLP_USE_TEMPLATE_EXPORT)
   1.243 +_STLP_EXPORT_TEMPLATE_CLASS _Pthread_alloc<_MAX_BYTES>;
   1.244 +# endif
   1.245 +
   1.246 +typedef _Pthread_alloc<_MAX_BYTES> __pthread_alloc;
   1.247 +typedef __pthread_alloc pthread_alloc;
   1.248 +
   1.249 +template <class _Tp>
   1.250 +class pthread_allocator {
   1.251 +  typedef pthread_alloc _S_Alloc;          // The underlying allocator.
   1.252 +public:
   1.253 +  typedef size_t     size_type;
   1.254 +  typedef ptrdiff_t  difference_type;
   1.255 +  typedef _Tp*       pointer;
   1.256 +  typedef const _Tp* const_pointer;
   1.257 +  typedef _Tp&       reference;
   1.258 +  typedef const _Tp& const_reference;
   1.259 +  typedef _Tp        value_type;
   1.260 +
   1.261 +#ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   1.262 +  template <class _NewType> struct rebind {
   1.263 +    typedef pthread_allocator<_NewType> other;
   1.264 +  };
   1.265 +#endif
   1.266 +
   1.267 +  pthread_allocator() _STLP_NOTHROW {}
   1.268 +  pthread_allocator(const pthread_allocator<_Tp>& a) _STLP_NOTHROW {}
   1.269 +
   1.270 +#if defined (_STLP_MEMBER_TEMPLATES) /* && defined (_STLP_FUNCTION_PARTIAL_ORDER) */
   1.271 +  template <class _OtherType> pthread_allocator(const pthread_allocator<_OtherType>&)
   1.272 +		_STLP_NOTHROW {}
   1.273 +#endif
   1.274 +
   1.275 +  ~pthread_allocator() _STLP_NOTHROW {}
   1.276 +
   1.277 +  pointer address(reference __x) const { return &__x; }
   1.278 +  const_pointer address(const_reference __x) const { return &__x; }
   1.279 +
   1.280 +  // __n is permitted to be 0.  The C++ standard says nothing about what
   1.281 +  // the return value is when __n == 0.
   1.282 +  _Tp* allocate(size_type __n, const void* = 0) {
   1.283 +    return __n != 0 ? __STATIC_CAST(_Tp*,_S_Alloc::allocate(__n * sizeof(_Tp)))
   1.284 +                    : 0;
   1.285 +  }
   1.286 +
   1.287 +  // p is not permitted to be a null pointer.
   1.288 +  void deallocate(pointer __p, size_type __n)
   1.289 +    { _S_Alloc::deallocate(__p, __n * sizeof(_Tp)); }
   1.290 +
   1.291 +  size_type max_size() const _STLP_NOTHROW 
   1.292 +    { return size_t(-1) / sizeof(_Tp); }
   1.293 +
   1.294 +  void construct(pointer __p, const _Tp& __val) { _STLP_PLACEMENT_NEW (__p) _Tp(__val); }
   1.295 +  void destroy(pointer _p) { _p->~_Tp(); }
   1.296 +};
   1.297 +
   1.298 +_STLP_TEMPLATE_NULL
   1.299 +class _STLP_CLASS_DECLSPEC pthread_allocator<void> {
   1.300 +public:
   1.301 +  typedef size_t      size_type;
   1.302 +  typedef ptrdiff_t   difference_type;
   1.303 +  typedef void*       pointer;
   1.304 +  typedef const void* const_pointer;
   1.305 +  typedef void        value_type;
   1.306 +#ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   1.307 +  template <class _NewType> struct rebind {
   1.308 +    typedef pthread_allocator<_NewType> other;
   1.309 +  };
   1.310 +#endif
   1.311 +};
   1.312 +
   1.313 +template <class _T1, class _T2>
   1.314 +inline bool operator==(const pthread_allocator<_T1>&,
   1.315 +                       const pthread_allocator<_T2>& a2) 
   1.316 +{
   1.317 +  return true;
   1.318 +}
   1.319 +
   1.320 +#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
   1.321 +template <class _T1, class _T2>
   1.322 +inline bool operator!=(const pthread_allocator<_T1>&,
   1.323 +                       const pthread_allocator<_T2>&)
   1.324 +{
   1.325 +  return false;
   1.326 +}
   1.327 +#endif
   1.328 +
   1.329 +
   1.330 +#ifdef _STLP_CLASS_PARTIAL_SPECIALIZATION
   1.331 +
   1.332 +# ifdef _STLP_USE_RAW_SGI_ALLOCATORS
   1.333 +template <class _Tp, size_t _Max_size>
   1.334 +struct _Alloc_traits<_Tp, _Pthread_alloc<_Max_size> >
   1.335 +{
   1.336 +  typedef __allocator<_Tp, _Pthread_alloc<_Max_size> > 
   1.337 +          allocator_type;
   1.338 +};
   1.339 +# endif
   1.340 +
   1.341 +template <class _Tp, class _Atype>
   1.342 +struct _Alloc_traits<_Tp, pthread_allocator<_Atype> >
   1.343 +{
   1.344 +  typedef pthread_allocator<_Tp> allocator_type;
   1.345 +};
   1.346 +
   1.347 +#endif
   1.348 +
   1.349 +#if !defined (_STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM)
   1.350 +
   1.351 +template <class _Tp1, class _Tp2>
   1.352 +inline pthread_allocator<_Tp2>&
   1.353 +__stl_alloc_rebind(pthread_allocator<_Tp1>& __x, const _Tp2*) {
   1.354 +  return (pthread_allocator<_Tp2>&)__x;
   1.355 +}
   1.356 +
   1.357 +template <class _Tp1, class _Tp2>
   1.358 +inline pthread_allocator<_Tp2>
   1.359 +__stl_alloc_create(pthread_allocator<_Tp1>&, const _Tp2*) {
   1.360 +  return pthread_allocator<_Tp2>();
   1.361 +}
   1.362 +
   1.363 +#endif /* _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM */
   1.364 +
   1.365 +//
   1.366 +// per_thread_allocator<> : this allocator always return memory to the same thread 
   1.367 +// it was allocated from.
   1.368 +//
   1.369 +
   1.370 +template <class _Tp>
   1.371 +class per_thread_allocator {
   1.372 +  typedef pthread_alloc _S_Alloc;          // The underlying allocator.
   1.373 +  typedef pthread_alloc::__state_type __state_type;
   1.374 +public:
   1.375 +  typedef size_t     size_type;
   1.376 +  typedef ptrdiff_t  difference_type;
   1.377 +  typedef _Tp*       pointer;
   1.378 +  typedef const _Tp* const_pointer;
   1.379 +  typedef _Tp&       reference;
   1.380 +  typedef const _Tp& const_reference;
   1.381 +  typedef _Tp        value_type;
   1.382 +
   1.383 +#ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   1.384 +  template <class _NewType> struct rebind {
   1.385 +    typedef per_thread_allocator<_NewType> other;
   1.386 +  };
   1.387 +#endif
   1.388 +
   1.389 +  per_thread_allocator() _STLP_NOTHROW { 
   1.390 +    _M_state = _S_Alloc::_S_get_per_thread_state();
   1.391 +  }
   1.392 +  per_thread_allocator(const per_thread_allocator<_Tp>& __a) _STLP_NOTHROW : _M_state(__a._M_state){}
   1.393 +
   1.394 +#if defined (_STLP_MEMBER_TEMPLATES) /* && defined (_STLP_FUNCTION_PARTIAL_ORDER) */
   1.395 +  template <class _OtherType> per_thread_allocator(const per_thread_allocator<_OtherType>& __a)
   1.396 +		_STLP_NOTHROW : _M_state(__a._M_state) {}
   1.397 +#endif
   1.398 +
   1.399 +  ~per_thread_allocator() _STLP_NOTHROW {}
   1.400 +
   1.401 +  pointer address(reference __x) const { return &__x; }
   1.402 +  const_pointer address(const_reference __x) const { return &__x; }
   1.403 +
   1.404 +  // __n is permitted to be 0.  The C++ standard says nothing about what
   1.405 +  // the return value is when __n == 0.
   1.406 +  _Tp* allocate(size_type __n, const void* = 0) {
   1.407 +    return __n != 0 ? __STATIC_CAST(_Tp*,_S_Alloc::allocate(__n * sizeof(_Tp), _M_state)): 0;
   1.408 +  }
   1.409 +
   1.410 +  // p is not permitted to be a null pointer.
   1.411 +  void deallocate(pointer __p, size_type __n)
   1.412 +    { _S_Alloc::deallocate(__p, __n * sizeof(_Tp), _M_state); }
   1.413 +
   1.414 +  size_type max_size() const _STLP_NOTHROW 
   1.415 +    { return size_t(-1) / sizeof(_Tp); }
   1.416 +
   1.417 +  void construct(pointer __p, const _Tp& __val) { _STLP_PLACEMENT_NEW (__p) _Tp(__val); }
   1.418 +  void destroy(pointer _p) { _p->~_Tp(); }
   1.419 +
   1.420 +  // state is being kept here
   1.421 +  __state_type* _M_state;
   1.422 +};
   1.423 +
   1.424 +_STLP_TEMPLATE_NULL
   1.425 +class _STLP_CLASS_DECLSPEC per_thread_allocator<void> {
   1.426 +public:
   1.427 +  typedef size_t      size_type;
   1.428 +  typedef ptrdiff_t   difference_type;
   1.429 +  typedef void*       pointer;
   1.430 +  typedef const void* const_pointer;
   1.431 +  typedef void        value_type;
   1.432 +#ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   1.433 +  template <class _NewType> struct rebind {
   1.434 +    typedef per_thread_allocator<_NewType> other;
   1.435 +  };
   1.436 +#endif
   1.437 +};
   1.438 +
   1.439 +template <class _T1, class _T2>
   1.440 +inline bool operator==(const per_thread_allocator<_T1>& __a1,
   1.441 +                       const per_thread_allocator<_T2>& __a2) 
   1.442 +{
   1.443 +  return __a1._M_state == __a2._M_state;
   1.444 +}
   1.445 +
   1.446 +#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
   1.447 +template <class _T1, class _T2>
   1.448 +inline bool operator!=(const per_thread_allocator<_T1>& __a1,
   1.449 +                       const per_thread_allocator<_T2>& __a2)
   1.450 +{
   1.451 +  return __a1._M_state != __a2._M_state;
   1.452 +}
   1.453 +#endif
   1.454 +
   1.455 +
   1.456 +#ifdef _STLP_CLASS_PARTIAL_SPECIALIZATION
   1.457 +
   1.458 +template <class _Tp, class _Atype>
   1.459 +struct _Alloc_traits<_Tp, per_thread_allocator<_Atype> >
   1.460 +{
   1.461 +  typedef per_thread_allocator<_Tp> allocator_type;
   1.462 +};
   1.463 +
   1.464 +#endif
   1.465 +
   1.466 +#if !defined (_STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM)
   1.467 +
   1.468 +template <class _Tp1, class _Tp2>
   1.469 +inline per_thread_allocator<_Tp2>&
   1.470 +__stl_alloc_rebind(per_thread_allocator<_Tp1>& __x, const _Tp2*) {
   1.471 +  return (per_thread_allocator<_Tp2>&)__x;
   1.472 +}
   1.473 +
   1.474 +template <class _Tp1, class _Tp2>
   1.475 +inline per_thread_allocator<_Tp2>
   1.476 +__stl_alloc_create(per_thread_allocator<_Tp1>&, const _Tp2*) {
   1.477 +  return per_thread_allocator<_Tp2>();
   1.478 +}
   1.479 +
   1.480 +#endif /* _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM */
   1.481 +
   1.482 +_STLP_END_NAMESPACE
   1.483 +
   1.484 +# if defined (_STLP_EXPOSE_GLOBALS_IMPLEMENTATION) && !defined (_STLP_LINK_TIME_INSTANTIATION)
   1.485 +#  include <stl/_pthread_alloc.c>
   1.486 +# endif
   1.487 +
   1.488 +#endif /* _STLP_PTHREAD_ALLOC */
   1.489 +
   1.490 +// Local Variables:
   1.491 +// mode:C++
   1.492 +// End: