epoc32/include/stdapis/stlport/stl/_pthread_alloc.h
branchSymbian2
changeset 2 2fe1408b6811
parent 0 061f57f2323e
     1.1 --- a/epoc32/include/stdapis/stlport/stl/_pthread_alloc.h	Tue Nov 24 13:55:44 2009 +0000
     1.2 +++ b/epoc32/include/stdapis/stlport/stl/_pthread_alloc.h	Tue Mar 16 16:12:26 2010 +0000
     1.3 @@ -1,1 +1,489 @@
     1.4 -_pthread_alloc.h
     1.5 +/*
     1.6 + *
     1.7 + * Copyright (c) 1994
     1.8 + * Hewlett-Packard Company
     1.9 + *
    1.10 + * Copyright (c) 1996,1997
    1.11 + * Silicon Graphics Computer Systems, Inc.
    1.12 + *
    1.13 + * Copyright (c) 1997
    1.14 + * Moscow Center for SPARC Technology
    1.15 + *
    1.16 + * Copyright (c) 1999 
    1.17 + * Boris Fomitchev
    1.18 + *
    1.19 + * This material is provided "as is", with absolutely no warranty expressed
    1.20 + * or implied. Any use is at your own risk.
    1.21 + *
    1.22 + * Permission to use or copy this software for any purpose is hereby granted 
    1.23 + * without fee, provided the above notices are retained on all copies.
    1.24 + * Permission to modify the code and to distribute modified code is granted,
    1.25 + * provided the above notices are retained, and a notice that the code was
    1.26 + * modified is included with the above copyright notice.
    1.27 + *
    1.28 + */
    1.29 +
    1.30 +#ifndef _STLP_PTHREAD_ALLOC_H
    1.31 +#define _STLP_PTHREAD_ALLOC_H
    1.32 +
    1.33 +// Pthread-specific node allocator.
    1.34 +// This is similar to the default allocator, except that free-list
    1.35 +// information is kept separately for each thread, avoiding locking.
    1.36 +// This should be reasonably fast even in the presence of threads.
    1.37 +// The down side is that storage may not be well-utilized.
    1.38 +// It is not an error to allocate memory in thread A and deallocate
    1.39 +// it in thread B.  But this effectively transfers ownership of the memory,
    1.40 +// so that it can only be reallocated by thread B.  Thus this can effectively
    1.41 +// result in a storage leak if it's done on a regular basis.
    1.42 +// It can also result in frequent sharing of
    1.43 +// cache lines among processors, with potentially serious performance
    1.44 +// consequences.
    1.45 +
    1.46 +#include <pthread.h>
    1.47 +
    1.48 +#ifndef _STLP_INTERNAL_ALLOC_H
    1.49 +#include <stl/_alloc.h>
    1.50 +#endif
    1.51 +
    1.52 +#ifndef __RESTRICT
    1.53 +#  define __RESTRICT
    1.54 +#endif
    1.55 +
    1.56 +_STLP_BEGIN_NAMESPACE
    1.57 +
    1.58 +#define _STLP_DATA_ALIGNMENT 8
    1.59 +
    1.60 +union _Pthread_alloc_obj {
    1.61 +    union _Pthread_alloc_obj * __free_list_link;
    1.62 +    char __client_data[_STLP_DATA_ALIGNMENT];    /* The client sees this.    */
    1.63 +};
    1.64 +
    1.65 +// Pthread allocators don't appear to the client to have meaningful
    1.66 +// instances.  We do in fact need to associate some state with each
    1.67 +// thread.  That state is represented by
    1.68 +// _Pthread_alloc_per_thread_state<_Max_size>.
    1.69 +
    1.70 +template<size_t _Max_size>
    1.71 +struct _Pthread_alloc_per_thread_state {
    1.72 +  typedef _Pthread_alloc_obj __obj;
    1.73 +  enum { _S_NFREELISTS = _Max_size/_STLP_DATA_ALIGNMENT };
    1.74 +
    1.75 +  // Free list link for list of available per thread structures.
    1.76 +  // When one of these becomes available for reuse due to thread
    1.77 +  // termination, any objects in its free list remain associated
    1.78 +  // with it.  The whole structure may then be used by a newly
    1.79 +  // created thread.
    1.80 +  _Pthread_alloc_per_thread_state() : __next(0)
    1.81 +  {
    1.82 +    memset((void *)__free_list, 0, (size_t)_S_NFREELISTS * sizeof(__obj *));
    1.83 +  }
    1.84 +  // Returns an object of size __n, and possibly adds to size n free list.
    1.85 +  void *_M_refill(size_t __n);
    1.86 +  
    1.87 +  _Pthread_alloc_obj* volatile __free_list[_S_NFREELISTS]; 
    1.88 +  _Pthread_alloc_per_thread_state<_Max_size> * __next; 
    1.89 +  // this data member is only to be used by per_thread_allocator, which returns memory to the originating thread.
    1.90 +  _STLP_mutex _M_lock;
    1.91 +
    1.92 + };
    1.93 +
    1.94 +// Pthread-specific allocator.
    1.95 +// The argument specifies the largest object size allocated from per-thread
    1.96 +// free lists.  Larger objects are allocated using malloc_alloc.
    1.97 +// Max_size must be a power of 2.
    1.98 +template < __DFL_NON_TYPE_PARAM(size_t, _Max_size, _MAX_BYTES) >
    1.99 +class _Pthread_alloc {
   1.100 +
   1.101 +public: // but only for internal use:
   1.102 +
   1.103 +  typedef _Pthread_alloc_obj __obj;
   1.104 +  typedef _Pthread_alloc_per_thread_state<_Max_size> __state_type;
   1.105 +  typedef char value_type;
   1.106 +
   1.107 +  // Allocates a chunk for nobjs of size size.  nobjs may be reduced
   1.108 +  // if it is inconvenient to allocate the requested number.
   1.109 +  static char *_S_chunk_alloc(size_t __size, size_t &__nobjs);
   1.110 +
   1.111 +  enum {_S_ALIGN = _STLP_DATA_ALIGNMENT};
   1.112 +
   1.113 +  static size_t _S_round_up(size_t __bytes) {
   1.114 +        return (((__bytes) + (int)_S_ALIGN-1) & ~((int)_S_ALIGN - 1));
   1.115 +  }
   1.116 +  static size_t _S_freelist_index(size_t __bytes) {
   1.117 +        return (((__bytes) + (int)_S_ALIGN-1)/(int)_S_ALIGN - 1);
   1.118 +  }
   1.119 +
   1.120 +private:
   1.121 +  // Chunk allocation state. And other shared state.
   1.122 +  // Protected by _S_chunk_allocator_lock.
   1.123 +  static _STLP_mutex_base _S_chunk_allocator_lock;
   1.124 +  static char *_S_start_free;
   1.125 +  static char *_S_end_free;
   1.126 +  static size_t _S_heap_size;
   1.127 +  static _Pthread_alloc_per_thread_state<_Max_size>* _S_free_per_thread_states;
   1.128 +  static pthread_key_t _S_key;
   1.129 +  static bool _S_key_initialized;
   1.130 +        // Pthread key under which per thread state is stored. 
   1.131 +        // Allocator instances that are currently unclaimed by any thread.
   1.132 +  static void _S_destructor(void *instance);
   1.133 +        // Function to be called on thread exit to reclaim per thread
   1.134 +        // state.
   1.135 +  static _Pthread_alloc_per_thread_state<_Max_size> *_S_new_per_thread_state();
   1.136 +public:
   1.137 +        // Return a recycled or new per thread state.
   1.138 +  static _Pthread_alloc_per_thread_state<_Max_size> *_S_get_per_thread_state();
   1.139 +private:
   1.140 +        // ensure that the current thread has an associated
   1.141 +        // per thread state.
   1.142 +  class _M_lock;
   1.143 +  friend class _M_lock;
   1.144 +  class _M_lock {
   1.145 +      public:
   1.146 +        _M_lock () { _S_chunk_allocator_lock._M_acquire_lock(); }
   1.147 +        ~_M_lock () { _S_chunk_allocator_lock._M_release_lock(); }
   1.148 +  };
   1.149 +
   1.150 +public:
   1.151 +
   1.152 +  /* n must be > 0      */
   1.153 +  static void * allocate(size_t __n)
   1.154 +  {
   1.155 +    __obj * volatile * __my_free_list;
   1.156 +    __obj * __RESTRICT __result;
   1.157 +    __state_type* __a;
   1.158 +
   1.159 +    if (__n > _Max_size) {
   1.160 +        return(__malloc_alloc<0>::allocate(__n));
   1.161 +    }
   1.162 +
   1.163 +    __a = _S_get_per_thread_state();
   1.164 +
   1.165 +    __my_free_list = __a -> __free_list + _S_freelist_index(__n);
   1.166 +    __result = *__my_free_list;
   1.167 +    if (__result == 0) {
   1.168 +        void *__r = __a -> _M_refill(_S_round_up(__n));
   1.169 +        return __r;
   1.170 +    }
   1.171 +    *__my_free_list = __result -> __free_list_link;
   1.172 +    return (__result);
   1.173 +  };
   1.174 +
   1.175 +  /* p may not be 0 */
   1.176 +  static void deallocate(void *__p, size_t __n)
   1.177 +  {
   1.178 +    __obj *__q = (__obj *)__p;
   1.179 +    __obj * volatile * __my_free_list;
   1.180 +    __state_type* __a;
   1.181 +
   1.182 +    if (__n > _Max_size) {
   1.183 +        __malloc_alloc<0>::deallocate(__p, __n);
   1.184 +        return;
   1.185 +    }
   1.186 +
   1.187 +    __a = _S_get_per_thread_state();
   1.188 +    
   1.189 +    __my_free_list = __a->__free_list + _S_freelist_index(__n);
   1.190 +    __q -> __free_list_link = *__my_free_list;
   1.191 +    *__my_free_list = __q;
   1.192 +  }
   1.193 +
   1.194 +  // boris : versions for per_thread_allocator
   1.195 +  /* n must be > 0      */
   1.196 +  static void * allocate(size_t __n, __state_type* __a)
   1.197 +  {
   1.198 +    __obj * volatile * __my_free_list;
   1.199 +    __obj * __RESTRICT __result;
   1.200 +
   1.201 +    if (__n > _Max_size) {
   1.202 +        return(__malloc_alloc<0>::allocate(__n));
   1.203 +    }
   1.204 +
   1.205 +    // boris : here, we have to lock per thread state, as we may be getting memory from
   1.206 +    // different thread pool.
   1.207 +    _STLP_mutex_lock __lock(__a->_M_lock);
   1.208 +
   1.209 +    __my_free_list = __a -> __free_list + _S_freelist_index(__n);
   1.210 +    __result = *__my_free_list;
   1.211 +    if (__result == 0) {
   1.212 +        void *__r = __a -> _M_refill(_S_round_up(__n));
   1.213 +        return __r;
   1.214 +    }
   1.215 +    *__my_free_list = __result -> __free_list_link;
   1.216 +    return (__result);
   1.217 +  };
   1.218 +
   1.219 +  /* p may not be 0 */
   1.220 +  static void deallocate(void *__p, size_t __n, __state_type* __a)
   1.221 +  {
   1.222 +    __obj *__q = (__obj *)__p;
   1.223 +    __obj * volatile * __my_free_list;
   1.224 +
   1.225 +    if (__n > _Max_size) {
   1.226 +        __malloc_alloc<0>::deallocate(__p, __n);
   1.227 +        return;
   1.228 +    }
   1.229 +
   1.230 +    // boris : here, we have to lock per thread state, as we may be returning memory from
   1.231 +    // different thread.
   1.232 +    _STLP_mutex_lock __lock(__a->_M_lock);
   1.233 +
   1.234 +    __my_free_list = __a->__free_list + _S_freelist_index(__n);
   1.235 +    __q -> __free_list_link = *__my_free_list;
   1.236 +    *__my_free_list = __q;
   1.237 +  }
   1.238 +
   1.239 +  static void * reallocate(void *__p, size_t __old_sz, size_t __new_sz);
   1.240 +
   1.241 +} ;
   1.242 +
   1.243 +# if defined (_STLP_USE_TEMPLATE_EXPORT)
   1.244 +_STLP_EXPORT_TEMPLATE_CLASS _Pthread_alloc<_MAX_BYTES>;
   1.245 +# endif
   1.246 +
   1.247 +typedef _Pthread_alloc<_MAX_BYTES> __pthread_alloc;
   1.248 +typedef __pthread_alloc pthread_alloc;
   1.249 +
   1.250 +template <class _Tp>
   1.251 +class pthread_allocator {
   1.252 +  typedef pthread_alloc _S_Alloc;          // The underlying allocator.
   1.253 +public:
   1.254 +  typedef size_t     size_type;
   1.255 +  typedef ptrdiff_t  difference_type;
   1.256 +  typedef _Tp*       pointer;
   1.257 +  typedef const _Tp* const_pointer;
   1.258 +  typedef _Tp&       reference;
   1.259 +  typedef const _Tp& const_reference;
   1.260 +  typedef _Tp        value_type;
   1.261 +
   1.262 +#ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   1.263 +  template <class _NewType> struct rebind {
   1.264 +    typedef pthread_allocator<_NewType> other;
   1.265 +  };
   1.266 +#endif
   1.267 +
   1.268 +  pthread_allocator() _STLP_NOTHROW {}
   1.269 +  pthread_allocator(const pthread_allocator<_Tp>& a) _STLP_NOTHROW {}
   1.270 +
   1.271 +#if defined (_STLP_MEMBER_TEMPLATES) /* && defined (_STLP_FUNCTION_PARTIAL_ORDER) */
   1.272 +  template <class _OtherType> pthread_allocator(const pthread_allocator<_OtherType>&)
   1.273 +		_STLP_NOTHROW {}
   1.274 +#endif
   1.275 +
   1.276 +  ~pthread_allocator() _STLP_NOTHROW {}
   1.277 +
   1.278 +  pointer address(reference __x) const { return &__x; }
   1.279 +  const_pointer address(const_reference __x) const { return &__x; }
   1.280 +
   1.281 +  // __n is permitted to be 0.  The C++ standard says nothing about what
   1.282 +  // the return value is when __n == 0.
   1.283 +  _Tp* allocate(size_type __n, const void* = 0) {
   1.284 +    return __n != 0 ? __STATIC_CAST(_Tp*,_S_Alloc::allocate(__n * sizeof(_Tp)))
   1.285 +                    : 0;
   1.286 +  }
   1.287 +
   1.288 +  // p is not permitted to be a null pointer.
   1.289 +  void deallocate(pointer __p, size_type __n)
   1.290 +    { _S_Alloc::deallocate(__p, __n * sizeof(_Tp)); }
   1.291 +
   1.292 +  size_type max_size() const _STLP_NOTHROW 
   1.293 +    { return size_t(-1) / sizeof(_Tp); }
   1.294 +
   1.295 +  void construct(pointer __p, const _Tp& __val) { _STLP_PLACEMENT_NEW (__p) _Tp(__val); }
   1.296 +  void destroy(pointer _p) { _p->~_Tp(); }
   1.297 +};
   1.298 +
   1.299 +_STLP_TEMPLATE_NULL
   1.300 +class _STLP_CLASS_DECLSPEC pthread_allocator<void> {
   1.301 +public:
   1.302 +  typedef size_t      size_type;
   1.303 +  typedef ptrdiff_t   difference_type;
   1.304 +  typedef void*       pointer;
   1.305 +  typedef const void* const_pointer;
   1.306 +  typedef void        value_type;
   1.307 +#ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   1.308 +  template <class _NewType> struct rebind {
   1.309 +    typedef pthread_allocator<_NewType> other;
   1.310 +  };
   1.311 +#endif
   1.312 +};
   1.313 +
   1.314 +template <class _T1, class _T2>
   1.315 +inline bool operator==(const pthread_allocator<_T1>&,
   1.316 +                       const pthread_allocator<_T2>& a2) 
   1.317 +{
   1.318 +  return true;
   1.319 +}
   1.320 +
   1.321 +#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
   1.322 +template <class _T1, class _T2>
   1.323 +inline bool operator!=(const pthread_allocator<_T1>&,
   1.324 +                       const pthread_allocator<_T2>&)
   1.325 +{
   1.326 +  return false;
   1.327 +}
   1.328 +#endif
   1.329 +
   1.330 +
   1.331 +#ifdef _STLP_CLASS_PARTIAL_SPECIALIZATION
   1.332 +
   1.333 +# ifdef _STLP_USE_RAW_SGI_ALLOCATORS
   1.334 +template <class _Tp, size_t _Max_size>
   1.335 +struct _Alloc_traits<_Tp, _Pthread_alloc<_Max_size> >
   1.336 +{
   1.337 +  typedef __allocator<_Tp, _Pthread_alloc<_Max_size> > 
   1.338 +          allocator_type;
   1.339 +};
   1.340 +# endif
   1.341 +
   1.342 +template <class _Tp, class _Atype>
   1.343 +struct _Alloc_traits<_Tp, pthread_allocator<_Atype> >
   1.344 +{
   1.345 +  typedef pthread_allocator<_Tp> allocator_type;
   1.346 +};
   1.347 +
   1.348 +#endif
   1.349 +
   1.350 +#if !defined (_STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM)
   1.351 +
   1.352 +template <class _Tp1, class _Tp2>
   1.353 +inline pthread_allocator<_Tp2>&
   1.354 +__stl_alloc_rebind(pthread_allocator<_Tp1>& __x, const _Tp2*) {
   1.355 +  return (pthread_allocator<_Tp2>&)__x;
   1.356 +}
   1.357 +
   1.358 +template <class _Tp1, class _Tp2>
   1.359 +inline pthread_allocator<_Tp2>
   1.360 +__stl_alloc_create(pthread_allocator<_Tp1>&, const _Tp2*) {
   1.361 +  return pthread_allocator<_Tp2>();
   1.362 +}
   1.363 +
   1.364 +#endif /* _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM */
   1.365 +
   1.366 +//
   1.367 +// per_thread_allocator<> : this allocator always return memory to the same thread 
   1.368 +// it was allocated from.
   1.369 +//
   1.370 +
   1.371 +template <class _Tp>
   1.372 +class per_thread_allocator {
   1.373 +  typedef pthread_alloc _S_Alloc;          // The underlying allocator.
   1.374 +  typedef pthread_alloc::__state_type __state_type;
   1.375 +public:
   1.376 +  typedef size_t     size_type;
   1.377 +  typedef ptrdiff_t  difference_type;
   1.378 +  typedef _Tp*       pointer;
   1.379 +  typedef const _Tp* const_pointer;
   1.380 +  typedef _Tp&       reference;
   1.381 +  typedef const _Tp& const_reference;
   1.382 +  typedef _Tp        value_type;
   1.383 +
   1.384 +#ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   1.385 +  template <class _NewType> struct rebind {
   1.386 +    typedef per_thread_allocator<_NewType> other;
   1.387 +  };
   1.388 +#endif
   1.389 +
   1.390 +  per_thread_allocator() _STLP_NOTHROW { 
   1.391 +    _M_state = _S_Alloc::_S_get_per_thread_state();
   1.392 +  }
   1.393 +  per_thread_allocator(const per_thread_allocator<_Tp>& __a) _STLP_NOTHROW : _M_state(__a._M_state){}
   1.394 +
   1.395 +#if defined (_STLP_MEMBER_TEMPLATES) /* && defined (_STLP_FUNCTION_PARTIAL_ORDER) */
   1.396 +  template <class _OtherType> per_thread_allocator(const per_thread_allocator<_OtherType>& __a)
   1.397 +		_STLP_NOTHROW : _M_state(__a._M_state) {}
   1.398 +#endif
   1.399 +
   1.400 +  ~per_thread_allocator() _STLP_NOTHROW {}
   1.401 +
   1.402 +  pointer address(reference __x) const { return &__x; }
   1.403 +  const_pointer address(const_reference __x) const { return &__x; }
   1.404 +
   1.405 +  // __n is permitted to be 0.  The C++ standard says nothing about what
   1.406 +  // the return value is when __n == 0.
   1.407 +  _Tp* allocate(size_type __n, const void* = 0) {
   1.408 +    return __n != 0 ? __STATIC_CAST(_Tp*,_S_Alloc::allocate(__n * sizeof(_Tp), _M_state)): 0;
   1.409 +  }
   1.410 +
   1.411 +  // p is not permitted to be a null pointer.
   1.412 +  void deallocate(pointer __p, size_type __n)
   1.413 +    { _S_Alloc::deallocate(__p, __n * sizeof(_Tp), _M_state); }
   1.414 +
   1.415 +  size_type max_size() const _STLP_NOTHROW 
   1.416 +    { return size_t(-1) / sizeof(_Tp); }
   1.417 +
   1.418 +  void construct(pointer __p, const _Tp& __val) { _STLP_PLACEMENT_NEW (__p) _Tp(__val); }
   1.419 +  void destroy(pointer _p) { _p->~_Tp(); }
   1.420 +
   1.421 +  // state is being kept here
   1.422 +  __state_type* _M_state;
   1.423 +};
   1.424 +
   1.425 +_STLP_TEMPLATE_NULL
   1.426 +class _STLP_CLASS_DECLSPEC per_thread_allocator<void> {
   1.427 +public:
   1.428 +  typedef size_t      size_type;
   1.429 +  typedef ptrdiff_t   difference_type;
   1.430 +  typedef void*       pointer;
   1.431 +  typedef const void* const_pointer;
   1.432 +  typedef void        value_type;
   1.433 +#ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   1.434 +  template <class _NewType> struct rebind {
   1.435 +    typedef per_thread_allocator<_NewType> other;
   1.436 +  };
   1.437 +#endif
   1.438 +};
   1.439 +
   1.440 +template <class _T1, class _T2>
   1.441 +inline bool operator==(const per_thread_allocator<_T1>& __a1,
   1.442 +                       const per_thread_allocator<_T2>& __a2) 
   1.443 +{
   1.444 +  return __a1._M_state == __a2._M_state;
   1.445 +}
   1.446 +
   1.447 +#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
   1.448 +template <class _T1, class _T2>
   1.449 +inline bool operator!=(const per_thread_allocator<_T1>& __a1,
   1.450 +                       const per_thread_allocator<_T2>& __a2)
   1.451 +{
   1.452 +  return __a1._M_state != __a2._M_state;
   1.453 +}
   1.454 +#endif
   1.455 +
   1.456 +
   1.457 +#ifdef _STLP_CLASS_PARTIAL_SPECIALIZATION
   1.458 +
   1.459 +template <class _Tp, class _Atype>
   1.460 +struct _Alloc_traits<_Tp, per_thread_allocator<_Atype> >
   1.461 +{
   1.462 +  typedef per_thread_allocator<_Tp> allocator_type;
   1.463 +};
   1.464 +
   1.465 +#endif
   1.466 +
   1.467 +#if !defined (_STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM)
   1.468 +
   1.469 +template <class _Tp1, class _Tp2>
   1.470 +inline per_thread_allocator<_Tp2>&
   1.471 +__stl_alloc_rebind(per_thread_allocator<_Tp1>& __x, const _Tp2*) {
   1.472 +  return (per_thread_allocator<_Tp2>&)__x;
   1.473 +}
   1.474 +
   1.475 +template <class _Tp1, class _Tp2>
   1.476 +inline per_thread_allocator<_Tp2>
   1.477 +__stl_alloc_create(per_thread_allocator<_Tp1>&, const _Tp2*) {
   1.478 +  return per_thread_allocator<_Tp2>();
   1.479 +}
   1.480 +
   1.481 +#endif /* _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM */
   1.482 +
   1.483 +_STLP_END_NAMESPACE
   1.484 +
   1.485 +# if defined (_STLP_EXPOSE_GLOBALS_IMPLEMENTATION) && !defined (_STLP_LINK_TIME_INSTANTIATION)
   1.486 +#  include <stl/_pthread_alloc.c>
   1.487 +# endif
   1.488 +
   1.489 +#endif /* _STLP_PTHREAD_ALLOC */
   1.490 +
   1.491 +// Local Variables:
   1.492 +// mode:C++
   1.493 +// End: