epoc32/include/stdapis/stlport/stl/_pthread_alloc.h
author William Roberts <williamr@symbian.org>
Tue, 16 Mar 2010 16:12:26 +0000
branchSymbian2
changeset 2 2fe1408b6811
parent 0 061f57f2323e
permissions -rw-r--r--
Final list of Symbian^2 public API header files
     1 /*
     2  *
     3  * Copyright (c) 1994
     4  * Hewlett-Packard Company
     5  *
     6  * Copyright (c) 1996,1997
     7  * Silicon Graphics Computer Systems, Inc.
     8  *
     9  * Copyright (c) 1997
    10  * Moscow Center for SPARC Technology
    11  *
    12  * Copyright (c) 1999 
    13  * Boris Fomitchev
    14  *
    15  * This material is provided "as is", with absolutely no warranty expressed
    16  * or implied. Any use is at your own risk.
    17  *
    18  * Permission to use or copy this software for any purpose is hereby granted 
    19  * without fee, provided the above notices are retained on all copies.
    20  * Permission to modify the code and to distribute modified code is granted,
    21  * provided the above notices are retained, and a notice that the code was
    22  * modified is included with the above copyright notice.
    23  *
    24  */
    25 
    26 #ifndef _STLP_PTHREAD_ALLOC_H
    27 #define _STLP_PTHREAD_ALLOC_H
    28 
    29 // Pthread-specific node allocator.
    30 // This is similar to the default allocator, except that free-list
    31 // information is kept separately for each thread, avoiding locking.
    32 // This should be reasonably fast even in the presence of threads.
    33 // The down side is that storage may not be well-utilized.
    34 // It is not an error to allocate memory in thread A and deallocate
    35 // it in thread B.  But this effectively transfers ownership of the memory,
    36 // so that it can only be reallocated by thread B.  Thus this can effectively
    37 // result in a storage leak if it's done on a regular basis.
    38 // It can also result in frequent sharing of
    39 // cache lines among processors, with potentially serious performance
    40 // consequences.
    41 
    42 #include <pthread.h>
    43 
    44 #ifndef _STLP_INTERNAL_ALLOC_H
    45 #include <stl/_alloc.h>
    46 #endif
    47 
    48 #ifndef __RESTRICT
    49 #  define __RESTRICT
    50 #endif
    51 
    52 _STLP_BEGIN_NAMESPACE
    53 
    54 #define _STLP_DATA_ALIGNMENT 8
    55 
    56 union _Pthread_alloc_obj {
    57     union _Pthread_alloc_obj * __free_list_link;
    58     char __client_data[_STLP_DATA_ALIGNMENT];    /* The client sees this.    */
    59 };
    60 
    61 // Pthread allocators don't appear to the client to have meaningful
    62 // instances.  We do in fact need to associate some state with each
    63 // thread.  That state is represented by
    64 // _Pthread_alloc_per_thread_state<_Max_size>.
    65 
    66 template<size_t _Max_size>
    67 struct _Pthread_alloc_per_thread_state {
    68   typedef _Pthread_alloc_obj __obj;
    69   enum { _S_NFREELISTS = _Max_size/_STLP_DATA_ALIGNMENT };
    70 
    71   // Free list link for list of available per thread structures.
    72   // When one of these becomes available for reuse due to thread
    73   // termination, any objects in its free list remain associated
    74   // with it.  The whole structure may then be used by a newly
    75   // created thread.
    76   _Pthread_alloc_per_thread_state() : __next(0)
    77   {
    78     memset((void *)__free_list, 0, (size_t)_S_NFREELISTS * sizeof(__obj *));
    79   }
    80   // Returns an object of size __n, and possibly adds to size n free list.
    81   void *_M_refill(size_t __n);
    82   
    83   _Pthread_alloc_obj* volatile __free_list[_S_NFREELISTS]; 
    84   _Pthread_alloc_per_thread_state<_Max_size> * __next; 
    85   // this data member is only to be used by per_thread_allocator, which returns memory to the originating thread.
    86   _STLP_mutex _M_lock;
    87 
    88  };
    89 
    90 // Pthread-specific allocator.
    91 // The argument specifies the largest object size allocated from per-thread
    92 // free lists.  Larger objects are allocated using malloc_alloc.
    93 // Max_size must be a power of 2.
    94 template < __DFL_NON_TYPE_PARAM(size_t, _Max_size, _MAX_BYTES) >
    95 class _Pthread_alloc {
    96 
    97 public: // but only for internal use:
    98 
    99   typedef _Pthread_alloc_obj __obj;
   100   typedef _Pthread_alloc_per_thread_state<_Max_size> __state_type;
   101   typedef char value_type;
   102 
   103   // Allocates a chunk for nobjs of size size.  nobjs may be reduced
   104   // if it is inconvenient to allocate the requested number.
   105   static char *_S_chunk_alloc(size_t __size, size_t &__nobjs);
   106 
   107   enum {_S_ALIGN = _STLP_DATA_ALIGNMENT};
   108 
   109   static size_t _S_round_up(size_t __bytes) {
   110         return (((__bytes) + (int)_S_ALIGN-1) & ~((int)_S_ALIGN - 1));
   111   }
   112   static size_t _S_freelist_index(size_t __bytes) {
   113         return (((__bytes) + (int)_S_ALIGN-1)/(int)_S_ALIGN - 1);
   114   }
   115 
   116 private:
   117   // Chunk allocation state. And other shared state.
   118   // Protected by _S_chunk_allocator_lock.
   119   static _STLP_mutex_base _S_chunk_allocator_lock;
   120   static char *_S_start_free;
   121   static char *_S_end_free;
   122   static size_t _S_heap_size;
   123   static _Pthread_alloc_per_thread_state<_Max_size>* _S_free_per_thread_states;
   124   static pthread_key_t _S_key;
   125   static bool _S_key_initialized;
   126         // Pthread key under which per thread state is stored. 
   127         // Allocator instances that are currently unclaimed by any thread.
   128   static void _S_destructor(void *instance);
   129         // Function to be called on thread exit to reclaim per thread
   130         // state.
   131   static _Pthread_alloc_per_thread_state<_Max_size> *_S_new_per_thread_state();
   132 public:
   133         // Return a recycled or new per thread state.
   134   static _Pthread_alloc_per_thread_state<_Max_size> *_S_get_per_thread_state();
   135 private:
   136         // ensure that the current thread has an associated
   137         // per thread state.
   138   class _M_lock;
   139   friend class _M_lock;
   140   class _M_lock {
   141       public:
   142         _M_lock () { _S_chunk_allocator_lock._M_acquire_lock(); }
   143         ~_M_lock () { _S_chunk_allocator_lock._M_release_lock(); }
   144   };
   145 
   146 public:
   147 
   148   /* n must be > 0      */
   149   static void * allocate(size_t __n)
   150   {
   151     __obj * volatile * __my_free_list;
   152     __obj * __RESTRICT __result;
   153     __state_type* __a;
   154 
   155     if (__n > _Max_size) {
   156         return(__malloc_alloc<0>::allocate(__n));
   157     }
   158 
   159     __a = _S_get_per_thread_state();
   160 
   161     __my_free_list = __a -> __free_list + _S_freelist_index(__n);
   162     __result = *__my_free_list;
   163     if (__result == 0) {
   164         void *__r = __a -> _M_refill(_S_round_up(__n));
   165         return __r;
   166     }
   167     *__my_free_list = __result -> __free_list_link;
   168     return (__result);
   169   };
   170 
   171   /* p may not be 0 */
   172   static void deallocate(void *__p, size_t __n)
   173   {
   174     __obj *__q = (__obj *)__p;
   175     __obj * volatile * __my_free_list;
   176     __state_type* __a;
   177 
   178     if (__n > _Max_size) {
   179         __malloc_alloc<0>::deallocate(__p, __n);
   180         return;
   181     }
   182 
   183     __a = _S_get_per_thread_state();
   184     
   185     __my_free_list = __a->__free_list + _S_freelist_index(__n);
   186     __q -> __free_list_link = *__my_free_list;
   187     *__my_free_list = __q;
   188   }
   189 
   190   // boris : versions for per_thread_allocator
   191   /* n must be > 0      */
   192   static void * allocate(size_t __n, __state_type* __a)
   193   {
   194     __obj * volatile * __my_free_list;
   195     __obj * __RESTRICT __result;
   196 
   197     if (__n > _Max_size) {
   198         return(__malloc_alloc<0>::allocate(__n));
   199     }
   200 
   201     // boris : here, we have to lock per thread state, as we may be getting memory from
   202     // different thread pool.
   203     _STLP_mutex_lock __lock(__a->_M_lock);
   204 
   205     __my_free_list = __a -> __free_list + _S_freelist_index(__n);
   206     __result = *__my_free_list;
   207     if (__result == 0) {
   208         void *__r = __a -> _M_refill(_S_round_up(__n));
   209         return __r;
   210     }
   211     *__my_free_list = __result -> __free_list_link;
   212     return (__result);
   213   };
   214 
   215   /* p may not be 0 */
   216   static void deallocate(void *__p, size_t __n, __state_type* __a)
   217   {
   218     __obj *__q = (__obj *)__p;
   219     __obj * volatile * __my_free_list;
   220 
   221     if (__n > _Max_size) {
   222         __malloc_alloc<0>::deallocate(__p, __n);
   223         return;
   224     }
   225 
   226     // boris : here, we have to lock per thread state, as we may be returning memory from
   227     // different thread.
   228     _STLP_mutex_lock __lock(__a->_M_lock);
   229 
   230     __my_free_list = __a->__free_list + _S_freelist_index(__n);
   231     __q -> __free_list_link = *__my_free_list;
   232     *__my_free_list = __q;
   233   }
   234 
   235   static void * reallocate(void *__p, size_t __old_sz, size_t __new_sz);
   236 
   237 } ;
   238 
   239 # if defined (_STLP_USE_TEMPLATE_EXPORT)
   240 _STLP_EXPORT_TEMPLATE_CLASS _Pthread_alloc<_MAX_BYTES>;
   241 # endif
   242 
   243 typedef _Pthread_alloc<_MAX_BYTES> __pthread_alloc;
   244 typedef __pthread_alloc pthread_alloc;
   245 
   246 template <class _Tp>
   247 class pthread_allocator {
   248   typedef pthread_alloc _S_Alloc;          // The underlying allocator.
   249 public:
   250   typedef size_t     size_type;
   251   typedef ptrdiff_t  difference_type;
   252   typedef _Tp*       pointer;
   253   typedef const _Tp* const_pointer;
   254   typedef _Tp&       reference;
   255   typedef const _Tp& const_reference;
   256   typedef _Tp        value_type;
   257 
   258 #ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   259   template <class _NewType> struct rebind {
   260     typedef pthread_allocator<_NewType> other;
   261   };
   262 #endif
   263 
   264   pthread_allocator() _STLP_NOTHROW {}
   265   pthread_allocator(const pthread_allocator<_Tp>& a) _STLP_NOTHROW {}
   266 
   267 #if defined (_STLP_MEMBER_TEMPLATES) /* && defined (_STLP_FUNCTION_PARTIAL_ORDER) */
   268   template <class _OtherType> pthread_allocator(const pthread_allocator<_OtherType>&)
   269 		_STLP_NOTHROW {}
   270 #endif
   271 
   272   ~pthread_allocator() _STLP_NOTHROW {}
   273 
   274   pointer address(reference __x) const { return &__x; }
   275   const_pointer address(const_reference __x) const { return &__x; }
   276 
   277   // __n is permitted to be 0.  The C++ standard says nothing about what
   278   // the return value is when __n == 0.
   279   _Tp* allocate(size_type __n, const void* = 0) {
   280     return __n != 0 ? __STATIC_CAST(_Tp*,_S_Alloc::allocate(__n * sizeof(_Tp)))
   281                     : 0;
   282   }
   283 
   284   // p is not permitted to be a null pointer.
   285   void deallocate(pointer __p, size_type __n)
   286     { _S_Alloc::deallocate(__p, __n * sizeof(_Tp)); }
   287 
   288   size_type max_size() const _STLP_NOTHROW 
   289     { return size_t(-1) / sizeof(_Tp); }
   290 
   291   void construct(pointer __p, const _Tp& __val) { _STLP_PLACEMENT_NEW (__p) _Tp(__val); }
   292   void destroy(pointer _p) { _p->~_Tp(); }
   293 };
   294 
   295 _STLP_TEMPLATE_NULL
   296 class _STLP_CLASS_DECLSPEC pthread_allocator<void> {
   297 public:
   298   typedef size_t      size_type;
   299   typedef ptrdiff_t   difference_type;
   300   typedef void*       pointer;
   301   typedef const void* const_pointer;
   302   typedef void        value_type;
   303 #ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   304   template <class _NewType> struct rebind {
   305     typedef pthread_allocator<_NewType> other;
   306   };
   307 #endif
   308 };
   309 
   310 template <class _T1, class _T2>
   311 inline bool operator==(const pthread_allocator<_T1>&,
   312                        const pthread_allocator<_T2>& a2) 
   313 {
   314   return true;
   315 }
   316 
   317 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
   318 template <class _T1, class _T2>
   319 inline bool operator!=(const pthread_allocator<_T1>&,
   320                        const pthread_allocator<_T2>&)
   321 {
   322   return false;
   323 }
   324 #endif
   325 
   326 
   327 #ifdef _STLP_CLASS_PARTIAL_SPECIALIZATION
   328 
   329 # ifdef _STLP_USE_RAW_SGI_ALLOCATORS
   330 template <class _Tp, size_t _Max_size>
   331 struct _Alloc_traits<_Tp, _Pthread_alloc<_Max_size> >
   332 {
   333   typedef __allocator<_Tp, _Pthread_alloc<_Max_size> > 
   334           allocator_type;
   335 };
   336 # endif
   337 
   338 template <class _Tp, class _Atype>
   339 struct _Alloc_traits<_Tp, pthread_allocator<_Atype> >
   340 {
   341   typedef pthread_allocator<_Tp> allocator_type;
   342 };
   343 
   344 #endif
   345 
   346 #if !defined (_STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM)
   347 
   348 template <class _Tp1, class _Tp2>
   349 inline pthread_allocator<_Tp2>&
   350 __stl_alloc_rebind(pthread_allocator<_Tp1>& __x, const _Tp2*) {
   351   return (pthread_allocator<_Tp2>&)__x;
   352 }
   353 
   354 template <class _Tp1, class _Tp2>
   355 inline pthread_allocator<_Tp2>
   356 __stl_alloc_create(pthread_allocator<_Tp1>&, const _Tp2*) {
   357   return pthread_allocator<_Tp2>();
   358 }
   359 
   360 #endif /* _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM */
   361 
   362 //
   363 // per_thread_allocator<> : this allocator always return memory to the same thread 
   364 // it was allocated from.
   365 //
   366 
   367 template <class _Tp>
   368 class per_thread_allocator {
   369   typedef pthread_alloc _S_Alloc;          // The underlying allocator.
   370   typedef pthread_alloc::__state_type __state_type;
   371 public:
   372   typedef size_t     size_type;
   373   typedef ptrdiff_t  difference_type;
   374   typedef _Tp*       pointer;
   375   typedef const _Tp* const_pointer;
   376   typedef _Tp&       reference;
   377   typedef const _Tp& const_reference;
   378   typedef _Tp        value_type;
   379 
   380 #ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   381   template <class _NewType> struct rebind {
   382     typedef per_thread_allocator<_NewType> other;
   383   };
   384 #endif
   385 
   386   per_thread_allocator() _STLP_NOTHROW { 
   387     _M_state = _S_Alloc::_S_get_per_thread_state();
   388   }
   389   per_thread_allocator(const per_thread_allocator<_Tp>& __a) _STLP_NOTHROW : _M_state(__a._M_state){}
   390 
   391 #if defined (_STLP_MEMBER_TEMPLATES) /* && defined (_STLP_FUNCTION_PARTIAL_ORDER) */
   392   template <class _OtherType> per_thread_allocator(const per_thread_allocator<_OtherType>& __a)
   393 		_STLP_NOTHROW : _M_state(__a._M_state) {}
   394 #endif
   395 
   396   ~per_thread_allocator() _STLP_NOTHROW {}
   397 
   398   pointer address(reference __x) const { return &__x; }
   399   const_pointer address(const_reference __x) const { return &__x; }
   400 
   401   // __n is permitted to be 0.  The C++ standard says nothing about what
   402   // the return value is when __n == 0.
   403   _Tp* allocate(size_type __n, const void* = 0) {
   404     return __n != 0 ? __STATIC_CAST(_Tp*,_S_Alloc::allocate(__n * sizeof(_Tp), _M_state)): 0;
   405   }
   406 
   407   // p is not permitted to be a null pointer.
   408   void deallocate(pointer __p, size_type __n)
   409     { _S_Alloc::deallocate(__p, __n * sizeof(_Tp), _M_state); }
   410 
   411   size_type max_size() const _STLP_NOTHROW 
   412     { return size_t(-1) / sizeof(_Tp); }
   413 
   414   void construct(pointer __p, const _Tp& __val) { _STLP_PLACEMENT_NEW (__p) _Tp(__val); }
   415   void destroy(pointer _p) { _p->~_Tp(); }
   416 
   417   // state is being kept here
   418   __state_type* _M_state;
   419 };
   420 
   421 _STLP_TEMPLATE_NULL
   422 class _STLP_CLASS_DECLSPEC per_thread_allocator<void> {
   423 public:
   424   typedef size_t      size_type;
   425   typedef ptrdiff_t   difference_type;
   426   typedef void*       pointer;
   427   typedef const void* const_pointer;
   428   typedef void        value_type;
   429 #ifdef _STLP_MEMBER_TEMPLATE_CLASSES
   430   template <class _NewType> struct rebind {
   431     typedef per_thread_allocator<_NewType> other;
   432   };
   433 #endif
   434 };
   435 
   436 template <class _T1, class _T2>
   437 inline bool operator==(const per_thread_allocator<_T1>& __a1,
   438                        const per_thread_allocator<_T2>& __a2) 
   439 {
   440   return __a1._M_state == __a2._M_state;
   441 }
   442 
   443 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
   444 template <class _T1, class _T2>
   445 inline bool operator!=(const per_thread_allocator<_T1>& __a1,
   446                        const per_thread_allocator<_T2>& __a2)
   447 {
   448   return __a1._M_state != __a2._M_state;
   449 }
   450 #endif
   451 
   452 
   453 #ifdef _STLP_CLASS_PARTIAL_SPECIALIZATION
   454 
   455 template <class _Tp, class _Atype>
   456 struct _Alloc_traits<_Tp, per_thread_allocator<_Atype> >
   457 {
   458   typedef per_thread_allocator<_Tp> allocator_type;
   459 };
   460 
   461 #endif
   462 
   463 #if !defined (_STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM)
   464 
   465 template <class _Tp1, class _Tp2>
   466 inline per_thread_allocator<_Tp2>&
   467 __stl_alloc_rebind(per_thread_allocator<_Tp1>& __x, const _Tp2*) {
   468   return (per_thread_allocator<_Tp2>&)__x;
   469 }
   470 
   471 template <class _Tp1, class _Tp2>
   472 inline per_thread_allocator<_Tp2>
   473 __stl_alloc_create(per_thread_allocator<_Tp1>&, const _Tp2*) {
   474   return per_thread_allocator<_Tp2>();
   475 }
   476 
   477 #endif /* _STLP_USE_NESTED_TCLASS_THROUGHT_TPARAM */
   478 
   479 _STLP_END_NAMESPACE
   480 
   481 # if defined (_STLP_EXPOSE_GLOBALS_IMPLEMENTATION) && !defined (_STLP_LINK_TIME_INSTANTIATION)
   482 #  include <stl/_pthread_alloc.c>
   483 # endif
   484 
   485 #endif /* _STLP_PTHREAD_ALLOC */
   486 
   487 // Local Variables:
   488 // mode:C++
   489 // End: