epoc32/include/stdapis/stlport/stl/_alloc.c
author William Roberts <williamr@symbian.org>
Tue, 16 Mar 2010 16:12:26 +0000
branchSymbian2
changeset 2 2fe1408b6811
parent 0 061f57f2323e
permissions -rw-r--r--
Final list of Symbian^2 public API header files
     1 /*
     2  *
     3  * Copyright (c) 1996,1997
     4  * Silicon Graphics Computer Systems, Inc.
     5  *
     6  * Copyright (c) 1997
     7  * Moscow Center for SPARC Technology
     8  *
     9  * Copyright (c) 1999 
    10  * Boris Fomitchev
    11  *
    12  * This material is provided "as is", with absolutely no warranty expressed
    13  * or implied. Any use is at your own risk.
    14  *
    15  * Permission to use or copy this software for any purpose is hereby granted 
    16  * without fee, provided the above notices are retained on all copies.
    17  * Permission to modify the code and to distribute modified code is granted,
    18  * provided the above notices are retained, and a notice that the code was
    19  * modified is included with the above copyright notice.
    20  *
    21  */
    22 #ifndef _STLP_ALLOC_C
    23 #define _STLP_ALLOC_C
    24 
    25 #ifdef __WATCOMC__
    26 #pragma warning 13 9
    27 #pragma warning 367 9
    28 #pragma warning 368 9
    29 #endif
    30 
    31 #ifndef _STLP_INTERNAL_ALLOC_H
    32 #  include <stl/_alloc.h>
    33 #endif
    34 
    35 # if defined (_STLP_EXPOSE_GLOBALS_IMPLEMENTATION)
    36 
    37 # ifdef _STLP_SGI_THREADS
    38   // We test whether threads are in use before locking.
    39   // Perhaps this should be moved into stl_threads.h, but that
    40   // probably makes it harder to avoid the procedure call when
    41   // it isn't needed.
    42     extern "C" {
    43       extern int __us_rsthread_malloc;
    44     }
    45 # endif
    46 
    47 
    48 // Specialised debug form of malloc which does not provide "false"
    49 // memory leaks when run with debug CRT libraries.
    50 #if defined(_STLP_MSVC) && (_STLP_MSVC>=1020 && defined(_STLP_DEBUG_ALLOC)) && ! defined (_STLP_WINCE)
    51 #  include <crtdbg.h>
    52 inline void* __stlp_chunk_malloc(size_t __bytes) { _STLP_CHECK_NULL_ALLOC(_malloc_dbg(__bytes, _CRT_BLOCK, __FILE__, __LINE__)); }
    53 #else	// !_DEBUG
    54 # ifdef _STLP_NODE_ALLOC_USE_MALLOC
    55 #  include <cstdlib>
    56 inline void* __stlp_chunk_malloc(size_t __bytes) { _STLP_CHECK_NULL_ALLOC(_STLP_VENDOR_CSTD::malloc(__bytes)); }
    57 # else
    58 inline void* __stlp_chunk_malloc(size_t __bytes) { return _STLP_STD::__stl_new(__bytes); }
    59 # endif
    60 #endif	// !_DEBUG
    61 
    62 
    63 #define _S_FREELIST_INDEX(__bytes) ((__bytes-size_t(1))>>(int)_ALIGN_SHIFT)
    64 
    65 _STLP_BEGIN_NAMESPACE
    66 
    67 #ifndef _STLP_NO_NODE_ALLOC
    68 
    69 template <int __inst>
    70 void *  _STLP_CALL __malloc_alloc<__inst>::_S_oom_malloc(size_t __n)
    71 {
    72   __oom_handler_type __my_malloc_handler;
    73   void * __result;
    74 
    75   for (;;) {
    76     __my_malloc_handler = __oom_handler;
    77     if (0 == __my_malloc_handler) { __THROW_BAD_ALLOC; }
    78     (*__my_malloc_handler)();
    79     __result = malloc(__n);
    80     if (__result) return(__result);
    81   }
    82 #if defined(_STLP_NEED_UNREACHABLE_RETURN)
    83   return 0;
    84 #endif
    85 
    86 }
    87 
    88 #endif
    89 
    90 template <class _Alloc>
    91 void *  _STLP_CALL __debug_alloc<_Alloc>::allocate(size_t __n) {
    92   size_t __real_n = __n + __extra_before_chunk() + __extra_after_chunk();
    93   __alloc_header *__result = (__alloc_header *)__allocator_type::allocate(__real_n);
    94   memset((char*)__result, __shred_byte, __real_n*sizeof(value_type));
    95   __result->__magic = __magic;
    96   __result->__type_size = sizeof(value_type);
    97   __result->_M_size = (_STLP_UINT32_T)__n;
    98   return ((char*)__result) + (long)__extra_before;
    99 }
   100 
   101 template <class _Alloc>
   102 void  _STLP_CALL
   103 __debug_alloc<_Alloc>::deallocate(void *__p, size_t __n) {
   104   __alloc_header * __real_p = (__alloc_header*)((char *)__p -(long)__extra_before);
   105   // check integrity
   106   _STLP_VERBOSE_ASSERT(__real_p->__magic != __deleted_magic, _StlMsg_DBA_DELETED_TWICE)
   107   _STLP_VERBOSE_ASSERT(__real_p->__magic == __magic, _StlMsg_DBA_NEVER_ALLOCATED)
   108   _STLP_VERBOSE_ASSERT(__real_p->__type_size == 1,_StlMsg_DBA_TYPE_MISMATCH)
   109   _STLP_VERBOSE_ASSERT(__real_p->_M_size == __n, _StlMsg_DBA_SIZE_MISMATCH)
   110   // check pads on both sides
   111   unsigned char* __tmp;
   112   for (__tmp= (unsigned char*)(__real_p+1); __tmp < (unsigned char*)__p; __tmp++) {  
   113     _STLP_VERBOSE_ASSERT(*__tmp==__shred_byte, _StlMsg_DBA_UNDERRUN)
   114       }
   115   
   116   size_t __real_n= __n + __extra_before_chunk() + __extra_after_chunk();
   117   
   118   for (__tmp= ((unsigned char*)__p)+__n*sizeof(value_type); 
   119        __tmp < ((unsigned char*)__real_p)+__real_n ; __tmp++) {
   120     _STLP_VERBOSE_ASSERT(*__tmp==__shred_byte, _StlMsg_DBA_OVERRUN)
   121       }
   122   
   123   // that may be unfortunate, just in case
   124   __real_p->__magic=__deleted_magic;
   125   memset((char*)__p, __shred_byte, __n*sizeof(value_type));
   126   __allocator_type::deallocate(__real_p, __real_n);
   127 }
   128 
   129 #ifndef _STLP_NO_NODE_ALLOC
   130 
   131 // # ifdef _STLP_THREADS
   132 
   133 template <bool __threads, int __inst>
   134 class _Node_Alloc_Lock {
   135 public:
   136   _Node_Alloc_Lock() { 
   137     
   138 #  ifdef _STLP_SGI_THREADS
   139     if (__threads && __us_rsthread_malloc)
   140 #  else /* !_STLP_SGI_THREADS */
   141       if (__threads) 
   142 #  endif
   143     	_S_lock._M_acquire_lock(); 
   144   }
   145   
   146   ~_Node_Alloc_Lock() {
   147 #  ifdef _STLP_SGI_THREADS
   148     if (__threads && __us_rsthread_malloc)
   149 #  else /* !_STLP_SGI_THREADS */
   150       if (__threads)
   151 #  endif
   152         _S_lock._M_release_lock(); 
   153   }
   154   
   155   static _STLP_STATIC_MUTEX _S_lock;
   156 };
   157 
   158 // # endif  /* _STLP_THREADS */
   159 
   160 
   161 template <bool __threads, int __inst>
   162 void* _STLP_CALL
   163 __node_alloc<__threads, __inst>::_M_allocate(size_t __n) {
   164   void*  __r;
   165   _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
   166   // #       ifdef _STLP_THREADS
   167   /*REFERENCED*/
   168   _Node_Alloc_Lock<__threads, __inst> __lock_instance;
   169   // #       endif
   170   // Acquire the lock here with a constructor call.
   171   // This ensures that it is released in exit or during stack
   172   // unwinding.
   173   if ( (__r  = *__my_free_list) != 0 ) {
   174     *__my_free_list = ((_Obj*)__r) -> _M_free_list_link;
   175   } else {
   176     __r = _S_refill(__n);
   177   }
   178   // lock is released here
   179   return __r;
   180 }
   181 
   182 template <bool __threads, int __inst>
   183 void _STLP_CALL
   184 __node_alloc<__threads, __inst>::_M_deallocate(void *__p, size_t __n) {
   185   _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
   186   // #       ifdef _STLP_THREADS
   187   /*REFERENCED*/
   188   _Node_Alloc_Lock<__threads, __inst> __lock_instance;
   189   // #       endif /* _STLP_THREADS */
   190   // acquire lock
   191   ((_Obj *)__p) -> _M_free_list_link = *__my_free_list;
   192   *__my_free_list = (_Obj *)__p;
   193   // lock is released here
   194 }
   195 
   196 /* We allocate memory in large chunks in order to avoid fragmenting     */
   197 /* the malloc heap too much.                                            */
   198 /* We assume that size is properly aligned.                             */
   199 /* We hold the allocation lock.                                         */
   200 template <bool __threads, int __inst>
   201 char* _STLP_CALL
   202 __node_alloc<__threads, __inst>::_S_chunk_alloc(size_t _p_size, 
   203 						int& __nobjs)
   204 {
   205   char* __result;
   206   size_t __total_bytes = _p_size * __nobjs;
   207   size_t __bytes_left = _S_end_free - _S_start_free;
   208 
   209   if (__bytes_left >= __total_bytes) {
   210     __result = _S_start_free;
   211     _S_start_free += __total_bytes;
   212     return(__result);
   213   } else if (__bytes_left >= _p_size) {
   214     __nobjs = (int)(__bytes_left/_p_size);
   215     __total_bytes = _p_size * __nobjs;
   216     __result = _S_start_free;
   217     _S_start_free += __total_bytes;
   218     return(__result);
   219   } else {
   220     size_t __bytes_to_get = 
   221       2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
   222     // Try to make use of the left-over piece.
   223     if (__bytes_left > 0) {
   224       _Obj* _STLP_VOLATILE* __my_free_list =
   225 	_S_free_list + _S_FREELIST_INDEX(__bytes_left);
   226 
   227       ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list;
   228       *__my_free_list = (_Obj*)_S_start_free;
   229     }
   230     _S_start_free = (char*)__stlp_chunk_malloc(__bytes_to_get);
   231     if (0 == _S_start_free) {
   232       size_t __i;
   233       _Obj* _STLP_VOLATILE* __my_free_list;
   234       _Obj* __p;
   235       // Try to make do with what we have.  That can't
   236       // hurt.  We do not try smaller requests, since that tends
   237       // to result in disaster on multi-process machines.
   238       for (__i = _p_size; __i <= (size_t)_MAX_BYTES; __i += (size_t)_ALIGN) {
   239 	__my_free_list = _S_free_list + _S_FREELIST_INDEX(__i);
   240 	__p = *__my_free_list;
   241 	if (0 != __p) {
   242 	  *__my_free_list = __p -> _M_free_list_link;
   243 	  _S_start_free = (char*)__p;
   244 	  _S_end_free = _S_start_free + __i;
   245 	  return(_S_chunk_alloc(_p_size, __nobjs));
   246 	  // Any leftover piece will eventually make it to the
   247 	  // right free list.
   248 	}
   249       }
   250       _S_end_free = 0;	// In case of exception.
   251       _S_start_free = (char*)__stlp_chunk_malloc(__bytes_to_get);
   252     /*
   253       (char*)malloc_alloc::allocate(__bytes_to_get);
   254       */
   255 
   256       // This should either throw an
   257       // exception or remedy the situation.  Thus we assume it
   258       // succeeded.
   259     }
   260     _S_heap_size += __bytes_to_get;
   261     _S_end_free = _S_start_free + __bytes_to_get;
   262     return(_S_chunk_alloc(_p_size, __nobjs));
   263   }
   264 }
   265 
   266 
   267 /* Returns an object of size __n, and optionally adds to size __n free list.*/
   268 /* We assume that __n is properly aligned.                                */
   269 /* We hold the allocation lock.                                         */
   270 template <bool __threads, int __inst>
   271 void* _STLP_CALL
   272 __node_alloc<__threads, __inst>::_S_refill(size_t __n)
   273 {
   274   int __nobjs = 20;
   275   __n = _S_round_up(__n);
   276   char* __chunk = _S_chunk_alloc(__n, __nobjs);
   277   _Obj* _STLP_VOLATILE* __my_free_list;
   278   _Obj* __result;
   279   _Obj* __current_obj;
   280   _Obj* __next_obj;
   281   int __i;
   282 
   283   if (1 == __nobjs) return(__chunk);
   284   __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
   285 
   286   /* Build free list in chunk */
   287   __result = (_Obj*)__chunk;
   288   *__my_free_list = __next_obj = (_Obj*)(__chunk + __n);
   289   for (__i = 1; ; __i++) {
   290     __current_obj = __next_obj;
   291     __next_obj = (_Obj*)((char*)__next_obj + __n);
   292     if (__nobjs - 1 == __i) {
   293       __current_obj -> _M_free_list_link = 0;
   294       break;
   295     } else {
   296       __current_obj -> _M_free_list_link = __next_obj;
   297     }
   298   }
   299   return(__result);
   300 }
   301 
   302 # if ( _STLP_STATIC_TEMPLATE_DATA > 0 )
   303 // malloc_alloc out-of-memory handling
   304 template <int __inst>
   305 __oom_handler_type __malloc_alloc<__inst>::__oom_handler=(__oom_handler_type)0 ;
   306 
   307 #ifdef _STLP_THREADS
   308     template <bool __threads, int __inst>
   309     _STLP_STATIC_MUTEX
   310     _Node_Alloc_Lock<__threads, __inst>::_S_lock _STLP_MUTEX_INITIALIZER;
   311 #endif
   312 
   313 template <bool __threads, int __inst>
   314 _Node_alloc_obj * _STLP_VOLATILE
   315 __node_alloc<__threads, __inst>::_S_free_list[_STLP_NFREELISTS]
   316 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
   317 // The 16 zeros are necessary to make version 4.1 of the SunPro
   318 // compiler happy.  Otherwise it appears to allocate too little
   319 // space for the array.
   320 
   321 template <bool __threads, int __inst>
   322 char *__node_alloc<__threads, __inst>::_S_start_free = 0;
   323 
   324 template <bool __threads, int __inst>
   325 char *__node_alloc<__threads, __inst>::_S_end_free = 0;
   326 
   327 template <bool __threads, int __inst>
   328 size_t __node_alloc<__threads, __inst>::_S_heap_size = 0;
   329 
   330 
   331 # else /* ( _STLP_STATIC_TEMPLATE_DATA > 0 ) */
   332 
   333 __DECLARE_INSTANCE(__oom_handler_type, __malloc_alloc<0>::__oom_handler, =0);
   334 
   335 # define _STLP_ALLOC_NOTHREADS __node_alloc<false, 0>
   336 # define _STLP_ALLOC_THREADS   __node_alloc<true, 0>
   337 # define _STLP_ALLOC_NOTHREADS_LOCK _Node_Alloc_Lock<false, 0>
   338 # define _STLP_ALLOC_THREADS_LOCK   _Node_Alloc_Lock<true, 0>
   339 
   340 __DECLARE_INSTANCE(char *, _STLP_ALLOC_NOTHREADS::_S_start_free,=0);
   341 __DECLARE_INSTANCE(char *, _STLP_ALLOC_NOTHREADS::_S_end_free,=0);
   342 __DECLARE_INSTANCE(size_t, _STLP_ALLOC_NOTHREADS::_S_heap_size,=0);
   343 __DECLARE_INSTANCE(_Node_alloc_obj * _STLP_VOLATILE,
   344                    _STLP_ALLOC_NOTHREADS::_S_free_list[_STLP_NFREELISTS],
   345                    ={0});
   346 __DECLARE_INSTANCE(char *, _STLP_ALLOC_THREADS::_S_start_free,=0);
   347 __DECLARE_INSTANCE(char *, _STLP_ALLOC_THREADS::_S_end_free,=0);
   348 __DECLARE_INSTANCE(size_t, _STLP_ALLOC_THREADS::_S_heap_size,=0);
   349 __DECLARE_INSTANCE(_Node_alloc_obj * _STLP_VOLATILE,
   350                    _STLP_ALLOC_THREADS::_S_free_list[_STLP_NFREELISTS],
   351                    ={0});
   352 // #   ifdef _STLP_THREADS
   353 __DECLARE_INSTANCE(_STLP_STATIC_MUTEX,
   354                    _STLP_ALLOC_NOTHREADS_LOCK::_S_lock,
   355                    _STLP_MUTEX_INITIALIZER);
   356 __DECLARE_INSTANCE(_STLP_STATIC_MUTEX,
   357                    _STLP_ALLOC_THREADS_LOCK::_S_lock,
   358                    _STLP_MUTEX_INITIALIZER);
   359 // #   endif
   360 
   361 # undef _STLP_ALLOC_THREADS
   362 # undef _STLP_ALLOC_NOTHREADS
   363 
   364 #  endif /* _STLP_STATIC_TEMPLATE_DATA */
   365 
   366 #endif
   367 
   368 _STLP_END_NAMESPACE
   369 
   370 # undef _S_FREELIST_INDEX
   371 
   372 # endif /* _STLP_EXPOSE_GLOBALS_IMPLEMENTATION */
   373 
   374 #endif /*  _STLP_ALLOC_C */
   375 
   376 // Local Variables:
   377 // mode:C++
   378 // End: