epoc32/include/stdapis/stlportv5/stl/_alloc.c
author William Roberts <williamr@symbian.org>
Wed, 31 Mar 2010 12:27:01 +0100
branchSymbian2
changeset 3 e1b950c65cb4
parent 2 epoc32/include/stdapis/stlport/stl/_alloc.c@2fe1408b6811
child 4 837f303aceeb
permissions -rw-r--r--
Attempt to represent the S^2->S^3 header reorganisation as a series of "hg rename" operations
williamr@2
     1
/*
williamr@2
     2
 *
williamr@2
     3
 * Copyright (c) 1996,1997
williamr@2
     4
 * Silicon Graphics Computer Systems, Inc.
williamr@2
     5
 *
williamr@2
     6
 * Copyright (c) 1997
williamr@2
     7
 * Moscow Center for SPARC Technology
williamr@2
     8
 *
williamr@2
     9
 * Copyright (c) 1999 
williamr@2
    10
 * Boris Fomitchev
williamr@2
    11
 *
williamr@2
    12
 * This material is provided "as is", with absolutely no warranty expressed
williamr@2
    13
 * or implied. Any use is at your own risk.
williamr@2
    14
 *
williamr@2
    15
 * Permission to use or copy this software for any purpose is hereby granted 
williamr@2
    16
 * without fee, provided the above notices are retained on all copies.
williamr@2
    17
 * Permission to modify the code and to distribute modified code is granted,
williamr@2
    18
 * provided the above notices are retained, and a notice that the code was
williamr@2
    19
 * modified is included with the above copyright notice.
williamr@2
    20
 *
williamr@2
    21
 */
williamr@2
    22
#ifndef _STLP_ALLOC_C
williamr@2
    23
#define _STLP_ALLOC_C
williamr@2
    24
williamr@2
    25
#ifdef __WATCOMC__
williamr@2
    26
#pragma warning 13 9
williamr@2
    27
#pragma warning 367 9
williamr@2
    28
#pragma warning 368 9
williamr@2
    29
#endif
williamr@2
    30
williamr@2
    31
#ifndef _STLP_INTERNAL_ALLOC_H
williamr@2
    32
#  include <stl/_alloc.h>
williamr@2
    33
#endif
williamr@2
    34
williamr@2
    35
# if defined (_STLP_EXPOSE_GLOBALS_IMPLEMENTATION)
williamr@2
    36
williamr@2
    37
# ifdef _STLP_SGI_THREADS
williamr@2
    38
  // We test whether threads are in use before locking.
williamr@2
    39
  // Perhaps this should be moved into stl_threads.h, but that
williamr@2
    40
  // probably makes it harder to avoid the procedure call when
williamr@2
    41
  // it isn't needed.
williamr@2
    42
    extern "C" {
williamr@2
    43
      extern int __us_rsthread_malloc;
williamr@2
    44
    }
williamr@2
    45
# endif
williamr@2
    46
williamr@2
    47
williamr@2
    48
// Specialised debug form of malloc which does not provide "false"
williamr@2
    49
// memory leaks when run with debug CRT libraries.
williamr@2
    50
#if defined(_STLP_MSVC) && (_STLP_MSVC>=1020 && defined(_STLP_DEBUG_ALLOC)) && ! defined (_STLP_WINCE)
williamr@2
    51
#  include <crtdbg.h>
williamr@2
    52
inline void* __stlp_chunk_malloc(size_t __bytes) { _STLP_CHECK_NULL_ALLOC(_malloc_dbg(__bytes, _CRT_BLOCK, __FILE__, __LINE__)); }
williamr@2
    53
#else	// !_DEBUG
williamr@2
    54
# ifdef _STLP_NODE_ALLOC_USE_MALLOC
williamr@2
    55
#  include <cstdlib>
williamr@2
    56
inline void* __stlp_chunk_malloc(size_t __bytes) { _STLP_CHECK_NULL_ALLOC(_STLP_VENDOR_CSTD::malloc(__bytes)); }
williamr@2
    57
# else
williamr@2
    58
inline void* __stlp_chunk_malloc(size_t __bytes) { return _STLP_STD::__stl_new(__bytes); }
williamr@2
    59
# endif
williamr@2
    60
#endif	// !_DEBUG
williamr@2
    61
williamr@2
    62
williamr@2
    63
#define _S_FREELIST_INDEX(__bytes) ((__bytes-size_t(1))>>(int)_ALIGN_SHIFT)
williamr@2
    64
williamr@2
    65
_STLP_BEGIN_NAMESPACE
williamr@2
    66
williamr@2
    67
#ifndef _STLP_NO_NODE_ALLOC
williamr@2
    68
williamr@2
    69
template <int __inst>
williamr@2
    70
void *  _STLP_CALL __malloc_alloc<__inst>::_S_oom_malloc(size_t __n)
williamr@2
    71
{
williamr@2
    72
  __oom_handler_type __my_malloc_handler;
williamr@2
    73
  void * __result;
williamr@2
    74
williamr@2
    75
  for (;;) {
williamr@2
    76
    __my_malloc_handler = __oom_handler;
williamr@2
    77
    if (0 == __my_malloc_handler) { __THROW_BAD_ALLOC; }
williamr@2
    78
    (*__my_malloc_handler)();
williamr@2
    79
    __result = malloc(__n);
williamr@2
    80
    if (__result) return(__result);
williamr@2
    81
  }
williamr@2
    82
#if defined(_STLP_NEED_UNREACHABLE_RETURN)
williamr@2
    83
  return 0;
williamr@2
    84
#endif
williamr@2
    85
williamr@2
    86
}
williamr@2
    87
williamr@2
    88
#endif
williamr@2
    89
williamr@2
    90
template <class _Alloc>
williamr@2
    91
void *  _STLP_CALL __debug_alloc<_Alloc>::allocate(size_t __n) {
williamr@2
    92
  size_t __real_n = __n + __extra_before_chunk() + __extra_after_chunk();
williamr@2
    93
  __alloc_header *__result = (__alloc_header *)__allocator_type::allocate(__real_n);
williamr@2
    94
  memset((char*)__result, __shred_byte, __real_n*sizeof(value_type));
williamr@2
    95
  __result->__magic = __magic;
williamr@2
    96
  __result->__type_size = sizeof(value_type);
williamr@2
    97
  __result->_M_size = (_STLP_UINT32_T)__n;
williamr@2
    98
  return ((char*)__result) + (long)__extra_before;
williamr@2
    99
}
williamr@2
   100
williamr@2
   101
template <class _Alloc>
williamr@2
   102
void  _STLP_CALL
williamr@2
   103
__debug_alloc<_Alloc>::deallocate(void *__p, size_t __n) {
williamr@2
   104
  __alloc_header * __real_p = (__alloc_header*)((char *)__p -(long)__extra_before);
williamr@2
   105
  // check integrity
williamr@2
   106
  _STLP_VERBOSE_ASSERT(__real_p->__magic != __deleted_magic, _StlMsg_DBA_DELETED_TWICE)
williamr@2
   107
  _STLP_VERBOSE_ASSERT(__real_p->__magic == __magic, _StlMsg_DBA_NEVER_ALLOCATED)
williamr@2
   108
  _STLP_VERBOSE_ASSERT(__real_p->__type_size == 1,_StlMsg_DBA_TYPE_MISMATCH)
williamr@2
   109
  _STLP_VERBOSE_ASSERT(__real_p->_M_size == __n, _StlMsg_DBA_SIZE_MISMATCH)
williamr@2
   110
  // check pads on both sides
williamr@2
   111
  unsigned char* __tmp;
williamr@2
   112
  for (__tmp= (unsigned char*)(__real_p+1); __tmp < (unsigned char*)__p; __tmp++) {  
williamr@2
   113
    _STLP_VERBOSE_ASSERT(*__tmp==__shred_byte, _StlMsg_DBA_UNDERRUN)
williamr@2
   114
      }
williamr@2
   115
  
williamr@2
   116
  size_t __real_n= __n + __extra_before_chunk() + __extra_after_chunk();
williamr@2
   117
  
williamr@2
   118
  for (__tmp= ((unsigned char*)__p)+__n*sizeof(value_type); 
williamr@2
   119
       __tmp < ((unsigned char*)__real_p)+__real_n ; __tmp++) {
williamr@2
   120
    _STLP_VERBOSE_ASSERT(*__tmp==__shred_byte, _StlMsg_DBA_OVERRUN)
williamr@2
   121
      }
williamr@2
   122
  
williamr@2
   123
  // that may be unfortunate, just in case
williamr@2
   124
  __real_p->__magic=__deleted_magic;
williamr@2
   125
  memset((char*)__p, __shred_byte, __n*sizeof(value_type));
williamr@2
   126
  __allocator_type::deallocate(__real_p, __real_n);
williamr@2
   127
}
williamr@2
   128
williamr@2
   129
#ifndef _STLP_NO_NODE_ALLOC
williamr@2
   130
williamr@2
   131
// # ifdef _STLP_THREADS
williamr@2
   132
williamr@2
   133
template <bool __threads, int __inst>
williamr@2
   134
class _Node_Alloc_Lock {
williamr@2
   135
public:
williamr@2
   136
  _Node_Alloc_Lock() { 
williamr@2
   137
    
williamr@2
   138
#  ifdef _STLP_SGI_THREADS
williamr@2
   139
    if (__threads && __us_rsthread_malloc)
williamr@2
   140
#  else /* !_STLP_SGI_THREADS */
williamr@2
   141
      if (__threads) 
williamr@2
   142
#  endif
williamr@2
   143
    	_S_lock._M_acquire_lock(); 
williamr@2
   144
  }
williamr@2
   145
  
williamr@2
   146
  ~_Node_Alloc_Lock() {
williamr@2
   147
#  ifdef _STLP_SGI_THREADS
williamr@2
   148
    if (__threads && __us_rsthread_malloc)
williamr@2
   149
#  else /* !_STLP_SGI_THREADS */
williamr@2
   150
      if (__threads)
williamr@2
   151
#  endif
williamr@2
   152
        _S_lock._M_release_lock(); 
williamr@2
   153
  }
williamr@2
   154
  
williamr@2
   155
  static _STLP_STATIC_MUTEX _S_lock;
williamr@2
   156
};
williamr@2
   157
williamr@2
   158
// # endif  /* _STLP_THREADS */
williamr@2
   159
williamr@2
   160
williamr@2
   161
template <bool __threads, int __inst>
williamr@2
   162
void* _STLP_CALL
williamr@2
   163
__node_alloc<__threads, __inst>::_M_allocate(size_t __n) {
williamr@2
   164
  void*  __r;
williamr@2
   165
  _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
williamr@2
   166
  // #       ifdef _STLP_THREADS
williamr@2
   167
  /*REFERENCED*/
williamr@2
   168
  _Node_Alloc_Lock<__threads, __inst> __lock_instance;
williamr@2
   169
  // #       endif
williamr@2
   170
  // Acquire the lock here with a constructor call.
williamr@2
   171
  // This ensures that it is released in exit or during stack
williamr@2
   172
  // unwinding.
williamr@2
   173
  if ( (__r  = *__my_free_list) != 0 ) {
williamr@2
   174
    *__my_free_list = ((_Obj*)__r) -> _M_free_list_link;
williamr@2
   175
  } else {
williamr@2
   176
    __r = _S_refill(__n);
williamr@2
   177
  }
williamr@2
   178
  // lock is released here
williamr@2
   179
  return __r;
williamr@2
   180
}
williamr@2
   181
williamr@2
   182
template <bool __threads, int __inst>
williamr@2
   183
void _STLP_CALL
williamr@2
   184
__node_alloc<__threads, __inst>::_M_deallocate(void *__p, size_t __n) {
williamr@2
   185
  _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
williamr@2
   186
  // #       ifdef _STLP_THREADS
williamr@2
   187
  /*REFERENCED*/
williamr@2
   188
  _Node_Alloc_Lock<__threads, __inst> __lock_instance;
williamr@2
   189
  // #       endif /* _STLP_THREADS */
williamr@2
   190
  // acquire lock
williamr@2
   191
  ((_Obj *)__p) -> _M_free_list_link = *__my_free_list;
williamr@2
   192
  *__my_free_list = (_Obj *)__p;
williamr@2
   193
  // lock is released here
williamr@2
   194
}
williamr@2
   195
williamr@2
   196
/* We allocate memory in large chunks in order to avoid fragmenting     */
williamr@2
   197
/* the malloc heap too much.                                            */
williamr@2
   198
/* We assume that size is properly aligned.                             */
williamr@2
   199
/* We hold the allocation lock.                                         */
williamr@2
   200
template <bool __threads, int __inst>
williamr@2
   201
char* _STLP_CALL
williamr@2
   202
__node_alloc<__threads, __inst>::_S_chunk_alloc(size_t _p_size, 
williamr@2
   203
						int& __nobjs)
williamr@2
   204
{
williamr@2
   205
  char* __result;
williamr@2
   206
  size_t __total_bytes = _p_size * __nobjs;
williamr@2
   207
  size_t __bytes_left = _S_end_free - _S_start_free;
williamr@2
   208
williamr@2
   209
  if (__bytes_left >= __total_bytes) {
williamr@2
   210
    __result = _S_start_free;
williamr@2
   211
    _S_start_free += __total_bytes;
williamr@2
   212
    return(__result);
williamr@2
   213
  } else if (__bytes_left >= _p_size) {
williamr@2
   214
    __nobjs = (int)(__bytes_left/_p_size);
williamr@2
   215
    __total_bytes = _p_size * __nobjs;
williamr@2
   216
    __result = _S_start_free;
williamr@2
   217
    _S_start_free += __total_bytes;
williamr@2
   218
    return(__result);
williamr@2
   219
  } else {
williamr@2
   220
    size_t __bytes_to_get = 
williamr@2
   221
      2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
williamr@2
   222
    // Try to make use of the left-over piece.
williamr@2
   223
    if (__bytes_left > 0) {
williamr@2
   224
      _Obj* _STLP_VOLATILE* __my_free_list =
williamr@2
   225
	_S_free_list + _S_FREELIST_INDEX(__bytes_left);
williamr@2
   226
williamr@2
   227
      ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list;
williamr@2
   228
      *__my_free_list = (_Obj*)_S_start_free;
williamr@2
   229
    }
williamr@2
   230
    _S_start_free = (char*)__stlp_chunk_malloc(__bytes_to_get);
williamr@2
   231
    if (0 == _S_start_free) {
williamr@2
   232
      size_t __i;
williamr@2
   233
      _Obj* _STLP_VOLATILE* __my_free_list;
williamr@2
   234
      _Obj* __p;
williamr@2
   235
      // Try to make do with what we have.  That can't
williamr@2
   236
      // hurt.  We do not try smaller requests, since that tends
williamr@2
   237
      // to result in disaster on multi-process machines.
williamr@2
   238
      for (__i = _p_size; __i <= (size_t)_MAX_BYTES; __i += (size_t)_ALIGN) {
williamr@2
   239
	__my_free_list = _S_free_list + _S_FREELIST_INDEX(__i);
williamr@2
   240
	__p = *__my_free_list;
williamr@2
   241
	if (0 != __p) {
williamr@2
   242
	  *__my_free_list = __p -> _M_free_list_link;
williamr@2
   243
	  _S_start_free = (char*)__p;
williamr@2
   244
	  _S_end_free = _S_start_free + __i;
williamr@2
   245
	  return(_S_chunk_alloc(_p_size, __nobjs));
williamr@2
   246
	  // Any leftover piece will eventually make it to the
williamr@2
   247
	  // right free list.
williamr@2
   248
	}
williamr@2
   249
      }
williamr@2
   250
      _S_end_free = 0;	// In case of exception.
williamr@2
   251
      _S_start_free = (char*)__stlp_chunk_malloc(__bytes_to_get);
williamr@2
   252
    /*
williamr@2
   253
      (char*)malloc_alloc::allocate(__bytes_to_get);
williamr@2
   254
      */
williamr@2
   255
williamr@2
   256
      // This should either throw an
williamr@2
   257
      // exception or remedy the situation.  Thus we assume it
williamr@2
   258
      // succeeded.
williamr@2
   259
    }
williamr@2
   260
    _S_heap_size += __bytes_to_get;
williamr@2
   261
    _S_end_free = _S_start_free + __bytes_to_get;
williamr@2
   262
    return(_S_chunk_alloc(_p_size, __nobjs));
williamr@2
   263
  }
williamr@2
   264
}
williamr@2
   265
williamr@2
   266
williamr@2
   267
/* Returns an object of size __n, and optionally adds to size __n free list.*/
williamr@2
   268
/* We assume that __n is properly aligned.                                */
williamr@2
   269
/* We hold the allocation lock.                                         */
williamr@2
   270
template <bool __threads, int __inst>
williamr@2
   271
void* _STLP_CALL
williamr@2
   272
__node_alloc<__threads, __inst>::_S_refill(size_t __n)
williamr@2
   273
{
williamr@2
   274
  int __nobjs = 20;
williamr@2
   275
  __n = _S_round_up(__n);
williamr@2
   276
  char* __chunk = _S_chunk_alloc(__n, __nobjs);
williamr@2
   277
  _Obj* _STLP_VOLATILE* __my_free_list;
williamr@2
   278
  _Obj* __result;
williamr@2
   279
  _Obj* __current_obj;
williamr@2
   280
  _Obj* __next_obj;
williamr@2
   281
  int __i;
williamr@2
   282
williamr@2
   283
  if (1 == __nobjs) return(__chunk);
williamr@2
   284
  __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
williamr@2
   285
williamr@2
   286
  /* Build free list in chunk */
williamr@2
   287
  __result = (_Obj*)__chunk;
williamr@2
   288
  *__my_free_list = __next_obj = (_Obj*)(__chunk + __n);
williamr@2
   289
  for (__i = 1; ; __i++) {
williamr@2
   290
    __current_obj = __next_obj;
williamr@2
   291
    __next_obj = (_Obj*)((char*)__next_obj + __n);
williamr@2
   292
    if (__nobjs - 1 == __i) {
williamr@2
   293
      __current_obj -> _M_free_list_link = 0;
williamr@2
   294
      break;
williamr@2
   295
    } else {
williamr@2
   296
      __current_obj -> _M_free_list_link = __next_obj;
williamr@2
   297
    }
williamr@2
   298
  }
williamr@2
   299
  return(__result);
williamr@2
   300
}
williamr@2
   301
williamr@2
   302
# if ( _STLP_STATIC_TEMPLATE_DATA > 0 )
williamr@2
   303
// malloc_alloc out-of-memory handling
williamr@2
   304
template <int __inst>
williamr@2
   305
__oom_handler_type __malloc_alloc<__inst>::__oom_handler=(__oom_handler_type)0 ;
williamr@2
   306
williamr@2
   307
#ifdef _STLP_THREADS
williamr@2
   308
    template <bool __threads, int __inst>
williamr@2
   309
    _STLP_STATIC_MUTEX
williamr@2
   310
    _Node_Alloc_Lock<__threads, __inst>::_S_lock _STLP_MUTEX_INITIALIZER;
williamr@2
   311
#endif
williamr@2
   312
williamr@2
   313
template <bool __threads, int __inst>
williamr@2
   314
_Node_alloc_obj * _STLP_VOLATILE
williamr@2
   315
__node_alloc<__threads, __inst>::_S_free_list[_STLP_NFREELISTS]
williamr@2
   316
= {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
williamr@2
   317
// The 16 zeros are necessary to make version 4.1 of the SunPro
williamr@2
   318
// compiler happy.  Otherwise it appears to allocate too little
williamr@2
   319
// space for the array.
williamr@2
   320
williamr@2
   321
template <bool __threads, int __inst>
williamr@2
   322
char *__node_alloc<__threads, __inst>::_S_start_free = 0;
williamr@2
   323
williamr@2
   324
template <bool __threads, int __inst>
williamr@2
   325
char *__node_alloc<__threads, __inst>::_S_end_free = 0;
williamr@2
   326
williamr@2
   327
template <bool __threads, int __inst>
williamr@2
   328
size_t __node_alloc<__threads, __inst>::_S_heap_size = 0;
williamr@2
   329
williamr@2
   330
williamr@2
   331
# else /* ( _STLP_STATIC_TEMPLATE_DATA > 0 ) */
williamr@2
   332
williamr@2
   333
__DECLARE_INSTANCE(__oom_handler_type, __malloc_alloc<0>::__oom_handler, =0);
williamr@2
   334
williamr@2
   335
# define _STLP_ALLOC_NOTHREADS __node_alloc<false, 0>
williamr@2
   336
# define _STLP_ALLOC_THREADS   __node_alloc<true, 0>
williamr@2
   337
# define _STLP_ALLOC_NOTHREADS_LOCK _Node_Alloc_Lock<false, 0>
williamr@2
   338
# define _STLP_ALLOC_THREADS_LOCK   _Node_Alloc_Lock<true, 0>
williamr@2
   339
williamr@2
   340
__DECLARE_INSTANCE(char *, _STLP_ALLOC_NOTHREADS::_S_start_free,=0);
williamr@2
   341
__DECLARE_INSTANCE(char *, _STLP_ALLOC_NOTHREADS::_S_end_free,=0);
williamr@2
   342
__DECLARE_INSTANCE(size_t, _STLP_ALLOC_NOTHREADS::_S_heap_size,=0);
williamr@2
   343
__DECLARE_INSTANCE(_Node_alloc_obj * _STLP_VOLATILE,
williamr@2
   344
                   _STLP_ALLOC_NOTHREADS::_S_free_list[_STLP_NFREELISTS],
williamr@2
   345
                   ={0});
williamr@2
   346
__DECLARE_INSTANCE(char *, _STLP_ALLOC_THREADS::_S_start_free,=0);
williamr@2
   347
__DECLARE_INSTANCE(char *, _STLP_ALLOC_THREADS::_S_end_free,=0);
williamr@2
   348
__DECLARE_INSTANCE(size_t, _STLP_ALLOC_THREADS::_S_heap_size,=0);
williamr@2
   349
__DECLARE_INSTANCE(_Node_alloc_obj * _STLP_VOLATILE,
williamr@2
   350
                   _STLP_ALLOC_THREADS::_S_free_list[_STLP_NFREELISTS],
williamr@2
   351
                   ={0});
williamr@2
   352
// #   ifdef _STLP_THREADS
williamr@2
   353
__DECLARE_INSTANCE(_STLP_STATIC_MUTEX,
williamr@2
   354
                   _STLP_ALLOC_NOTHREADS_LOCK::_S_lock,
williamr@2
   355
                   _STLP_MUTEX_INITIALIZER);
williamr@2
   356
__DECLARE_INSTANCE(_STLP_STATIC_MUTEX,
williamr@2
   357
                   _STLP_ALLOC_THREADS_LOCK::_S_lock,
williamr@2
   358
                   _STLP_MUTEX_INITIALIZER);
williamr@2
   359
// #   endif
williamr@2
   360
williamr@2
   361
# undef _STLP_ALLOC_THREADS
williamr@2
   362
# undef _STLP_ALLOC_NOTHREADS
williamr@2
   363
williamr@2
   364
#  endif /* _STLP_STATIC_TEMPLATE_DATA */
williamr@2
   365
williamr@2
   366
#endif
williamr@2
   367
williamr@2
   368
_STLP_END_NAMESPACE
williamr@2
   369
williamr@2
   370
# undef _S_FREELIST_INDEX
williamr@2
   371
williamr@2
   372
# endif /* _STLP_EXPOSE_GLOBALS_IMPLEMENTATION */
williamr@2
   373
williamr@2
   374
#endif /*  _STLP_ALLOC_C */
williamr@2
   375
williamr@2
   376
// Local Variables:
williamr@2
   377
// mode:C++
williamr@2
   378
// End: