3 * Copyright (c) 1996,1997
4 * Silicon Graphics Computer Systems, Inc.
7 * Moscow Center for SPARC Technology
12 * This material is provided "as is", with absolutely no warranty expressed
13 * or implied. Any use is at your own risk.
15 * Permission to use or copy this software for any purpose is hereby granted
16 * without fee, provided the above notices are retained on all copies.
17 * Permission to modify the code and to distribute modified code is granted,
18 * provided the above notices are retained, and a notice that the code was
19 * modified is included with the above copyright notice.
31 #ifndef _STLP_INTERNAL_ALLOC_H
32 # include <stl/_alloc.h>
35 # if defined (_STLP_EXPOSE_GLOBALS_IMPLEMENTATION)
37 # ifdef _STLP_SGI_THREADS
38 // We test whether threads are in use before locking.
39 // Perhaps this should be moved into stl_threads.h, but that
40 // probably makes it harder to avoid the procedure call when
43 extern int __us_rsthread_malloc;
48 // Specialised debug form of malloc which does not provide "false"
49 // memory leaks when run with debug CRT libraries.
50 #if defined(_STLP_MSVC) && (_STLP_MSVC>=1020 && defined(_STLP_DEBUG_ALLOC)) && ! defined (_STLP_WINCE)
52 inline void* __stlp_chunk_malloc(size_t __bytes) { _STLP_CHECK_NULL_ALLOC(_malloc_dbg(__bytes, _CRT_BLOCK, __FILE__, __LINE__)); }
54 # ifdef _STLP_NODE_ALLOC_USE_MALLOC
56 inline void* __stlp_chunk_malloc(size_t __bytes) { _STLP_CHECK_NULL_ALLOC(_STLP_VENDOR_CSTD::malloc(__bytes)); }
58 inline void* __stlp_chunk_malloc(size_t __bytes) { return _STLP_STD::__stl_new(__bytes); }
63 #define _S_FREELIST_INDEX(__bytes) ((__bytes-size_t(1))>>(int)_ALIGN_SHIFT)
67 #ifndef _STLP_NO_NODE_ALLOC
70 void * _STLP_CALL __malloc_alloc<__inst>::_S_oom_malloc(size_t __n)
72 __oom_handler_type __my_malloc_handler;
76 __my_malloc_handler = __oom_handler;
77 if (0 == __my_malloc_handler) { __THROW_BAD_ALLOC; }
78 (*__my_malloc_handler)();
79 __result = malloc(__n);
80 if (__result) return(__result);
82 #if defined(_STLP_NEED_UNREACHABLE_RETURN)
90 template <class _Alloc>
91 void * _STLP_CALL __debug_alloc<_Alloc>::allocate(size_t __n) {
92 size_t __real_n = __n + __extra_before_chunk() + __extra_after_chunk();
93 __alloc_header *__result = (__alloc_header *)__allocator_type::allocate(__real_n);
94 memset((char*)__result, __shred_byte, __real_n*sizeof(value_type));
95 __result->__magic = __magic;
96 __result->__type_size = sizeof(value_type);
97 __result->_M_size = (_STLP_UINT32_T)__n;
98 return ((char*)__result) + (long)__extra_before;
101 template <class _Alloc>
103 __debug_alloc<_Alloc>::deallocate(void *__p, size_t __n) {
104 __alloc_header * __real_p = (__alloc_header*)((char *)__p -(long)__extra_before);
106 _STLP_VERBOSE_ASSERT(__real_p->__magic != __deleted_magic, _StlMsg_DBA_DELETED_TWICE)
107 _STLP_VERBOSE_ASSERT(__real_p->__magic == __magic, _StlMsg_DBA_NEVER_ALLOCATED)
108 _STLP_VERBOSE_ASSERT(__real_p->__type_size == 1,_StlMsg_DBA_TYPE_MISMATCH)
109 _STLP_VERBOSE_ASSERT(__real_p->_M_size == __n, _StlMsg_DBA_SIZE_MISMATCH)
110 // check pads on both sides
111 unsigned char* __tmp;
112 for (__tmp= (unsigned char*)(__real_p+1); __tmp < (unsigned char*)__p; __tmp++) {
113 _STLP_VERBOSE_ASSERT(*__tmp==__shred_byte, _StlMsg_DBA_UNDERRUN)
116 size_t __real_n= __n + __extra_before_chunk() + __extra_after_chunk();
118 for (__tmp= ((unsigned char*)__p)+__n*sizeof(value_type);
119 __tmp < ((unsigned char*)__real_p)+__real_n ; __tmp++) {
120 _STLP_VERBOSE_ASSERT(*__tmp==__shred_byte, _StlMsg_DBA_OVERRUN)
123 // that may be unfortunate, just in case
124 __real_p->__magic=__deleted_magic;
125 memset((char*)__p, __shred_byte, __n*sizeof(value_type));
126 __allocator_type::deallocate(__real_p, __real_n);
129 #ifndef _STLP_NO_NODE_ALLOC
131 // # ifdef _STLP_THREADS
133 template <bool __threads, int __inst>
134 class _Node_Alloc_Lock {
138 # ifdef _STLP_SGI_THREADS
139 if (__threads && __us_rsthread_malloc)
140 # else /* !_STLP_SGI_THREADS */
143 _S_lock._M_acquire_lock();
146 ~_Node_Alloc_Lock() {
147 # ifdef _STLP_SGI_THREADS
148 if (__threads && __us_rsthread_malloc)
149 # else /* !_STLP_SGI_THREADS */
152 _S_lock._M_release_lock();
155 static _STLP_STATIC_MUTEX _S_lock;
158 // # endif /* _STLP_THREADS */
161 template <bool __threads, int __inst>
163 __node_alloc<__threads, __inst>::_M_allocate(size_t __n) {
165 _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
166 // # ifdef _STLP_THREADS
168 _Node_Alloc_Lock<__threads, __inst> __lock_instance;
170 // Acquire the lock here with a constructor call.
171 // This ensures that it is released in exit or during stack
173 if ( (__r = *__my_free_list) != 0 ) {
174 *__my_free_list = ((_Obj*)__r) -> _M_free_list_link;
176 __r = _S_refill(__n);
178 // lock is released here
182 template <bool __threads, int __inst>
184 __node_alloc<__threads, __inst>::_M_deallocate(void *__p, size_t __n) {
185 _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
186 // # ifdef _STLP_THREADS
188 _Node_Alloc_Lock<__threads, __inst> __lock_instance;
189 // # endif /* _STLP_THREADS */
191 ((_Obj *)__p) -> _M_free_list_link = *__my_free_list;
192 *__my_free_list = (_Obj *)__p;
193 // lock is released here
196 /* We allocate memory in large chunks in order to avoid fragmenting */
197 /* the malloc heap too much. */
198 /* We assume that size is properly aligned. */
199 /* We hold the allocation lock. */
200 template <bool __threads, int __inst>
202 __node_alloc<__threads, __inst>::_S_chunk_alloc(size_t _p_size,
206 size_t __total_bytes = _p_size * __nobjs;
207 size_t __bytes_left = _S_end_free - _S_start_free;
209 if (__bytes_left >= __total_bytes) {
210 __result = _S_start_free;
211 _S_start_free += __total_bytes;
213 } else if (__bytes_left >= _p_size) {
214 __nobjs = (int)(__bytes_left/_p_size);
215 __total_bytes = _p_size * __nobjs;
216 __result = _S_start_free;
217 _S_start_free += __total_bytes;
220 size_t __bytes_to_get =
221 2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
222 // Try to make use of the left-over piece.
223 if (__bytes_left > 0) {
224 _Obj* _STLP_VOLATILE* __my_free_list =
225 _S_free_list + _S_FREELIST_INDEX(__bytes_left);
227 ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list;
228 *__my_free_list = (_Obj*)_S_start_free;
230 _S_start_free = (char*)__stlp_chunk_malloc(__bytes_to_get);
231 if (0 == _S_start_free) {
233 _Obj* _STLP_VOLATILE* __my_free_list;
235 // Try to make do with what we have. That can't
236 // hurt. We do not try smaller requests, since that tends
237 // to result in disaster on multi-process machines.
238 for (__i = _p_size; __i <= (size_t)_MAX_BYTES; __i += (size_t)_ALIGN) {
239 __my_free_list = _S_free_list + _S_FREELIST_INDEX(__i);
240 __p = *__my_free_list;
242 *__my_free_list = __p -> _M_free_list_link;
243 _S_start_free = (char*)__p;
244 _S_end_free = _S_start_free + __i;
245 return(_S_chunk_alloc(_p_size, __nobjs));
246 // Any leftover piece will eventually make it to the
250 _S_end_free = 0; // In case of exception.
251 _S_start_free = (char*)__stlp_chunk_malloc(__bytes_to_get);
253 (char*)malloc_alloc::allocate(__bytes_to_get);
256 // This should either throw an
257 // exception or remedy the situation. Thus we assume it
260 _S_heap_size += __bytes_to_get;
261 _S_end_free = _S_start_free + __bytes_to_get;
262 return(_S_chunk_alloc(_p_size, __nobjs));
267 /* Returns an object of size __n, and optionally adds to size __n free list.*/
268 /* We assume that __n is properly aligned. */
269 /* We hold the allocation lock. */
270 template <bool __threads, int __inst>
272 __node_alloc<__threads, __inst>::_S_refill(size_t __n)
275 __n = _S_round_up(__n);
276 char* __chunk = _S_chunk_alloc(__n, __nobjs);
277 _Obj* _STLP_VOLATILE* __my_free_list;
283 if (1 == __nobjs) return(__chunk);
284 __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
286 /* Build free list in chunk */
287 __result = (_Obj*)__chunk;
288 *__my_free_list = __next_obj = (_Obj*)(__chunk + __n);
289 for (__i = 1; ; __i++) {
290 __current_obj = __next_obj;
291 __next_obj = (_Obj*)((char*)__next_obj + __n);
292 if (__nobjs - 1 == __i) {
293 __current_obj -> _M_free_list_link = 0;
296 __current_obj -> _M_free_list_link = __next_obj;
302 # if ( _STLP_STATIC_TEMPLATE_DATA > 0 )
303 // malloc_alloc out-of-memory handling
304 template <int __inst>
305 __oom_handler_type __malloc_alloc<__inst>::__oom_handler=(__oom_handler_type)0 ;
308 template <bool __threads, int __inst>
310 _Node_Alloc_Lock<__threads, __inst>::_S_lock _STLP_MUTEX_INITIALIZER;
313 template <bool __threads, int __inst>
314 _Node_alloc_obj * _STLP_VOLATILE
315 __node_alloc<__threads, __inst>::_S_free_list[_STLP_NFREELISTS]
316 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
317 // The 16 zeros are necessary to make version 4.1 of the SunPro
318 // compiler happy. Otherwise it appears to allocate too little
319 // space for the array.
321 template <bool __threads, int __inst>
322 char *__node_alloc<__threads, __inst>::_S_start_free = 0;
324 template <bool __threads, int __inst>
325 char *__node_alloc<__threads, __inst>::_S_end_free = 0;
327 template <bool __threads, int __inst>
328 size_t __node_alloc<__threads, __inst>::_S_heap_size = 0;
331 # else /* ( _STLP_STATIC_TEMPLATE_DATA > 0 ) */
333 __DECLARE_INSTANCE(__oom_handler_type, __malloc_alloc<0>::__oom_handler, =0);
335 # define _STLP_ALLOC_NOTHREADS __node_alloc<false, 0>
336 # define _STLP_ALLOC_THREADS __node_alloc<true, 0>
337 # define _STLP_ALLOC_NOTHREADS_LOCK _Node_Alloc_Lock<false, 0>
338 # define _STLP_ALLOC_THREADS_LOCK _Node_Alloc_Lock<true, 0>
340 __DECLARE_INSTANCE(char *, _STLP_ALLOC_NOTHREADS::_S_start_free,=0);
341 __DECLARE_INSTANCE(char *, _STLP_ALLOC_NOTHREADS::_S_end_free,=0);
342 __DECLARE_INSTANCE(size_t, _STLP_ALLOC_NOTHREADS::_S_heap_size,=0);
343 __DECLARE_INSTANCE(_Node_alloc_obj * _STLP_VOLATILE,
344 _STLP_ALLOC_NOTHREADS::_S_free_list[_STLP_NFREELISTS],
346 __DECLARE_INSTANCE(char *, _STLP_ALLOC_THREADS::_S_start_free,=0);
347 __DECLARE_INSTANCE(char *, _STLP_ALLOC_THREADS::_S_end_free,=0);
348 __DECLARE_INSTANCE(size_t, _STLP_ALLOC_THREADS::_S_heap_size,=0);
349 __DECLARE_INSTANCE(_Node_alloc_obj * _STLP_VOLATILE,
350 _STLP_ALLOC_THREADS::_S_free_list[_STLP_NFREELISTS],
352 // # ifdef _STLP_THREADS
353 __DECLARE_INSTANCE(_STLP_STATIC_MUTEX,
354 _STLP_ALLOC_NOTHREADS_LOCK::_S_lock,
355 _STLP_MUTEX_INITIALIZER);
356 __DECLARE_INSTANCE(_STLP_STATIC_MUTEX,
357 _STLP_ALLOC_THREADS_LOCK::_S_lock,
358 _STLP_MUTEX_INITIALIZER);
361 # undef _STLP_ALLOC_THREADS
362 # undef _STLP_ALLOC_NOTHREADS
364 # endif /* _STLP_STATIC_TEMPLATE_DATA */
370 # undef _S_FREELIST_INDEX
372 # endif /* _STLP_EXPOSE_GLOBALS_IMPLEMENTATION */
374 #endif /* _STLP_ALLOC_C */