Update contrib.
1 // Copyright (c) 1994-2009 Nokia Corporation and/or its subsidiary(-ies).
2 // All rights reserved.
3 // This component and the accompanying materials are made available
4 // under the terms of the License "Eclipse Public License v1.0"
5 // which accompanies this distribution, and is available
6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
8 // Initial Contributors:
9 // Nokia Corporation - initial contribution.
14 // e32\common\heap.cpp
19 #ifdef __KERNEL_MODE__
20 #include <kernel/kern_priv.h>
24 #define __SIMULATE_ALLOC_FAIL(s) if (CheckForSimulatedAllocFail()) {s}
25 #define __CHECK_CELL(p) CheckCell(p)
26 #define __ZAP_CELL(p) memset( ((TUint8*)p) + RHeap::EAllocCellSize, 0xde, p->len - RHeap::EAllocCellSize)
27 #define __DEBUG_SAVE(p) TInt dbgNestLevel = ((SDebugCell*)p)->nestingLevel
28 #define __DEBUG_RESTORE(p) ((SDebugCell*)(((TUint8*)p)-EAllocCellSize))->nestingLevel = dbgNestLevel
30 #define __SIMULATE_ALLOC_FAIL(s)
31 #define __CHECK_CELL(p)
33 #define __DEBUG_SAVE(p)
34 #define __DEBUG_RESTORE(p)
37 #define __NEXT_CELL(p) ((SCell*)(((TUint8*)p)+p->len))
39 #define __POWER_OF_2(x) ((TUint32)((x)^((x)-1))>=(TUint32)(x))
41 #define __MEMORY_MONITOR_CHECK_CELL(p) \
43 TLinAddr m = TLinAddr(iAlign-1); \
44 SCell* c = (SCell*)(((TUint8*)p)-EAllocCellSize); \
45 if((c->len & m) || (c->len<iMinCell) || ((TUint8*)c<iBase) || ((TUint8*)__NEXT_CELL(c)>iTop)) \
46 BTraceContext12(BTrace::EHeap, BTrace::EHeapCorruption, (TUint32)this, (TUint32)p, (TUint32)c->len-EAllocCellSize); \
54 Defines the minimum cell size of a heap.
56 The constant can be changed at ROM build time using patchdata OBY keyword.
58 #ifdef __X86GCC__ // For X86GCC we dont use the proper data import attribute
59 #undef IMPORT_D // since the constant is not really imported. GCC doesn't
60 #define IMPORT_D // allow imports from self.
62 IMPORT_D extern const TInt KHeapMinCellSize;
69 This constant defines the ratio that determines the amount of hysteresis between heap growing and heap
71 It is a 32-bit fixed point number where the radix point is defined to be
72 between bits 7 and 8 (where the LSB is bit 0) i.e. using standard notation, a Q8 or a fx24.8
73 fixed point number. For example, for a ratio of 2.0, set KHeapShrinkHysRatio=0x200.
75 The heap shrinking hysteresis value is calculated to be:
77 KHeapShrinkHysRatio*(iGrowBy>>8)
79 where iGrowBy is a page aligned value set by the argument, aGrowBy, to the RHeap constructor.
80 The default hysteresis value is iGrowBy bytes i.e. KHeapShrinkHysRatio=2.0.
82 Memory usage may be improved by reducing the heap shrinking hysteresis
83 by setting 1.0 < KHeapShrinkHysRatio < 2.0. Heap shrinking hysteresis is disabled/removed
84 when KHeapShrinkHysRatio <= 1.0.
86 The constant can be changed at ROM build time using patchdata OBY keyword.
88 IMPORT_D extern const TInt KHeapShrinkHysRatio;
90 #pragma warning( disable : 4705 ) // statement has no effect
91 UEXPORT_C RHeap::RHeap(TInt aMaxLength, TInt aAlign, TBool aSingleThread)
96 // Constructor for fixed size heap
98 : iMinLength(aMaxLength), iMaxLength(aMaxLength), iOffset(0), iGrowBy(0), iChunkHandle(0),
99 iNestingLevel(0), iAllocCount(0), iFailType(ENone), iTestData(NULL)
101 iAlign = aAlign ? aAlign : ECellAlignment;
103 iFlags = aSingleThread ? (ESingleThreaded|EFixedSize) : EFixedSize;
106 #pragma warning( default : 4705 )
111 UEXPORT_C RHeap::RHeap(TInt aChunkHandle, TInt aOffset, TInt aMinLength, TInt aMaxLength, TInt aGrowBy, TInt aAlign, TBool aSingleThread)
116 // Constructor for chunk heaps.
118 : iOffset(aOffset), iChunkHandle(aChunkHandle),
119 iNestingLevel(0), iAllocCount(0), iFailType(ENone), iTestData(NULL)
121 TInt sz = iBase - ((TUint8*)this - iOffset);
122 GET_PAGE_SIZE(iPageSize);
123 __ASSERT_ALWAYS(iOffset>=0, HEAP_PANIC(ETHeapNewBadOffset));
124 iMinLength = Max(aMinLength, sz + EAllocCellSize);
125 iMinLength = _ALIGN_UP(iMinLength, iPageSize);
126 iMaxLength = Max(aMaxLength, iMinLength);
127 iMaxLength = _ALIGN_UP(iMaxLength, iPageSize);
128 iGrowBy = _ALIGN_UP(aGrowBy, iPageSize);
129 iFlags = aSingleThread ? ESingleThreaded : 0;
130 iAlign = aAlign ? aAlign : ECellAlignment;
137 UEXPORT_C TAny* RHeap::operator new(TUint aSize, TAny* aBase) __NO_THROW
142 __ASSERT_ALWAYS(aSize>=sizeof(RHeap), HEAP_PANIC(ETHeapNewBadSize));
143 RHeap* h = (RHeap*)aBase;
144 h->iAlign = 0x80000000; // garbage value
145 h->iBase = ((TUint8*)aBase) + aSize;
149 void RHeap::Initialise()
151 // Initialise the heap.
155 __ASSERT_ALWAYS((TUint32)iAlign>=sizeof(TAny*) && __POWER_OF_2(iAlign), HEAP_PANIC(ETHeapNewBadAlignment));
158 iBase = (TUint8*)Align(iBase + EAllocCellSize);
159 iBase -= EAllocCellSize;
160 TInt b = iBase - ((TUint8*)this - iOffset);
161 TInt len = _ALIGN_DOWN(iMinLength - b, iAlign);
163 iMinLength = iTop - ((TUint8*)this - iOffset);
164 iMinCell = Align(KHeapMinCellSize + Max((TInt)EAllocCellSize, (TInt)EFreeCellSize));
166 memset(iBase, 0xa5, len);
168 SCell* pM=(SCell*)iBase; // First free cell
169 iFree.next=pM; // Free list points to first free cell
170 iFree.len=0; // Stop free from joining this with a free block
171 pM->next=NULL; // Terminate the free list
172 pM->len=len; // Set the size of the free cell
176 void RHeap::CheckCell(const SCell* aCell) const
178 TLinAddr m = TLinAddr(iAlign - 1);
180 __ASSERT_DEBUG(!(aCell->len & m), HEAP_PANIC(ETHeapBadCellAddress));
181 __ASSERT_DEBUG(aCell->len >= iMinCell, HEAP_PANIC(ETHeapBadCellAddress));
182 __ASSERT_DEBUG((TUint8*)aCell>=iBase, HEAP_PANIC(ETHeapBadCellAddress));
183 __ASSERT_DEBUG((TUint8*)__NEXT_CELL(aCell)<=iTop, HEAP_PANIC(ETHeapBadCellAddress));
187 UEXPORT_C RHeap::SCell* RHeap::GetAddress(const TAny* aCell) const
189 // As much as possible, check a cell address and backspace it
190 // to point at the cell header.
194 TLinAddr m = TLinAddr(iAlign - 1);
195 __ASSERT_ALWAYS(!(TLinAddr(aCell)&m), HEAP_PANIC(ETHeapBadCellAddress));
197 SCell* pC = (SCell*)(((TUint8*)aCell)-EAllocCellSize);
206 UEXPORT_C TInt RHeap::AllocLen(const TAny* aCell) const
208 Gets the length of the available space in the specified allocated cell.
210 @param aCell A pointer to the allocated cell.
212 @return The length of the available space in the allocated cell.
214 @panic USER 42 if aCell does not point to a valid cell.
218 SCell* pC = GetAddress(aCell);
219 return pC->len - EAllocCellSize;
226 #if !defined(__HEAP_MACHINE_CODED__) || defined(_DEBUG)
227 RHeap::SCell* RHeap::DoAlloc(TInt aSize, SCell*& aLastFree)
229 // Allocate without growing. aSize includes cell header and alignment.
230 // Lock already held.
234 SCell* pC = pP->next;
235 for (; pC; pP=pC, pC=pC->next) // Scan the free list
239 if (pC->len >= aSize) // Block size bigger than request
241 if (pC->len - aSize < iMinCell) // Leftover must be large enough to hold an SCell
243 aSize = pC->len; // It isn't, so take it all
244 pE = pC->next; // Set the next field
248 pE = (SCell*)(((TUint8*)pC)+aSize); // Take amount required
249 pE->len = pC->len - aSize; // Initialize new free cell
252 pP->next = pE; // Update previous pointer
253 pC->len = aSize; // Set control size word
255 ((SDebugCell*)pC)->nestingLevel = iNestingLevel;
256 ((SDebugCell*)pC)->allocCount = ++iAllocCount;
269 UEXPORT_C TAny* RHeap::Alloc(TInt aSize)
271 Allocates a cell of the specified size from the heap.
273 If there is insufficient memory available on the heap from which to allocate
274 a cell of the required size, the function returns NULL.
276 The cell is aligned according to the alignment value specified at construction,
277 or the default alignment value, if an explict value was not specified.
279 The resulting size of the allocated cell may be rounded up to a
280 value greater than aSize, but is guaranteed to be not less than aSize.
283 size of the cell to be allocated from the heap
285 @return A pointer to the allocated cell. NULL if there is insufficient memory
288 @panic USER 47 if the maximum unsigned value of aSize is greater than or equal
289 to the value of KMaxTInt/2; for example, calling Alloc(-1) raises
296 __CHECK_THREAD_STATE;
297 __ASSERT_ALWAYS((TUint)aSize<(KMaxTInt/2),HEAP_PANIC(ETHeapBadAllocatedCellSize));
298 __SIMULATE_ALLOC_FAIL(return NULL;)
300 TInt origSize = aSize;
301 aSize = Max(Align(aSize + EAllocCellSize), iMinCell);
304 SCell* pC = (SCell*)DoAlloc(aSize, pL);
305 if (!pC && !(iFlags & EFixedSize))
307 // try to grow chunk heap
308 TInt r = TryToGrowHeap(aSize, pL);
310 pC = DoAlloc(aSize, pL);
313 ++iCellCount, iTotalAllocSize += (pC->len - EAllocCellSize);
317 TAny* result=((TUint8*)pC) + EAllocCellSize;
318 if (iFlags & ETraceAllocs)
320 TUint32 traceData[2];
321 traceData[0] = AllocLen(result);
322 traceData[1] = origSize;
323 BTraceContextN(BTrace::EHeap, BTrace::EHeapAlloc, (TUint32)this, (TUint32)result, traceData, sizeof(traceData));
325 #ifdef __KERNEL_MODE__
326 memclr(result, pC->len - EAllocCellSize);
330 if (iFlags & ETraceAllocs)
331 BTraceContext8(BTrace::EHeap, BTrace::EHeapAllocFail, (TUint32)this, (TUint32)origSize);
338 TInt RHeap::TryToGrowHeap(TInt aSize, SCell* aLastFree)
340 TBool at_end = IsLastCell(aLastFree);
341 TInt extra = at_end ? aSize - aLastFree->len : aSize;
342 extra = (extra + iGrowBy - 1) / iGrowBy;
344 TInt cur_len = _ALIGN_UP(iTop - ((TUint8*)this - iOffset), iPageSize);
345 TInt new_len = cur_len + extra;
346 TInt r = KErrNoMemory;
347 if (new_len <= iMaxLength)
353 aLastFree->len += extra;
356 SCell* pC = (SCell*)iTop;
359 aLastFree->next = pC;
370 #ifndef __KERNEL_MODE__
371 EXPORT_C TInt RHeap::Compress()
375 The function frees excess committed space from the top
376 of the heap. The size of the heap is never reduced below the minimum size
377 specified during creation of the heap.
379 @return The space reclaimed. If no space can be reclaimed, then this value
384 if (iFlags & EFixedSize)
389 for (; pC->next; pC=pC->next) {}
404 #if !defined(__HEAP_MACHINE_CODED__) || defined(_DEBUG)
405 void RHeap::DoFree(SCell* pC)
410 SCell* pE = pP->next;
411 for (; pE && pE<pC; pP=pE, pE=pE->next) {}
412 if (pE) // Is there a following free cell?
414 SCell* pN = __NEXT_CELL(pC);
415 __ASSERT_ALWAYS(pN<=pE, HEAP_PANIC(ETHeapFreeBadNextCell)); // Following cell overlaps
416 if (pN==pE) // Is it adjacent
418 pC->len += pE->len; // Yes - coalesce adjacent free cells
421 else // pN<pE, non-adjacent free cells
422 pC->next = pE; // Otherwise just point to it
425 pC->next = NULL; // No following free cell
426 SCell* pN = __NEXT_CELL(pP); // pN=pP=&iFree if no preceding free cell
427 __ASSERT_ALWAYS(pN<=pC, HEAP_PANIC(ETHeapFreeBadPrevCell)); // Previous cell overlaps
428 if (pN==pC) // Is it adjacent
430 pP->len += pC->len; // Yes - coalesce adjacent free cells
432 pC = pP; // for size reduction check
434 else // pN<pC, non-adjacent free cells
435 pP->next = pC; // point previous cell to the one being freed
436 pN = __NEXT_CELL(pC); // End of amalgamated free cell
437 if ((TUint8*)pN==iTop && !(iFlags & EFixedSize) &&
438 pC->len >= KHeapShrinkHysRatio*(iGrowBy>>8))
446 UEXPORT_C void RHeap::Free(TAny* aCell)
448 Frees the specified cell and returns it to the heap.
450 @param aCell A pointer to a valid cell; this pointer can also be NULL,
451 in which case the function does nothing and just returns.
453 @panic USER 42 if aCell points to an invalid cell.
456 __CHECK_THREAD_STATE;
460 if (iFlags & EMonitorMemory)
461 __MEMORY_MONITOR_CHECK_CELL(aCell);
462 SCell* pC = GetAddress(aCell);
464 iTotalAllocSize -= (pC->len - EAllocCellSize);
466 if (iFlags & ETraceAllocs)
467 BTraceContext8(BTrace::EHeap, BTrace::EHeapFree, (TUint32)this, (TUint32)aCell);
474 TInt RHeap::Reduce(SCell* aCell)
477 TInt offset=((TUint8*)aCell)-((TUint8*)this - iOffset);
478 if (offset>=iMinLength)
479 reduce = aCell->len; // length of entire free cell
481 reduce = offset + aCell->len - iMinLength; // length of free cell past minimum heap size
482 reduce = _ALIGN_DOWN(reduce, iPageSize); // round down to page multiple
484 return 0; // can't reduce this heap
485 TInt new_cell_len = aCell->len - reduce; // length of last free cell after reduction
486 if (new_cell_len == 0)
488 // the free cell can be entirely eliminated
490 for (; pP->next!=aCell; pP=pP->next) {}
495 if (new_cell_len < iMinCell)
497 // max reduction would leave a cell too small
499 new_cell_len += iPageSize;
501 aCell->len = new_cell_len; // reduce the cell length
504 TInt new_len = _ALIGN_UP(iTop - ((TUint8*)this - iOffset), iPageSize);
505 TInt r = SetBrk(new_len);
506 __ASSERT_ALWAYS(r==KErrNone, HEAP_PANIC(ETHeapReduceFailed));
513 #ifndef __KERNEL_MODE__
514 EXPORT_C void RHeap::Reset()
516 Frees all allocated cells on this heap.
521 if (!(iFlags & EFixedSize))
523 TInt r = SetBrk(iMinLength);
524 __ASSERT_ALWAYS(r==KErrNone, HEAP_PANIC(ETHeapResetFailed));
534 inline void RHeap::FindFollowingFreeCell(SCell* aCell, SCell*& aPrev, SCell*& aNext)
536 // Find the free cell that immediately follows aCell, if one exists
537 // If found, aNext is set to point to it, else it is set to NULL.
538 // aPrev is set to the free cell before aCell or the dummy free cell where there are no free cells before aCell.
539 // Called with lock enabled.
544 for (; aNext && aNext<aCell; aPrev=aNext, aNext=aNext->next) {}
546 if (aNext) // If there is a following free cell, check its directly after aCell.
548 SCell* pNextCell = __NEXT_CELL(aCell); // end of this cell
549 __ASSERT_ALWAYS(pNextCell<=aNext, (Unlock(), HEAP_PANIC(ETHeapReAllocBadNextCell))); // Following free cell overlaps
550 if (pNextCell!=aNext)
558 TInt RHeap::TryToGrowCell(SCell* aCell,SCell* aPrev, SCell* aNext, TInt aSize)
560 // Try to grow the heap cell 'aCell' in place, to size 'aSize'.
561 // Requires the free cell immediately after aCell (aNext), and the free cell prior to
562 // that (aPrev), to be provided. (As found by FindFollowingFreeCell)
566 TInt extra = aSize - aCell->len;
567 if (aNext && (aNext->len>=extra)) // Is there a following free cell big enough?
569 if (aNext->len - extra >= iMinCell) // take part of free cell ?
571 SCell* pX = (SCell*)((TUint8*)aNext + extra); // remainder of free cell
572 pX->next = aNext->next; // remainder->next = original free cell->next
573 pX->len = aNext->len - extra; // remainder length = original free cell length - extra
574 aPrev->next = pX; // put remainder into free chain
578 extra = aNext->len; // Take whole free cell
579 aPrev->next = aNext->next; // remove from free chain
581 #ifdef __KERNEL_MODE__
582 memclr(((TUint8*)aCell) + aCell->len, extra);
584 aCell->len += extra; // update reallocated cell length
585 iTotalAllocSize += extra;
588 return KErrGeneral; // No space to grow cell
594 // UEXPORT_C TAny* RHeap::ReAlloc(TAny* aCell, TInt aSize, TInt aMode)
596 Increases or decreases the size of an existing cell in the heap.
598 If the cell is being decreased in size, then it is guaranteed not to move,
599 and the function returns the pointer originally passed in aCell. Note that the
600 length of the cell will be the same if the difference between the old size
601 and the new size is smaller than the minimum cell size.
603 If the cell is being increased in size, i.e. aSize is bigger than its
604 current size, then the function tries to grow the cell in place.
605 If successful, then the function returns the pointer originally
606 passed in aCell. If unsuccessful, then:
608 1. if the cell cannot be moved, i.e. aMode has the ENeverMove bit set, then
609 the function returns NULL.
610 2. if the cell can be moved, i.e. aMode does not have the ENeverMove bit set,
611 then the function tries to allocate a new replacement cell, and, if
612 successful, returns a pointer to the new cell; if unsuccessful, it
615 Note that in debug mode, the function returns NULL if the cell cannot be grown
616 in place, regardless of whether the ENeverMove bit is set.
618 If the reallocated cell is at a different location from the original cell, then
619 the content of the original cell is copied to the reallocated cell.
621 If the supplied pointer, aCell is NULL, then the function attempts to allocate
622 a new cell, but only if the cell can be moved, i.e. aMode does not have
623 the ENeverMove bit set.
625 Note the following general points:
627 1. If reallocation fails, the content of the original cell is preserved.
629 2. The resulting size of the re-allocated cell may be rounded up to a value
630 greater than aSize, but is guaranteed to be not less than aSize.
632 @param aCell A pointer to the cell to be reallocated. This may be NULL.
634 @param aSize The new size of the cell. This may be bigger or smaller than the
635 size of the original cell.
637 @param aMode Flags controlling the reallocation. The only bit which has any
638 effect on this function is that defined by the enumeration
639 ENeverMove of the enum RAllocator::TReAllocMode.
640 If this is set, then any successful reallocation guarantees not
641 to have changed the start address of the cell.
642 By default, this parameter is zero.
644 @return A pointer to the reallocated cell. This may be the same as the original
645 pointer supplied through aCell. NULL if there is insufficient memory to
646 reallocate the cell, or to grow it in place.
648 @panic USER 42, if aCell is not NULL, and does not point to a valid cell.
649 @panic USER 47, if the maximum unsigned value of aSize is greater
650 than or equal to KMaxTInt/2. For example,
651 calling ReAlloc(someptr,-1) raises this panic.
653 @see RAllocator::TReAllocMode
655 UEXPORT_C TAny* RHeap::ReAlloc(TAny* aCell, TInt aSize, TInt aMode)
657 if (aCell && iFlags&EMonitorMemory)
658 __MEMORY_MONITOR_CHECK_CELL(aCell);
659 TAny* retval = ReAllocImpl(aCell, aSize, aMode);
660 if (iFlags & ETraceAllocs)
664 TUint32 traceData[3];
665 traceData[0] = AllocLen(retval);
666 traceData[1] = aSize;
667 traceData[2] = (TUint32)aCell;
668 BTraceContextN(BTrace::EHeap, BTrace::EHeapReAlloc,(TUint32)this, (TUint32)retval,traceData, sizeof(traceData));
671 BTraceContext12(BTrace::EHeap, BTrace::EHeapReAllocFail, (TUint32)this, (TUint32)aCell, (TUint32)aSize);
675 inline TAny* RHeap::ReAllocImpl(TAny* aCell, TInt aSize, TInt aMode)
677 __CHECK_THREAD_STATE;
679 return (aMode & ENeverMove) ? NULL : Alloc(aSize);
680 __ASSERT_ALWAYS((TUint)aSize<(KMaxTInt/2),HEAP_PANIC(ETHeapBadAllocatedCellSize));
682 SCell* pC = GetAddress(aCell);
683 TInt old_len = pC->len;
685 aSize = Max(Align(aSize + EAllocCellSize), iMinCell);
686 if (aSize > old_len) // Trying to grow cell
688 __SIMULATE_ALLOC_FAIL({ Unlock(); return NULL;})
690 // Try to grow cell in place, without reallocation
693 FindFollowingFreeCell(pC,pPrev, pNext);
694 TInt r = TryToGrowCell(pC, pPrev, pNext, aSize);
702 if (!(aMode & ENeverMove))
703 // If moving allowed, try re-alloc.
704 // If we need to extend heap,and cell is at the end, try and grow in place
707 SCell* pNewCell = (SCell*)DoAlloc(aSize, pLastFree);
708 if (!pNewCell && !(iFlags & EFixedSize))
709 // if we need to extend the heap to alloc
711 if (IsLastCell(pC) || (pNext && IsLastCell(pNext)))
712 // if last used Cell, try and extend heap and then cell
714 TInt r = TryToGrowHeap(aSize - old_len, pLastFree);
717 r = TryToGrowCell(pC, pPrev, pPrev->next, aSize);
719 __ASSERT_DEBUG(r == KErrNone, HEAP_PANIC(ETHeapCellDidntGrow));
724 // try to grow chunk heap and Alloc on it
726 TInt r = TryToGrowHeap(aSize, pLastFree);
728 pNewCell = DoAlloc(aSize, pLastFree);
733 // if we created a new cell, adjust tellies, copy the contents and delete old cell.
736 iTotalAllocSize += (pNewCell->len - EAllocCellSize);
739 TUint8* raw = ((TUint8*) pNewCell);
741 memcpy(raw + EAllocCellSize, aCell, old_len - EAllocCellSize);
742 #ifdef __KERNEL_MODE__
743 memclr(raw + old_len, pNewCell->len - old_len);
746 __DEBUG_RESTORE(raw + EAllocCellSize);
747 return raw + EAllocCellSize;
751 // No moving, but still posible to extend the heap (if heap extendable)
753 if (!(iFlags & EFixedSize) && (IsLastCell(pC) || (pNext && IsLastCell(pNext))))
755 SCell* pLastFree = pNext ? pNext : pPrev;
756 TInt r = TryToGrowHeap(aSize - old_len, pLastFree);
759 r = TryToGrowCell(pC, pPrev, pPrev->next, aSize);
761 __ASSERT_DEBUG(r==KErrNone, HEAP_PANIC(ETHeapCellDidntGrow));
769 if (old_len - aSize >= iMinCell)
771 // cell shrinking, remainder big enough to form a new free cell
772 SCell* pX = (SCell*)((TUint8*)pC + aSize); // pointer to new free cell
773 pC->len = aSize; // update cell size
774 pX->len = old_len - aSize; // size of remainder
775 iTotalAllocSize -= pX->len;
776 DoFree(pX); // link new free cell into chain, shrink heap if necessary
785 #ifndef __KERNEL_MODE__
787 EXPORT_C TInt RHeap::Available(TInt& aBiggestBlock) const
789 Gets the total free space currently available on the heap and the space
790 available in the largest free block.
792 The space available represents the total space which can be allocated.
794 Note that compressing the heap may reduce the total free space available and
795 the space available in the largest free block.
797 @param aBiggestBlock On return, contains the space available
798 in the largest free block on the heap.
800 @return The total free space currently available on the heap.
807 SCell* pC = iFree.next;
808 for (; pC; pC=pC->next)
810 TInt l = pC->len - EAllocCellSize;
823 EXPORT_C TInt RHeap::AllocSize(TInt& aTotalAllocSize) const
825 Gets the number of cells allocated on this heap, and the total space
828 @param aTotalAllocSize On return, contains the total space allocated
831 @return The number of cells allocated on this heap.
836 aTotalAllocSize = iTotalAllocSize;
844 EXPORT_C RHeap* UserHeap::FixedHeap(TAny* aBase, TInt aMaxLength, TInt aAlign, TBool aSingleThread)
846 Creates a fixed length heap at a specified location.
848 On successful return from this function, aMaxLength bytes are committed by the chunk.
849 The heap cannot be extended.
851 @param aBase A pointer to the location where the heap is to be constructed.
852 @param aMaxLength The length of the heap. If the supplied value is less
853 than KMinHeapSize, it is discarded and the value KMinHeapSize
855 @param aAlign The alignment of heap cells.
856 @param aSingleThread Indicates whether single threaded or not.
858 @return A pointer to the new heap, or NULL if the heap could not be created.
860 @panic USER 56 if aMaxLength is negative.
861 @panic USER 172 if aAlign is not a power of 2 or is less than the size of a TAny*.
864 // Force construction of the fixed memory.
868 __ASSERT_ALWAYS(aMaxLength>=0, ::Panic(ETHeapMaxLengthNegative));
869 if (aMaxLength<KMinHeapSize)
870 aMaxLength=KMinHeapSize;
871 RHeap* h = new(aBase) RHeap(aMaxLength, aAlign, aSingleThread);
874 TInt r = h->iLock.CreateLocal();
877 h->iHandles = (TInt*)&h->iLock;
885 Constructor where minimum and maximum length of the heap can be defined.
886 It defaults the chunk heap to be created to have use a new local chunk,
887 to have a grow by value of KMinHeapGrowBy, to be unaligned, not to be
888 single threaded and not to have any mode flags set.
890 @param aMinLength The minimum length of the heap to be created.
891 @param aMaxLength The maximum length to which the heap to be created can grow.
892 If the supplied value is less than KMinHeapSize, then it
893 is discarded and the value KMinHeapSize used instead.
895 EXPORT_C TChunkHeapCreateInfo::TChunkHeapCreateInfo(TInt aMinLength, TInt aMaxLength) :
896 iVersionNumber(EVersion0), iMinLength(aMinLength), iMaxLength(aMaxLength),
897 iAlign(0), iGrowBy(1), iSingleThread(EFalse),
898 iOffset(0), iPaging(EUnspecified), iMode(0), iName(NULL)
904 Sets the chunk heap to create a new chunk with the specified name.
906 This overriddes any previous call to TChunkHeapCreateInfo::SetNewChunkHeap() or
907 TChunkHeapCreateInfo::SetExistingChunkHeap() for this TChunkHeapCreateInfo object.
909 @param aName The name to be given to the chunk heap to be created
910 If NULL, the function constructs a local chunk to host the heap.
911 If not NULL, a pointer to a descriptor containing the name to be
912 assigned to the global chunk hosting the heap.
914 EXPORT_C void TChunkHeapCreateInfo::SetCreateChunk(const TDesC* aName)
916 iName = (TDesC*)aName;
917 iChunk.SetHandle(KNullHandle);
922 Sets the chunk heap to be created to use the chunk specified.
924 This overriddes any previous call to TChunkHeapCreateInfo::SetNewChunkHeap() or
925 TChunkHeapCreateInfo::SetExistingChunkHeap() for this TChunkHeapCreateInfo object.
927 @param aChunk A handle to the chunk to use for the heap.
929 EXPORT_C void TChunkHeapCreateInfo::SetUseChunk(const RChunk aChunk)
937 Creates a chunk heap of the type specified by the parameter aCreateInfo.
939 @param aCreateInfo A reference to a TChunkHeapCreateInfo object specifying the
940 type of chunk heap to create.
942 @return A pointer to the new heap or NULL if the heap could not be created.
944 @panic USER 41 if the heap's specified minimum length is greater than the specified maximum length.
945 @panic USER 55 if the heap's specified minimum length is negative.
946 @panic USER 172 if the heap's specified alignment is not a power of 2 or is less than the size of a TAny*.
948 EXPORT_C RHeap* UserHeap::ChunkHeap(const TChunkHeapCreateInfo& aCreateInfo)
950 // aCreateInfo must have been configured to use a new chunk or an exiting chunk.
951 __ASSERT_ALWAYS(!(aCreateInfo.iMode & (TUint32)~EChunkHeapMask), ::Panic(EHeapCreateInvalidMode));
954 if (aCreateInfo.iChunk.Handle() == KNullHandle)
955 {// A new chunk is to be created for this heap.
956 __ASSERT_ALWAYS(aCreateInfo.iMinLength >= 0, ::Panic(ETHeapMinLengthNegative));
957 __ASSERT_ALWAYS(aCreateInfo.iMaxLength >= aCreateInfo.iMinLength, ::Panic(ETHeapCreateMaxLessThanMin));
959 TInt maxLength = aCreateInfo.iMaxLength;
960 if (maxLength < KMinHeapSize)
961 maxLength = KMinHeapSize;
963 TChunkCreateInfo chunkInfo;
964 chunkInfo.SetNormal(0, maxLength);
965 chunkInfo.SetOwner((aCreateInfo.iSingleThread)? EOwnerThread : EOwnerProcess);
966 if (aCreateInfo.iName)
967 chunkInfo.SetGlobal(*aCreateInfo.iName);
968 // Set the paging attributes of the chunk.
969 if (aCreateInfo.iPaging == TChunkHeapCreateInfo::EPaged)
970 chunkInfo.SetPaging(TChunkCreateInfo::EPaged);
971 if (aCreateInfo.iPaging == TChunkHeapCreateInfo::EUnpaged)
972 chunkInfo.SetPaging(TChunkCreateInfo::EUnpaged);
975 if (chunk.Create(chunkInfo) != KErrNone)
977 // Create the heap using the new chunk.
978 TUint mode = aCreateInfo.iMode | EChunkHeapDuplicate; // Must duplicate the handle.
979 h = OffsetChunkHeap(chunk, aCreateInfo.iMinLength, aCreateInfo.iOffset,
980 aCreateInfo.iGrowBy, maxLength, aCreateInfo.iAlign,
981 aCreateInfo.iSingleThread, mode);
986 h = OffsetChunkHeap(aCreateInfo.iChunk, aCreateInfo.iMinLength, aCreateInfo.iOffset,
987 aCreateInfo.iGrowBy, aCreateInfo.iMaxLength, aCreateInfo.iAlign,
988 aCreateInfo.iSingleThread, aCreateInfo.iMode);
994 EXPORT_C RHeap* UserHeap::ChunkHeap(const TDesC* aName, TInt aMinLength, TInt aMaxLength, TInt aGrowBy, TInt aAlign, TBool aSingleThread)
996 Creates a heap in a local or global chunk.
998 The chunk hosting the heap can be local or global.
1000 A local chunk is one which is private to the process creating it and is not
1001 intended for access by other user processes.
1002 A global chunk is one which is visible to all processes.
1004 The hosting chunk is local, if the pointer aName is NULL, otherwise
1005 the hosting chunk is global and the descriptor *aName is assumed to contain
1006 the name to be assigned to it.
1008 Ownership of the host chunk is vested in the current process.
1010 A minimum and a maximum size for the heap can be specified. On successful
1011 return from this function, the size of the heap is at least aMinLength.
1012 If subsequent requests for allocation of memory from the heap cannot be
1013 satisfied by compressing the heap, the size of the heap is extended in
1014 increments of aGrowBy until the request can be satisfied. Attempts to extend
1015 the heap causes the size of the host chunk to be adjusted.
1017 Note that the size of the heap cannot be adjusted by more than aMaxLength.
1019 @param aName If NULL, the function constructs a local chunk to host
1021 If not NULL, a pointer to a descriptor containing the name
1022 to be assigned to the global chunk hosting the heap.
1023 @param aMinLength The minimum length of the heap.
1024 @param aMaxLength The maximum length to which the heap can grow.
1025 If the supplied value is less than KMinHeapSize, then it
1026 is discarded and the value KMinHeapSize used instead.
1027 @param aGrowBy The increments to the size of the host chunk. If a value is
1028 not explicitly specified, the value KMinHeapGrowBy is taken
1030 @param aAlign The alignment of heap cells.
1031 @param aSingleThread Indicates whether single threaded or not.
1033 @return A pointer to the new heap or NULL if the heap could not be created.
1035 @panic USER 41 if aMinLength is greater than the supplied value of aMaxLength.
1036 @panic USER 55 if aMinLength is negative.
1037 @panic USER 172 if aAlign is not a power of 2 or is less than the size of a TAny*.
1040 // Allocate a Chunk of the requested size and force construction.
1043 TChunkHeapCreateInfo createInfo(aMinLength, aMaxLength);
1044 createInfo.SetCreateChunk(aName);
1045 createInfo.SetGrowBy(aGrowBy);
1046 createInfo.SetAlignment(aAlign);
1047 createInfo.SetSingleThread(aSingleThread);
1048 return ChunkHeap(createInfo);
1054 EXPORT_C RHeap* UserHeap::ChunkHeap(RChunk aChunk, TInt aMinLength, TInt aGrowBy, TInt aMaxLength, TInt aAlign, TBool aSingleThread, TUint32 aMode)
1056 Creates a heap in an existing chunk.
1058 This function is intended to be used to create a heap in a user writable code
1059 chunk as created by a call to RChunk::CreateLocalCode().
1060 This type of heap can be used to hold code fragments from a JIT compiler.
1062 The maximum length to which the heap can grow is the same as
1063 the maximum size of the chunk.
1065 @param aChunk The chunk that will host the heap.
1066 @param aMinLength The minimum length of the heap.
1067 @param aGrowBy The increments to the size of the host chunk.
1068 @param aMaxLength The maximum length to which the heap can grow.
1069 @param aAlign The alignment of heap cells.
1070 @param aSingleThread Indicates whether single threaded or not.
1071 @param aMode Flags controlling the heap creation. This should be set
1072 from one or more of the values in TChunkHeapCreateMode.
1074 @return A pointer to the new heap or NULL if the heap could not be created.
1076 @panic USER 172 if aAlign is not a power of 2 or is less than the size of a TAny*.
1079 // Construct a heap in an already existing chunk
1083 return OffsetChunkHeap(aChunk, aMinLength, 0, aGrowBy, aMaxLength, aAlign, aSingleThread, aMode);
1089 EXPORT_C RHeap* UserHeap::OffsetChunkHeap(RChunk aChunk, TInt aMinLength, TInt aOffset, TInt aGrowBy, TInt aMaxLength, TInt aAlign, TBool aSingleThread, TUint32 aMode)
1091 Creates a heap in an existing chunk, offset from the beginning of the chunk.
1093 This function is intended to be used to create a heap where a fixed amount of
1094 additional data must be stored at a known location. The additional data can be
1095 placed at the base address of the chunk, allowing it to be located without
1096 depending on the internals of the heap structure.
1098 The maximum length to which the heap can grow is the maximum size of the chunk,
1101 @param aChunk The chunk that will host the heap.
1102 @param aMinLength The minimum length of the heap.
1103 @param aOffset The offset from the start of the chunk, to the start of the heap.
1104 @param aGrowBy The increments to the size of the host chunk.
1105 @param aMaxLength The maximum length to which the heap can grow.
1106 @param aAlign The alignment of heap cells.
1107 @param aSingleThread Indicates whether single threaded or not.
1108 @param aMode Flags controlling the heap creation. This should be set
1109 from one or more of the values in TChunkHeapCreateMode.
1111 @return A pointer to the new heap or NULL if the heap could not be created.
1113 @panic USER 172 if aAlign is not a power of 2 or is less than the size of a TAny*.
1116 // Construct a heap in an already existing chunk
1121 UserHal::PageSizeInBytes(page_size);
1123 aAlign = RHeap::ECellAlignment;
1124 TInt maxLength = aChunk.MaxSize();
1125 TInt round_up = Max(aAlign, page_size);
1126 TInt min_cell = _ALIGN_UP(Max((TInt)RHeap::EAllocCellSize, (TInt)RHeap::EFreeCellSize), aAlign);
1127 aOffset = _ALIGN_UP(aOffset, 8);
1128 if (aMaxLength && aMaxLength+aOffset<maxLength)
1129 maxLength = _ALIGN_UP(aMaxLength+aOffset, round_up);
1130 __ASSERT_ALWAYS(aMinLength>=0, ::Panic(ETHeapMinLengthNegative));
1131 __ASSERT_ALWAYS(maxLength>=aMinLength, ::Panic(ETHeapCreateMaxLessThanMin));
1132 aMinLength = _ALIGN_UP(Max(aMinLength, (TInt)sizeof(RHeap) + min_cell) + aOffset, round_up);
1133 TInt r=aChunk.Adjust(aMinLength);
1137 RHeap* h = new (aChunk.Base() + aOffset) RHeap(aChunk.Handle(), aOffset, aMinLength, maxLength, aGrowBy, aAlign, aSingleThread);
1139 TBool duplicateLock = EFalse;
1142 duplicateLock = aMode & EChunkHeapSwitchTo;
1143 if(h->iLock.CreateLocal(duplicateLock ? EOwnerThread : EOwnerProcess)!=KErrNone)
1145 h->iChunkHandle = 0;
1150 if (aMode & EChunkHeapSwitchTo)
1151 User::SwitchHeap(h);
1153 h->iHandles = &h->iChunkHandle;
1156 // now change the thread-relative chunk/semaphore handles into process-relative handles
1157 h->iHandleCount = 2;
1160 RHandleBase s = h->iLock;
1161 r = h->iLock.Duplicate(RThread());
1164 if (r==KErrNone && (aMode & EChunkHeapDuplicate))
1166 r = ((RChunk*)&h->iChunkHandle)->Duplicate(RThread());
1168 h->iLock.Close(), h->iChunkHandle=0;
1173 h->iHandleCount = 1;
1174 if (aMode & EChunkHeapDuplicate)
1175 r = ((RChunk*)&h->iChunkHandle)->Duplicate(RThread(), EOwnerThread);
1178 // return the heap address
1179 return (r==KErrNone) ? h : NULL;
1184 #define UserTestDebugMaskBit(bit) (TBool)(UserSvr::DebugMask(bit>>5) & (1<<(bit&31)))
1186 _LIT(KLitDollarHeap,"$HEAP");
1187 EXPORT_C TInt UserHeap::CreateThreadHeap(SStdEpocThreadCreateInfo& aInfo, RHeap*& aHeap, TInt aAlign, TBool aSingleThread)
1192 // Create a user-side heap
1196 UserHal::PageSizeInBytes(page_size);
1197 TInt minLength = _ALIGN_UP(aInfo.iHeapInitialSize, page_size);
1198 TInt maxLength = Max(aInfo.iHeapMaxSize, minLength);
1199 if (UserTestDebugMaskBit(96)) // 96 == KUSERHEAPTRACE in nk_trace.h
1200 aInfo.iFlags |= ETraceHeapAllocs;
1202 // Create the thread's heap chunk.
1204 TChunkCreateInfo createInfo;
1205 createInfo.SetThreadHeap(0, maxLength, KLitDollarHeap()); // Initialise with no memory committed.
1207 // Set the paging policy of the heap chunk based on the thread's paging policy.
1208 TUint pagingflags = aInfo.iFlags & EThreadCreateFlagPagingMask;
1209 switch (pagingflags)
1211 case EThreadCreateFlagPaged:
1212 createInfo.SetPaging(TChunkCreateInfo::EPaged);
1214 case EThreadCreateFlagUnpaged:
1215 createInfo.SetPaging(TChunkCreateInfo::EUnpaged);
1217 case EThreadCreateFlagPagingUnspec:
1218 // Leave the chunk paging policy unspecified so the process's
1219 // paging policy is used.
1223 TInt r = c.Create(createInfo);
1227 aHeap = ChunkHeap(c, minLength, page_size, maxLength, aAlign, aSingleThread, EChunkHeapSwitchTo|EChunkHeapDuplicate);
1230 return KErrNoMemory;
1231 if (aInfo.iFlags & ETraceHeapAllocs)
1233 aHeap->iFlags |= RHeap::ETraceAllocs;
1234 BTraceContext8(BTrace::EHeap, BTrace::EHeapCreate,(TUint32)aHeap, RHeap::EAllocCellSize);
1235 TInt handle = aHeap->ChunkHandle();
1236 TInt chunkId = ((RHandleBase&)handle).BTraceId();
1237 BTraceContext8(BTrace::EHeap, BTrace::EHeapChunkCreate, (TUint32)aHeap, chunkId);
1239 if (aInfo.iFlags & EMonitorHeapMemory)
1240 aHeap->iFlags |= RHeap::EMonitorMemory;
1244 #endif // __KERNEL_MODE__
1246 void RHeap::WalkCheckCell(TAny* aPtr, TCellType aType, TAny* aCell, TInt aLen)
1249 SHeapCellInfo& info = *(SHeapCellInfo*)aPtr;
1252 case EGoodAllocatedCell:
1255 info.iTotalAllocSize += (aLen-EAllocCellSize);
1257 RHeap& h = *info.iHeap;
1258 if ( ((SDebugCell*)aCell)->nestingLevel == h.iNestingLevel )
1260 if (++info.iLevelAlloc==1)
1261 info.iStranded = (SDebugCell*)aCell;
1262 #ifdef __KERNEL_MODE__
1263 if (KDebugNum(KSERVER) || KDebugNum(KTESTFAST))
1265 // __KTRACE_OPT(KSERVER,Kern::Printf("LEAKED KERNEL HEAP CELL @ %08x : len=%d", aCell, aLen));
1266 Kern::Printf("LEAKED KERNEL HEAP CELL @ %08x : len=%d", aCell, aLen);
1267 TLinAddr base = ((TLinAddr)aCell)&~0x0f;
1268 TLinAddr end = ((TLinAddr)aCell)+(TLinAddr)aLen;
1271 const TUint32* p = (const TUint32*)base;
1272 Kern::Printf("%08x: %08x %08x %08x %08x", p, p[0], p[1], p[2], p[3]);
1284 case EBadAllocatedCellSize:
1285 HEAP_PANIC(ETHeapBadAllocatedCellSize);
1286 case EBadAllocatedCellAddress:
1287 HEAP_PANIC(ETHeapBadAllocatedCellAddress);
1288 case EBadFreeCellAddress:
1289 HEAP_PANIC(ETHeapBadFreeCellAddress);
1290 case EBadFreeCellSize:
1291 HEAP_PANIC(ETHeapBadFreeCellSize);
1293 HEAP_PANIC(ETHeapWalkBadCellType);
1297 TInt RHeap::DoCountAllocFree(TInt& aFree)
1300 memclr(&info, sizeof(info));
1302 Walk(&WalkCheckCell, &info);
1303 aFree = info.iTotalFree;
1304 return info.iTotalAlloc;
1308 UEXPORT_C TInt RHeap::DebugFunction(TInt aFunc, TAny* a1, TAny* a2)
1316 case RAllocator::ECount:
1317 r = DoCountAllocFree(*(TInt*)a1);
1319 case RAllocator::EMarkStart:
1320 __DEBUG_ONLY(DoMarkStart());
1322 case RAllocator::EMarkEnd:
1323 __DEBUG_ONLY( r = DoMarkEnd((TInt)a1) );
1325 case RAllocator::ECheck:
1326 r = DoCheckHeap((SCheckInfo*)a1);
1328 case RAllocator::ESetFail:
1329 __DEBUG_ONLY(DoSetAllocFail((TAllocFail)(TInt)a1, (TInt)a2));
1331 case RAllocator::ESetBurstFail:
1334 SRAllocatorBurstFail* fail = (SRAllocatorBurstFail*) a2;
1335 DoSetAllocFail((TAllocFail)(TInt)a1, fail->iRate, fail->iBurst);
1340 case RAllocator::ECheckFailure:
1341 // iRand will be incremented for each EFailNext, EBurstFailNext,
1342 // EDeterministic and EBurstDeterministic failure.
1346 case RAllocator::ECopyDebugInfo:
1348 TInt nestingLevel = ((SDebugCell*)a1)[-1].nestingLevel;
1349 ((SDebugCell*)a2)[-1].nestingLevel = nestingLevel;
1353 Walk((TWalkFunc)a1, a2);
1356 return KErrNotSupported;
1364 void RHeap::Walk(TWalkFunc aFunc, TAny* aPtr)
1366 // Walk the heap calling the info function.
1371 SCell* pC = (SCell*)iBase; // allocated cells
1372 SCell* pF = &iFree; // free cells
1375 pF = pF->next; // next free cell
1377 pF = (SCell*)iTop; // to make size checking work
1378 else if ( (TUint8*)pF>=iTop || (pF->next && pF->next<=pF) )
1380 if (iFlags & ETraceAllocs)
1381 BTraceContext12(BTrace::EHeap, BTrace::EHeapCorruption, (TUint32)this, (TUint32)pF+EFreeCellSize, 0);
1382 // free cell pointer off the end or going backwards
1384 (*aFunc)(aPtr, EBadFreeCellAddress, pF, 0);
1390 if (l<iMinCell || (l & (iAlign-1)))
1392 if (iFlags & ETraceAllocs)
1393 BTraceContext12(BTrace::EHeap, BTrace::EHeapCorruption, (TUint32)this, (TUint32)pF+EFreeCellSize, l-EFreeCellSize);
1394 // free cell length invalid
1396 (*aFunc)(aPtr, EBadFreeCellSize, pF, l);
1400 while (pC!=pF) // walk allocated cells up to next free cell
1403 if (l<iMinCell || (l & (iAlign-1)))
1405 if (iFlags & ETraceAllocs)
1406 BTraceContext12(BTrace::EHeap, BTrace::EHeapCorruption, (TUint32)this, (TUint32)pC+EAllocCellSize, l-EAllocCellSize);
1407 // allocated cell length invalid
1409 (*aFunc)(aPtr, EBadAllocatedCellSize, pC, l);
1412 (*aFunc)(aPtr, EGoodAllocatedCell, pC, l);
1413 SCell* pN = __NEXT_CELL(pC);
1416 if (iFlags & ETraceAllocs)
1417 BTraceContext12(BTrace::EHeap, BTrace::EHeapCorruption, (TUint32)this, (TUint32)pC+EAllocCellSize, l-EAllocCellSize);
1418 // cell overlaps next free cell
1420 (*aFunc)(aPtr, EBadAllocatedCellAddress, pC, l);
1425 if ((TUint8*)pF == iTop)
1426 break; // reached end of heap
1427 pC = __NEXT_CELL(pF); // step to next allocated cell
1428 (*aFunc)(aPtr, EGoodFreeCell, pF, pF->len);
1433 TInt RHeap::DoCheckHeap(SCheckInfo* aInfo)
1437 memclr(&info, sizeof(info));
1439 Walk(&WalkCheckCell, &info);
1443 TInt expected = aInfo->iCount;
1444 TInt actual = aInfo->iAll ? info.iTotalAlloc : info.iLevelAlloc;
1445 if (actual!=expected && !iTestData)
1447 #ifdef __KERNEL_MODE__
1448 Kern::Fault("KERN-ALLOC COUNT", (expected<<16)|actual );
1450 User::Panic(_L("ALLOC COUNT"), (expected<<16)|actual );
1458 void RHeap::DoMarkStart()
1460 if (iNestingLevel==0)
1465 TUint32 RHeap::DoMarkEnd(TInt aExpected)
1467 if (iNestingLevel==0)
1470 SHeapCellInfo* p = iTestData ? (SHeapCellInfo*)iTestData : &info;
1471 memclr(p, sizeof(info));
1473 Walk(&WalkCheckCell, p);
1474 if (p->iLevelAlloc != aExpected && !iTestData)
1475 return (TUint32)(p->iStranded + 1);
1476 if (--iNestingLevel == 0)
1481 void ResetAllocCellLevels(TAny* aPtr, RHeap::TCellType aType, TAny* aCell, TInt aLen)
1485 RHeap::SDebugCell* cell = (RHeap::SDebugCell*)aCell;
1486 if (aType == RHeap::EGoodAllocatedCell)
1488 cell->nestingLevel = 0;
1492 void RHeap::DoSetAllocFail(TAllocFail aType, TInt aRate)
1493 {// Default to a burst mode of 1, as aType may be a burst type.
1494 DoSetAllocFail(aType, aRate, 1);
1497 // Don't change as the ETHeapBadDebugFailParameter check below and the API
1498 // documentation rely on this being 16 for RHeap.
1499 LOCAL_D const TInt KBurstFailRateShift = 16;
1500 LOCAL_D const TInt KBurstFailRateMask = (1 << KBurstFailRateShift) - 1;
1502 void RHeap::DoSetAllocFail(TAllocFail aType, TInt aRate, TUint aBurst)
1506 // reset levels of all allocated cells to 0
1507 // this should prevent subsequent tests failing unnecessarily
1508 iFailed = EFalse; // Reset for ECheckFailure relies on this.
1509 Walk(&ResetAllocCellLevels, NULL);
1510 // reset heap allocation mark as well
1519 case EBurstTrueRandom:
1520 case EBurstDeterministic:
1521 case EBurstFailNext:
1522 // If the fail type is a burst type then iFailRate is split in 2:
1523 // the 16 lsbs are the fail rate and the 16 msbs are the burst length.
1524 if (TUint(aRate) > (TUint)KMaxTUint16 || aBurst > KMaxTUint16)
1525 HEAP_PANIC(ETHeapBadDebugFailParameter);
1529 iFailRate = (aRate == 0) ? 1 : aRate;
1530 iFailAllocCount = -iFailRate;
1531 iFailRate = iFailRate | (aBurst << KBurstFailRateShift);
1537 iFailRate = (aRate == 0) ? 1 : aRate; // A rate of <1 is meaningless
1538 iFailAllocCount = 0;
1542 // Set up iRand for either:
1543 // - random seed value, or
1544 // - a count of the number of failures so far.
1546 #ifndef __KERNEL_MODE__
1550 case EBurstTrueRandom:
1554 TInt64 seed = time.Int64();
1555 iRand = Math::Rand(seed);
1561 TInt64 seed = 12345;
1562 iRand = Math::Rand(seed);
1571 TBool RHeap::CheckForSimulatedAllocFail()
1573 // Check to see if the user has requested simulated alloc failure, and if so possibly
1574 // Return ETrue indicating a failure.
1577 // For burst mode failures iFailRate is shared
1578 TUint16 rate = (TUint16)(iFailRate & KBurstFailRateMask);
1579 TUint16 burst = (TUint16)(iFailRate >> KBurstFailRateShift);
1583 #ifndef __KERNEL_MODE__
1586 if (++iFailAllocCount>=iFailRate)
1589 if (!iFailed) // haven't failed yet after iFailRate allocations so fail now
1598 iRand=Math::Rand(seed);
1599 if (iRand%iFailRate==0)
1609 case EBurstTrueRandom:
1610 if (++iFailAllocCount < 0)
1612 // We haven't started failing yet so should we now?
1613 TInt64 seed = iRand;
1614 iRand = Math::Rand(seed);
1615 if (iRand % rate == 0)
1616 {// Fail now. Reset iFailAllocCount so we fail burst times
1617 iFailAllocCount = 0;
1623 if (iFailAllocCount < burst)
1624 {// Keep failing for burst times
1628 {// We've now failed burst times so start again.
1629 iFailAllocCount = -(rate - 1);
1634 case EDeterministic:
1635 if (++iFailAllocCount%iFailRate==0)
1638 iRand++; // Keep count of how many times we have failed
1642 case EBurstDeterministic:
1643 // This will fail burst number of times, every rate attempts.
1644 if (++iFailAllocCount >= 0)
1646 if (iFailAllocCount == burst - 1)
1647 {// This is the burst time we have failed so make it the last by
1648 // reseting counts so we next fail after rate attempts.
1649 iFailAllocCount = -rate;
1652 iRand++; // Keep count of how many times we have failed
1657 if ((++iFailAllocCount%iFailRate)==0)
1661 iRand++; // Keep count of how many times we have failed
1665 case EBurstFailNext:
1666 if (++iFailAllocCount >= 0)
1668 if (iFailAllocCount == burst - 1)
1669 {// This is the burst time we have failed so make it the last.
1673 iRand++; // Keep count of how many times we have failed
1681 #endif // ifdef _DEBUG
1683 UEXPORT_C TInt RHeap::Extension_(TUint aExtensionId, TAny*& a0, TAny* a1)
1685 return RAllocator::Extension_(aExtensionId, a0, a1);
1688 #if defined(__HEAP_MACHINE_CODED__) && !defined(_DEBUG)
1689 GLDEF_C void RHeap_PanicBadAllocatedCellSize()
1691 HEAP_PANIC(ETHeapBadAllocatedCellSize);
1694 GLDEF_C void RHeap_PanicBadNextCell()
1696 HEAP_PANIC(ETHeapFreeBadNextCell);
1699 GLDEF_C void RHeap_PanicBadPrevCell()
1701 HEAP_PANIC(ETHeapFreeBadPrevCell);
1704 GLDEF_C void RHeap_PanicBadCellAddress()
1706 HEAP_PANIC(ETHeapBadCellAddress);