Update contrib.
1 // Copyright (c) 2002-2009 Nokia Corporation and/or its subsidiary(-ies).
2 // All rights reserved.
3 // This component and the accompanying materials are made available
4 // under the terms of the License "Eclipse Public License v1.0"
5 // which accompanies this distribution, and is available
6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
8 // Initial Contributors:
9 // Nokia Corporation - initial contribution.
14 // e32test\heap\t_heap2.cpp
16 // Tests RHeap class, including a stress test and a "grow in place"
21 // - Test allocation on fixed length heaps in local, disconnected chunks for
22 // different heap sizes and alignments. Assumes knowledge of heap
24 // - Test allocation, free, reallocation and compression on chunk heaps with
25 // different maximum and minimum lengths and alignments. Assumes knowledge
26 // of heap implementation.
27 // - Stress test heap implementation with a single thread that allocates, frees
28 // and reallocates cells, and checks the heap.
29 // - Stress test heap implementation with two threads that run concurrently.
30 // - Create a chunk heap, test growing in place by allocating a cell and
31 // then reallocating additional space until failure, verify that the cell
32 // did not move and the size was increased.
33 // - The heap is checked to verify that no cells remain allocated after the
34 // tests are complete.
35 // Platforms/Drives/Compatibility:
37 // Assumptions/Requirement/Pre-requisites:
38 // Failures and causes:
39 // Base Port information:
46 #include <e32def_private.h>
48 // Needed for KHeapShrinkHysRatio which is now ROM 'patchdata'
49 #include "TestRHeapShrink.h"
51 #define DECL_GET(T,x) inline T x() const {return i##x;}
52 #define DECL_GET2(T,x,y) inline T y() const {return i##x;}
56 IMPORT_D extern const TInt KHeapMinCellSize;
58 const TInt KHeapMinCellSize = 0;
61 RTest test(_L("T_HEAP2"));
63 #define TEST_ALIGN(p,a) test((TLinAddr(p)&((a)-1))==0)
67 enum {EMagic = 0xb8aa3b29};
72 void Set(TInt aLength);
73 void Verify(TInt aLength);
74 void Verify(const TAny* aInitPtr, TInt aInitLength, TInt aLength);
77 void STestCell::Set(TInt aLength)
80 TUint32 x = (TUint32)this ^ (TUint32)aLength ^ (TUint32)EMagic;
81 aLength -= RHeap::EAllocCellSize;
85 aLength /= sizeof(TUint32);
86 for (i=0; i<aLength-1; ++i)
94 void STestCell::Verify(TInt aLength)
96 Verify(this, aLength, aLength);
99 void STestCell::Verify(const TAny* aInitPtr, TInt aInitLength, TInt aLength)
102 TUint32 x = (TUint32)aInitPtr ^ (TUint32)aInitLength ^ (TUint32)EMagic;
103 aLength -= RHeap::EAllocCellSize;
107 aLength /= sizeof(TUint32);
108 for (i=0; i<aLength-1; ++i)
116 class RTestHeap : public RHeap
119 DECL_GET(TInt,AccessCount)
120 DECL_GET(TInt,HandleCount)
121 DECL_GET(TInt*,Handles)
122 DECL_GET(TUint32,Flags)
123 DECL_GET(TInt,CellCount)
124 DECL_GET(TInt,TotalAllocSize)
125 DECL_GET(TInt,MinLength)
126 DECL_GET(TInt,Offset)
127 DECL_GET(TInt,GrowBy)
128 DECL_GET(TInt,ChunkHandle)
129 DECL_GET2(const RFastLock&,Lock,LockRef)
130 DECL_GET(TUint8*,Top)
132 DECL_GET(TInt,MinCell)
133 DECL_GET(TInt,PageSize)
134 DECL_GET2(const SCell&,Free,FreeRef)
136 TInt CheckAllocatedCell(const TAny* aCell) const;
137 void FullCheckAllocatedCell(const TAny* aCell) const;
138 TAny* TestAlloc(TInt aSize);
139 void TestFree(TAny* aPtr);
140 TAny* TestReAlloc(TAny* aPtr, TInt aSize, TInt aMode=0);
142 static void WalkFullCheckCell(TAny* aPtr, TCellType aType, TAny* aCell, TInt aLen);
143 TInt FreeCellLen(const TAny* aPtr) const;
144 static RTestHeap* FixedHeap(TInt aMaxLength, TInt aAlign=0, TBool aSingleThread=ETrue);
145 void TakeChunkOwnership(RChunk aChunk);
146 TInt LastFreeCellLen(void) const;
147 TInt CalcComp(TInt aCompSize);
148 void ForceCompress(TInt aFreed);
151 TInt RTestHeap::CheckAllocatedCell(const TAny* aCell) const
153 SCell* pC = GetAddress(aCell);
155 TUint8* pEnd = (TUint8*)pC + len;
156 TEST_ALIGN(aCell, iAlign);
157 TEST_ALIGN(len, iAlign);
158 test(len >= iMinCell);
159 test((TUint8*)pC>=iBase && pEnd<=iTop);
163 void RTestHeap::FullCheckAllocatedCell(const TAny* aCell) const
165 ((STestCell*)aCell)->Verify(CheckAllocatedCell(aCell));
168 TAny* RTestHeap::TestAlloc(TInt aSize)
170 TAny* p = Alloc(aSize);
173 TInt len = CheckAllocatedCell(p);
174 test((len-RHeap::EAllocCellSize)>=aSize);
175 ((STestCell*)p)->Set(len);
180 void RTestHeap::TestFree(TAny* aPtr)
183 FullCheckAllocatedCell(aPtr);
187 TAny* RTestHeap::TestReAlloc(TAny* aPtr, TInt aSize, TInt aMode)
189 TInt old_len = aPtr ? CheckAllocatedCell(aPtr) : 0;
191 ((STestCell*)aPtr)->Verify(old_len);
192 TAny* p = ReAlloc(aPtr, aSize, aMode);
195 ((STestCell*)aPtr)->Verify(old_len);
198 TInt new_len = CheckAllocatedCell(p);
199 test((new_len-RHeap::EAllocCellSize)>=aSize);
202 ((STestCell*)p)->Verify(p, old_len, Min(old_len, new_len));
203 if (new_len != old_len)
204 ((STestCell*)p)->Set(new_len);
207 test(!(aMode & ENeverMove));
208 test((new_len > old_len) || (aMode & EAllowMoveOnShrink));
210 ((STestCell*)p)->Verify(aPtr, old_len, Min(old_len, new_len));
211 if (new_len != old_len)
212 ((STestCell*)p)->Set(new_len);
220 TInt iTotalAllocSize;
225 void RTestHeap::WalkFullCheckCell(TAny* aPtr, TCellType aType, TAny* aCell, TInt aLen)
228 ::SHeapCellInfo& info = *(::SHeapCellInfo*)aPtr;
231 case EGoodAllocatedCell:
233 test(aCell == info.iNextCell);
234 TInt len = ((SCell*)aCell)->len;
236 info.iNextCell += len;
238 info.iTotalAllocSize += (aLen-EAllocCellSize);
239 STestCell* pT = (STestCell*)((TUint8*)aCell + EAllocCellSize);
245 test(aCell == info.iNextCell);
246 TInt len = ((SCell*)aCell)->len;
248 info.iNextCell += len;
253 test.Printf(_L("TYPE=%d ??\n"),aType);
259 void RTestHeap::FullCheck()
261 ::SHeapCellInfo info;
262 Mem::FillZ(&info, sizeof(info));
264 info.iNextCell = iBase;
265 DebugFunction(EWalk, (TAny*)&WalkFullCheckCell, &info);
266 test(info.iNextCell == iTop);
267 test(info.iTotalAlloc == iCellCount);
268 test(info.iTotalAllocSize == iTotalAllocSize);
271 TInt RTestHeap::FreeCellLen(const TAny* aPtr) const
273 SCell* p = iFree.next;
274 SCell* q = (SCell*)((TUint8*)aPtr - EAllocCellSize);
275 for (; p && p!=q; p = p->next) {}
277 return p->len - EAllocCellSize;
281 TInt RTestHeap::LastFreeCellLen(void) const
283 SCell* p = iFree.next;
286 for (; p->next; p=p->next){}
291 /** Checks whether a call to Compress() will actually perform a reduction
293 Relies on the free last cell on the heap being cell that has just been freed
295 Intended for use by t_heap2.cpp - DoTest4().
296 @param aFreedSize The size in bytes of the cell that was freed
298 TInt RTestHeap::CalcComp(TInt aFreedSize)
301 largestCell = LastFreeCellLen();
302 // if the largest cell is too small or it would have been compressed by the
303 // free operation then return 0.
304 if (largestCell < iPageSize || aFreedSize >= KHeapShrinkHysRatio*(iGrowBy>>8))
310 return _ALIGN_DOWN(aFreedSize,iPageSize);
314 /** compress the heap if the KHeapShrinkRatio is too large for what we are
315 expecting in DoTest4().
317 void RTestHeap::ForceCompress(TInt aFreed)
319 if (aFreed < KHeapShrinkHysRatio*(iGrowBy>>8))
324 RTestHeap* RTestHeap::FixedHeap(TInt aMaxLength, TInt aAlign, TBool aSingleThread)
327 TInt bottom = 0x40000;
328 TInt top = bottom + aMaxLength;
329 TInt r = c.CreateDisconnectedLocal(bottom, top, top + bottom, EOwnerThread);
332 TUint8* base = c.Base() + bottom;
333 RTestHeap* h = (RTestHeap*)UserHeap::FixedHeap(base, aMaxLength, aAlign, aSingleThread);
335 aAlign = RHeap::ECellAlignment;
336 test((TUint8*)h == base);
337 test(h->AccessCount() == 1);
338 test(h->HandleCount() == (aSingleThread ? 0 : 1));
339 test(h->Handles() == (aSingleThread ? NULL : (TInt*)&h->LockRef()));
340 test(h->Flags() == TUint32(RAllocator::EFixedSize | (aSingleThread ? RAllocator::ESingleThreaded : 0)));
341 test(h->CellCount() == 0);
342 test(h->TotalAllocSize() == 0);
343 test(h->MaxLength() == aMaxLength);
344 test(h->MinLength() == h->Top() - (TUint8*)h);
345 test(h->Offset() == 0);
346 test(h->GrowBy() == 0);
347 test(h->ChunkHandle() == 0);
348 test(h->Align() == aAlign);
349 TInt min_cell = _ALIGN_UP((KHeapMinCellSize + Max((TInt)RHeap::EAllocCellSize, (TInt)RHeap::EFreeCellSize)), aAlign);
350 TInt hdr_len = _ALIGN_UP(sizeof(RHeap) + RHeap::EAllocCellSize, aAlign) - RHeap::EAllocCellSize;
351 TInt user_len = _ALIGN_DOWN(aMaxLength - hdr_len, aAlign);
352 test(h->Base() == base + hdr_len);
353 test(h->MinCell() == min_cell);
354 test(h->Top() - h->Base() == user_len);
355 test(h->FreeRef().next == (RHeap::SCell*)h->Base());
356 h->TakeChunkOwnership(c);
360 void RTestHeap::TakeChunkOwnership(RChunk aChunk)
362 iChunkHandle = aChunk.Handle();
364 iHandles = &iChunkHandle;
368 #define ACCESS_COUNT(h) (((RTestHeap*)h)->AccessCount())
369 #define HANDLE_COUNT(h) (((RTestHeap*)h)->HandleCount())
370 #define HANDLES(h) (((RTestHeap*)h)->Handles())
371 #define FLAGS(h) (((RTestHeap*)h)->Flags())
372 #define CELL_COUNT(h) (((RTestHeap*)h)->CellCount())
373 #define TOTAL_ALLOC_SIZE(h) (((RTestHeap*)h)->TotalAllocSize())
374 #define MIN_LENGTH(h) (((RTestHeap*)h)->MinLength())
375 #define OFFSET(h) (((RTestHeap*)h)->Offset())
376 #define GROW_BY(h) (((RTestHeap*)h)->GrowBy())
377 #define CHUNK_HANDLE(h) (((RTestHeap*)h)->ChunkHandle())
378 #define LOCK_REF(h) (((RTestHeap*)h)->LockRef())
379 #define TOP(h) (((RTestHeap*)h)->Top())
380 #define ALIGN(h) (((RTestHeap*)h)->Align())
381 #define MIN_CELL(h) (((RTestHeap*)h)->MinCell())
382 #define PAGE_SIZE(h) (((RTestHeap*)h)->PageSize())
383 #define FREE_REF(h) (((RTestHeap*)h)->FreeRef())
385 void DoTest1(RHeap* aH)
387 RTestHeap* h = (RTestHeap*)aH;
388 test.Printf(_L("Test Alloc: min=%x max=%x align=%d growby=%d\n"),
389 h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
392 TUint8* next = h->Base();
393 TUint8* top = h->Top();
394 TUint8* limit = (TUint8*)h + h->MaxLength();
395 TBool fixed = h->Flags() & RAllocator::EFixedSize;
396 for (l=1; l<=1024; ++l)
398 TInt remain1 = top - next;
399 TInt xl1 = _ALIGN_UP(Max((l+RHeap::EAllocCellSize), h->MinCell()), h->Align());
401 if ( (fixed && remain1 < xl1) || (next + xl1 > limit) )
404 test(top == h->Top());
405 test.Printf(_L("Alloc failed at l=%d next=%08x\n"), l, next);
408 test(p == next + RHeap::EAllocCellSize);
411 // no room for this cell
412 TInt g = h->GrowBy();
413 while (xl1 > remain1)
419 test(top == h->Top());
420 if (xl1 + h->MinCell() > remain1)
422 // this cell fits but remainder is too small or nonexistent
425 test(h->FreeRef().next == NULL);
429 // this cell fits and remainder can be reused
432 test(aH->AllocLen(p) == xl1 - RHeap::EAllocCellSize);
437 void DoTest2(RHeap* aH)
439 RTestHeap* h = (RTestHeap*)aH;
440 test.Printf(_L("Test Free: min=%x max=%x align=%d growby=%d\n"),
441 h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
443 TInt min = h->MinCell();
445 for (al=1; al<256; (void)((pad=!pad)!=0 || (al+=al+1)) )
451 test.Printf(_L("al=%d pad=%d\n"), al, pad);
454 TBool heapReduced = EFalse;
457 // Check whether the cell created for the allocation of al would end up
458 // including extra bytes from the last free cell that aren't enough
459 // to create a new free cell.
461 TInt freeLen=h->LastFreeCellLen();
462 TInt actualAllocBytes = Max(_ALIGN_UP(al + RHeap::EAllocCellSize, h->Align()), min);
463 TInt remainingBytes = freeLen - actualAllocBytes;
464 if (remainingBytes < min)
466 // Force the heap to grow so that once this allocation is freed
467 // the free cell left will be large enough to include the al allocation
468 // and to create a new free cell if necessary.
469 actualAllocBytes = _ALIGN_UP(actualAllocBytes + min, h->Align());
470 TAny* q = h->TestAlloc(actualAllocBytes);
471 // Check heap has grown
472 test(top < h->Top());
475 // Have grown the heap so allocate a cell as a place holder to stop
476 // the heap being shrunk and the actual cell we want to allocate from being the
478 spare=h->TestAlloc(8);
480 // Ensure heap wasn't shrunk after free
481 test(top == h->Top());
485 p[i] = h->TestAlloc(al);
487 if (remainingBytes < min)
488 {// now safe to free any padding as p[i] now allocated and its size can't change
491 TInt tmp1=h->AllocLen(p[i]);
492 TInt tmp2=Max(_ALIGN_UP(al+RHeap::EAllocCellSize,h->Align()), min)-RHeap::EAllocCellSize;
495 last = (TUint8*)p[31] + _ALIGN_UP(Max((al + RHeap::EAllocCellSize), min), h->Align());
496 last_len = h->FreeCellLen(last);
500 test(h->TestAlloc(last_len) == last);
501 test(h->FreeRef().next == NULL);
506 for (i=0,heapReduced=EFalse; i<32; ++i)
509 TInt fl = h->FreeCellLen(p[i]);
510 TInt xfl = _ALIGN_UP(Max((al + RHeap::EAllocCellSize), h->MinCell()), h->Align()) - RHeap::EAllocCellSize;
511 if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
522 test(fl == xfl + RHeap::EAllocCellSize + last_len);
525 heapReduced = EFalse;
528 test(h->TestAlloc(al)==p[i]);
530 for (i=0,heapReduced=EFalse; i<31; ++i)
534 // Free to adjacent cells and check that the free cell left is the combined
535 // size of the 2 adjacent cells just freed
538 TInt fl = h->FreeCellLen(p[i]);
539 if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
544 TInt xfl = 2 * _ALIGN_UP(Max((al + RHeap::EAllocCellSize), h->MinCell()), h->Align()) - RHeap::EAllocCellSize;
550 test(fl == xfl + RHeap::EAllocCellSize + last_len);
553 heapReduced = EFalse;
556 test(h->FreeCellLen(p[j]) < 0);
557 test(h->TestAlloc(fl)==p[i]);
558 test(h->Top() == top);
560 test(h->FreeCellLen(p[i]) == fl);
561 // test when you alloc a cell that is larger than cells just freed
562 // that its position is not the same as the freed cells
563 // will hold for all cells except top/last one
564 if (j < 31 && !pad && fl < last_len)
566 q = (TUint8*)h->TestAlloc(fl+1);
569 test(h->Top() == top);
572 if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
578 // check cell that is just smaller than space but not small enough
579 // for a new free cell to be created, is the size of whole free cell
580 test(h->TestAlloc(fl-min+1)==p[i]);
581 test(h->Top() == top);
582 test(h->AllocLen(p[i])==fl);
584 // Check cell that is small enough for new free cell and alloc'd cell to be
585 // created at p[i] cell is created at p[i]
586 test(h->TestAlloc(fl-min)==p[i]);
587 test(h->Top() == top);
588 // check free cell is at expected position
589 q = (TUint8*)p[i] + fl - min + RHeap::EAllocCellSize;
590 test(h->FreeCellLen(q) == min - RHeap::EAllocCellSize);
591 // alloc 0 length cell at q, will work as new cell of min length will be created
592 test(h->TestAlloc(0) == q);
593 test(h->Top() == top);
595 test(h->FreeCellLen(p[i]) == fl - min);
597 // again check free cells are combined
598 test(h->FreeCellLen(q) < 0);
599 test(h->FreeCellLen(p[i]) == fl);
600 // check reallocating the cells places them back to same positions
601 test(h->TestAlloc(al)==p[i]);
602 test(h->Top() == top);
603 test(h->TestAlloc(al)==p[j]);
604 test(h->Top() == top);
606 test(h->FreeRef().next == NULL);
608 for (i=0,heapReduced=EFalse; i<30; ++i)
613 // Free 3 adjacent cells and check free cell created is combined size
618 if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
623 TInt fl = h->FreeCellLen(p[i]);
624 TInt xfl = 3 * _ALIGN_UP(Max((al + RHeap::EAllocCellSize), h->MinCell()), h->Align()) - RHeap::EAllocCellSize;
630 test(fl == xfl + RHeap::EAllocCellSize + last_len);
633 heapReduced = EFalse;
636 test(h->FreeCellLen(p[j]) < 0);
637 test(h->FreeCellLen(p[k]) < 0);
638 //ensure created free cell is allocated to new cell of free cell size
639 test(h->TestAlloc(fl)==p[i]);
640 test(h->Top() == top);
642 test(h->FreeCellLen(p[i]) == fl);
643 if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
645 if (k < 31 && !pad && fl < last_len)
647 // Test new cell one larger than free cell size is allocated somewhere else
648 q = (TUint8*)h->TestAlloc(fl+1);
651 test(h->Top() == top);
654 if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
660 // check allocating cell just smaller than free cell size but
661 // too large for neew free cell to be created, is size of whole free cell
662 test(h->TestAlloc(fl-min+1)==p[i]);
663 test(h->Top() == top);
664 test(h->AllocLen(p[i])==fl);
666 // ensure free cell is created this time as well as alloc'd cell
667 test(h->TestAlloc(fl-min)==p[i]);
668 test(h->Top() == top);
669 q = (TUint8*)p[i] + fl - min + RHeap::EAllocCellSize;
670 test(h->FreeCellLen(q) == min - RHeap::EAllocCellSize);
671 test(h->TestAlloc(0) == q);
672 test(h->Top() == top);
674 test(h->FreeCellLen(p[i]) == fl - min);
676 test(h->FreeCellLen(q) < 0);
677 test(h->FreeCellLen(p[i]) == fl);
678 // realloc all cells and check heap not expanded
679 test(h->TestAlloc(al)==p[i]);
680 test(h->Top() == top);
681 test(h->TestAlloc(al)==p[j]);
682 test(h->Top() == top);
683 test(h->TestAlloc(al)==p[k]);
684 test(h->Top() == top);
685 // If padding than no space should left on heap
687 test(h->FreeRef().next == NULL);
689 // when padding this will free padding from top of heap
695 void DoTest3(RHeap* aH)
697 RTestHeap* h = (RTestHeap*)aH;
698 test.Printf(_L("Test ReAlloc: min=%x max=%x align=%d growby=%d\n"),
699 h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
700 // allocate continuous heap cell, then free them and reallocate again
702 for (al=1; al<256; al+=al+1)
704 TAny* p0 = h->TestAlloc(al);
705 TInt al0 = h->AllocLen(p0);
707 TAny* p1 = h->TestReAlloc(NULL, al, 0);
708 TInt al1 = h->AllocLen(p1);
712 TAny* p2 = h->TestAlloc(1);
713 TAny* p3 = h->TestReAlloc(p2, al, 0);
715 TInt al3 = h->AllocLen(p3);
718 TAny* p4 = h->TestAlloc(1024);
719 TAny* p5 = h->TestReAlloc(p4, al, 0);
721 TInt al5 = h->AllocLen(p5);
727 for (j=0; j<30; j+=3)
735 p[i] = h->TestAlloc(8*i*i);
736 ala[i] = h->AllocLen(p[i]);
739 for (i=1; i<30; i+=3)
742 fla[i] = h->FreeCellLen(p[i]);
743 test(fla[i] == ala[i]);
744 test(h->FreeCellLen(p[i-1]) < 0);
745 test(h->FreeCellLen(p[i+1]) < 0);
748 TInt al1 = _ALIGN_UP(Max((RHeap::EAllocCellSize + 1), h->MinCell()), h->Align());
749 // adjust al1 for some case when reallocated heap cell will not be shrinked because remainder will not big enough
750 // to form a new free cell due to a big KHeapMinCellSize value
751 TInt alaj = ala[j] + RHeap::EAllocCellSize;
752 if (al1 < alaj && alaj - al1 < h->MinCell())
754 TAny* p1 = h->TestReAlloc(p[j], 1, RHeap::ENeverMove);
756 test(h->AllocLen(p1) == al1 - RHeap::EAllocCellSize);
757 TAny* p1b = (TUint8*)p1 + al1;
758 test(h->FreeCellLen(p1b) == fla[j+1] + RHeap::EAllocCellSize + ala[j] - al1);
759 TInt l2 = ala[j] + fla[j+1] + RHeap::EAllocCellSize; // max without moving
760 TInt l3 = l2 - h->MinCell();
761 TAny* p3 = h->TestReAlloc(p[j], l3, RHeap::ENeverMove);
763 TAny* p3b = (TUint8*)p3 + h->AllocLen(p3) + RHeap::EAllocCellSize;
764 test(h->FreeCellLen(p3b) == h->MinCell() - RHeap::EAllocCellSize);
765 TAny* p2 = h->TestReAlloc(p[j], l2, RHeap::ENeverMove);
767 test(h->AllocLen(p2) == l2);
768 TAny* p4 = h->TestReAlloc(p[j], l2+1, RHeap::ENeverMove);
770 test(h->AllocLen(p2) == l2);
771 TAny* p5 = h->TestReAlloc(p[j], l2+1, 0);
773 for (; k<30 && fla[k] <= l2; ++k) {}
777 test(p5 >= (TUint8*)p[29] + ala[29]);
778 test(h->FreeCellLen(p2) == ala[j] + ala[j+1] + RHeap::EAllocCellSize);
779 TInt ali = _ALIGN_UP(RHeap::EAllocCellSize,h->Align());
780 TAny* p6b = (TUint8*)p[j+2] + ala[j+2] - ali + RHeap::EAllocCellSize;
781 test(h->FreeCellLen(p6b) < 0);
782 TAny* p6 = h->TestReAlloc(p[j+2], ala[j+2] - ali , 0);
784 if (h->AllocLen(p6) != ala[j+2]) // allocated heap cell size changed
785 test(h->FreeCellLen(p6b) == h->MinCell() - RHeap::EAllocCellSize);
786 TInt g = h->GrowBy();
787 TAny* p7 = h->TestReAlloc(p5, 8*g, 0);
789 TUint8* p8 = (TUint8*)p7 - RHeap::EAllocCellSize + al1;
790 TUint8* p9 = (TUint8*)_ALIGN_UP(TLinAddr(p8), h->PageSize());
791 if (p9-p8 < h->MinCell())
793 TAny* p7b = h->TestReAlloc(p7, 1, 0);
795 test(h->Top() + (RHeap::EAllocCellSize & (h->Align()-1)) == p9);
802 // {1 free cell, >1 free cell} x {reduce cell, eliminate cell, reduce cell but too small}
804 void DoTest4(RHeap* aH)
806 RTestHeap* h = (RTestHeap*)aH;
807 test.Printf(_L("Test Compress: min=%x max=%x align=%d growby=%d\n"),
808 h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
810 UserHal::PageSizeInBytes(page_size);
811 test(page_size == h->PageSize());
812 TInt g = h->GrowBy();
813 TEST_ALIGN(g, page_size);
814 test(g >= page_size);
816 c.SetHandle(h->ChunkHandle());
817 TInt align = h->Align();
818 TInt minc = h->MinCell();
820 TInt orig_size = c.Size();
821 TUint8* orig_top = h->Top();
823 // size in bytes that last free cell on the top of the heap must be
824 // before the heap will be shrunk, size must include the no of bytes to
825 // store the cell data/header i.e RHeap::EAllocCellSize
826 TInt shrinkThres = KHeapShrinkHysRatio*(g>>8);
829 for (pass=0; pass<2; ++pass)
831 TUint8* p0 = (TUint8*)h->TestAlloc(4);
832 test(p0 == h->Base() + RHeap::EAllocCellSize);
833 TInt l1 = h->Top() - (TUint8*)h->FreeRef().next;
834 TEST_ALIGN(l1, align);
835 l1 -= RHeap::EAllocCellSize;
837 // Grow heap by 2*iGrowBy bytes
838 p1 = (TUint8*)h->TestAlloc(l1 + 2*g);
839 test(p1 == p0 + h->AllocLen(p0) + RHeap::EAllocCellSize);
840 test(h->Top() - orig_top == 2*g);
841 test(c.Size() - orig_size == 2*g);
842 // May compress heap, may not
844 h->ForceCompress(2*g);
845 test(h->Top() == orig_top);
846 test(c.Size() == orig_size);
847 test((TUint8*)h->FreeRef().next == p1 - RHeap::EAllocCellSize);
849 //if KHeapShrinkHysRatio is > 2.0 then heap compression will occur here
850 test(h->Compress() == 0);
851 test(h->TestAlloc(l1) == p1);
852 test(h->FreeRef().next == NULL);
854 h->TestFree(p0); // leave another free cell on second pass
855 TInt l2 = g - RHeap::EAllocCellSize;
856 // Will grow heap by iGrowBy bytes
857 TUint8* p2 = (TUint8*)h->TestAlloc(l2);
858 test(p2 == orig_top + RHeap::EAllocCellSize);
859 test(h->Top() - orig_top == g);
860 test(c.Size() - orig_size == g);
861 // may or may not compress heap
863 if (l2+RHeap::EAllocCellSize >= shrinkThres)
865 // When KHeapShrinkRatio small enough heap will have been compressed
866 test(h->Top() == orig_top);
869 test((TUint8*)h->FreeRef().next == p0 - RHeap::EAllocCellSize);
870 test((TUint8*)h->FreeRef().next->next == NULL);
873 test((TUint8*)h->FreeRef().next == NULL);
877 test(h->Top() - orig_top == g);
880 test((TUint8*)h->FreeRef().next == p0 - RHeap::EAllocCellSize);
881 test((TUint8*)h->FreeRef().next->next == orig_top);
884 test((TUint8*)h->FreeRef().next == orig_top);
886 // this compress will only do anything if the KHeapShrinkRatio is large
887 // enough to introduce hysteresis otherwise the heap would have been compressed
888 // by the free operation itself
893 test(h->Top() == orig_top);
894 test(c.Size() == orig_size);
896 // shouldn't compress heap as already compressed
897 test(h->Compress() == 0);
898 //grow heap by iGrowBy bytes
899 test(h->TestAlloc(l2) == p2);
900 //grow heap by iGrowBy bytes
901 TUint8* p3 = (TUint8*)h->TestAlloc(l2);
903 test(h->Top() - orig_top == 2*g);
904 test(c.Size() - orig_size == 2*g);
905 // may or may not reduce heap
907 // may or may not reduce heap
909 h->ForceCompress(2*g);
910 test(h->Top() == orig_top);
911 test(c.Size() == orig_size);
915 test((TUint8*)h->FreeRef().next == p0 - RHeap::EAllocCellSize);
916 test((TUint8*)h->FreeRef().next->next == NULL);
919 test((TUint8*)h->FreeRef().next == NULL);
920 //grow heap by iGrowBy bytes
921 test(h->TestAlloc(l2) == p2);
922 //grow heap by iGrowBy*2 + page size bytes
923 test(h->TestAlloc(l2 + g + page_size) == p3);
924 test(h->Top() - orig_top == 4*g);
925 test(c.Size() - orig_size == 4*g);
926 // will compress heap if KHeapShrinkHysRatio <= KHeapShrinkRatioDflt
927 test(h->TestReAlloc(p3, page_size - RHeap::EAllocCellSize, 0) == p3);
928 h->ForceCompress(g+page_size);
929 test(h->Top() - orig_top == g + page_size);
930 test(c.Size() - orig_size == g + page_size);
932 // will compress heap if KHeapShrinkHysRatio <= KHeapShrinkRatio1
934 // will compress heap if KHeapShrinkHysRatio <= KHeapShrinkRatio1 && g<=page_size
935 // or KHeapShrinkHysRatio >= 2.0 and g==page_size
937 // may or may not perform further compression
938 tmp1=h->CalcComp(g+page_size);
941 test(h->Top() == orig_top);
942 test(c.Size() == orig_size);
944 test(h->TestAlloc(l2 - minc) == p2);
945 test(h->TestAlloc(l2 + g + page_size + minc) == p3 - minc);
946 test(h->Top() - orig_top == 4*g);
947 test(c.Size() - orig_size == 4*g);
948 h->TestFree(p3 - minc);
949 h->ForceCompress(l2 + g + page_size + minc);
950 test(h->Top() - orig_top == g);
951 test(c.Size() - orig_size == g);
955 test((TUint8*)h->FreeRef().next == p0 - RHeap::EAllocCellSize);
956 test((TUint8*)h->FreeRef().next->next == p3 - minc - RHeap::EAllocCellSize);
959 test((TUint8*)h->FreeRef().next == p3 - minc - RHeap::EAllocCellSize);
961 if (l2+RHeap::EAllocCellSize >= shrinkThres)
963 // When KHeapShrinkRatio small enough heap will have been compressed
964 test(h->Top() == orig_top);
965 test(c.Size() - orig_size == 0);
969 test(h->Top() - orig_top == g);
970 test(c.Size() - orig_size == g);
973 if ( ((TLinAddr)orig_top & (align-1)) == 0)
976 TEST_ALIGN(p2 - RHeap::EAllocCellSize, page_size);
977 // will have free space of g-minc
978 test(h->TestAlloc(l2 + minc) == p2);
979 test(h->Top() - orig_top == 2*g);
980 test(c.Size() - orig_size == 2*g);
981 free = pass ? h->FreeRef().next->next : h->FreeRef().next;
983 test(h->TestReAlloc(p2, l2 - 4, 0) == p2);
984 TInt freeSp = g-minc + (l2+minc - (l2-4));
986 if (freeSp >= shrinkThres && freeSp-page_size >= minc)
988 // if page_size is less than growBy (g) then heap will be shrunk
989 // by less than a whole g.
990 adjust = g-((page_size<g)?page_size:0);
992 test(h->Top() - orig_top == 2*g - adjust);
993 test(c.Size() - orig_size == 2*g - adjust);
994 free = pass ? h->FreeRef().next->next : h->FreeRef().next;
996 TEST_ALIGN(TLinAddr(free)+4, page_size);
997 test(h->TestAlloc(l2 + g + page_size + 4) == p3 - 4);
998 test(h->Top() - orig_top == 4*g - adjust);
999 test(c.Size() - orig_size == 4*g - adjust);
1000 h->TestFree(p3 - 4);
1001 h->ForceCompress(l2 + g + page_size + 4);
1002 test(h->Top() - orig_top == g + page_size);
1003 test(c.Size() - orig_size == g + page_size);
1006 h->ForceCompress(l2-4);
1007 test(h->Compress() == 0);
1008 // check heap is grown, will have free space of g-minc
1009 test(h->TestAlloc(l2 + minc) == p2);
1010 test(h->Top() - orig_top == 2*g);
1011 test(c.Size() - orig_size == 2*g);
1012 free = pass ? h->FreeRef().next->next : h->FreeRef().next;
1014 // may shrink heap as will now have g+minc free bytes
1015 test(h->TestReAlloc(p2, l2 - minc, 0) == p2);
1016 if (g+minc >= shrinkThres)
1018 test(h->Top() - orig_top == g);
1019 test(c.Size() - orig_size == g);
1023 test(h->Top() - orig_top == 2*g);
1024 test(c.Size() - orig_size == 2*g);
1026 free = pass ? h->FreeRef().next->next : h->FreeRef().next;
1028 TEST_ALIGN(TLinAddr(free)+minc, page_size);
1029 test(h->TestAlloc(l2 + g + page_size + minc) == p3 - minc);
1030 test(h->Top() - orig_top == 4*g);
1031 test(c.Size() - orig_size == 4*g);
1032 h->TestFree(p3 - minc);
1033 h->ForceCompress(l2 + g + page_size + minc);
1034 test(h->Top() - orig_top == g);
1035 test(c.Size() - orig_size == g);
1051 h = RTestHeap::FixedHeap(0x1000, 0);
1055 h = RTestHeap::FixedHeap(0x1000, 0, EFalse);
1059 h = RTestHeap::FixedHeap(0x10000, 64);
1063 h = RTestHeap::FixedHeap(0x100000, 4096);
1067 h = RTestHeap::FixedHeap(0x100000, 8192);
1071 h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x1000, 0x1000, 4);
1075 h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x10000, 0x1000, 4);
1079 h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 4096);
1083 h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 4);
1093 h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 8);
1103 h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 16);
1113 h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 32);
1123 h = UserHeap::ChunkHeap(&KNullDesC(), 0x3000, 0x100000, 0x3000, 4);
1138 volatile TBool iStop;
1143 TInt iFailedReAllocs;
1146 RAllocator* iAllocator;
1151 TUint32 SHeapStress::Random()
1158 TInt RandomLength(TUint32 aRandom)
1160 TUint8 x = (TUint8)aRandom;
1162 return (x & 0x7f) << 7;
1166 TInt HeapStress(TAny* aPtr)
1168 SHeapStress& hs = *(SHeapStress*)aPtr;
1169 RTestHeap* h = (RTestHeap*)&User::Allocator();
1173 Mem::FillZ(cell, sizeof(cell));
1174 Mem::FillZ(len, sizeof(len));
1176 RThread::Rendezvous(KErrNone);
1179 // allocate all cells
1181 for (i=0; i<256; ++i)
1186 cell[i] = (TUint8*)h->TestAlloc(RandomLength(hs.Random()));
1188 len[i] = h->AllocLen(cell[i]);
1195 TInt n = 64 + (hs.Random() & 127);
1198 i = hs.Random() & 0xff;
1201 test(h->AllocLen(cell[i]) == len[i]);
1202 h->TestFree(cell[i]);
1209 // realloc some cells
1210 n = 64 + (hs.Random() & 127);
1213 TUint32 rn = hs.Random();
1214 i = (rn >> 8) & 0xff;
1215 TInt new_len = RandomLength(rn);
1218 test(h->AllocLen(cell[i]) == len[i]);
1220 TUint8* p = (TUint8*)h->TestReAlloc(cell[i], new_len, rn >> 16);
1224 len[i] = h->AllocLen(p);
1227 ++hs.iFailedReAllocs;
1238 void CreateStressThread(SHeapStress& aInfo)
1240 Mem::FillZ(&aInfo, _FOFF(SHeapStress, iSeed));
1241 RThread& t = aInfo.iThread;
1242 TInt r = t.Create(KNullDesC(), &HeapStress, 0x2000, aInfo.iAllocator, &aInfo);
1244 t.SetPriority(EPriorityLess);
1247 test(s == KRequestPending);
1249 User::WaitForRequest(s);
1250 test(s == KErrNone);
1251 test(t.ExitType() == EExitPending);
1252 t.SetPriority(EPriorityMuchLess);
1255 void StopStressThread(SHeapStress& aInfo)
1257 RThread& t = aInfo.iThread;
1260 aInfo.iStop = ETrue;
1261 User::WaitForRequest(s);
1262 const TDesC& exitCat = t.ExitCategory();
1263 TInt exitReason = t.ExitReason();
1264 TInt exitType = t.ExitType();
1265 test.Printf(_L("Exit type %d,%d,%S\n"), exitType, exitReason, &exitCat);
1266 test(exitType == EExitKill);
1267 test(exitReason == KErrNone);
1268 test(s == KErrNone);
1269 test.Printf(_L("Total Allocs : %d\n"), aInfo.iAllocs);
1270 test.Printf(_L("Failed Allocs : %d\n"), aInfo.iFailedAllocs);
1271 test.Printf(_L("Total Frees : %d\n"), aInfo.iFrees);
1272 test.Printf(_L("Total ReAllocs : %d\n"), aInfo.iReAllocs);
1273 test.Printf(_L("Failed ReAllocs : %d\n"), aInfo.iFailedReAllocs);
1274 test.Printf(_L("Heap checks : %d\n"), aInfo.iChecks);
1277 void DoStressTest1(RAllocator* aAllocator)
1279 RTestHeap* h = (RTestHeap*)aAllocator;
1280 test.Printf(_L("Test Stress 1: min=%x max=%x align=%d growby=%d\n"),
1281 h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
1283 hs.iSeed = 0xb504f334;
1284 hs.iAllocator = aAllocator;
1285 CreateStressThread(hs);
1286 User::After(10*1000000);
1287 StopStressThread(hs);
1288 CLOSE_AND_WAIT(hs.iThread);
1292 void DoStressTest2(RAllocator* aAllocator)
1294 RTestHeap* h = (RTestHeap*)aAllocator;
1295 test.Printf(_L("Test Stress 2: min=%x max=%x align=%d growby=%d\n"),
1296 h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
1299 hs1.iSeed = 0xb504f334;
1300 hs1.iAllocator = aAllocator;
1301 hs2.iSeed = 0xddb3d743;
1302 hs2.iAllocator = aAllocator;
1303 CreateStressThread(hs1);
1304 CreateStressThread(hs2);
1305 User::After(20*1000000);
1306 StopStressThread(hs1);
1307 StopStressThread(hs2);
1308 CLOSE_AND_WAIT(hs1.iThread);
1309 CLOSE_AND_WAIT(hs2.iThread);
1316 h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 4);
1322 h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 8);
1330 TInt TestHeapGrowInPlace(TInt aMode)
1332 TBool reAllocs=EFalse;
1333 TBool heapGrew=EFalse;
1337 myHeap = UserHeap::ChunkHeap(NULL,0x1000,0x4000,0x1000);
1339 TAny *testBuffer,*testBuffer2;
1340 // Start size chosen so that 1st realloc will use up exactly all the heap.
1341 // Later iterations wont, and there will be a free cell at the end of the heap.
1342 TInt currentSize = ((0x800) - sizeof(RHeap)) - RHeap::EAllocCellSize;
1343 TInt growBy = 0x800;
1344 TInt newSpace, space;
1346 testBuffer2 = myHeap->Alloc(currentSize);
1348 newSpace = myHeap->Size();
1352 testBuffer = testBuffer2;
1353 currentSize+=growBy;
1354 testBuffer2 = myHeap->ReAlloc(testBuffer,currentSize,aMode);
1356 newSpace = myHeap->Size();
1361 if (testBuffer!=testBuffer2)
1368 } while (testBuffer2);
1369 currentSize-=growBy;
1371 myHeap->Free(testBuffer);
1377 test.Printf(_L("Failure - Memory was moved!\n"));
1382 test.Printf(_L("Failure - Heap Never Grew!\n"));
1385 if (currentSize<= 0x3000)
1387 test.Printf(_L("Failed to grow by a reasonable amount!\n"));
1396 test.Next(_L("Testing Grow In Place"));
1397 test(TestHeapGrowInPlace(0)==KErrNone);
1398 test(TestHeapGrowInPlace(RHeap::ENeverMove)==KErrNone);
1401 RHeap* TestDEF078391Heap = 0;
1403 TInt TestDEF078391ThreadFunction(TAny*)
1405 TestDEF078391Heap = UserHeap::ChunkHeap(NULL,0x1000,0x100000,KMinHeapGrowBy,0,EFalse);
1406 return TestDEF078391Heap ? KErrNone : KErrGeneral;
1409 void TestDEF078391()
1411 // Test that creating a multithreaded heap with UserHeap::ChunkHeap
1412 // doesn't create any reference counts on the creating thread.
1413 // This is done by creating a heap in a named thread, then exiting
1414 // the thread and re-creating it with the same name.
1415 // This will fail with KErrAlreadyExists if the orinal thread has
1416 // not died because of an unclosed reference count.
1417 test.Next(_L("Test that creating a multithreaded heap doesn't open references of creator"));
1418 _LIT(KThreadName,"ThreadName");
1420 TInt r=t.Create(KThreadName,TestDEF078391ThreadFunction,0x1000,0x1000,0x100000,NULL);
1422 TRequestStatus status;
1425 User::WaitForRequest(status);
1426 test(status==KErrNone);
1427 test(t.ExitType()==EExitKill);
1428 test(t.ExitReason()==KErrNone);
1430 test(TestDEF078391Heap!=0);
1431 User::After(1000000); // give more opportunity for thread cleanup to happen
1433 // create thread a second time
1434 r=t.Create(KThreadName,TestDEF078391ThreadFunction,0x1000,0x1000,0x100000,NULL);
1439 // close the heap that got created earlier
1440 TestDEF078391Heap->Close();
1447 test.Start(_L("Testing heaps"));