os/kernelhwsrv/kernel/eka/common/heap.cpp
author sl
Tue, 10 Jun 2014 14:32:02 +0200
changeset 1 260cb5ec6c19
permissions -rw-r--r--
Update contrib.
sl@0
     1
// Copyright (c) 1994-2009 Nokia Corporation and/or its subsidiary(-ies).
sl@0
     2
// All rights reserved.
sl@0
     3
// This component and the accompanying materials are made available
sl@0
     4
// under the terms of the License "Eclipse Public License v1.0"
sl@0
     5
// which accompanies this distribution, and is available
sl@0
     6
// at the URL "http://www.eclipse.org/legal/epl-v10.html".
sl@0
     7
//
sl@0
     8
// Initial Contributors:
sl@0
     9
// Nokia Corporation - initial contribution.
sl@0
    10
//
sl@0
    11
// Contributors:
sl@0
    12
//
sl@0
    13
// Description:
sl@0
    14
// e32\common\heap.cpp
sl@0
    15
// 
sl@0
    16
//
sl@0
    17
sl@0
    18
#include "common.h"
sl@0
    19
#ifdef __KERNEL_MODE__
sl@0
    20
#include <kernel/kern_priv.h>
sl@0
    21
#endif
sl@0
    22
sl@0
    23
#ifdef _DEBUG
sl@0
    24
#define __SIMULATE_ALLOC_FAIL(s)	if (CheckForSimulatedAllocFail()) {s}
sl@0
    25
#define	__CHECK_CELL(p)				CheckCell(p)
sl@0
    26
#define	__ZAP_CELL(p)				memset( ((TUint8*)p) + RHeap::EAllocCellSize, 0xde, p->len - RHeap::EAllocCellSize)
sl@0
    27
#define __DEBUG_SAVE(p)				TInt dbgNestLevel = ((SDebugCell*)p)->nestingLevel
sl@0
    28
#define __DEBUG_RESTORE(p)			((SDebugCell*)(((TUint8*)p)-EAllocCellSize))->nestingLevel = dbgNestLevel
sl@0
    29
#else
sl@0
    30
#define __SIMULATE_ALLOC_FAIL(s)
sl@0
    31
#define	__CHECK_CELL(p)
sl@0
    32
#define	__ZAP_CELL(p)
sl@0
    33
#define __DEBUG_SAVE(p)
sl@0
    34
#define __DEBUG_RESTORE(p)
sl@0
    35
#endif
sl@0
    36
sl@0
    37
#define __NEXT_CELL(p)				((SCell*)(((TUint8*)p)+p->len))
sl@0
    38
sl@0
    39
#define __POWER_OF_2(x)				((TUint32)((x)^((x)-1))>=(TUint32)(x))
sl@0
    40
sl@0
    41
#define __MEMORY_MONITOR_CHECK_CELL(p) \
sl@0
    42
					{ \
sl@0
    43
					TLinAddr m = TLinAddr(iAlign-1); \
sl@0
    44
					SCell* c = (SCell*)(((TUint8*)p)-EAllocCellSize); \
sl@0
    45
					if((c->len & m) || (c->len<iMinCell) || ((TUint8*)c<iBase) || ((TUint8*)__NEXT_CELL(c)>iTop)) \
sl@0
    46
						BTraceContext12(BTrace::EHeap, BTrace::EHeapCorruption, (TUint32)this, (TUint32)p, (TUint32)c->len-EAllocCellSize); \
sl@0
    47
					}
sl@0
    48
					
sl@0
    49
/**
sl@0
    50
@SYMPatchable
sl@0
    51
@publishedPartner
sl@0
    52
@released
sl@0
    53
sl@0
    54
Defines the minimum cell size of  a heap.
sl@0
    55
sl@0
    56
The constant can be changed at ROM build time using patchdata OBY keyword.
sl@0
    57
*/
sl@0
    58
#ifdef __X86GCC__	// For X86GCC we dont use the proper data import attribute
sl@0
    59
#undef IMPORT_D		// since the constant is not really imported. GCC doesn't 
sl@0
    60
#define IMPORT_D	// allow imports from self.
sl@0
    61
#endif
sl@0
    62
IMPORT_D extern const TInt KHeapMinCellSize;
sl@0
    63
sl@0
    64
/**
sl@0
    65
@SYMPatchable
sl@0
    66
@publishedPartner
sl@0
    67
@released
sl@0
    68
sl@0
    69
This constant defines the ratio that determines the amount of hysteresis between heap growing and heap
sl@0
    70
shrinking.
sl@0
    71
It is a 32-bit fixed point number where the radix point is defined to be
sl@0
    72
between bits 7 and 8 (where the LSB is bit 0) i.e. using standard notation, a Q8 or a fx24.8
sl@0
    73
fixed point number.  For example, for a ratio of 2.0, set KHeapShrinkHysRatio=0x200.
sl@0
    74
sl@0
    75
The heap shrinking hysteresis value is calculated to be:
sl@0
    76
@code
sl@0
    77
KHeapShrinkHysRatio*(iGrowBy>>8)
sl@0
    78
@endcode
sl@0
    79
where iGrowBy is a page aligned value set by the argument, aGrowBy, to the RHeap constructor.
sl@0
    80
The default hysteresis value is iGrowBy bytes i.e. KHeapShrinkHysRatio=2.0.
sl@0
    81
sl@0
    82
Memory usage may be improved by reducing the heap shrinking hysteresis
sl@0
    83
by setting 1.0 < KHeapShrinkHysRatio < 2.0.  Heap shrinking hysteresis is disabled/removed
sl@0
    84
when KHeapShrinkHysRatio <= 1.0.
sl@0
    85
sl@0
    86
The constant can be changed at ROM build time using patchdata OBY keyword.
sl@0
    87
*/
sl@0
    88
IMPORT_D extern const TInt KHeapShrinkHysRatio;
sl@0
    89
sl@0
    90
#pragma warning( disable : 4705 )	// statement has no effect
sl@0
    91
UEXPORT_C RHeap::RHeap(TInt aMaxLength, TInt aAlign, TBool aSingleThread)
sl@0
    92
/**
sl@0
    93
@internalComponent
sl@0
    94
*/
sl@0
    95
//
sl@0
    96
// Constructor for fixed size heap
sl@0
    97
//
sl@0
    98
	:	iMinLength(aMaxLength), iMaxLength(aMaxLength), iOffset(0), iGrowBy(0), iChunkHandle(0),
sl@0
    99
		iNestingLevel(0), iAllocCount(0), iFailType(ENone), iTestData(NULL)
sl@0
   100
	{
sl@0
   101
	iAlign = aAlign ? aAlign : ECellAlignment;
sl@0
   102
	iPageSize = 0;
sl@0
   103
	iFlags = aSingleThread ? (ESingleThreaded|EFixedSize) : EFixedSize;
sl@0
   104
	Initialise();
sl@0
   105
	}
sl@0
   106
#pragma warning( default : 4705 )
sl@0
   107
sl@0
   108
sl@0
   109
sl@0
   110
sl@0
   111
UEXPORT_C RHeap::RHeap(TInt aChunkHandle, TInt aOffset, TInt aMinLength, TInt aMaxLength, TInt aGrowBy, TInt aAlign, TBool aSingleThread)
sl@0
   112
/**
sl@0
   113
@internalComponent
sl@0
   114
*/
sl@0
   115
//
sl@0
   116
// Constructor for chunk heaps.
sl@0
   117
//
sl@0
   118
	:	iOffset(aOffset), iChunkHandle(aChunkHandle),
sl@0
   119
		iNestingLevel(0), iAllocCount(0), iFailType(ENone), iTestData(NULL)
sl@0
   120
	{
sl@0
   121
	TInt sz = iBase - ((TUint8*)this - iOffset);
sl@0
   122
	GET_PAGE_SIZE(iPageSize);
sl@0
   123
	__ASSERT_ALWAYS(iOffset>=0, HEAP_PANIC(ETHeapNewBadOffset));
sl@0
   124
	iMinLength = Max(aMinLength, sz + EAllocCellSize);
sl@0
   125
	iMinLength = _ALIGN_UP(iMinLength, iPageSize);
sl@0
   126
	iMaxLength = Max(aMaxLength, iMinLength);
sl@0
   127
	iMaxLength = _ALIGN_UP(iMaxLength, iPageSize);
sl@0
   128
	iGrowBy = _ALIGN_UP(aGrowBy, iPageSize);
sl@0
   129
	iFlags = aSingleThread ? ESingleThreaded : 0;
sl@0
   130
	iAlign = aAlign ? aAlign : ECellAlignment;
sl@0
   131
	Initialise();
sl@0
   132
	}
sl@0
   133
sl@0
   134
sl@0
   135
sl@0
   136
sl@0
   137
UEXPORT_C TAny* RHeap::operator new(TUint aSize, TAny* aBase) __NO_THROW
sl@0
   138
/**
sl@0
   139
@internalComponent
sl@0
   140
*/
sl@0
   141
	{
sl@0
   142
	__ASSERT_ALWAYS(aSize>=sizeof(RHeap), HEAP_PANIC(ETHeapNewBadSize));
sl@0
   143
	RHeap* h = (RHeap*)aBase;
sl@0
   144
	h->iAlign = 0x80000000;	// garbage value
sl@0
   145
	h->iBase = ((TUint8*)aBase) + aSize;
sl@0
   146
	return aBase;
sl@0
   147
	}
sl@0
   148
sl@0
   149
void RHeap::Initialise()
sl@0
   150
//
sl@0
   151
// Initialise the heap.
sl@0
   152
//
sl@0
   153
	{
sl@0
   154
sl@0
   155
	__ASSERT_ALWAYS((TUint32)iAlign>=sizeof(TAny*) && __POWER_OF_2(iAlign), HEAP_PANIC(ETHeapNewBadAlignment));
sl@0
   156
	iCellCount = 0;
sl@0
   157
	iTotalAllocSize = 0;
sl@0
   158
	iBase = (TUint8*)Align(iBase + EAllocCellSize);
sl@0
   159
	iBase -= EAllocCellSize;
sl@0
   160
	TInt b = iBase - ((TUint8*)this - iOffset);
sl@0
   161
	TInt len = _ALIGN_DOWN(iMinLength - b, iAlign);
sl@0
   162
	iTop = iBase + len;
sl@0
   163
	iMinLength = iTop - ((TUint8*)this - iOffset);
sl@0
   164
	iMinCell = Align(KHeapMinCellSize + Max((TInt)EAllocCellSize, (TInt)EFreeCellSize));
sl@0
   165
#ifdef _DEBUG
sl@0
   166
	memset(iBase, 0xa5, len);
sl@0
   167
#endif
sl@0
   168
	SCell* pM=(SCell*)iBase; // First free cell
sl@0
   169
	iFree.next=pM; // Free list points to first free cell
sl@0
   170
	iFree.len=0; // Stop free from joining this with a free block
sl@0
   171
	pM->next=NULL; // Terminate the free list
sl@0
   172
	pM->len=len; // Set the size of the free cell
sl@0
   173
	}
sl@0
   174
sl@0
   175
#ifdef _DEBUG
sl@0
   176
void RHeap::CheckCell(const SCell* aCell) const
sl@0
   177
	{
sl@0
   178
	TLinAddr m = TLinAddr(iAlign - 1);
sl@0
   179
sl@0
   180
	__ASSERT_DEBUG(!(aCell->len & m), HEAP_PANIC(ETHeapBadCellAddress));
sl@0
   181
	__ASSERT_DEBUG(aCell->len >= iMinCell, HEAP_PANIC(ETHeapBadCellAddress));
sl@0
   182
	__ASSERT_DEBUG((TUint8*)aCell>=iBase, HEAP_PANIC(ETHeapBadCellAddress));
sl@0
   183
	__ASSERT_DEBUG((TUint8*)__NEXT_CELL(aCell)<=iTop, HEAP_PANIC(ETHeapBadCellAddress));
sl@0
   184
	}
sl@0
   185
#endif
sl@0
   186
sl@0
   187
UEXPORT_C RHeap::SCell* RHeap::GetAddress(const TAny* aCell) const
sl@0
   188
//
sl@0
   189
// As much as possible, check a cell address and backspace it
sl@0
   190
// to point at the cell header.
sl@0
   191
//
sl@0
   192
	{
sl@0
   193
sl@0
   194
	TLinAddr m = TLinAddr(iAlign - 1);
sl@0
   195
	__ASSERT_ALWAYS(!(TLinAddr(aCell)&m), HEAP_PANIC(ETHeapBadCellAddress));
sl@0
   196
sl@0
   197
	SCell* pC = (SCell*)(((TUint8*)aCell)-EAllocCellSize);
sl@0
   198
	__CHECK_CELL(pC);
sl@0
   199
sl@0
   200
	return pC;
sl@0
   201
	}
sl@0
   202
sl@0
   203
sl@0
   204
sl@0
   205
sl@0
   206
UEXPORT_C TInt RHeap::AllocLen(const TAny* aCell) const
sl@0
   207
/**
sl@0
   208
Gets the length of the available space in the specified allocated cell.
sl@0
   209
sl@0
   210
@param aCell A pointer to the allocated cell.
sl@0
   211
sl@0
   212
@return The length of the available space in the allocated cell.
sl@0
   213
sl@0
   214
@panic USER 42 if aCell does not point to  a valid cell.
sl@0
   215
*/
sl@0
   216
	{
sl@0
   217
sl@0
   218
	SCell* pC = GetAddress(aCell);
sl@0
   219
	return pC->len - EAllocCellSize;
sl@0
   220
	}
sl@0
   221
sl@0
   222
sl@0
   223
sl@0
   224
sl@0
   225
sl@0
   226
#if !defined(__HEAP_MACHINE_CODED__) || defined(_DEBUG)
sl@0
   227
RHeap::SCell* RHeap::DoAlloc(TInt aSize, SCell*& aLastFree)
sl@0
   228
//
sl@0
   229
// Allocate without growing. aSize includes cell header and alignment.
sl@0
   230
// Lock already held.
sl@0
   231
//
sl@0
   232
	{
sl@0
   233
	SCell* pP = &iFree;
sl@0
   234
	SCell* pC = pP->next;
sl@0
   235
	for (; pC; pP=pC, pC=pC->next) // Scan the free list
sl@0
   236
		{
sl@0
   237
		__CHECK_CELL(pC);
sl@0
   238
		SCell* pE;
sl@0
   239
		if (pC->len >= aSize)				// Block size bigger than request
sl@0
   240
			{
sl@0
   241
			if (pC->len - aSize < iMinCell)	// Leftover must be large enough to hold an SCell
sl@0
   242
			   	{
sl@0
   243
			   	aSize = pC->len;			// It isn't, so take it all
sl@0
   244
			   	pE = pC->next;				// Set the next field
sl@0
   245
			   	}
sl@0
   246
			else
sl@0
   247
			   	{
sl@0
   248
			   	pE = (SCell*)(((TUint8*)pC)+aSize); // Take amount required
sl@0
   249
			   	pE->len = pC->len - aSize;	// Initialize new free cell
sl@0
   250
			   	pE->next = pC->next;
sl@0
   251
			   	}
sl@0
   252
			pP->next = pE;					// Update previous pointer
sl@0
   253
			pC->len = aSize;				// Set control size word
sl@0
   254
#if defined(_DEBUG)														
sl@0
   255
			((SDebugCell*)pC)->nestingLevel = iNestingLevel;
sl@0
   256
			((SDebugCell*)pC)->allocCount = ++iAllocCount;
sl@0
   257
#endif
sl@0
   258
			return pC;
sl@0
   259
			}
sl@0
   260
		}
sl@0
   261
	aLastFree = pP;
sl@0
   262
	return NULL;
sl@0
   263
	}
sl@0
   264
#endif
sl@0
   265
sl@0
   266
sl@0
   267
sl@0
   268
sl@0
   269
UEXPORT_C TAny* RHeap::Alloc(TInt aSize)
sl@0
   270
/**
sl@0
   271
Allocates a cell of the specified size from the heap.
sl@0
   272
sl@0
   273
If there is insufficient memory available on the heap from which to allocate
sl@0
   274
a cell of the required size, the function returns NULL.
sl@0
   275
sl@0
   276
The cell is aligned according to the alignment value specified at construction,
sl@0
   277
or the default alignment value, if an explict value was not specified.
sl@0
   278
sl@0
   279
The resulting size of the allocated cell may be rounded up to a
sl@0
   280
value greater than aSize, but is guaranteed to be not less than aSize.
sl@0
   281
sl@0
   282
@param aSize The 
sl@0
   283
size of the cell to be allocated from the heap
sl@0
   284
sl@0
   285
@return A pointer to the allocated cell. NULL if there is insufficient memory 
sl@0
   286
        available.
sl@0
   287
        
sl@0
   288
@panic USER 47 if the maximum unsigned value of aSize is greater than or equal
sl@0
   289
       to the value of KMaxTInt/2; for example, calling Alloc(-1) raises
sl@0
   290
       this panic.
sl@0
   291
       
sl@0
   292
@see KMaxTInt        
sl@0
   293
*/
sl@0
   294
	{
sl@0
   295
sl@0
   296
	__CHECK_THREAD_STATE;
sl@0
   297
	__ASSERT_ALWAYS((TUint)aSize<(KMaxTInt/2),HEAP_PANIC(ETHeapBadAllocatedCellSize));
sl@0
   298
	__SIMULATE_ALLOC_FAIL(return NULL;)
sl@0
   299
	
sl@0
   300
	TInt origSize = aSize;
sl@0
   301
	aSize = Max(Align(aSize + EAllocCellSize), iMinCell);
sl@0
   302
	SCell* pL = NULL;
sl@0
   303
	Lock();
sl@0
   304
	SCell* pC = (SCell*)DoAlloc(aSize, pL);
sl@0
   305
	if (!pC && !(iFlags & EFixedSize))
sl@0
   306
		{
sl@0
   307
		// try to grow chunk heap
sl@0
   308
		TInt r = TryToGrowHeap(aSize, pL);
sl@0
   309
		if (r==KErrNone)
sl@0
   310
			pC = DoAlloc(aSize, pL);
sl@0
   311
		}
sl@0
   312
	if (pC)
sl@0
   313
		++iCellCount, iTotalAllocSize += (pC->len - EAllocCellSize);
sl@0
   314
	Unlock();
sl@0
   315
	if (pC)
sl@0
   316
		{
sl@0
   317
		TAny* result=((TUint8*)pC) + EAllocCellSize;
sl@0
   318
		if (iFlags & ETraceAllocs)
sl@0
   319
			{
sl@0
   320
			TUint32 traceData[2];
sl@0
   321
			traceData[0] = AllocLen(result);
sl@0
   322
			traceData[1] = origSize;
sl@0
   323
			BTraceContextN(BTrace::EHeap, BTrace::EHeapAlloc, (TUint32)this, (TUint32)result, traceData, sizeof(traceData));
sl@0
   324
			}
sl@0
   325
#ifdef __KERNEL_MODE__
sl@0
   326
		memclr(result, pC->len - EAllocCellSize);
sl@0
   327
#endif		
sl@0
   328
		return result;
sl@0
   329
		}
sl@0
   330
	if (iFlags & ETraceAllocs)						
sl@0
   331
			BTraceContext8(BTrace::EHeap, BTrace::EHeapAllocFail, (TUint32)this, (TUint32)origSize);
sl@0
   332
	return NULL;
sl@0
   333
	}
sl@0
   334
sl@0
   335
sl@0
   336
sl@0
   337
sl@0
   338
TInt RHeap::TryToGrowHeap(TInt aSize, SCell* aLastFree)
sl@0
   339
	{
sl@0
   340
	TBool at_end = IsLastCell(aLastFree);
sl@0
   341
	TInt extra = at_end ? aSize - aLastFree->len : aSize;
sl@0
   342
	extra = (extra + iGrowBy - 1) / iGrowBy;
sl@0
   343
	extra *= iGrowBy;
sl@0
   344
	TInt cur_len = _ALIGN_UP(iTop - ((TUint8*)this - iOffset), iPageSize);
sl@0
   345
	TInt new_len = cur_len + extra;
sl@0
   346
	TInt r = KErrNoMemory;
sl@0
   347
	if (new_len <= iMaxLength)
sl@0
   348
		{
sl@0
   349
		r = SetBrk(new_len);
sl@0
   350
		if (r == KErrNone)
sl@0
   351
			{
sl@0
   352
			if (at_end)
sl@0
   353
				aLastFree->len += extra;
sl@0
   354
			else
sl@0
   355
				{
sl@0
   356
				SCell* pC = (SCell*)iTop;
sl@0
   357
				pC->len = extra;
sl@0
   358
				pC->next = NULL;
sl@0
   359
				aLastFree->next = pC;
sl@0
   360
				}
sl@0
   361
			iTop += extra;
sl@0
   362
			}
sl@0
   363
		}
sl@0
   364
	return r;
sl@0
   365
	}
sl@0
   366
sl@0
   367
sl@0
   368
sl@0
   369
sl@0
   370
#ifndef __KERNEL_MODE__
sl@0
   371
EXPORT_C TInt RHeap::Compress()
sl@0
   372
/**
sl@0
   373
Compresses the heap.
sl@0
   374
sl@0
   375
The function frees excess committed space from the top 
sl@0
   376
of the heap. The size of the heap is never reduced below the minimum size 
sl@0
   377
specified during creation of the heap.
sl@0
   378
sl@0
   379
@return The space reclaimed. If no space can be reclaimed, then this value 
sl@0
   380
        is zero.
sl@0
   381
*/
sl@0
   382
	{
sl@0
   383
sl@0
   384
	if (iFlags & EFixedSize)
sl@0
   385
		return 0;
sl@0
   386
	TInt r = 0;
sl@0
   387
	Lock();
sl@0
   388
	SCell* pC = &iFree;
sl@0
   389
	for (; pC->next; pC=pC->next) {}
sl@0
   390
	if (pC!=&iFree)
sl@0
   391
		{
sl@0
   392
		__CHECK_CELL(pC);
sl@0
   393
		if (IsLastCell(pC))
sl@0
   394
			r = Reduce(pC);
sl@0
   395
		}
sl@0
   396
	Unlock();
sl@0
   397
	return r;
sl@0
   398
	}
sl@0
   399
#endif
sl@0
   400
sl@0
   401
sl@0
   402
sl@0
   403
sl@0
   404
#if !defined(__HEAP_MACHINE_CODED__) || defined(_DEBUG)
sl@0
   405
void RHeap::DoFree(SCell* pC)
sl@0
   406
	{
sl@0
   407
	__ZAP_CELL(pC);
sl@0
   408
sl@0
   409
	SCell* pP = &iFree;
sl@0
   410
	SCell* pE = pP->next;
sl@0
   411
	for (; pE && pE<pC; pP=pE, pE=pE->next) {}
sl@0
   412
	if (pE)			// Is there a following free cell?
sl@0
   413
		{
sl@0
   414
		SCell* pN = __NEXT_CELL(pC);
sl@0
   415
		__ASSERT_ALWAYS(pN<=pE, HEAP_PANIC(ETHeapFreeBadNextCell)); // Following cell overlaps
sl@0
   416
		if (pN==pE) // Is it adjacent
sl@0
   417
			{
sl@0
   418
			pC->len += pE->len; // Yes - coalesce adjacent free cells
sl@0
   419
			pC->next = pE->next;
sl@0
   420
			}
sl@0
   421
		else					// pN<pE, non-adjacent free cells
sl@0
   422
			pC->next = pE;		// Otherwise just point to it
sl@0
   423
		}
sl@0
   424
	else
sl@0
   425
		pC->next = NULL;		// No following free cell
sl@0
   426
	SCell* pN = __NEXT_CELL(pP);	// pN=pP=&iFree if no preceding free cell
sl@0
   427
	__ASSERT_ALWAYS(pN<=pC, HEAP_PANIC(ETHeapFreeBadPrevCell)); // Previous cell overlaps
sl@0
   428
	if (pN==pC) // Is it adjacent
sl@0
   429
		{
sl@0
   430
		pP->len += pC->len;		// Yes - coalesce adjacent free cells
sl@0
   431
		pP->next = pC->next;
sl@0
   432
		pC = pP;				// for size reduction check
sl@0
   433
		}
sl@0
   434
	else						// pN<pC, non-adjacent free cells
sl@0
   435
		pP->next = pC;			// point previous cell to the one being freed
sl@0
   436
	pN = __NEXT_CELL(pC);		// End of amalgamated free cell
sl@0
   437
	if ((TUint8*)pN==iTop && !(iFlags & EFixedSize) && 
sl@0
   438
		pC->len >= KHeapShrinkHysRatio*(iGrowBy>>8))
sl@0
   439
		Reduce(pC);
sl@0
   440
	}
sl@0
   441
#endif
sl@0
   442
sl@0
   443
sl@0
   444
sl@0
   445
sl@0
   446
UEXPORT_C void RHeap::Free(TAny* aCell)
sl@0
   447
/**
sl@0
   448
Frees the specified cell and returns it to the heap.
sl@0
   449
sl@0
   450
@param aCell A pointer to a valid cell; this pointer can also be NULL,
sl@0
   451
             in which case the function does nothing and just returns.
sl@0
   452
sl@0
   453
@panic USER 42 if aCell points to an invalid cell.
sl@0
   454
*/
sl@0
   455
	{
sl@0
   456
	__CHECK_THREAD_STATE;
sl@0
   457
	if (!aCell)
sl@0
   458
		return;
sl@0
   459
	Lock();
sl@0
   460
	if (iFlags & EMonitorMemory)
sl@0
   461
		__MEMORY_MONITOR_CHECK_CELL(aCell);
sl@0
   462
	SCell* pC = GetAddress(aCell);
sl@0
   463
	--iCellCount;
sl@0
   464
	iTotalAllocSize -= (pC->len - EAllocCellSize);
sl@0
   465
	DoFree(pC);
sl@0
   466
	if (iFlags & ETraceAllocs)
sl@0
   467
		BTraceContext8(BTrace::EHeap, BTrace::EHeapFree, (TUint32)this, (TUint32)aCell);
sl@0
   468
	Unlock();
sl@0
   469
	}
sl@0
   470
sl@0
   471
sl@0
   472
sl@0
   473
sl@0
   474
TInt RHeap::Reduce(SCell* aCell)
sl@0
   475
	{
sl@0
   476
	TInt reduce=0;
sl@0
   477
	TInt offset=((TUint8*)aCell)-((TUint8*)this - iOffset);
sl@0
   478
	if (offset>=iMinLength)
sl@0
   479
		reduce = aCell->len;						// length of entire free cell
sl@0
   480
	else
sl@0
   481
		reduce = offset + aCell->len - iMinLength;	// length of free cell past minimum heap size
sl@0
   482
	reduce = _ALIGN_DOWN(reduce, iPageSize);		// round down to page multiple
sl@0
   483
	if (reduce<=0)
sl@0
   484
		return 0;									// can't reduce this heap
sl@0
   485
	TInt new_cell_len = aCell->len - reduce;		// length of last free cell after reduction
sl@0
   486
	if (new_cell_len == 0)
sl@0
   487
		{
sl@0
   488
		// the free cell can be entirely eliminated
sl@0
   489
		SCell* pP = &iFree;
sl@0
   490
		for (; pP->next!=aCell; pP=pP->next) {}
sl@0
   491
		pP->next = NULL;
sl@0
   492
		}
sl@0
   493
	else
sl@0
   494
		{
sl@0
   495
		if (new_cell_len < iMinCell)
sl@0
   496
			{
sl@0
   497
			// max reduction would leave a cell too small
sl@0
   498
			reduce -= iPageSize;
sl@0
   499
			new_cell_len += iPageSize;
sl@0
   500
			}
sl@0
   501
		aCell->len = new_cell_len;	// reduce the cell length
sl@0
   502
		}
sl@0
   503
	iTop -= reduce;
sl@0
   504
	TInt new_len = _ALIGN_UP(iTop - ((TUint8*)this - iOffset), iPageSize);
sl@0
   505
	TInt r = SetBrk(new_len);
sl@0
   506
	__ASSERT_ALWAYS(r==KErrNone, HEAP_PANIC(ETHeapReduceFailed));
sl@0
   507
	return reduce;
sl@0
   508
	}
sl@0
   509
sl@0
   510
sl@0
   511
sl@0
   512
sl@0
   513
#ifndef __KERNEL_MODE__
sl@0
   514
EXPORT_C void RHeap::Reset()
sl@0
   515
/**
sl@0
   516
Frees all allocated cells on this heap.
sl@0
   517
*/
sl@0
   518
	{
sl@0
   519
sl@0
   520
	Lock();
sl@0
   521
	if (!(iFlags & EFixedSize))
sl@0
   522
		{
sl@0
   523
		TInt r = SetBrk(iMinLength);
sl@0
   524
		__ASSERT_ALWAYS(r==KErrNone, HEAP_PANIC(ETHeapResetFailed));
sl@0
   525
		}
sl@0
   526
	Initialise();
sl@0
   527
	Unlock();
sl@0
   528
	}
sl@0
   529
#endif
sl@0
   530
sl@0
   531
sl@0
   532
sl@0
   533
sl@0
   534
inline void RHeap::FindFollowingFreeCell(SCell* aCell, SCell*& aPrev, SCell*& aNext)
sl@0
   535
//
sl@0
   536
// Find the free cell that immediately follows aCell, if one exists
sl@0
   537
// If found, aNext is set to point to it, else it is set to NULL.
sl@0
   538
// aPrev is set to the free cell before aCell or the dummy free cell where there are no free cells before aCell.
sl@0
   539
// Called with lock enabled.
sl@0
   540
//
sl@0
   541
	{
sl@0
   542
	aPrev = &iFree;
sl@0
   543
	aNext = aPrev->next;
sl@0
   544
	for (; aNext && aNext<aCell; aPrev=aNext, aNext=aNext->next) {}	
sl@0
   545
	
sl@0
   546
	if (aNext) // If there is a following free cell, check its directly after aCell.
sl@0
   547
		{
sl@0
   548
			SCell* pNextCell = __NEXT_CELL(aCell);			// end of this cell
sl@0
   549
			__ASSERT_ALWAYS(pNextCell<=aNext, (Unlock(), HEAP_PANIC(ETHeapReAllocBadNextCell)));	// Following free cell overlaps
sl@0
   550
			if (pNextCell!=aNext) 
sl@0
   551
				aNext=NULL;		
sl@0
   552
		}
sl@0
   553
	}
sl@0
   554
sl@0
   555
sl@0
   556
sl@0
   557
sl@0
   558
TInt RHeap::TryToGrowCell(SCell* aCell,SCell* aPrev, SCell* aNext, TInt aSize)
sl@0
   559
//
sl@0
   560
// Try to grow the heap cell 'aCell' in place, to size 'aSize'.
sl@0
   561
// Requires the free cell immediately after aCell (aNext), and the free cell prior to
sl@0
   562
// that (aPrev), to be provided.  (As found by FindFollowingFreeCell)
sl@0
   563
//
sl@0
   564
sl@0
   565
	{
sl@0
   566
	TInt extra = aSize - aCell->len;
sl@0
   567
	if (aNext && (aNext->len>=extra)) // Is there a following free cell big enough?
sl@0
   568
		{
sl@0
   569
		if (aNext->len - extra >= iMinCell)	// take part of free cell ?
sl@0
   570
			{
sl@0
   571
			SCell* pX = (SCell*)((TUint8*)aNext + extra);	// remainder of free cell
sl@0
   572
			pX->next = aNext->next;			// remainder->next = original free cell->next
sl@0
   573
			pX->len = aNext->len - extra;		// remainder length = original free cell length - extra
sl@0
   574
			aPrev->next = pX;					// put remainder into free chain
sl@0
   575
			}
sl@0
   576
		else
sl@0
   577
			{
sl@0
   578
			extra = aNext->len;					// Take whole free cell
sl@0
   579
			aPrev->next = aNext->next;			// remove from free chain
sl@0
   580
			}
sl@0
   581
#ifdef __KERNEL_MODE__
sl@0
   582
		memclr(((TUint8*)aCell) + aCell->len, extra);
sl@0
   583
#endif		
sl@0
   584
		aCell->len += extra;					// update reallocated cell length
sl@0
   585
		iTotalAllocSize += extra;
sl@0
   586
		return KErrNone;
sl@0
   587
		}
sl@0
   588
	return KErrGeneral;  // No space to grow cell
sl@0
   589
	}
sl@0
   590
sl@0
   591
sl@0
   592
sl@0
   593
sl@0
   594
// UEXPORT_C TAny* RHeap::ReAlloc(TAny* aCell, TInt aSize, TInt aMode)
sl@0
   595
/**
sl@0
   596
Increases or decreases the size of an existing cell in the heap.
sl@0
   597
sl@0
   598
If the cell is being decreased in size, then it is guaranteed not to move,
sl@0
   599
and the function returns the pointer originally passed in aCell. Note that the
sl@0
   600
length of the cell will be the same if the difference between the old size
sl@0
   601
and the new size is smaller than the minimum cell size.
sl@0
   602
sl@0
   603
If the cell is being increased in size, i.e. aSize is bigger than its
sl@0
   604
current size, then the function tries to grow the cell in place.
sl@0
   605
If successful, then the function returns the pointer originally
sl@0
   606
passed in aCell. If unsuccessful, then:
sl@0
   607
sl@0
   608
1. if the cell cannot be moved, i.e. aMode has the ENeverMove bit set, then
sl@0
   609
   the function returns NULL.
sl@0
   610
2. if the cell can be moved, i.e. aMode does not have the ENeverMove bit set,
sl@0
   611
   then the function tries to allocate a new replacement cell, and, if
sl@0
   612
   successful, returns a pointer to the new cell; if unsuccessful, it
sl@0
   613
   returns NULL.
sl@0
   614
sl@0
   615
Note that in debug mode, the function returns NULL if the cell cannot be grown
sl@0
   616
in place, regardless of whether the ENeverMove bit is set.
sl@0
   617
sl@0
   618
If the reallocated cell is at a different location from the original cell, then
sl@0
   619
the content of the original cell is copied to the reallocated cell.
sl@0
   620
sl@0
   621
If the supplied pointer, aCell is NULL, then the function attempts to allocate
sl@0
   622
a new cell, but only if the cell can be moved, i.e. aMode does not have
sl@0
   623
the ENeverMove bit set.
sl@0
   624
sl@0
   625
Note the following general points:
sl@0
   626
sl@0
   627
1. If reallocation fails, the content of the original cell is preserved.
sl@0
   628
sl@0
   629
2. The resulting size of the re-allocated cell may be rounded up to a value
sl@0
   630
   greater than aSize, but is guaranteed to be not less than aSize.
sl@0
   631
 
sl@0
   632
@param aCell A pointer to the cell to be reallocated. This may be NULL.
sl@0
   633
sl@0
   634
@param aSize The new size of the cell. This may be bigger or smaller than the
sl@0
   635
             size of the original cell.
sl@0
   636
             
sl@0
   637
@param aMode Flags controlling the reallocation. The only bit which has any
sl@0
   638
             effect on this function is that defined by the enumeration
sl@0
   639
             ENeverMove of the enum RAllocator::TReAllocMode.
sl@0
   640
             If this is set, then any successful reallocation guarantees not
sl@0
   641
             to have changed the start address of the cell.
sl@0
   642
             By default, this parameter is zero.
sl@0
   643
sl@0
   644
@return A pointer to the reallocated cell. This may be the same as the original
sl@0
   645
        pointer supplied through aCell. NULL if there is insufficient memory to
sl@0
   646
        reallocate the cell, or to grow it in place.
sl@0
   647
sl@0
   648
@panic USER 42, if aCell is not NULL, and does not point to a valid cell.
sl@0
   649
@panic USER 47, if the maximum unsigned value of aSize is greater
sl@0
   650
                than or equal to KMaxTInt/2. For example,
sl@0
   651
                calling ReAlloc(someptr,-1) raises this panic.
sl@0
   652
sl@0
   653
@see RAllocator::TReAllocMode
sl@0
   654
*/
sl@0
   655
UEXPORT_C TAny* RHeap::ReAlloc(TAny* aCell, TInt aSize, TInt aMode)
sl@0
   656
	{
sl@0
   657
	if (aCell && iFlags&EMonitorMemory)
sl@0
   658
		__MEMORY_MONITOR_CHECK_CELL(aCell);
sl@0
   659
	TAny* retval = ReAllocImpl(aCell, aSize, aMode);
sl@0
   660
	if (iFlags & ETraceAllocs)
sl@0
   661
		{
sl@0
   662
		if (retval)
sl@0
   663
			{
sl@0
   664
			TUint32 traceData[3];
sl@0
   665
			traceData[0] = AllocLen(retval);
sl@0
   666
			traceData[1] = aSize;
sl@0
   667
			traceData[2] = (TUint32)aCell;
sl@0
   668
			BTraceContextN(BTrace::EHeap, BTrace::EHeapReAlloc,(TUint32)this, (TUint32)retval,traceData, sizeof(traceData));
sl@0
   669
			}
sl@0
   670
		else
sl@0
   671
			BTraceContext12(BTrace::EHeap, BTrace::EHeapReAllocFail, (TUint32)this, (TUint32)aCell, (TUint32)aSize);
sl@0
   672
		}
sl@0
   673
	return retval;
sl@0
   674
	}
sl@0
   675
inline TAny* RHeap::ReAllocImpl(TAny* aCell, TInt aSize, TInt aMode)
sl@0
   676
	{
sl@0
   677
	__CHECK_THREAD_STATE;
sl@0
   678
	if (!aCell)
sl@0
   679
		return (aMode & ENeverMove) ? NULL : Alloc(aSize);
sl@0
   680
	__ASSERT_ALWAYS((TUint)aSize<(KMaxTInt/2),HEAP_PANIC(ETHeapBadAllocatedCellSize));
sl@0
   681
	Lock();
sl@0
   682
	SCell* pC = GetAddress(aCell);
sl@0
   683
	TInt old_len = pC->len;
sl@0
   684
	__DEBUG_SAVE(pC);
sl@0
   685
	aSize = Max(Align(aSize + EAllocCellSize), iMinCell);
sl@0
   686
	if (aSize > old_len)	// Trying to grow cell
sl@0
   687
		{
sl@0
   688
		__SIMULATE_ALLOC_FAIL({	Unlock(); return NULL;})			
sl@0
   689
		
sl@0
   690
		// Try to grow cell in place, without reallocation
sl@0
   691
		SCell* pPrev;
sl@0
   692
		SCell* pNext;
sl@0
   693
		FindFollowingFreeCell(pC,pPrev, pNext);
sl@0
   694
		TInt r = TryToGrowCell(pC, pPrev, pNext, aSize);
sl@0
   695
		
sl@0
   696
		if (r==KErrNone) 
sl@0
   697
			{
sl@0
   698
			Unlock();
sl@0
   699
			return aCell;
sl@0
   700
			}
sl@0
   701
sl@0
   702
		if (!(aMode & ENeverMove))
sl@0
   703
		// If moving allowed, try re-alloc. 
sl@0
   704
		// If we need to extend heap,and cell is at the end, try and grow in place
sl@0
   705
			{
sl@0
   706
			SCell* pLastFree;
sl@0
   707
			SCell* pNewCell = (SCell*)DoAlloc(aSize, pLastFree);
sl@0
   708
			if (!pNewCell && !(iFlags & EFixedSize))
sl@0
   709
			// if we need to extend the heap to alloc
sl@0
   710
				{
sl@0
   711
				if (IsLastCell(pC) || (pNext && IsLastCell(pNext)))
sl@0
   712
				// if last used Cell, try and extend heap and then cell 
sl@0
   713
					{
sl@0
   714
					TInt r = TryToGrowHeap(aSize - old_len, pLastFree);
sl@0
   715
					if (r==KErrNone)
sl@0
   716
						{
sl@0
   717
						r = TryToGrowCell(pC, pPrev, pPrev->next, aSize);
sl@0
   718
						Unlock();
sl@0
   719
						__ASSERT_DEBUG(r == KErrNone, HEAP_PANIC(ETHeapCellDidntGrow));						
sl@0
   720
						return aCell;
sl@0
   721
						}
sl@0
   722
					}
sl@0
   723
				else
sl@0
   724
				// try to grow chunk heap and Alloc on it
sl@0
   725
					{
sl@0
   726
					TInt r = TryToGrowHeap(aSize, pLastFree);
sl@0
   727
					if (r==KErrNone)
sl@0
   728
						pNewCell = DoAlloc(aSize, pLastFree);
sl@0
   729
					}
sl@0
   730
				}
sl@0
   731
sl@0
   732
			if (pNewCell)
sl@0
   733
			// if we created a new cell, adjust tellies, copy the contents and delete old cell.
sl@0
   734
				{
sl@0
   735
				iCellCount++;
sl@0
   736
				iTotalAllocSize += (pNewCell->len - EAllocCellSize);
sl@0
   737
sl@0
   738
				Unlock();
sl@0
   739
				TUint8* raw = ((TUint8*) pNewCell);
sl@0
   740
				
sl@0
   741
				memcpy(raw + EAllocCellSize, aCell, old_len - EAllocCellSize);
sl@0
   742
#ifdef __KERNEL_MODE__
sl@0
   743
				memclr(raw + old_len, pNewCell->len - old_len);
sl@0
   744
#endif		
sl@0
   745
				Free(aCell);
sl@0
   746
				__DEBUG_RESTORE(raw + EAllocCellSize);
sl@0
   747
				return raw + EAllocCellSize;
sl@0
   748
				}
sl@0
   749
			}
sl@0
   750
		else 
sl@0
   751
		// No moving, but still posible to extend the heap (if heap extendable)
sl@0
   752
			{
sl@0
   753
			if (!(iFlags & EFixedSize) && (IsLastCell(pC) || (pNext && IsLastCell(pNext))))
sl@0
   754
				{
sl@0
   755
				SCell* pLastFree = pNext ? pNext : pPrev;
sl@0
   756
				TInt r = TryToGrowHeap(aSize - old_len, pLastFree);
sl@0
   757
				if (r==KErrNone)
sl@0
   758
					{
sl@0
   759
					r = TryToGrowCell(pC, pPrev, pPrev->next, aSize);
sl@0
   760
					Unlock();
sl@0
   761
					__ASSERT_DEBUG(r==KErrNone, HEAP_PANIC(ETHeapCellDidntGrow));					
sl@0
   762
					return aCell;
sl@0
   763
					}
sl@0
   764
				}
sl@0
   765
			}			
sl@0
   766
		Unlock();
sl@0
   767
		return NULL;
sl@0
   768
		}
sl@0
   769
	if (old_len - aSize >= iMinCell)
sl@0
   770
		{
sl@0
   771
		// cell shrinking, remainder big enough to form a new free cell
sl@0
   772
		SCell* pX = (SCell*)((TUint8*)pC + aSize);	// pointer to new free cell
sl@0
   773
		pC->len = aSize;			// update cell size
sl@0
   774
		pX->len = old_len - aSize;	// size of remainder
sl@0
   775
		iTotalAllocSize -= pX->len;
sl@0
   776
		DoFree(pX);					// link new free cell into chain, shrink heap if necessary
sl@0
   777
		}
sl@0
   778
	Unlock();
sl@0
   779
	return aCell;
sl@0
   780
	}
sl@0
   781
sl@0
   782
sl@0
   783
sl@0
   784
sl@0
   785
#ifndef __KERNEL_MODE__
sl@0
   786
sl@0
   787
EXPORT_C TInt RHeap::Available(TInt& aBiggestBlock) const
sl@0
   788
/**
sl@0
   789
Gets the total free space currently available on the heap and the space 
sl@0
   790
available in the largest free block.
sl@0
   791
sl@0
   792
The space available represents the total space which can be allocated.
sl@0
   793
sl@0
   794
Note that compressing the heap may reduce the total free space available and 
sl@0
   795
the space available in the largest free block.
sl@0
   796
sl@0
   797
@param aBiggestBlock On return, contains the space available 
sl@0
   798
                     in the largest free block on the heap.
sl@0
   799
                     
sl@0
   800
@return The total free space currently available on the heap.
sl@0
   801
*/
sl@0
   802
	{
sl@0
   803
sl@0
   804
	TInt total = 0;
sl@0
   805
	TInt max = 0;
sl@0
   806
	Lock();
sl@0
   807
	SCell* pC = iFree.next;
sl@0
   808
	for (; pC; pC=pC->next)
sl@0
   809
		{
sl@0
   810
		TInt l = pC->len - EAllocCellSize;
sl@0
   811
		if (l > max)
sl@0
   812
			max = l;
sl@0
   813
		total += l;
sl@0
   814
		}
sl@0
   815
	Unlock();
sl@0
   816
	aBiggestBlock = max;
sl@0
   817
	return total;
sl@0
   818
	}
sl@0
   819
sl@0
   820
sl@0
   821
sl@0
   822
sl@0
   823
EXPORT_C TInt RHeap::AllocSize(TInt& aTotalAllocSize) const
sl@0
   824
/**
sl@0
   825
Gets the number of cells allocated on this heap, and the total space 
sl@0
   826
allocated to them.
sl@0
   827
sl@0
   828
@param aTotalAllocSize On return, contains the total space allocated
sl@0
   829
                       to the cells.
sl@0
   830
sl@0
   831
@return The number of cells allocated on this heap.
sl@0
   832
*/
sl@0
   833
	{
sl@0
   834
	Lock();
sl@0
   835
	TInt c = iCellCount;
sl@0
   836
	aTotalAllocSize = iTotalAllocSize;
sl@0
   837
	Unlock();
sl@0
   838
	return c;
sl@0
   839
	}
sl@0
   840
sl@0
   841
sl@0
   842
sl@0
   843
sl@0
   844
EXPORT_C RHeap* UserHeap::FixedHeap(TAny* aBase, TInt aMaxLength, TInt aAlign, TBool aSingleThread)
sl@0
   845
/**
sl@0
   846
Creates a fixed length heap at a specified location.
sl@0
   847
sl@0
   848
On successful return from this function, aMaxLength bytes are committed by the chunk.
sl@0
   849
The heap cannot be extended.
sl@0
   850
sl@0
   851
@param aBase         A pointer to the location where the heap is to be constructed.
sl@0
   852
@param aMaxLength    The length of the heap. If the supplied value is less
sl@0
   853
                     than KMinHeapSize, it is discarded and the value KMinHeapSize
sl@0
   854
                     is used instead.
sl@0
   855
@param aAlign        The alignment of heap cells.
sl@0
   856
@param aSingleThread Indicates whether single threaded or not.
sl@0
   857
sl@0
   858
@return A pointer to the new heap, or NULL if the heap could not be created.
sl@0
   859
sl@0
   860
@panic USER 56 if aMaxLength is negative.
sl@0
   861
@panic USER 172 if aAlign is not a power of 2 or is less than the size of a TAny*.
sl@0
   862
*/
sl@0
   863
//
sl@0
   864
// Force construction of the fixed memory.
sl@0
   865
//
sl@0
   866
	{
sl@0
   867
sl@0
   868
	__ASSERT_ALWAYS(aMaxLength>=0, ::Panic(ETHeapMaxLengthNegative));
sl@0
   869
	if (aMaxLength<KMinHeapSize)
sl@0
   870
		aMaxLength=KMinHeapSize;
sl@0
   871
	RHeap* h = new(aBase) RHeap(aMaxLength, aAlign, aSingleThread);
sl@0
   872
	if (!aSingleThread)
sl@0
   873
		{
sl@0
   874
		TInt r = h->iLock.CreateLocal();
sl@0
   875
		if (r!=KErrNone)
sl@0
   876
			return NULL;
sl@0
   877
		h->iHandles = (TInt*)&h->iLock;
sl@0
   878
		h->iHandleCount = 1;
sl@0
   879
		}
sl@0
   880
	return h;
sl@0
   881
	}
sl@0
   882
sl@0
   883
sl@0
   884
/**
sl@0
   885
Constructor where minimum and maximum length of the heap can be defined.
sl@0
   886
It defaults the chunk heap to be created to have use a new local chunk, 
sl@0
   887
to have a grow by value of KMinHeapGrowBy, to be unaligned, not to be 
sl@0
   888
single threaded and not to have any mode flags set.
sl@0
   889
sl@0
   890
@param aMinLength    The minimum length of the heap to be created.
sl@0
   891
@param aMaxLength    The maximum length to which the heap to be created can grow.
sl@0
   892
                     If the supplied value is less than KMinHeapSize, then it
sl@0
   893
                     is discarded and the value KMinHeapSize used instead.
sl@0
   894
*/
sl@0
   895
EXPORT_C TChunkHeapCreateInfo::TChunkHeapCreateInfo(TInt aMinLength, TInt aMaxLength) :
sl@0
   896
	iVersionNumber(EVersion0), iMinLength(aMinLength), iMaxLength(aMaxLength),
sl@0
   897
	iAlign(0), iGrowBy(1), iSingleThread(EFalse), 
sl@0
   898
	iOffset(0), iPaging(EUnspecified), iMode(0), iName(NULL)
sl@0
   899
	{
sl@0
   900
	}
sl@0
   901
sl@0
   902
sl@0
   903
/**
sl@0
   904
Sets the chunk heap to create a new chunk with the specified name.
sl@0
   905
sl@0
   906
This overriddes any previous call to TChunkHeapCreateInfo::SetNewChunkHeap() or
sl@0
   907
TChunkHeapCreateInfo::SetExistingChunkHeap() for this TChunkHeapCreateInfo object.
sl@0
   908
sl@0
   909
@param aName	The name to be given to the chunk heap to be created
sl@0
   910
				If NULL, the function constructs a local chunk to host the heap.
sl@0
   911
				If not NULL, a pointer to a descriptor containing the name to be 
sl@0
   912
				assigned to the global chunk hosting the heap.
sl@0
   913
*/
sl@0
   914
EXPORT_C void TChunkHeapCreateInfo::SetCreateChunk(const TDesC* aName)
sl@0
   915
	{
sl@0
   916
	iName = (TDesC*)aName;
sl@0
   917
	iChunk.SetHandle(KNullHandle);
sl@0
   918
	}
sl@0
   919
sl@0
   920
sl@0
   921
/**
sl@0
   922
Sets the chunk heap to be created to use the chunk specified.
sl@0
   923
sl@0
   924
This overriddes any previous call to TChunkHeapCreateInfo::SetNewChunkHeap() or
sl@0
   925
TChunkHeapCreateInfo::SetExistingChunkHeap() for this TChunkHeapCreateInfo object.
sl@0
   926
sl@0
   927
@param aChunk	A handle to the chunk to use for the heap.
sl@0
   928
*/
sl@0
   929
EXPORT_C void TChunkHeapCreateInfo::SetUseChunk(const RChunk aChunk)
sl@0
   930
	{
sl@0
   931
	iName = NULL;
sl@0
   932
	iChunk = aChunk;
sl@0
   933
	}
sl@0
   934
sl@0
   935
sl@0
   936
/**
sl@0
   937
Creates a chunk heap of the type specified by the parameter aCreateInfo.
sl@0
   938
sl@0
   939
@param aCreateInfo	A reference to a TChunkHeapCreateInfo object specifying the
sl@0
   940
					type of chunk heap to create.
sl@0
   941
sl@0
   942
@return A pointer to the new heap or NULL if the heap could not be created.
sl@0
   943
sl@0
   944
@panic USER 41 if the heap's specified minimum length is greater than the specified maximum length.
sl@0
   945
@panic USER 55 if the heap's specified minimum length is negative.
sl@0
   946
@panic USER 172 if the heap's specified alignment is not a power of 2 or is less than the size of a TAny*.
sl@0
   947
*/
sl@0
   948
EXPORT_C RHeap* UserHeap::ChunkHeap(const TChunkHeapCreateInfo& aCreateInfo)
sl@0
   949
	{
sl@0
   950
	// aCreateInfo must have been configured to use a new chunk or an exiting chunk.
sl@0
   951
	__ASSERT_ALWAYS(!(aCreateInfo.iMode & (TUint32)~EChunkHeapMask), ::Panic(EHeapCreateInvalidMode));
sl@0
   952
	RHeap* h = NULL;
sl@0
   953
sl@0
   954
	if (aCreateInfo.iChunk.Handle() == KNullHandle)
sl@0
   955
		{// A new chunk is to be created for this heap.
sl@0
   956
		__ASSERT_ALWAYS(aCreateInfo.iMinLength >= 0, ::Panic(ETHeapMinLengthNegative));
sl@0
   957
		__ASSERT_ALWAYS(aCreateInfo.iMaxLength >= aCreateInfo.iMinLength, ::Panic(ETHeapCreateMaxLessThanMin));
sl@0
   958
sl@0
   959
		TInt maxLength = aCreateInfo.iMaxLength;
sl@0
   960
		if (maxLength < KMinHeapSize)
sl@0
   961
			maxLength = KMinHeapSize;
sl@0
   962
sl@0
   963
		TChunkCreateInfo chunkInfo;
sl@0
   964
		chunkInfo.SetNormal(0, maxLength);
sl@0
   965
		chunkInfo.SetOwner((aCreateInfo.iSingleThread)? EOwnerThread : EOwnerProcess);
sl@0
   966
		if (aCreateInfo.iName)
sl@0
   967
			chunkInfo.SetGlobal(*aCreateInfo.iName);
sl@0
   968
		// Set the paging attributes of the chunk.
sl@0
   969
		if (aCreateInfo.iPaging == TChunkHeapCreateInfo::EPaged)
sl@0
   970
			chunkInfo.SetPaging(TChunkCreateInfo::EPaged);
sl@0
   971
		if (aCreateInfo.iPaging == TChunkHeapCreateInfo::EUnpaged)
sl@0
   972
			chunkInfo.SetPaging(TChunkCreateInfo::EUnpaged);
sl@0
   973
		// Create the chunk.
sl@0
   974
		RChunk chunk;
sl@0
   975
		if (chunk.Create(chunkInfo) != KErrNone)
sl@0
   976
			return NULL;
sl@0
   977
		// Create the heap using the new chunk.
sl@0
   978
		TUint mode = aCreateInfo.iMode | EChunkHeapDuplicate;	// Must duplicate the handle.
sl@0
   979
		h = OffsetChunkHeap(chunk, aCreateInfo.iMinLength, aCreateInfo.iOffset,
sl@0
   980
							aCreateInfo.iGrowBy, maxLength, aCreateInfo.iAlign,
sl@0
   981
							aCreateInfo.iSingleThread, mode);
sl@0
   982
		chunk.Close();
sl@0
   983
		}
sl@0
   984
	else
sl@0
   985
		{
sl@0
   986
		h = OffsetChunkHeap(aCreateInfo.iChunk, aCreateInfo.iMinLength, aCreateInfo.iOffset,
sl@0
   987
							aCreateInfo.iGrowBy, aCreateInfo.iMaxLength, aCreateInfo.iAlign,
sl@0
   988
							aCreateInfo.iSingleThread, aCreateInfo.iMode);
sl@0
   989
		}
sl@0
   990
	return h;
sl@0
   991
	}
sl@0
   992
sl@0
   993
sl@0
   994
EXPORT_C RHeap* UserHeap::ChunkHeap(const TDesC* aName, TInt aMinLength, TInt aMaxLength, TInt aGrowBy, TInt aAlign, TBool aSingleThread)
sl@0
   995
/**
sl@0
   996
Creates a heap in a local or global chunk.
sl@0
   997
sl@0
   998
The chunk hosting the heap can be local or global.
sl@0
   999
sl@0
  1000
A local chunk is one which is private to the process creating it and is not
sl@0
  1001
intended for access by other user processes.
sl@0
  1002
A global chunk is one which is visible to all processes.
sl@0
  1003
sl@0
  1004
The hosting chunk is local, if the pointer aName is NULL, otherwise
sl@0
  1005
the hosting chunk is global and the descriptor *aName is assumed to contain
sl@0
  1006
the name to be assigned to it.
sl@0
  1007
sl@0
  1008
Ownership of the host chunk is vested in the current process.
sl@0
  1009
sl@0
  1010
A minimum and a maximum size for the heap can be specified. On successful
sl@0
  1011
return from this function, the size of the heap is at least aMinLength.
sl@0
  1012
If subsequent requests for allocation of memory from the heap cannot be
sl@0
  1013
satisfied by compressing the heap, the size of the heap is extended in
sl@0
  1014
increments of aGrowBy until the request can be satisfied. Attempts to extend
sl@0
  1015
the heap causes the size of the host chunk to be adjusted.
sl@0
  1016
sl@0
  1017
Note that the size of the heap cannot be adjusted by more than aMaxLength.
sl@0
  1018
sl@0
  1019
@param aName         If NULL, the function constructs a local chunk to host
sl@0
  1020
                     the heap.
sl@0
  1021
                     If not NULL, a pointer to a descriptor containing the name
sl@0
  1022
                     to be assigned to the global chunk hosting the heap.
sl@0
  1023
@param aMinLength    The minimum length of the heap.
sl@0
  1024
@param aMaxLength    The maximum length to which the heap can grow.
sl@0
  1025
                     If the supplied value is less than KMinHeapSize, then it
sl@0
  1026
                     is discarded and the value KMinHeapSize used instead.
sl@0
  1027
@param aGrowBy       The increments to the size of the host chunk. If a value is
sl@0
  1028
                     not explicitly specified, the value KMinHeapGrowBy is taken
sl@0
  1029
                     by default
sl@0
  1030
@param aAlign        The alignment of heap cells.
sl@0
  1031
@param aSingleThread Indicates whether single threaded or not.
sl@0
  1032
sl@0
  1033
@return A pointer to the new heap or NULL if the heap could not be created.
sl@0
  1034
sl@0
  1035
@panic USER 41 if aMinLength is greater than the supplied value of aMaxLength.
sl@0
  1036
@panic USER 55 if aMinLength is negative.
sl@0
  1037
@panic USER 172 if aAlign is not a power of 2 or is less than the size of a TAny*.
sl@0
  1038
*/
sl@0
  1039
//
sl@0
  1040
// Allocate a Chunk of the requested size and force construction.
sl@0
  1041
//
sl@0
  1042
	{
sl@0
  1043
	TChunkHeapCreateInfo createInfo(aMinLength, aMaxLength);
sl@0
  1044
	createInfo.SetCreateChunk(aName);
sl@0
  1045
	createInfo.SetGrowBy(aGrowBy);
sl@0
  1046
	createInfo.SetAlignment(aAlign);
sl@0
  1047
	createInfo.SetSingleThread(aSingleThread);
sl@0
  1048
	return ChunkHeap(createInfo);
sl@0
  1049
	}
sl@0
  1050
sl@0
  1051
sl@0
  1052
sl@0
  1053
sl@0
  1054
EXPORT_C RHeap* UserHeap::ChunkHeap(RChunk aChunk, TInt aMinLength, TInt aGrowBy, TInt aMaxLength, TInt aAlign, TBool aSingleThread, TUint32 aMode)
sl@0
  1055
/**
sl@0
  1056
Creates a heap in an existing chunk.
sl@0
  1057
sl@0
  1058
This function is intended to be used to create a heap in a user writable code
sl@0
  1059
chunk as created by a call to RChunk::CreateLocalCode().
sl@0
  1060
This type of heap can be used to hold code fragments from a JIT compiler.
sl@0
  1061
sl@0
  1062
The maximum length to which the heap can grow is the same as
sl@0
  1063
the maximum size of the chunk.
sl@0
  1064
sl@0
  1065
@param aChunk        The chunk that will host the heap.
sl@0
  1066
@param aMinLength    The minimum length of the heap.
sl@0
  1067
@param aGrowBy       The increments to the size of the host chunk. 
sl@0
  1068
@param aMaxLength    The maximum length to which the heap can grow.
sl@0
  1069
@param aAlign        The alignment of heap cells.
sl@0
  1070
@param aSingleThread Indicates whether single threaded or not.
sl@0
  1071
@param aMode         Flags controlling the heap creation.  This should be set 
sl@0
  1072
					 from one or more of the values in TChunkHeapCreateMode.
sl@0
  1073
                     
sl@0
  1074
@return A pointer to the new heap or NULL if the heap could not be created.
sl@0
  1075
sl@0
  1076
@panic USER 172 if aAlign is not a power of 2 or is less than the size of a TAny*.
sl@0
  1077
*/
sl@0
  1078
//
sl@0
  1079
// Construct a heap in an already existing chunk
sl@0
  1080
//
sl@0
  1081
	{
sl@0
  1082
	
sl@0
  1083
	return OffsetChunkHeap(aChunk, aMinLength, 0, aGrowBy, aMaxLength, aAlign, aSingleThread, aMode);
sl@0
  1084
	}
sl@0
  1085
sl@0
  1086
sl@0
  1087
sl@0
  1088
sl@0
  1089
EXPORT_C RHeap* UserHeap::OffsetChunkHeap(RChunk aChunk, TInt aMinLength, TInt aOffset, TInt aGrowBy, TInt aMaxLength, TInt aAlign, TBool aSingleThread, TUint32 aMode)
sl@0
  1090
/**
sl@0
  1091
Creates a heap in an existing chunk, offset from the beginning of the chunk.
sl@0
  1092
sl@0
  1093
This function is intended to be used to create a heap where a fixed amount of
sl@0
  1094
additional data must be stored at a known location. The additional data can be
sl@0
  1095
placed at the base address of the chunk, allowing it to be located without
sl@0
  1096
depending on the internals of the heap structure.
sl@0
  1097
sl@0
  1098
The maximum length to which the heap can grow is the maximum size of the chunk,
sl@0
  1099
minus the offset.
sl@0
  1100
sl@0
  1101
@param aChunk        The chunk that will host the heap.
sl@0
  1102
@param aMinLength    The minimum length of the heap.
sl@0
  1103
@param aOffset       The offset from the start of the chunk, to the start of the heap.
sl@0
  1104
@param aGrowBy       The increments to the size of the host chunk. 
sl@0
  1105
@param aMaxLength    The maximum length to which the heap can grow.
sl@0
  1106
@param aAlign        The alignment of heap cells.
sl@0
  1107
@param aSingleThread Indicates whether single threaded or not.
sl@0
  1108
@param aMode         Flags controlling the heap creation.  This should be set 
sl@0
  1109
					 from one or more of the values in TChunkHeapCreateMode.
sl@0
  1110
                     
sl@0
  1111
@return A pointer to the new heap or NULL if the heap could not be created.
sl@0
  1112
sl@0
  1113
@panic USER 172 if aAlign is not a power of 2 or is less than the size of a TAny*.
sl@0
  1114
*/
sl@0
  1115
//
sl@0
  1116
// Construct a heap in an already existing chunk
sl@0
  1117
//
sl@0
  1118
	{
sl@0
  1119
sl@0
  1120
	TInt page_size;
sl@0
  1121
	UserHal::PageSizeInBytes(page_size);
sl@0
  1122
	if (!aAlign)
sl@0
  1123
		aAlign = RHeap::ECellAlignment;
sl@0
  1124
	TInt maxLength = aChunk.MaxSize();
sl@0
  1125
	TInt round_up = Max(aAlign, page_size);
sl@0
  1126
	TInt min_cell = _ALIGN_UP(Max((TInt)RHeap::EAllocCellSize, (TInt)RHeap::EFreeCellSize), aAlign);
sl@0
  1127
	aOffset = _ALIGN_UP(aOffset, 8);
sl@0
  1128
	if (aMaxLength && aMaxLength+aOffset<maxLength)
sl@0
  1129
		maxLength = _ALIGN_UP(aMaxLength+aOffset, round_up);
sl@0
  1130
	__ASSERT_ALWAYS(aMinLength>=0, ::Panic(ETHeapMinLengthNegative));
sl@0
  1131
	__ASSERT_ALWAYS(maxLength>=aMinLength, ::Panic(ETHeapCreateMaxLessThanMin));
sl@0
  1132
	aMinLength = _ALIGN_UP(Max(aMinLength, (TInt)sizeof(RHeap) + min_cell) + aOffset, round_up);
sl@0
  1133
	TInt r=aChunk.Adjust(aMinLength);
sl@0
  1134
	if (r!=KErrNone)
sl@0
  1135
		return NULL;
sl@0
  1136
sl@0
  1137
	RHeap* h = new (aChunk.Base() + aOffset) RHeap(aChunk.Handle(), aOffset, aMinLength, maxLength, aGrowBy, aAlign, aSingleThread);
sl@0
  1138
sl@0
  1139
	TBool duplicateLock = EFalse;
sl@0
  1140
	if (!aSingleThread)
sl@0
  1141
		{
sl@0
  1142
		duplicateLock = aMode & EChunkHeapSwitchTo;
sl@0
  1143
		if(h->iLock.CreateLocal(duplicateLock ? EOwnerThread : EOwnerProcess)!=KErrNone)
sl@0
  1144
			{
sl@0
  1145
			h->iChunkHandle = 0;
sl@0
  1146
			return NULL;
sl@0
  1147
			}
sl@0
  1148
		}
sl@0
  1149
sl@0
  1150
	if (aMode & EChunkHeapSwitchTo)
sl@0
  1151
		User::SwitchHeap(h);
sl@0
  1152
sl@0
  1153
	h->iHandles = &h->iChunkHandle;
sl@0
  1154
	if (!aSingleThread)
sl@0
  1155
		{
sl@0
  1156
		// now change the thread-relative chunk/semaphore handles into process-relative handles
sl@0
  1157
		h->iHandleCount = 2;
sl@0
  1158
		if(duplicateLock)
sl@0
  1159
			{
sl@0
  1160
			RHandleBase s = h->iLock;
sl@0
  1161
			r = h->iLock.Duplicate(RThread());
sl@0
  1162
			s.Close();
sl@0
  1163
			}
sl@0
  1164
		if (r==KErrNone && (aMode & EChunkHeapDuplicate))
sl@0
  1165
			{
sl@0
  1166
			r = ((RChunk*)&h->iChunkHandle)->Duplicate(RThread());
sl@0
  1167
			if (r!=KErrNone)
sl@0
  1168
				h->iLock.Close(), h->iChunkHandle=0;
sl@0
  1169
			}
sl@0
  1170
		}
sl@0
  1171
	else
sl@0
  1172
		{
sl@0
  1173
		h->iHandleCount = 1;
sl@0
  1174
		if (aMode & EChunkHeapDuplicate)
sl@0
  1175
			r = ((RChunk*)&h->iChunkHandle)->Duplicate(RThread(), EOwnerThread);
sl@0
  1176
		}
sl@0
  1177
sl@0
  1178
	// return the heap address
sl@0
  1179
	return (r==KErrNone) ? h : NULL;
sl@0
  1180
	}
sl@0
  1181
sl@0
  1182
sl@0
  1183
sl@0
  1184
#define UserTestDebugMaskBit(bit) (TBool)(UserSvr::DebugMask(bit>>5) & (1<<(bit&31)))
sl@0
  1185
sl@0
  1186
_LIT(KLitDollarHeap,"$HEAP");
sl@0
  1187
EXPORT_C TInt UserHeap::CreateThreadHeap(SStdEpocThreadCreateInfo& aInfo, RHeap*& aHeap, TInt aAlign, TBool aSingleThread)
sl@0
  1188
/**
sl@0
  1189
@internalComponent
sl@0
  1190
*/
sl@0
  1191
//
sl@0
  1192
// Create a user-side heap
sl@0
  1193
//
sl@0
  1194
	{
sl@0
  1195
	TInt page_size;
sl@0
  1196
	UserHal::PageSizeInBytes(page_size);
sl@0
  1197
	TInt minLength = _ALIGN_UP(aInfo.iHeapInitialSize, page_size);
sl@0
  1198
	TInt maxLength = Max(aInfo.iHeapMaxSize, minLength);
sl@0
  1199
	if (UserTestDebugMaskBit(96)) // 96 == KUSERHEAPTRACE in nk_trace.h
sl@0
  1200
		aInfo.iFlags |= ETraceHeapAllocs;
sl@0
  1201
sl@0
  1202
	// Create the thread's heap chunk.
sl@0
  1203
	RChunk c;
sl@0
  1204
	TChunkCreateInfo createInfo;
sl@0
  1205
	createInfo.SetThreadHeap(0, maxLength, KLitDollarHeap());	// Initialise with no memory committed.
sl@0
  1206
sl@0
  1207
	// Set the paging policy of the heap chunk based on the thread's paging policy.
sl@0
  1208
	TUint pagingflags = aInfo.iFlags & EThreadCreateFlagPagingMask;
sl@0
  1209
	switch (pagingflags)
sl@0
  1210
		{
sl@0
  1211
		case EThreadCreateFlagPaged:
sl@0
  1212
			createInfo.SetPaging(TChunkCreateInfo::EPaged);
sl@0
  1213
			break;
sl@0
  1214
		case EThreadCreateFlagUnpaged:
sl@0
  1215
			createInfo.SetPaging(TChunkCreateInfo::EUnpaged);
sl@0
  1216
			break;
sl@0
  1217
		case EThreadCreateFlagPagingUnspec:
sl@0
  1218
			// Leave the chunk paging policy unspecified so the process's 
sl@0
  1219
			// paging policy is used.
sl@0
  1220
			break;
sl@0
  1221
		}
sl@0
  1222
sl@0
  1223
	TInt r = c.Create(createInfo);
sl@0
  1224
	if (r!=KErrNone)
sl@0
  1225
		return r;
sl@0
  1226
sl@0
  1227
	aHeap = ChunkHeap(c, minLength, page_size, maxLength, aAlign, aSingleThread, EChunkHeapSwitchTo|EChunkHeapDuplicate);
sl@0
  1228
	c.Close();
sl@0
  1229
	if (!aHeap)
sl@0
  1230
		return KErrNoMemory;
sl@0
  1231
	if (aInfo.iFlags & ETraceHeapAllocs)
sl@0
  1232
		{
sl@0
  1233
		aHeap->iFlags |= RHeap::ETraceAllocs;
sl@0
  1234
		BTraceContext8(BTrace::EHeap, BTrace::EHeapCreate,(TUint32)aHeap, RHeap::EAllocCellSize);
sl@0
  1235
		TInt handle = aHeap->ChunkHandle();
sl@0
  1236
		TInt chunkId = ((RHandleBase&)handle).BTraceId();
sl@0
  1237
		BTraceContext8(BTrace::EHeap, BTrace::EHeapChunkCreate, (TUint32)aHeap, chunkId);
sl@0
  1238
		}
sl@0
  1239
	if (aInfo.iFlags & EMonitorHeapMemory)
sl@0
  1240
		aHeap->iFlags |= RHeap::EMonitorMemory;
sl@0
  1241
	return KErrNone;
sl@0
  1242
	}
sl@0
  1243
sl@0
  1244
#endif	// __KERNEL_MODE__
sl@0
  1245
sl@0
  1246
void RHeap::WalkCheckCell(TAny* aPtr, TCellType aType, TAny* aCell, TInt aLen)
sl@0
  1247
	{
sl@0
  1248
	(void)aCell;
sl@0
  1249
	SHeapCellInfo& info = *(SHeapCellInfo*)aPtr;
sl@0
  1250
	switch(aType)
sl@0
  1251
		{
sl@0
  1252
		case EGoodAllocatedCell:
sl@0
  1253
			{
sl@0
  1254
			++info.iTotalAlloc;
sl@0
  1255
			info.iTotalAllocSize += (aLen-EAllocCellSize);
sl@0
  1256
#if defined(_DEBUG)
sl@0
  1257
			RHeap& h = *info.iHeap;
sl@0
  1258
			if ( ((SDebugCell*)aCell)->nestingLevel == h.iNestingLevel )
sl@0
  1259
				{
sl@0
  1260
				if (++info.iLevelAlloc==1)
sl@0
  1261
					info.iStranded = (SDebugCell*)aCell;
sl@0
  1262
#ifdef __KERNEL_MODE__
sl@0
  1263
				if (KDebugNum(KSERVER) || KDebugNum(KTESTFAST))
sl@0
  1264
					{
sl@0
  1265
//				__KTRACE_OPT(KSERVER,Kern::Printf("LEAKED KERNEL HEAP CELL @ %08x : len=%d", aCell, aLen));
sl@0
  1266
					Kern::Printf("LEAKED KERNEL HEAP CELL @ %08x : len=%d", aCell, aLen);
sl@0
  1267
					TLinAddr base = ((TLinAddr)aCell)&~0x0f;
sl@0
  1268
					TLinAddr end = ((TLinAddr)aCell)+(TLinAddr)aLen;
sl@0
  1269
					while(base<end)
sl@0
  1270
						{
sl@0
  1271
						const TUint32* p = (const TUint32*)base;
sl@0
  1272
						Kern::Printf("%08x: %08x %08x %08x %08x", p, p[0], p[1], p[2], p[3]);
sl@0
  1273
						base += 16;
sl@0
  1274
						}
sl@0
  1275
					}
sl@0
  1276
#endif
sl@0
  1277
				}
sl@0
  1278
#endif	
sl@0
  1279
			break;
sl@0
  1280
			}
sl@0
  1281
		case EGoodFreeCell:
sl@0
  1282
			++info.iTotalFree;
sl@0
  1283
			break;
sl@0
  1284
		case EBadAllocatedCellSize:
sl@0
  1285
			HEAP_PANIC(ETHeapBadAllocatedCellSize);
sl@0
  1286
		case EBadAllocatedCellAddress:
sl@0
  1287
			HEAP_PANIC(ETHeapBadAllocatedCellAddress);
sl@0
  1288
		case EBadFreeCellAddress:
sl@0
  1289
			HEAP_PANIC(ETHeapBadFreeCellAddress);
sl@0
  1290
		case EBadFreeCellSize:
sl@0
  1291
			HEAP_PANIC(ETHeapBadFreeCellSize);
sl@0
  1292
		default:
sl@0
  1293
			HEAP_PANIC(ETHeapWalkBadCellType);
sl@0
  1294
		}
sl@0
  1295
	}
sl@0
  1296
sl@0
  1297
TInt RHeap::DoCountAllocFree(TInt& aFree)
sl@0
  1298
	{
sl@0
  1299
	SHeapCellInfo info;
sl@0
  1300
	memclr(&info, sizeof(info));
sl@0
  1301
	info.iHeap = this;
sl@0
  1302
	Walk(&WalkCheckCell, &info);
sl@0
  1303
	aFree = info.iTotalFree;
sl@0
  1304
	return info.iTotalAlloc;
sl@0
  1305
	}
sl@0
  1306
sl@0
  1307
sl@0
  1308
UEXPORT_C TInt RHeap::DebugFunction(TInt aFunc, TAny* a1, TAny* a2)
sl@0
  1309
/**
sl@0
  1310
@internalComponent
sl@0
  1311
*/
sl@0
  1312
	{
sl@0
  1313
	TInt r = KErrNone;
sl@0
  1314
	switch(aFunc)
sl@0
  1315
		{
sl@0
  1316
		case RAllocator::ECount:
sl@0
  1317
			r = DoCountAllocFree(*(TInt*)a1);
sl@0
  1318
			break;
sl@0
  1319
		case RAllocator::EMarkStart:
sl@0
  1320
			__DEBUG_ONLY(DoMarkStart());
sl@0
  1321
			break;
sl@0
  1322
		case RAllocator::EMarkEnd:
sl@0
  1323
			__DEBUG_ONLY( r = DoMarkEnd((TInt)a1) );
sl@0
  1324
			break;
sl@0
  1325
		case RAllocator::ECheck:
sl@0
  1326
			r = DoCheckHeap((SCheckInfo*)a1);
sl@0
  1327
			break;
sl@0
  1328
		case RAllocator::ESetFail:
sl@0
  1329
			__DEBUG_ONLY(DoSetAllocFail((TAllocFail)(TInt)a1, (TInt)a2));
sl@0
  1330
			break;
sl@0
  1331
		case RAllocator::ESetBurstFail:
sl@0
  1332
#if _DEBUG
sl@0
  1333
			{
sl@0
  1334
			SRAllocatorBurstFail* fail = (SRAllocatorBurstFail*) a2;
sl@0
  1335
			DoSetAllocFail((TAllocFail)(TInt)a1, fail->iRate, fail->iBurst);
sl@0
  1336
			}
sl@0
  1337
#endif
sl@0
  1338
			break;
sl@0
  1339
sl@0
  1340
		case RAllocator::ECheckFailure:
sl@0
  1341
				// iRand will be incremented for each EFailNext, EBurstFailNext,
sl@0
  1342
				// EDeterministic and EBurstDeterministic failure.
sl@0
  1343
				r = iRand;
sl@0
  1344
				break;
sl@0
  1345
sl@0
  1346
		case RAllocator::ECopyDebugInfo:
sl@0
  1347
			{
sl@0
  1348
			TInt nestingLevel = ((SDebugCell*)a1)[-1].nestingLevel;
sl@0
  1349
			((SDebugCell*)a2)[-1].nestingLevel = nestingLevel;
sl@0
  1350
			break;
sl@0
  1351
			}
sl@0
  1352
		case RHeap::EWalk:
sl@0
  1353
			Walk((TWalkFunc)a1, a2);
sl@0
  1354
			break;
sl@0
  1355
		default:
sl@0
  1356
			return KErrNotSupported;
sl@0
  1357
		}
sl@0
  1358
	return r;
sl@0
  1359
	}
sl@0
  1360
sl@0
  1361
sl@0
  1362
sl@0
  1363
sl@0
  1364
void RHeap::Walk(TWalkFunc aFunc, TAny* aPtr)
sl@0
  1365
//
sl@0
  1366
// Walk the heap calling the info function.
sl@0
  1367
//
sl@0
  1368
	{
sl@0
  1369
sl@0
  1370
	Lock();
sl@0
  1371
	SCell* pC = (SCell*)iBase;		// allocated cells
sl@0
  1372
	SCell* pF = &iFree;				// free cells
sl@0
  1373
	FOREVER
sl@0
  1374
		{
sl@0
  1375
		pF = pF->next;				// next free cell
sl@0
  1376
		if (!pF)
sl@0
  1377
			pF = (SCell*)iTop;		// to make size checking work
sl@0
  1378
		else if ( (TUint8*)pF>=iTop || (pF->next && pF->next<=pF) )
sl@0
  1379
			{
sl@0
  1380
			if (iFlags & ETraceAllocs)
sl@0
  1381
				BTraceContext12(BTrace::EHeap, BTrace::EHeapCorruption, (TUint32)this, (TUint32)pF+EFreeCellSize, 0);
sl@0
  1382
			// free cell pointer off the end or going backwards
sl@0
  1383
			Unlock();
sl@0
  1384
			(*aFunc)(aPtr, EBadFreeCellAddress, pF, 0);
sl@0
  1385
			return;
sl@0
  1386
			}
sl@0
  1387
		else
sl@0
  1388
			{
sl@0
  1389
			TInt l = pF->len;
sl@0
  1390
			if (l<iMinCell || (l & (iAlign-1)))
sl@0
  1391
				{
sl@0
  1392
				if (iFlags & ETraceAllocs)
sl@0
  1393
					BTraceContext12(BTrace::EHeap, BTrace::EHeapCorruption, (TUint32)this, (TUint32)pF+EFreeCellSize, l-EFreeCellSize);
sl@0
  1394
				// free cell length invalid
sl@0
  1395
				Unlock();
sl@0
  1396
				(*aFunc)(aPtr, EBadFreeCellSize, pF, l);
sl@0
  1397
				return;
sl@0
  1398
				}
sl@0
  1399
			}
sl@0
  1400
		while (pC!=pF)				// walk allocated cells up to next free cell
sl@0
  1401
			{
sl@0
  1402
			TInt l = pC->len;
sl@0
  1403
			if (l<iMinCell || (l & (iAlign-1)))
sl@0
  1404
				{
sl@0
  1405
				if (iFlags & ETraceAllocs)
sl@0
  1406
					BTraceContext12(BTrace::EHeap, BTrace::EHeapCorruption, (TUint32)this, (TUint32)pC+EAllocCellSize, l-EAllocCellSize);
sl@0
  1407
				// allocated cell length invalid
sl@0
  1408
				Unlock();
sl@0
  1409
				(*aFunc)(aPtr, EBadAllocatedCellSize, pC, l);
sl@0
  1410
				return;
sl@0
  1411
				}
sl@0
  1412
			(*aFunc)(aPtr, EGoodAllocatedCell, pC, l);
sl@0
  1413
			SCell* pN = __NEXT_CELL(pC);
sl@0
  1414
			if (pN > pF)
sl@0
  1415
				{
sl@0
  1416
				if (iFlags & ETraceAllocs)
sl@0
  1417
					BTraceContext12(BTrace::EHeap, BTrace::EHeapCorruption, (TUint32)this, (TUint32)pC+EAllocCellSize, l-EAllocCellSize);			
sl@0
  1418
				// cell overlaps next free cell
sl@0
  1419
				Unlock();
sl@0
  1420
				(*aFunc)(aPtr, EBadAllocatedCellAddress, pC, l);
sl@0
  1421
				return;
sl@0
  1422
				}
sl@0
  1423
			pC = pN;
sl@0
  1424
			}
sl@0
  1425
		if ((TUint8*)pF == iTop)
sl@0
  1426
			break;		// reached end of heap
sl@0
  1427
		pC = __NEXT_CELL(pF);	// step to next allocated cell
sl@0
  1428
		(*aFunc)(aPtr, EGoodFreeCell, pF, pF->len);
sl@0
  1429
		}
sl@0
  1430
	Unlock();
sl@0
  1431
	}
sl@0
  1432
sl@0
  1433
TInt RHeap::DoCheckHeap(SCheckInfo* aInfo)
sl@0
  1434
	{
sl@0
  1435
	(void)aInfo;
sl@0
  1436
	SHeapCellInfo info;
sl@0
  1437
	memclr(&info, sizeof(info));
sl@0
  1438
	info.iHeap = this;
sl@0
  1439
	Walk(&WalkCheckCell, &info);
sl@0
  1440
#if defined(_DEBUG)
sl@0
  1441
	if (!aInfo)
sl@0
  1442
		return KErrNone;
sl@0
  1443
	TInt expected = aInfo->iCount;
sl@0
  1444
	TInt actual = aInfo->iAll ? info.iTotalAlloc : info.iLevelAlloc;
sl@0
  1445
	if (actual!=expected && !iTestData)
sl@0
  1446
		{
sl@0
  1447
#ifdef __KERNEL_MODE__
sl@0
  1448
		Kern::Fault("KERN-ALLOC COUNT", (expected<<16)|actual );
sl@0
  1449
#else
sl@0
  1450
		User::Panic(_L("ALLOC COUNT"), (expected<<16)|actual );
sl@0
  1451
#endif
sl@0
  1452
		}
sl@0
  1453
#endif
sl@0
  1454
	return KErrNone;
sl@0
  1455
	}
sl@0
  1456
sl@0
  1457
#ifdef _DEBUG
sl@0
  1458
void RHeap::DoMarkStart()
sl@0
  1459
	{
sl@0
  1460
	if (iNestingLevel==0)
sl@0
  1461
		iAllocCount=0;
sl@0
  1462
	iNestingLevel++;
sl@0
  1463
	}
sl@0
  1464
sl@0
  1465
TUint32 RHeap::DoMarkEnd(TInt aExpected)
sl@0
  1466
	{
sl@0
  1467
	if (iNestingLevel==0)
sl@0
  1468
		return 0;
sl@0
  1469
	SHeapCellInfo info;
sl@0
  1470
	SHeapCellInfo* p = iTestData ? (SHeapCellInfo*)iTestData : &info;
sl@0
  1471
	memclr(p, sizeof(info));
sl@0
  1472
	p->iHeap = this;
sl@0
  1473
	Walk(&WalkCheckCell, p);
sl@0
  1474
	if (p->iLevelAlloc != aExpected && !iTestData)
sl@0
  1475
		return (TUint32)(p->iStranded + 1);
sl@0
  1476
	if (--iNestingLevel == 0)
sl@0
  1477
		iAllocCount = 0;
sl@0
  1478
	return 0;
sl@0
  1479
	}
sl@0
  1480
sl@0
  1481
void ResetAllocCellLevels(TAny* aPtr, RHeap::TCellType aType, TAny* aCell, TInt aLen)
sl@0
  1482
	{
sl@0
  1483
	(void)aPtr;
sl@0
  1484
	(void)aLen;
sl@0
  1485
	RHeap::SDebugCell* cell = (RHeap::SDebugCell*)aCell;
sl@0
  1486
	if (aType == RHeap::EGoodAllocatedCell)
sl@0
  1487
		{
sl@0
  1488
		cell->nestingLevel = 0;
sl@0
  1489
		}
sl@0
  1490
	}
sl@0
  1491
sl@0
  1492
void RHeap::DoSetAllocFail(TAllocFail aType, TInt aRate)
sl@0
  1493
	{// Default to a burst mode of 1, as aType may be a burst type.
sl@0
  1494
	DoSetAllocFail(aType, aRate, 1);
sl@0
  1495
	}
sl@0
  1496
sl@0
  1497
// Don't change as the ETHeapBadDebugFailParameter check below and the API 
sl@0
  1498
// documentation rely on this being 16 for RHeap.
sl@0
  1499
LOCAL_D const TInt KBurstFailRateShift = 16;
sl@0
  1500
LOCAL_D const TInt KBurstFailRateMask = (1 << KBurstFailRateShift) - 1;
sl@0
  1501
sl@0
  1502
void RHeap::DoSetAllocFail(TAllocFail aType, TInt aRate, TUint aBurst)
sl@0
  1503
	{
sl@0
  1504
	if (aType==EReset)
sl@0
  1505
		{
sl@0
  1506
		// reset levels of all allocated cells to 0
sl@0
  1507
		// this should prevent subsequent tests failing unnecessarily
sl@0
  1508
		iFailed = EFalse;		// Reset for ECheckFailure relies on this.
sl@0
  1509
		Walk(&ResetAllocCellLevels, NULL);
sl@0
  1510
		// reset heap allocation mark as well
sl@0
  1511
		iNestingLevel=0;
sl@0
  1512
		iAllocCount=0;
sl@0
  1513
		aType=ENone;
sl@0
  1514
		}
sl@0
  1515
sl@0
  1516
	switch (aType)
sl@0
  1517
		{
sl@0
  1518
		case EBurstRandom:
sl@0
  1519
		case EBurstTrueRandom:
sl@0
  1520
		case EBurstDeterministic:
sl@0
  1521
		case EBurstFailNext:
sl@0
  1522
			// If the fail type is a burst type then iFailRate is split in 2:
sl@0
  1523
			// the 16 lsbs are the fail rate and the 16 msbs are the burst length.
sl@0
  1524
			if (TUint(aRate) > (TUint)KMaxTUint16 || aBurst > KMaxTUint16)
sl@0
  1525
				HEAP_PANIC(ETHeapBadDebugFailParameter);
sl@0
  1526
sl@0
  1527
			iFailed = EFalse;
sl@0
  1528
			iFailType = aType;
sl@0
  1529
			iFailRate = (aRate == 0) ? 1 : aRate;
sl@0
  1530
			iFailAllocCount = -iFailRate;
sl@0
  1531
			iFailRate = iFailRate | (aBurst << KBurstFailRateShift);
sl@0
  1532
			break;
sl@0
  1533
sl@0
  1534
		default:
sl@0
  1535
			iFailed = EFalse;
sl@0
  1536
			iFailType = aType;
sl@0
  1537
			iFailRate = (aRate == 0) ? 1 : aRate; // A rate of <1 is meaningless
sl@0
  1538
			iFailAllocCount = 0;
sl@0
  1539
			break;
sl@0
  1540
		}
sl@0
  1541
sl@0
  1542
	// Set up iRand for either:
sl@0
  1543
	//		- random seed value, or
sl@0
  1544
	//		- a count of the number of failures so far.
sl@0
  1545
	iRand = 0;
sl@0
  1546
#ifndef __KERNEL_MODE__
sl@0
  1547
	switch (iFailType)
sl@0
  1548
		{
sl@0
  1549
		case ETrueRandom:
sl@0
  1550
		case EBurstTrueRandom:
sl@0
  1551
			{
sl@0
  1552
			TTime time;
sl@0
  1553
			time.HomeTime();
sl@0
  1554
			TInt64 seed = time.Int64();
sl@0
  1555
			iRand = Math::Rand(seed);
sl@0
  1556
			break;
sl@0
  1557
			}
sl@0
  1558
		case ERandom:
sl@0
  1559
		case EBurstRandom:
sl@0
  1560
	        {
sl@0
  1561
	        TInt64 seed = 12345;
sl@0
  1562
			iRand = Math::Rand(seed);
sl@0
  1563
			break;
sl@0
  1564
	        }
sl@0
  1565
		default:
sl@0
  1566
			break;
sl@0
  1567
		}
sl@0
  1568
#endif
sl@0
  1569
	}
sl@0
  1570
sl@0
  1571
TBool RHeap::CheckForSimulatedAllocFail()
sl@0
  1572
//
sl@0
  1573
// Check to see if the user has requested simulated alloc failure, and if so possibly 
sl@0
  1574
// Return ETrue indicating a failure.
sl@0
  1575
//
sl@0
  1576
	{
sl@0
  1577
	// For burst mode failures iFailRate is shared
sl@0
  1578
	TUint16 rate  = (TUint16)(iFailRate &  KBurstFailRateMask);
sl@0
  1579
	TUint16 burst = (TUint16)(iFailRate >> KBurstFailRateShift);
sl@0
  1580
	TBool r = EFalse;
sl@0
  1581
	switch (iFailType)
sl@0
  1582
		{
sl@0
  1583
#ifndef __KERNEL_MODE__
sl@0
  1584
		case ERandom:
sl@0
  1585
		case ETrueRandom:
sl@0
  1586
			if (++iFailAllocCount>=iFailRate) 
sl@0
  1587
				{	
sl@0
  1588
				iFailAllocCount=0;
sl@0
  1589
				if (!iFailed) // haven't failed yet after iFailRate allocations so fail now
sl@0
  1590
					return(ETrue); 
sl@0
  1591
				iFailed=EFalse;
sl@0
  1592
				}
sl@0
  1593
			else   
sl@0
  1594
				{
sl@0
  1595
				if (!iFailed)
sl@0
  1596
					{
sl@0
  1597
	                TInt64 seed=iRand;
sl@0
  1598
					iRand=Math::Rand(seed);
sl@0
  1599
					if (iRand%iFailRate==0)
sl@0
  1600
						{
sl@0
  1601
						iFailed=ETrue;
sl@0
  1602
						return(ETrue);
sl@0
  1603
						}
sl@0
  1604
					}
sl@0
  1605
				}
sl@0
  1606
			break;
sl@0
  1607
sl@0
  1608
		case EBurstRandom:
sl@0
  1609
		case EBurstTrueRandom:
sl@0
  1610
			if (++iFailAllocCount < 0) 
sl@0
  1611
				{
sl@0
  1612
				// We haven't started failing yet so should we now?
sl@0
  1613
				TInt64 seed = iRand;
sl@0
  1614
				iRand = Math::Rand(seed);
sl@0
  1615
				if (iRand % rate == 0)
sl@0
  1616
					{// Fail now.  Reset iFailAllocCount so we fail burst times
sl@0
  1617
					iFailAllocCount = 0;
sl@0
  1618
					r = ETrue;
sl@0
  1619
					}
sl@0
  1620
				}
sl@0
  1621
			else
sl@0
  1622
				{
sl@0
  1623
				if (iFailAllocCount < burst)
sl@0
  1624
					{// Keep failing for burst times
sl@0
  1625
					r = ETrue;
sl@0
  1626
					}
sl@0
  1627
				else
sl@0
  1628
					{// We've now failed burst times so start again.
sl@0
  1629
					iFailAllocCount = -(rate - 1);
sl@0
  1630
					}
sl@0
  1631
				}
sl@0
  1632
			break;
sl@0
  1633
#endif
sl@0
  1634
		case EDeterministic:
sl@0
  1635
			if (++iFailAllocCount%iFailRate==0)
sl@0
  1636
				{
sl@0
  1637
				r=ETrue;
sl@0
  1638
				iRand++;	// Keep count of how many times we have failed
sl@0
  1639
				}
sl@0
  1640
			break;
sl@0
  1641
sl@0
  1642
		case EBurstDeterministic:
sl@0
  1643
			// This will fail burst number of times, every rate attempts.
sl@0
  1644
			if (++iFailAllocCount >= 0)
sl@0
  1645
				{
sl@0
  1646
				if (iFailAllocCount == burst - 1)
sl@0
  1647
					{// This is the burst time we have failed so make it the last by
sl@0
  1648
					// reseting counts so we next fail after rate attempts.
sl@0
  1649
					iFailAllocCount = -rate;
sl@0
  1650
					}
sl@0
  1651
				r = ETrue;
sl@0
  1652
				iRand++;	// Keep count of how many times we have failed
sl@0
  1653
				}
sl@0
  1654
			break;
sl@0
  1655
sl@0
  1656
		case EFailNext:
sl@0
  1657
			if ((++iFailAllocCount%iFailRate)==0)
sl@0
  1658
				{
sl@0
  1659
				iFailType=ENone;
sl@0
  1660
				r=ETrue;
sl@0
  1661
				iRand++;	// Keep count of how many times we have failed
sl@0
  1662
				}
sl@0
  1663
			break;
sl@0
  1664
sl@0
  1665
		case EBurstFailNext:
sl@0
  1666
			if (++iFailAllocCount >= 0)
sl@0
  1667
				{
sl@0
  1668
				if (iFailAllocCount == burst - 1)
sl@0
  1669
					{// This is the burst time we have failed so make it the last.
sl@0
  1670
					iFailType = ENone;
sl@0
  1671
					}
sl@0
  1672
				r = ETrue;
sl@0
  1673
				iRand++;	// Keep count of how many times we have failed
sl@0
  1674
				}
sl@0
  1675
			break;
sl@0
  1676
		default:
sl@0
  1677
			break;
sl@0
  1678
		}
sl@0
  1679
	return r;
sl@0
  1680
	}
sl@0
  1681
#endif	// ifdef _DEBUG
sl@0
  1682
sl@0
  1683
UEXPORT_C TInt RHeap::Extension_(TUint aExtensionId, TAny*& a0, TAny* a1)
sl@0
  1684
	{
sl@0
  1685
	return RAllocator::Extension_(aExtensionId, a0, a1);
sl@0
  1686
	}
sl@0
  1687
sl@0
  1688
#if defined(__HEAP_MACHINE_CODED__) && !defined(_DEBUG)
sl@0
  1689
GLDEF_C void RHeap_PanicBadAllocatedCellSize()
sl@0
  1690
	{
sl@0
  1691
	HEAP_PANIC(ETHeapBadAllocatedCellSize);
sl@0
  1692
	}
sl@0
  1693
sl@0
  1694
GLDEF_C void RHeap_PanicBadNextCell()
sl@0
  1695
	{
sl@0
  1696
	HEAP_PANIC(ETHeapFreeBadNextCell);
sl@0
  1697
	}
sl@0
  1698
sl@0
  1699
GLDEF_C void RHeap_PanicBadPrevCell()
sl@0
  1700
	{
sl@0
  1701
	HEAP_PANIC(ETHeapFreeBadPrevCell);
sl@0
  1702
	}
sl@0
  1703
sl@0
  1704
GLDEF_C void RHeap_PanicBadCellAddress()
sl@0
  1705
	{
sl@0
  1706
	HEAP_PANIC(ETHeapBadCellAddress);
sl@0
  1707
	}
sl@0
  1708
#endif
sl@0
  1709
sl@0
  1710
sl@0
  1711
sl@0
  1712
sl@0
  1713