os/kernelhwsrv/kernel/eka/common/arm/cheap.cia
author sl
Tue, 10 Jun 2014 14:32:02 +0200
changeset 1 260cb5ec6c19
permissions -rw-r--r--
Update contrib.
sl@0
     1
// Copyright (c) 1994-2009 Nokia Corporation and/or its subsidiary(-ies).
sl@0
     2
// All rights reserved.
sl@0
     3
// This component and the accompanying materials are made available
sl@0
     4
// under the terms of the License "Eclipse Public License v1.0"
sl@0
     5
// which accompanies this distribution, and is available
sl@0
     6
// at the URL "http://www.eclipse.org/legal/epl-v10.html".
sl@0
     7
//
sl@0
     8
// Initial Contributors:
sl@0
     9
// Nokia Corporation - initial contribution.
sl@0
    10
//
sl@0
    11
// Contributors:
sl@0
    12
//
sl@0
    13
// Description:
sl@0
    14
// e32\common\arm\cheap.cia
sl@0
    15
// 
sl@0
    16
//
sl@0
    17
sl@0
    18
#include <e32cia.h>
sl@0
    19
#include "../common.h"
sl@0
    20
sl@0
    21
#if defined(__HEAP_MACHINE_CODED__) && !defined(_DEBUG)
sl@0
    22
GLREF_C void RHeap_PanicBadAllocatedCellSize();
sl@0
    23
GLREF_C void RHeap_PanicBadNextCell();
sl@0
    24
GLREF_C void RHeap_PanicBadPrevCell();
sl@0
    25
GLREF_C void RHeap_PanicBadCellAddress();
sl@0
    26
sl@0
    27
IMPORT_D extern const TInt KHeapShrinkHysRatio;
sl@0
    28
sl@0
    29
sl@0
    30
__NAKED__ RHeap::SCell* RHeap::DoAlloc(TInt /*aSize*/, SCell*& /*aLastFree*/)
sl@0
    31
//
sl@0
    32
// Allocate a cell.
sl@0
    33
//
sl@0
    34
	{
sl@0
    35
	asm("stmfd sp!, {r4,r5,lr} ");
sl@0
    36
	asm("mov r4, r0 ");										// r4=this
sl@0
    37
	asm("add r3, r0, #%a0" : : "i" _FOFF(RHeap,iFree));		// r3=pP=&iFree
sl@0
    38
	asm("ldr r0, [r3, #4] ");								// r0=pC=pP->next
sl@0
    39
	asm("cmp r0, #0 ");
sl@0
    40
	asm("beq 0f ");								// if no free cells, alloc failed
sl@0
    41
sl@0
    42
	// optimised unfolded scanning loop
sl@0
    43
	asm("2: ");
sl@0
    44
	asm("ldmia r0, {r12,r14} ");				// r12=pC->size, r14=pC->next
sl@0
    45
	asm("cmp r1, r12 ");						// compare aSize to size
sl@0
    46
	asm("movhi r3, r0 ");						// if aSize>size, pP=pC
sl@0
    47
	asm("movhis r0, r14 ");						// and pC=pC->next
sl@0
    48
#ifndef __CPU_ARMV6 // don't unroll on armv6
sl@0
    49
	asm("ldmhiia r0, {r12,r14} ");				// r12=pC->size, r14=pC->next
sl@0
    50
	asm("cmphi r1, r12 ");						// compare aSize to size
sl@0
    51
	asm("movhi r3, r0 ");						// if aSize>size, pP=pC
sl@0
    52
	asm("movhis r0, r14 ");						// and pC=pC->next
sl@0
    53
#endif
sl@0
    54
	asm("bhi 2b ");								// branch back if scan not finished
sl@0
    55
	
sl@0
    56
	asm("1: ");
sl@0
    57
	asm("subs r5, r12, r1 ");					// r5 = pC->len - aSize
sl@0
    58
	asm("ldrhs r2, [r4, #%a0]" : : "i" _FOFF(RHeap,iMinCell));		// if big enough, r2=iMinCell
sl@0
    59
	asm("blo 0f ");								// branch if no free cell was big enough
sl@0
    60
	asm("cmp r5, r2 ");							// leftover big enough?
sl@0
    61
	asm("movlo r1, r12 ");						// if not, aSize=pC->len ...
sl@0
    62
	asm("strlo r14, [r3, #4] ");				// ... and pP->next = pC->next
sl@0
    63
	asm("addhs r2, r0, r1 ");					// else r2 = pE = address of new free cell ...
sl@0
    64
	asm("stmhsia r2, {r5, r14} ");				// ... pE->len = pC->len - aSize, pE->next = pC->next ...
sl@0
    65
	asm("strhs r2, [r3, #4] ");					// ... and pP->next = pE
sl@0
    66
	asm("str r1, [r0] ");						// pC->len = aSize
sl@0
    67
	__POPRET("r4,r5,");							// restore and exit, return pC
sl@0
    68
sl@0
    69
	asm("0: ");
sl@0
    70
	asm("str r3, [r2] ");						// alloc failed - aLastFree=pP
sl@0
    71
	asm("mov r0, #0 ");							// return NULL
sl@0
    72
	__POPRET("r4,r5,");
sl@0
    73
	}
sl@0
    74
sl@0
    75
__NAKED__ void RHeap::DoFree(SCell* /*pC*/)
sl@0
    76
//
sl@0
    77
// Free a cell.
sl@0
    78
//
sl@0
    79
	{
sl@0
    80
	asm("add r2, r0, #%a0" : : "i" _FOFF(RHeap,iFree));	// r2=pP=&iFree
sl@0
    81
	asm("ldr r3, [r2, #4] ");					// r3=pE=pP->next
sl@0
    82
	asm("stmfd sp!, {r4, r5} ");
sl@0
    83
sl@0
    84
	asm("1: ");
sl@0
    85
	asm("cmp r3, #0 ");						// check if pE=NULL
sl@0
    86
	asm("cmphi r1, r3 ");					// if not, check if pC>pE
sl@0
    87
	asm("movhi r2, r3 ");					// if so, pP=pE
sl@0
    88
	asm("ldrhi r3, [r3, #4] ");				// and pE=pE->next
sl@0
    89
#ifndef __CPU_ARMV6    // don't unroll on armv6
sl@0
    90
	asm("cmphi r3, #0 ");					// check if pE=NULL
sl@0
    91
	asm("cmphi r1, r3 ");					// if not, check if pC>pE
sl@0
    92
	asm("movhi r2, r3 ");					// if so, pP=pE
sl@0
    93
	asm("ldrhi r3, [r3, #4] ");				// and pE=pE->next
sl@0
    94
#endif
sl@0
    95
	asm("bhi 1b ");							// loop if free cell position not found
sl@0
    96
sl@0
    97
	asm("ldr r4, [r1, #0] ");				// r4=pC->len
sl@0
    98
	asm("cmp r3, #0 ");						// is there a following free cell ?
sl@0
    99
	asm("streq r3, [r1, #4] ");				// if not, pC->next=NULL
sl@0
   100
	asm("beq 2f ");							// and skip next section
sl@0
   101
	asm("add r5, r1, r4 ");					// r5=pN=pC + pC->len (cell after pC)
sl@0
   102
	asm("cmp r5, r3 ");						// compare pN with pE
sl@0
   103
	asm("ldmeqia r3, {r5, r12} ");			// if pN==pE, r5=pE->len, r12=pE->next
sl@0
   104
	asm("bhi " CSM_Z22RHeap_PanicBadNextCellv );	// if pN>pE, panic
sl@0
   105
	asm("strne r3, [r1, #4] ");				// if pN<pE, pC->next=pE
sl@0
   106
	asm("addeq r4, r4, r5 ");				// if pN==pE r4 = pC->len + pE->len
sl@0
   107
	asm("stmeqia r1, {r4,r12} ");			// if pN==pE pC->len+=pE->len, pC->next=pE->next
sl@0
   108
	asm("2: ");
sl@0
   109
	asm("ldr r3, [r2, #0] ");				// r3=pP->len
sl@0
   110
	asm("sub r5, r1, r2 ");					// r5=pC-pP (gap between preceding free cell and this one)
sl@0
   111
	asm("cmp r5, r3 ");						// compare gap with predecessor length
sl@0
   112
	asm("ldreq r12, [r1, #4] ");			// if predecessor is adjacent, r12=pC->next
sl@0
   113
	asm("blo RHeap_PanicBadPrevCell__Fv ");	// if predecessor overlaps, panic
sl@0
   114
	asm("addeq r4, r4, r3 ");				// if predecessor is adjacent, r4=pC->len + pP->len
sl@0
   115
	asm("stmeqia r2, {r4,r12} ");			// if predecessor is adjacent, pP->len+=pC->len, pP->next=pC->next
sl@0
   116
	asm("strne r1, [r2, #4] ");				// else pP->next = pC
sl@0
   117
	asm("moveq r1, r2 ");					// if predecessor is adjacent, pC=pP (final amalgamated free cell)
sl@0
   118
	asm("3: ");
sl@0
   119
	asm("ldr r12, [r0, #%a0]" : : "i" _FOFF(RHeap,iTop));	// r12=iTop
sl@0
   120
	asm("add r3, r1, r4 ");					// end of amalgamated free cell
sl@0
   121
	asm("cmp r3, r12 ");					// end of amalgamated free cell = iTop ?
sl@0
   122
	asm("ldmneia sp!, {r4,r5} ");			// restore registers
sl@0
   123
	__JUMP(ne,lr);							// if not, finished
sl@0
   124
	asm("ldr r12, [r0, #%a0]" : : "i" _FOFF(RHeap,iFlags));	// r12=iFlags
sl@0
   125
	asm("tst r12, #%a0" : : "i" ((TInt)RAllocator::EFixedSize));	// check fixed size flag
sl@0
   126
	asm("ldmneia sp!, {r4,r5} ");			// restore registers
sl@0
   127
	__JUMP(ne,lr);							// if set, finished
sl@0
   128
	asm("ldr r2, [r0, #%a0]" : : "i" _FOFF(RHeap,iGrowBy));	// r2=iGrowBy
sl@0
   129
	asm("mov r3, r2, LSR #8");				// r3=iGrowBy>>8
sl@0
   130
	asm("ldr r2, const_addr");				// r2=&KHeapShrinkHysRatio
sl@0
   131
	asm("ldr r5, [r2]");					// r5=KHeapShrinkHysRatio
sl@0
   132
	asm("mul r2, r5, r3");					// r2=KHeapShrinkHysRatio*(iGrowBy>>8) - low order bits
sl@0
   133
	asm("cmp r4, r2");						// compare len(r4) to (iGrowBy>>8)*KHeapShrinkHysRatio(r2)
sl@0
   134
	asm("ldmia sp!, {r4,r5} ");				// restore registers
sl@0
   135
	__JUMP(lo,lr);							// if less, finished
sl@0
   136
	asm("b Reduce__5RHeapPQ25RHeap5SCell ");	// else reduce heap
sl@0
   137
	
sl@0
   138
	asm("const_addr:");
sl@0
   139
	asm(".word %a0" : : "i" ((TInt)&KHeapShrinkHysRatio));	
sl@0
   140
	}
sl@0
   141
#endif
sl@0
   142