1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
1.2 +++ b/os/kernelhwsrv/kerneltest/e32test/buffer/t_tbma.cpp Fri Jun 15 03:10:57 2012 +0200
1.3 @@ -0,0 +1,1256 @@
1.4 +// Copyright (c) 1995-2009 Nokia Corporation and/or its subsidiary(-ies).
1.5 +// All rights reserved.
1.6 +// This component and the accompanying materials are made available
1.7 +// under the terms of the License "Eclipse Public License v1.0"
1.8 +// which accompanies this distribution, and is available
1.9 +// at the URL "http://www.eclipse.org/legal/epl-v10.html".
1.10 +//
1.11 +// Initial Contributors:
1.12 +// Nokia Corporation - initial contribution.
1.13 +//
1.14 +// Contributors:
1.15 +//
1.16 +// Description:
1.17 +// e32test\buffer\t_tbma.cpp
1.18 +//
1.19 +//
1.20 +
1.21 +#define __E32TEST_EXTENSION__
1.22 +#include "t_tbma.h"
1.23 +#include <cpudefs.h>
1.24 +#include <e32atomics.h>
1.25 +
1.26 +RTest test(_L("T_TBMA"));
1.27 +
1.28 +
1.29 +/**************************************
1.30 + * class TBmaList
1.31 + **************************************/
1.32 +
1.33 +TBmaList* TBmaList::New(TInt aNumBmas)
1.34 + {
1.35 + TBmaList* pL=new TBmaList;
1.36 + if (pL)
1.37 + {
1.38 + pL->iNumBmas=aNumBmas;
1.39 + pL->iBmaList=(TBitMapAllocator**)User::Alloc(aNumBmas*sizeof(TBitMapAllocator*));
1.40 + if (pL->iBmaList)
1.41 + Mem::FillZ(pL->iBmaList, aNumBmas*sizeof(TBitMapAllocator*));
1.42 + pL->iBaseList=(TInt*)User::Alloc((aNumBmas+1)*sizeof(TInt));
1.43 + if (!pL->iBmaList || !pL->iBaseList)
1.44 + {
1.45 + delete pL;
1.46 + pL=NULL;
1.47 + }
1.48 + }
1.49 + return pL;
1.50 + }
1.51 +
1.52 +TBmaList* TBmaList::New(const TBitMapAllocator& aBma, TInt aNumSplits, VA_LIST aList)
1.53 + {
1.54 + TBmaList* pL=TBmaList::New(aNumSplits+1);
1.55 + if (!pL)
1.56 + return NULL;
1.57 + TInt i;
1.58 + pL->iBaseList[0]=0;
1.59 + for (i=1; i<=aNumSplits; ++i)
1.60 + pL->iBaseList[i]=VA_ARG(aList,TInt);
1.61 + pL->iBaseList[aNumSplits+1]=aBma.iSize;
1.62 + for (i=0; i<=aNumSplits; ++i)
1.63 + {
1.64 + TInt sz=pL->iBaseList[i+1]-pL->iBaseList[i];
1.65 + __ASSERT_ALWAYS(sz>0, TBMA_FAULT());
1.66 + pL->iBmaList[i]=TBitMapAllocator::New(sz,EFalse);
1.67 + if (!pL->iBmaList[i])
1.68 + {
1.69 + delete pL;
1.70 + return NULL;
1.71 + }
1.72 + TInt j;
1.73 + for (j=0; j<sz; ++j)
1.74 + {
1.75 + if (aBma.NotAllocated(j+pL->iBaseList[i],1))
1.76 + pL->iBmaList[i]->Free(j);
1.77 + }
1.78 + }
1.79 + return pL;
1.80 + }
1.81 +
1.82 +TBmaList::TBmaList()
1.83 + {
1.84 + iNumBmas=0;
1.85 + iBmaList=NULL;
1.86 + iBaseList=NULL;
1.87 + }
1.88 +
1.89 +TBmaList::~TBmaList()
1.90 + {
1.91 + TInt i;
1.92 + for (i=0; i<iNumBmas; ++i)
1.93 + delete iBmaList[i];
1.94 + User::Free(iBmaList);
1.95 + User::Free(iBaseList);
1.96 + }
1.97 +/*
1.98 + * Extended first fit allocator
1.99 + */
1.100 +TInt TBmaList::AllocConsecutiveFF(TInt aLength)
1.101 + {
1.102 + TInt base=KErrNotFound;
1.103 + TInt bmalen=0;
1.104 + TInt carry=0;
1.105 + TBitMapAllocator** ppA=iBmaList; // pointer to list of TBitMapAllocator*
1.106 + TBitMapAllocator** pE=ppA+iNumBmas;
1.107 + TInt* pB=iBaseList;
1.108 + for (; ppA<pE; ++ppA, ++pB)
1.109 + {
1.110 + TBitMapAllocator* pA=*ppA;
1.111 + if (*pB!=base+bmalen)
1.112 + {
1.113 + // this BMA is not contiguous with previous one
1.114 + carry=0;
1.115 + }
1.116 + base=*pB;
1.117 + bmalen=pA->iSize;
1.118 + TInt l;
1.119 + TInt r=pA->AllocAligned(aLength,0,0,EFalse,carry,l);
1.120 + if (r>=0)
1.121 + return base+r-carry;
1.122 + }
1.123 + return KErrNotFound;
1.124 + }
1.125 +
1.126 +/*
1.127 + * Extended best fit allocator
1.128 + */
1.129 +TInt TBmaList::AllocConsecutiveBF(TInt aLength)
1.130 + {
1.131 + TInt bmalen=0;
1.132 + TInt carry=0;
1.133 + TInt minrun=KMaxTInt;
1.134 + TInt minrunpos=KErrNotFound;
1.135 + TBitMapAllocator** ppA=iBmaList; // pointer to list of TBitMapAllocator*
1.136 + TBitMapAllocator** pE=ppA+iNumBmas;
1.137 + TInt* pB=iBaseList;
1.138 + TInt base=*pB;
1.139 + for (; ppA<pE; ++ppA, ++pB)
1.140 + {
1.141 + TBitMapAllocator* pA=*ppA;
1.142 + if (*pB!=base+bmalen)
1.143 + {
1.144 + // this BMA is not contiguous with previous one
1.145 + // check final run of previous BMA
1.146 + if (carry<minrun)
1.147 + {
1.148 + minrun=carry;
1.149 + minrunpos=base+bmalen-carry;
1.150 + }
1.151 + carry=0;
1.152 + }
1.153 + base=*pB;
1.154 + bmalen=pA->iSize;
1.155 + TInt l=KMaxTInt;
1.156 + TInt oldc=carry;
1.157 + TInt r=pA->AllocAligned(aLength,0,0,ETrue,carry,l);
1.158 + if (r>=0)
1.159 + {
1.160 + // check shortest run in this BMA
1.161 + if (l<minrun)
1.162 + {
1.163 + minrun=l;
1.164 + minrunpos=r ? (base+r) : (base-oldc);
1.165 + if (minrun==aLength)
1.166 + break; // exact match so finish
1.167 + }
1.168 + }
1.169 + }
1.170 + // check final run of last BMA
1.171 + if (ppA==pE && carry>=aLength && carry<minrun)
1.172 + minrunpos=base+bmalen-carry;
1.173 + return minrunpos;
1.174 + }
1.175 +
1.176 +/*
1.177 + * Extended first fit aligned allocator
1.178 + */
1.179 +TInt TBmaList::AllocAlignedFF(TInt aLength, TInt aAlign)
1.180 + {
1.181 + TUint32 alignsize=1<<aAlign;
1.182 + TUint32 alignmask=alignsize-1;
1.183 + TInt base=KErrNotFound;
1.184 + TInt bmalen=0;
1.185 + TInt carry=0;
1.186 + TBitMapAllocator** ppA=iBmaList; // pointer to list of TBitMapAllocator*
1.187 + TBitMapAllocator** pE=ppA+iNumBmas;
1.188 + TInt* pB=iBaseList;
1.189 + for (; ppA<pE; ++ppA, ++pB)
1.190 + {
1.191 + TBitMapAllocator* pA=*ppA;
1.192 + if (*pB!=base+bmalen)
1.193 + {
1.194 + // this BMA is not contiguous with previous one
1.195 + carry=0;
1.196 + }
1.197 + base=*pB;
1.198 + bmalen=pA->iSize;
1.199 + TInt l;
1.200 + TInt r=pA->AllocAligned(aLength,aAlign,base,EFalse,carry,l);
1.201 + if (r>=0)
1.202 + return (base+r-carry+alignmask)&~alignmask;
1.203 + }
1.204 + return KErrNotFound;
1.205 + }
1.206 +
1.207 +/*
1.208 + * Extended best fit aligned allocator
1.209 + */
1.210 +TInt TBmaList::AllocAlignedBF(TInt aLength, TInt aAlign)
1.211 + {
1.212 + TInt bmalen=0;
1.213 + TInt carry=0;
1.214 + TInt minrun=KMaxTInt;
1.215 + TInt minrunpos=KErrNotFound;
1.216 + TUint32 alignsize=1<<aAlign;
1.217 + TUint32 alignmask=alignsize-1;
1.218 + TBitMapAllocator** ppA=iBmaList; // pointer to list of TBitMapAllocator*
1.219 + TBitMapAllocator** pE=ppA+iNumBmas;
1.220 + TInt* pB=iBaseList;
1.221 + TInt base=*pB;
1.222 + for (; ppA<pE; ++ppA, ++pB)
1.223 + {
1.224 + TBitMapAllocator* pA=*ppA;
1.225 + if (*pB!=base+bmalen)
1.226 + {
1.227 + // this BMA is not contiguous with previous one
1.228 + // check final run of previous BMA
1.229 + if (carry<minrun)
1.230 + {
1.231 + TInt fpos=base+bmalen-carry;
1.232 + TInt lost=((fpos+base+alignmask)&~alignmask)-base-fpos;
1.233 + if (carry-lost>=aLength)
1.234 + {
1.235 + minrun=carry;
1.236 + minrunpos=fpos;
1.237 + }
1.238 + }
1.239 + carry=0;
1.240 + }
1.241 + base=*pB;
1.242 + bmalen=pA->iSize;
1.243 + TInt l=KMaxTInt;
1.244 + TInt oldc=carry;
1.245 + TInt r=pA->AllocAligned(aLength,aAlign,base,ETrue,carry,l);
1.246 + if (r>=0)
1.247 + {
1.248 + // check shortest run in this BMA
1.249 + if (l<minrun)
1.250 + {
1.251 + minrun=l;
1.252 + minrunpos=r ? (base+r) : (base-oldc);
1.253 + if (minrun==aLength)
1.254 + break; // exact match so finish
1.255 + }
1.256 + }
1.257 + }
1.258 + // check final run of last BMA
1.259 + if (ppA==pE && carry<minrun)
1.260 + {
1.261 + TInt fpos=base+bmalen-carry;
1.262 + TInt lost=((fpos+alignmask)&~alignmask)-fpos;
1.263 + if (carry-lost>=aLength)
1.264 + {
1.265 + minrun=carry;
1.266 + minrunpos=fpos;
1.267 + }
1.268 + }
1.269 + return (minrunpos<0) ? minrunpos : ((minrunpos+alignmask)&~alignmask);
1.270 + }
1.271 +
1.272 +
1.273 +
1.274 +
1.275 +
1.276 +
1.277 +
1.278 +
1.279 +void Display(TBitMapAllocator* aBma)
1.280 + {
1.281 + test.Printf(_L("Free %d FirstCheck %08x Size %d Map %08x\n"),aBma->iAvail,aBma->iCheckFirst,aBma->iSize,aBma->iMap);
1.282 + TInt i;
1.283 + TInt l=0;
1.284 + for (i=0; i<((aBma->iSize+31)>>5); i++)
1.285 + {
1.286 + if (++l==10)
1.287 + {
1.288 + l=0;
1.289 +// test.Getch();
1.290 + }
1.291 + TUint32 x=aBma->iMap[i];
1.292 + TBuf<80> buf;
1.293 + buf.NumFixedWidth(x,EBinary,32);
1.294 + buf.Append(_L("\n"));
1.295 + test.Printf(buf);
1.296 + }
1.297 + test.Getch();
1.298 + }
1.299 +
1.300 +void Check(TBitMapAllocator& a)
1.301 + {
1.302 + TInt l=a.iSize;
1.303 + l=(l+31)>>5;
1.304 + TInt n=0;
1.305 + TInt i;
1.306 + TUint32* first=NULL;
1.307 + for (i=0; i<l; ++i)
1.308 + {
1.309 + TUint32 w=a.iMap[i];
1.310 + if (w && !first)
1.311 + first=a.iMap+i;
1.312 + n+=__e32_bit_count_32(w);
1.313 + }
1.314 + test(a.Avail()==n);
1.315 + test(first?(a.iCheckFirst<=first):(a.iCheckFirst>=a.iMap && a.iCheckFirst<=a.iMap+l));
1.316 + }
1.317 +
1.318 +void TestConstruct(TInt aSize)
1.319 + {
1.320 + test.Printf(_L("TestConstruct %d\n"),aSize);
1.321 + TBitMapAllocator* pA;
1.322 + pA=TBitMapAllocator::New(aSize, EFalse);
1.323 + test(pA!=NULL);
1.324 + test(pA->Avail()==0);
1.325 + Check(*pA);
1.326 + delete pA;
1.327 + pA=TBitMapAllocator::New(aSize, ETrue);
1.328 + test(pA!=NULL);
1.329 + test(pA->Avail()==aSize);
1.330 + Check(*pA);
1.331 + delete pA;
1.332 + }
1.333 +
1.334 +void TestAlloc1(TInt aSize)
1.335 + {
1.336 + test.Printf(_L("TestAlloc1 %d\n"),aSize);
1.337 + TBitMapAllocator* pA=TBitMapAllocator::New(aSize, ETrue);
1.338 + test(pA!=NULL);
1.339 + test(pA->Avail()==aSize);
1.340 + Check(*pA);
1.341 + TInt i;
1.342 + for (i=0; i<aSize; ++i)
1.343 + {
1.344 + TInt r=pA->Alloc();
1.345 + test(r==i);
1.346 + test(pA->Avail()==aSize-i-1);
1.347 + test(pA->iCheckFirst==pA->iMap+i/32);
1.348 + Check(*pA);
1.349 + }
1.350 + test(pA->Alloc()<0);
1.351 + delete pA;
1.352 + }
1.353 +
1.354 +void TestFree1(TInt aSize)
1.355 + {
1.356 + test.Printf(_L("TestFree1 %d\n"),aSize);
1.357 + TBitMapAllocator* pA=TBitMapAllocator::New(aSize, EFalse);
1.358 + test(pA!=NULL);
1.359 + test(pA->Avail()==0);
1.360 + TInt i;
1.361 + for (i=aSize-1; i>=0; --i)
1.362 + {
1.363 + pA->Free(i);
1.364 + test(pA->Avail()==aSize-i);
1.365 + test(pA->Alloc()==i);
1.366 + pA->Free(i);
1.367 + test(pA->iCheckFirst==pA->iMap+i/32);
1.368 + Check(*pA);
1.369 + }
1.370 + delete pA;
1.371 + }
1.372 +
1.373 +void TestBlockAlloc(TInt aSize)
1.374 + {
1.375 + test.Printf(_L("TestBlockAlloc %d\n"),aSize);
1.376 + const TInt begin[]={0,1,2,7,16,29,31,32,33,63,64,65,83,128};
1.377 + TBitMapAllocator* pA=TBitMapAllocator::New(aSize, ETrue);
1.378 + test(pA!=NULL);
1.379 + test(pA->Avail()==aSize);
1.380 + pA->Alloc(0,aSize);
1.381 + test(pA->Avail()==0);
1.382 + Check(*pA);
1.383 + pA->Free(0,aSize);
1.384 + test(pA->Avail()==aSize);
1.385 + Check(*pA);
1.386 + TInt i;
1.387 + for (i=0; i<(TInt)(sizeof(begin)/sizeof(TInt)); ++i)
1.388 + {
1.389 + TInt j=begin[i];
1.390 + if (j>aSize)
1.391 + break;
1.392 + TInt l;
1.393 + for (l=1; l<=aSize-j; ++l)
1.394 + {
1.395 +// test.Printf(_L("j=%d, l=%d, s=%d\n"),j,l,aSize);
1.396 + pA->Alloc(j,l);
1.397 + test(pA->Avail()==aSize-l);
1.398 + test(!pA->NotAllocated(j,l));
1.399 + if (j+l<aSize)
1.400 + test(pA->NotAllocated(j,l+1));
1.401 + if (j>0)
1.402 + test(pA->NotAllocated(j-1,l));
1.403 + TInt r=pA->Alloc();
1.404 + if (j==0)
1.405 + {
1.406 + if (l<aSize)
1.407 + test(r==l);
1.408 + else
1.409 + test(r<0);
1.410 + }
1.411 + else
1.412 + test(r==0);
1.413 + if (r==0)
1.414 + {
1.415 + pA->Free(r);
1.416 + pA->Free(j,l);
1.417 + }
1.418 + else if (r>0)
1.419 + pA->Free(j,l+1);
1.420 + else
1.421 + pA->Free(j,l);
1.422 + test(pA->Avail()==aSize);
1.423 + Check(*pA);
1.424 + }
1.425 + }
1.426 + delete pA;
1.427 + }
1.428 +
1.429 +void TestBlockFree(TInt aSize)
1.430 + {
1.431 + test.Printf(_L("TestBlockFree %d\n"),aSize);
1.432 + const TInt begin[]={0,1,2,7,16,29,31,32,33,63,64,65,83,128};
1.433 + TBitMapAllocator* pA=TBitMapAllocator::New(aSize, EFalse);
1.434 + test(pA!=NULL);
1.435 + test(pA->Avail()==0);
1.436 + TInt i;
1.437 + for (i=0; i<(TInt)(sizeof(begin)/sizeof(TInt)); ++i)
1.438 + {
1.439 + TInt j=begin[i];
1.440 + if (j>aSize)
1.441 + break;
1.442 + TInt l;
1.443 + for (l=1; l<=aSize-j; ++l)
1.444 + {
1.445 +// test.Printf(_L("j=%d, l=%d, s=%d\n"),j,l,aSize);
1.446 + pA->Free(j,l);
1.447 + test(pA->Avail()==l);
1.448 + test(!pA->NotFree(j,l));
1.449 + if (j+l<aSize)
1.450 + test(pA->NotFree(j,l+1));
1.451 + if (j>0)
1.452 + test(pA->NotFree(j-1,l));
1.453 + TInt r=pA->Alloc();
1.454 + test(r==j);
1.455 + if (l>1)
1.456 + pA->Alloc(j+1,l-1);
1.457 + test(pA->Avail()==0);
1.458 + Check(*pA);
1.459 + }
1.460 + }
1.461 + delete pA;
1.462 + }
1.463 +
1.464 +void TestNotFree(TInt aSize)
1.465 + {
1.466 + test.Printf(_L("TestNotFree %d\n"),aSize);
1.467 + TBitMapAllocator* pA=TBitMapAllocator::New(aSize, ETrue);
1.468 + test(pA!=NULL);
1.469 + test(pA->Avail()==aSize);
1.470 + test(!pA->NotFree(0,aSize));
1.471 + TInt i;
1.472 + for (i=0; i<aSize; ++i)
1.473 + {
1.474 + pA->Alloc(i,1);
1.475 + test(pA->NotFree(0,aSize));
1.476 + TInt j;
1.477 + for (j=1; j*j<=i || j*j+i<=aSize; ++j)
1.478 + {
1.479 + TInt a=Max(i-j*j,0);
1.480 + TInt b=Min(i+j*j,aSize);
1.481 + test(pA->NotFree(a,b-a));
1.482 + }
1.483 + pA->Free(i);
1.484 + test(!pA->NotFree(0,aSize));
1.485 + }
1.486 + const TInt begin[]={0,1,2,7,16,29,31,32,33,63,64,65,83,128};
1.487 + const TInt size[]={2,3,7,16,23,31,32,33,63,64,65,89,128};
1.488 + const TInt* pB=begin;
1.489 + const TInt* pBE=pB+sizeof(begin)/sizeof(TInt);
1.490 + const TInt* pS=size;
1.491 + const TInt* pSE=pS+sizeof(size)/sizeof(TInt);
1.492 + for (; pB<pBE; ++pB)
1.493 + {
1.494 + TInt b=*pB;
1.495 + if (b>=aSize)
1.496 + continue;
1.497 + for (; pS<pSE; ++pS)
1.498 + {
1.499 + TInt l=*pS;
1.500 + if (b+l>aSize)
1.501 + continue;
1.502 + pA->Alloc(b,l);
1.503 + TInt j;
1.504 + for (j=1; j<aSize; ++j)
1.505 + {
1.506 + if (j<=b)
1.507 + test(!pA->NotFree(0,j));
1.508 + else
1.509 + test(pA->NotFree(0,j));
1.510 + if (aSize-j>=b+l)
1.511 + test(!pA->NotFree(aSize-j,j));
1.512 + else
1.513 + test(pA->NotFree(aSize-j,j));
1.514 + }
1.515 + pA->Free(b,l);
1.516 + }
1.517 + }
1.518 + delete pA;
1.519 + }
1.520 +
1.521 +void TestNotAllocated(TInt aSize)
1.522 + {
1.523 + test.Printf(_L("TestNotAllocated %d\n"),aSize);
1.524 + TBitMapAllocator* pA=TBitMapAllocator::New(aSize, EFalse);
1.525 + test(pA!=NULL);
1.526 + test(pA->Avail()==0);
1.527 + test(!pA->NotAllocated(0,aSize));
1.528 + TInt i;
1.529 + for (i=0; i<aSize; ++i)
1.530 + {
1.531 + pA->Free(i);
1.532 + test(pA->NotAllocated(0,aSize));
1.533 + TInt j;
1.534 + for (j=1; j*j<=i || j*j+i<=aSize; ++j)
1.535 + {
1.536 + TInt a=Max(i-j*j,0);
1.537 + TInt b=Min(i+j*j,aSize);
1.538 + test(pA->NotAllocated(a,b-a));
1.539 + }
1.540 + pA->Alloc(i,1);
1.541 + test(!pA->NotAllocated(0,aSize));
1.542 + }
1.543 + const TInt begin[]={0,1,2,7,16,29,31,32,33,63,64,65,83,128};
1.544 + const TInt size[]={2,3,7,16,23,31,32,33,63,64,65,89,128};
1.545 + const TInt* pB=begin;
1.546 + const TInt* pBE=pB+sizeof(begin)/sizeof(TInt);
1.547 + const TInt* pS=size;
1.548 + const TInt* pSE=pS+sizeof(size)/sizeof(TInt);
1.549 + for (; pB<pBE; ++pB)
1.550 + {
1.551 + TInt b=*pB;
1.552 + if (b>=aSize)
1.553 + continue;
1.554 + for (; pS<pSE; ++pS)
1.555 + {
1.556 + TInt l=*pS;
1.557 + if (b+l>aSize)
1.558 + continue;
1.559 + pA->Free(b,l);
1.560 + TInt j;
1.561 + for (j=1; j<aSize; ++j)
1.562 + {
1.563 + if (j<=b)
1.564 + test(!pA->NotAllocated(0,j));
1.565 + else
1.566 + test(pA->NotAllocated(0,j));
1.567 + if (aSize-j>=b+l)
1.568 + test(!pA->NotAllocated(aSize-j,j));
1.569 + else
1.570 + test(pA->NotAllocated(aSize-j,j));
1.571 + }
1.572 + pA->Alloc(b,l);
1.573 + }
1.574 + }
1.575 + delete pA;
1.576 + }
1.577 +
1.578 +void TestAllocList(TInt aSize)
1.579 + {
1.580 + test.Printf(_L("TestAllocList %d\n"),aSize);
1.581 + TBitMapAllocator* pA=TBitMapAllocator::New(aSize, EFalse);
1.582 + test(pA!=NULL);
1.583 + test(pA->Avail()==0);
1.584 + TInt i;
1.585 + TInt list[256];
1.586 + for (i=0; i<aSize; ++i)
1.587 + {
1.588 + pA->Free(i);
1.589 + test(pA->AllocList(128,list)==1);
1.590 + test(list[0]==i);
1.591 + test(pA->Avail()==0);
1.592 + }
1.593 + TInt j;
1.594 + for (i=0; i<aSize-1; ++i)
1.595 + {
1.596 + for (j=i+1; j<aSize; ++j)
1.597 + {
1.598 + pA->Free(i);
1.599 + pA->Free(j);
1.600 + test(pA->AllocList(1,list)==1);
1.601 + test(list[0]==i);
1.602 + test(pA->Avail()==1);
1.603 + pA->Free(i);
1.604 + test(pA->AllocList(128,list)==2);
1.605 + test(list[0]==i && list[1]==j);
1.606 + test(pA->Avail()==0);
1.607 + }
1.608 + }
1.609 + TInt l;
1.610 + for (l=1; l<80; ++l)
1.611 + {
1.612 + if (2*l+1>aSize)
1.613 + break;
1.614 + for (i=l+1; i<=aSize-l; ++i)
1.615 + {
1.616 + pA->Free(0,l);
1.617 + pA->Free(i,l);
1.618 + test(pA->Avail()==2*l);
1.619 + TInt l2;
1.620 + for (l2=Max(l-1,1); l2<=2*l; ++l2)
1.621 + {
1.622 + TInt r=pA->AllocList(l2,list);
1.623 +// test.Printf(_L("l2=%d r=%d\n"),l2,r);
1.624 + test(r==l2);
1.625 + for (j=0; j<Min(l2,l); j++)
1.626 + test(list[j]==j);
1.627 + for (j=l; j<l2; j++)
1.628 + test(list[j]==j-l+i);
1.629 + for (j=0; j<l2; ++j)
1.630 + pA->Free(list[j]);
1.631 + pA->SelectiveFree(0,l);
1.632 + pA->SelectiveFree(i,l);
1.633 + test(pA->Avail()==2*l);
1.634 + }
1.635 + pA->Alloc(0,l);
1.636 + pA->Alloc(i,l);
1.637 + Check(*pA);
1.638 + }
1.639 + }
1.640 + delete pA;
1.641 + }
1.642 +
1.643 +void TestSelectiveFree(TInt aSize)
1.644 + {
1.645 + test.Printf(_L("TestSelectiveFree %d\n"),aSize);
1.646 + TBitMapAllocator* pA=TBitMapAllocator::New(aSize, ETrue);
1.647 + test(pA!=NULL);
1.648 + test(pA->Avail()==aSize);
1.649 + TInt i;
1.650 + TInt j;
1.651 + TInt l;
1.652 + for (i=2; i<8; ++i)
1.653 + {
1.654 + for (l=1; l<=aSize; ++l)
1.655 + {
1.656 + new (pA) TBitMapAllocator(aSize, ETrue);
1.657 + for (j=0; j<aSize; j+=i)
1.658 + pA->Alloc(j,1);
1.659 + TInt orig=pA->Avail();
1.660 + test(orig==aSize-(aSize+i-1)/i);
1.661 + pA->SelectiveFree(0,l);
1.662 + TInt freed=pA->Avail()-orig;
1.663 + test(freed==(l+i-1)/i);
1.664 + Check(*pA);
1.665 + }
1.666 + }
1.667 + for (i=0; i<=Min(32,aSize-1); ++i)
1.668 + {
1.669 + for (l=1; l<=aSize-i; ++l)
1.670 + {
1.671 + for (j=1; j<=aSize; ++j)
1.672 + {
1.673 + new (pA) TBitMapAllocator(aSize, ETrue);
1.674 + pA->Alloc(i,l);
1.675 + test(pA->Avail()==aSize-l);
1.676 + pA->SelectiveFree(0,j);
1.677 + test(pA->Avail()==aSize-l+Max(0,Min(i+l,j)-i));
1.678 + test(!pA->NotFree(0,j));
1.679 + if (j>=i && j<i+l)
1.680 + test(pA->NotFree(0,j+1));
1.681 + Check(*pA);
1.682 + }
1.683 + }
1.684 + }
1.685 + delete pA;
1.686 + }
1.687 +
1.688 +TBitMapAllocator* DoSetupBMA(TInt aSize, VA_LIST aList)
1.689 + {
1.690 + TBitMapAllocator* pA=TBitMapAllocator::New(aSize, EFalse);
1.691 + test(pA!=NULL);
1.692 + test(pA->Avail()==0);
1.693 + TInt totalfree=0;
1.694 + FOREVER
1.695 + {
1.696 + TInt i=VA_ARG(aList,TInt);
1.697 + if (i<0)
1.698 + break;
1.699 + TInt l=VA_ARG(aList,TInt);
1.700 + pA->Free(i,l);
1.701 + totalfree+=l;
1.702 + }
1.703 + test(pA->Avail()==totalfree);
1.704 + return pA;
1.705 + }
1.706 +
1.707 +TBitMapAllocator* SetupBMA(TInt aSize, ...)
1.708 + {
1.709 + VA_LIST list;
1.710 + VA_START(list,aSize);
1.711 + return DoSetupBMA(aSize,list);
1.712 + }
1.713 +
1.714 +void PopulateRangeArray(RArray<SRange>& aArray, VA_LIST aList)
1.715 + {
1.716 + aArray.Reset();
1.717 + TInt n=0;
1.718 + FOREVER
1.719 + {
1.720 + SRange rng;
1.721 + rng.iBase=VA_ARG(aList,TInt);
1.722 + if (rng.iBase<0)
1.723 + break;
1.724 + rng.iLength=VA_ARG(aList,TInt);
1.725 + rng.iLength<<=8;
1.726 + rng.iLength+=n;
1.727 + ++n;
1.728 + test(aArray.Append(rng)==KErrNone);
1.729 + }
1.730 + }
1.731 +
1.732 +TInt FirstFitPos(RArray<SRange>& aArray, TInt aLength)
1.733 + {
1.734 + TInt c=aArray.Count();
1.735 + SRange* pR=&aArray[0];
1.736 + SRange* pE=pR+c;
1.737 + for (; pR<pE; ++pR)
1.738 + {
1.739 + TInt l=pR->iLength>>8;
1.740 + if (l>=aLength)
1.741 + {
1.742 +// test.Printf(_L("FFP %d = %d\n"),aLength,pR->iBase);
1.743 + return pR->iBase;
1.744 + }
1.745 + }
1.746 +// test.Printf(_L("FFP %d = -1\n"),aLength);
1.747 + return -1;
1.748 + }
1.749 +
1.750 +TInt AlignedFirstFitPos(RArray<SRange>& aArray, TInt aSize, TInt aLength, TInt aAlign, TInt aBase, TInt aOffset=0, TBool aBestFit=EFalse)
1.751 + {
1.752 + TInt alignSize=1<<aAlign;
1.753 + TInt alignMask=alignSize-1;
1.754 + TInt minRun=0;
1.755 + TInt minRunStart=0;
1.756 + TBool runFound = EFalse;
1.757 + TInt c=aArray.Count();
1.758 + SRange* pR=&aArray[0];
1.759 + // Best fit mode should ignore any final run TBitMapAllocator will
1.760 + // always ignore the final run in best fit mode and rely on carry being
1.761 + // checked by the caller.
1.762 + SRange* pE = pR + c - 1;
1.763 + if (!aBestFit || aSize > pE->iBase + (pE->iLength >> 8))
1.764 + pE++;
1.765 +
1.766 + for (; pR<pE; ++pR)
1.767 + {
1.768 + TInt l=pR->iLength>>8;
1.769 + TInt b=pR->iBase;
1.770 + if (aOffset != 0)
1.771 + {
1.772 + aOffset = ((aOffset + aBase + alignMask) & ~alignMask) - aBase;
1.773 + if (aOffset + aLength - 1 >= b + l)
1.774 + {// The offset is after the end of this region.
1.775 + continue;
1.776 + }
1.777 + l -= (aOffset <= b)? 0 : aOffset - b;
1.778 + b += (aOffset <= b)? 0 : aOffset - b; // Start the search from aOffset
1.779 + }
1.780 + TInt ab=((b+aBase+alignMask)&~alignMask)-aBase;
1.781 + TInt al = l + b - ab;
1.782 + if (al >= aLength)
1.783 + {
1.784 + if (!aBestFit || l == aLength)
1.785 + {
1.786 +// test.Printf(_L("AFFP %d %d %d = %d\n"),aLength,aAlign,aBase,ab);
1.787 + return ab;
1.788 + }
1.789 + if (!runFound || minRun > l)
1.790 + {
1.791 + minRun = l;
1.792 + minRunStart = ab;
1.793 + runFound = ETrue;
1.794 + }
1.795 + }
1.796 + }
1.797 + if (runFound)
1.798 + {
1.799 + return minRunStart;
1.800 + }
1.801 +// test.Printf(_L("AFFP %d %d %d = -1\n"),aLength,aAlign,aBase);
1.802 + return -1;
1.803 + }
1.804 +
1.805 +void DoConsecTest(TInt aSize, ...)
1.806 + {
1.807 + test.Printf(_L("DoConsecTest %d\n"),aSize);
1.808 + VA_LIST list;
1.809 + VA_LIST list2;
1.810 + VA_START(list,aSize);
1.811 + VA_START(list2,aSize);
1.812 + TBitMapAllocator* pA=DoSetupBMA(aSize,list2);
1.813 + RArray<SRange> rangeArray(8,_FOFF(SRange,iLength));
1.814 + PopulateRangeArray(rangeArray, list);
1.815 + TInt n;
1.816 + for (n=1; n<=aSize; ++n)
1.817 + {
1.818 + TInt r1=pA->AllocConsecutive(n,EFalse);
1.819 + TInt r2=FirstFitPos(rangeArray,n);
1.820 +// test.Printf(_L("ALC(%d,0) = %d [%d]\n"),n,r1,r2);
1.821 + test_Equal(r2, r1);
1.822 + }
1.823 + rangeArray.SortUnsigned(); // sort array in ascending order of length
1.824 + for (n=1; n<=aSize; ++n)
1.825 + {
1.826 + TInt r1=pA->AllocConsecutive(n,ETrue);
1.827 + TInt r2=FirstFitPos(rangeArray,n);
1.828 +// test.Printf(_L("ALC(%d,1) = %d [%d]\n"),n,r1,r2);
1.829 + test_Equal(r2, r1);
1.830 + }
1.831 + rangeArray.Close();
1.832 + delete pA;
1.833 + }
1.834 +
1.835 +void DoAlignedTest(TInt aSize, ...)
1.836 + {
1.837 + test.Printf(_L("DoAlignedTest %d\n"),aSize);
1.838 + VA_LIST list;
1.839 + VA_LIST list2;
1.840 + VA_START(list,aSize);
1.841 + VA_START(list2,aSize);
1.842 + TBitMapAllocator* pA=DoSetupBMA(aSize,list2);
1.843 + RArray<SRange> rangeArray(8,_FOFF(SRange,iLength));
1.844 + PopulateRangeArray(rangeArray, list);
1.845 + TInt finalRunLength = 0;
1.846 + SRange& lastRun = rangeArray[rangeArray.Count() - 1];
1.847 + if (lastRun.iBase + (lastRun.iLength>>8) == aSize)
1.848 + {// The last run is at the end of the bma.
1.849 + finalRunLength = lastRun.iLength >> 8;
1.850 + }
1.851 + TInt a;
1.852 + TInt b;
1.853 + TInt n;
1.854 + TUint offset;
1.855 + for (a=0; ((1<<a)<=aSize); ++a)
1.856 + {
1.857 + TInt alignsize=1<<a;
1.858 + TInt alignmask=alignsize-1;
1.859 + for (b=0; b<(1<<a); ++b)
1.860 + {
1.861 +// test.Printf(_L("size %d a=%d b=%d First\n"),aSize,a,b);
1.862 + for (n=1; n<=aSize; ++n)
1.863 + {
1.864 + for (offset = 1; offset < (TUint)aSize; offset <<= 1)
1.865 + {
1.866 + TInt carry = 0;
1.867 + TInt runLength;
1.868 + TInt r1=pA->AllocAligned(n,a,b,EFalse, carry, runLength, offset);
1.869 + TInt r2=AlignedFirstFitPos(rangeArray,aSize, n,a,b, offset);
1.870 + if (r2 < 0 && pA->iSize == pA->iAvail)
1.871 + {// Totally empty bmas return KErrOverflow on failure.
1.872 + r2 = KErrOverflow;
1.873 + }
1.874 +// test.Printf(_L("ALA %d %d %d %d 0 = %d [%d]\n"),n,a,b,offset,r1,r2);
1.875 + test( (r1<0) || ((r1+b)&alignmask)==0 );
1.876 + test( (r1<0) || !pA->NotFree(r1,n));
1.877 + test( (r1<0) || runLength >= n);
1.878 + test_Equal(r2, r1);
1.879 + }
1.880 + }
1.881 + }
1.882 + }
1.883 + for (a=0; ((1<<a)<=aSize); ++a)
1.884 + {
1.885 + TInt alignsize=1<<a;
1.886 + TInt alignmask=alignsize-1;
1.887 + for (b=0; b<(1<<a); ++b)
1.888 + {
1.889 +// test.Printf(_L("size %d a=%d b=%d Best\n"),aSize,a,b);
1.890 + for (n=1; n<=aSize; ++n)
1.891 + {// test for with offset=0 as that has screwed best fit in past.
1.892 + for (offset = 0; offset < (TUint)aSize; offset <<= 1)
1.893 + {
1.894 + TInt carry = 0;
1.895 + TInt runLength;
1.896 + TInt r1=pA->AllocAligned(n,a,b,ETrue, carry, runLength, offset);
1.897 + TInt r2=AlignedFirstFitPos(rangeArray,aSize, n,a,b, offset, ETrue);
1.898 + if (pA->iSize == pA->iAvail)
1.899 + {// Totally empty bmas return KErrOverflow always on best fit mode.
1.900 + r2 = KErrOverflow;
1.901 + }
1.902 +// test.Printf(_L("ALA %d %d %d 1 = %d [%d]\n"),n,a,b,r1,r2);
1.903 + test( (r1<0) || ((r1+b)&alignmask)==0 );
1.904 + test( (r1<0) || !pA->NotFree(r1,n));
1.905 + test( (r1<0) || runLength >= n);
1.906 + test_Equal(r2, r1);
1.907 + // No carry in so if run found then carry should be zero.
1.908 + // If no run found then carry should set to the length of
1.909 + // any run at the end of the bma minus the aligned offset.
1.910 + TInt lost = 0;
1.911 + TInt alignOffset = ((offset + b + alignmask) & ~alignmask) - b;
1.912 + if (finalRunLength && offset && lastRun.iBase < alignOffset)
1.913 + {// This search had started past the start of the final run
1.914 + // so the final run length found will be shorter than its
1.915 + // total length.
1.916 + if (alignOffset < aSize)
1.917 + {
1.918 + lost = Min(alignOffset - lastRun.iBase, finalRunLength);
1.919 + }
1.920 + else // alignedOffset starts past end of bma.
1.921 + lost = finalRunLength;
1.922 + }
1.923 + test((r1>=0 && carry == 0) || carry == finalRunLength - lost);
1.924 + offset = (offset)? offset : 1;
1.925 + }
1.926 + }
1.927 + }
1.928 + }
1.929 + rangeArray.Close();
1.930 + delete pA;
1.931 + }
1.932 +
1.933 +void Clone(TAny* aDest, const TBitMapAllocator* aSrc)
1.934 + {
1.935 + TInt nmapw=(aSrc->iSize+31)>>5;
1.936 + TInt memsz=sizeof(TBitMapAllocator)+(nmapw-1)*sizeof(TUint32);
1.937 + Mem::Move(aDest,aSrc,memsz);
1.938 + }
1.939 +
1.940 +void TestAllocConsecutive()
1.941 + {
1.942 + test.Printf(_L("TestAllocConsecutive\n"));
1.943 + DoConsecTest(256, 0,4 , 20,8 , 38,1 , 58,6 , 65,10, 78,16 , 127,72, 222,19 , 244,12 , -1);
1.944 + DoConsecTest(255, 0,2 , 3,2 , 6,3 , 10,3 , 14,5 , 20,5 , 26,7 , 34,7 , 42,11 , 54,11 , 66,13 , 80,37,
1.945 + 118,19 , 138,23 , 162,47 , 254,1 , -1);
1.946 + DoConsecTest(1023, 0,2 , 32,32 , 65,31 , 99,30 , 144,64 , 256,519 , 776,1, 778,245 , -1);
1.947 +
1.948 + DoAlignedTest(256, 0,4 , 20,8 , 38,1 , 58,6 , 65,10, 78,16 , 127,72, 222,19 , 244,12 , -1);
1.949 + DoAlignedTest(255, 0,2 , 3,2 , 6,3 , 10,3 , 14,5 , 20,5 , 26,7 , 34,7 , 42,11 , 54,11 , 66,13 , 80,37,
1.950 + 118,19 , 138,23 , 162,47 , 254,1 , -1);
1.951 + DoAlignedTest(1023, 0,2 , 32,32 , 65,31 , 99,30 , 144,64 , 256,519 , 776,1, 778,245 , -1);
1.952 + // Test some completely free bmas
1.953 + DoAlignedTest(255, 0,255, -1);
1.954 + DoAlignedTest(256, 0,256, -1);
1.955 + DoAlignedTest(1023, 0,1023, -1);
1.956 + DoAlignedTest(1024, 0,1024, -1);
1.957 + }
1.958 +
1.959 +void DoTestChain(const TBitMapAllocator& aBma, TInt aNumSplits, ...)
1.960 + {
1.961 + test.Printf(_L("DoTestChain %d %d\n"),aBma.iSize,aNumSplits);
1.962 + VA_LIST list;
1.963 + VA_START(list,aNumSplits);
1.964 +
1.965 + TBmaList* pL=TBmaList::New(aBma,aNumSplits,list);
1.966 + test(pL!=NULL);
1.967 +
1.968 + TInt n;
1.969 + for (n=1; n<=aBma.iSize; ++n)
1.970 + {
1.971 + TInt r1=aBma.AllocConsecutive(n,EFalse);
1.972 + TInt r2=pL->AllocConsecutiveFF(n);
1.973 +// test.Printf(_L("CHAIN C FF %d: r1=%d r2=%d\n"),n,r1,r2);
1.974 + test(r1==r2);
1.975 + }
1.976 + for (n=1; n<=aBma.iSize; ++n)
1.977 + {
1.978 + TInt r1=aBma.AllocConsecutive(n,ETrue);
1.979 + TInt r2=pL->AllocConsecutiveBF(n);
1.980 +// test.Printf(_L("CHAIN C BF %d: r1=%d r2=%d\n"),n,r1,r2);
1.981 + test(r1==r2);
1.982 + }
1.983 +
1.984 + TInt a;
1.985 + for (a=0; ((1<<a)<=aBma.iSize); ++a)
1.986 + {
1.987 + for (n=1; n<=aBma.iSize; ++n)
1.988 + {
1.989 + if (n==264 && a==9)
1.990 + {
1.991 + ++n;
1.992 + --n;
1.993 + }
1.994 + TInt r1=aBma.AllocAligned(n,a,0,EFalse);
1.995 + TInt r2=pL->AllocAlignedFF(n,a);
1.996 +// test.Printf(_L("CHAIN A FF %d,%d: r1=%d r2=%d\n"),n,a,r1,r2);
1.997 + test(r1==r2);
1.998 + }
1.999 + }
1.1000 + for (a=0; ((1<<a)<=aBma.iSize); ++a)
1.1001 + {
1.1002 + for (n=1; n<=aBma.iSize; ++n)
1.1003 + {
1.1004 + if (n==240 && a==3)
1.1005 + {
1.1006 + ++n;
1.1007 + --n;
1.1008 + }
1.1009 + TInt r1=aBma.AllocAligned(n,a,0,ETrue);
1.1010 + TInt r2=pL->AllocAlignedBF(n,a);
1.1011 +// test.Printf(_L("CHAIN A BF %d,%d: r1=%d r2=%d\n"),n,a,r1,r2);
1.1012 + test(r1==r2);
1.1013 + }
1.1014 + }
1.1015 +
1.1016 + delete pL;
1.1017 + }
1.1018 +
1.1019 +void TestChain()
1.1020 + {
1.1021 + test.Printf(_L("TestChain\n"));
1.1022 + TBitMapAllocator* pA;
1.1023 + pA=SetupBMA(1023, 0,2 , 32,32 , 65,31 , 99,30 , 144,64 , 256,519 , 776,1, 778,245 , -1);
1.1024 + test(pA!=NULL);
1.1025 + DoTestChain(*pA, 2, 300, 700);
1.1026 + DoTestChain(*pA, 3, 64, 301, 702);
1.1027 + delete pA;
1.1028 + pA=SetupBMA(512, 0,2 , 20,10 , 32,32 , 65,31 , 144,64 , 399,113 , -1);
1.1029 + test(pA!=NULL);
1.1030 + DoTestChain(*pA, 2, 256, 384);
1.1031 + DoTestChain(*pA, 3, 128, 256, 384);
1.1032 + DoTestChain(*pA, 3, 80, 208, 384);
1.1033 + DoTestChain(*pA, 3, 80, 208, 400);
1.1034 + delete pA;
1.1035 + }
1.1036 +
1.1037 +void TestBitOps()
1.1038 + {
1.1039 + test.Next(_L("Bit operations (32 bit)"));
1.1040 + test(__e32_find_ls1_32(0x00000000)==-1);
1.1041 + TInt i, j, k;
1.1042 + TInt count = 0;
1.1043 + for (i=0; i<=31; ++i)
1.1044 + test(__e32_find_ls1_32(1u<<i)==i);
1.1045 + TUint x = 0;
1.1046 + for (i=0; i<1000; ++i)
1.1047 + {
1.1048 + x = 69069*x + 41;
1.1049 + TInt bit = x&31;
1.1050 +
1.1051 + x = 69069*x + 41;
1.1052 + TUint y = ((x|1)<<bit);
1.1053 +
1.1054 + test(__e32_find_ls1_32(y) == bit);
1.1055 + }
1.1056 +
1.1057 + test(__e32_find_ms1_32(0x00000000)==-1);
1.1058 + for (i=0; i<=31; ++i)
1.1059 + test(__e32_find_ms1_32(1u<<i)==i);
1.1060 + for (i=0; i<1000; ++i)
1.1061 + {
1.1062 + x = 69069*x + 41;
1.1063 + TInt bit = x&31;
1.1064 +
1.1065 + x = 69069*x + 41;
1.1066 + TUint y = ((x|0x80000000u)>>bit);
1.1067 +
1.1068 + test(__e32_find_ms1_32(y) == 31-bit);
1.1069 + }
1.1070 +
1.1071 + test(__e32_bit_count_32(0)==0);
1.1072 + test(__e32_bit_count_32(0xffffffff)==32);
1.1073 + for (i=0; i<32; ++i)
1.1074 + {
1.1075 + TUint32 y = 0xffffffffu << i;
1.1076 + TUint32 z = 0xffffffffu >> i;
1.1077 + test(__e32_bit_count_32(y) == 32-i);
1.1078 + test(__e32_bit_count_32(z) == 32-i);
1.1079 + test(__e32_bit_count_32(~y) == i);
1.1080 + test(__e32_bit_count_32(~z) == i);
1.1081 + }
1.1082 + for (i=0; i<32; ++i)
1.1083 + for (j=0; j<32; ++j)
1.1084 + for (k=0; k<32; ++k)
1.1085 + {
1.1086 + TUint32 b0 = 1u<<i;
1.1087 + TUint32 b1 = 1u<<j;
1.1088 + TUint32 b2 = 1u<<k;
1.1089 + TUint32 y = b0 | b1 | b2;
1.1090 + TInt n;
1.1091 + if (i==j && j==k) n=1;
1.1092 + else if (i==j || j==k || i==k) n=2;
1.1093 + else n=3;
1.1094 + test(__e32_bit_count_32(y) == n);
1.1095 + test(__e32_bit_count_32(~y) == 32-n);
1.1096 + ++count;
1.1097 + }
1.1098 + test.Printf(_L("%d iterations done\n"), count);
1.1099 + for (i=0; i<=31; ++i)
1.1100 + {
1.1101 + test(__e32_bit_count_32(0xaaaaaaaau<<i)==16-(i+1)/2);
1.1102 + test(__e32_bit_count_32(0x55555555u<<i)==16-i/2);
1.1103 + }
1.1104 + test(__e32_bit_count_32(0x33333333u)==16);
1.1105 + test(__e32_bit_count_32(0x88888888u)==8);
1.1106 +
1.1107 + test.Next(_L("Bit operations (64 bit)"));
1.1108 + test(__e32_find_ls1_64(0x00000000)==-1);
1.1109 + for (i=0; i<=63; ++i)
1.1110 + {
1.1111 + TUint64 x = 1u;
1.1112 + x<<=i;
1.1113 + test(__e32_find_ls1_64(x)==i);
1.1114 + }
1.1115 + x = 487;
1.1116 + for (i=0; i<1000; ++i)
1.1117 + {
1.1118 + x = 69069*x + 41;
1.1119 + TInt bit = x&63;
1.1120 +
1.1121 + x = 69069*x + 41;
1.1122 + TUint32 xl = x|1;
1.1123 + x = 69069*x + 41;
1.1124 + TUint32 xh = x;
1.1125 + TUint64 y = MAKE_TUINT64(xh,xl);
1.1126 + y <<= bit;
1.1127 + test(__e32_find_ls1_64(y) == bit);
1.1128 + }
1.1129 +
1.1130 + test(__e32_find_ms1_64(0x00000000)==-1);
1.1131 + for (i=0; i<=63; ++i)
1.1132 + {
1.1133 + TUint64 x = 1u;
1.1134 + x<<=i;
1.1135 + test(__e32_find_ms1_64(x)==i);
1.1136 + }
1.1137 + x = 1039;
1.1138 + for (i=0; i<1000; ++i)
1.1139 + {
1.1140 + x = 69069*x + 41;
1.1141 + TInt bit = x&63;
1.1142 +
1.1143 + x = 69069*x + 41;
1.1144 + TUint32 xl = x;
1.1145 + x = 69069*x + 41;
1.1146 + TUint32 xh = x|0x80000000u;
1.1147 + TUint64 y = MAKE_TUINT64(xh,xl);
1.1148 + y >>= bit;
1.1149 + test(__e32_find_ms1_64(y) == 63-bit);
1.1150 + }
1.1151 +
1.1152 + test(__e32_bit_count_64(0)==0);
1.1153 + test(__e32_bit_count_64(MAKE_TUINT64(0x00000000,0xffffffff))==32);
1.1154 + test(__e32_bit_count_64(MAKE_TUINT64(0xffffffff,0x00000000))==32);
1.1155 + test(__e32_bit_count_64(MAKE_TUINT64(0xffffffff,0xffffffff))==64);
1.1156 + for (i=0; i<64; ++i)
1.1157 + {
1.1158 + TUint64 y = MAKE_TUINT64(0xffffffff,0xffffffff);
1.1159 + TUint64 z = y >> i;
1.1160 + y <<= i;
1.1161 + test(__e32_bit_count_64(y) == 64-i);
1.1162 + test(__e32_bit_count_64(z) == 64-i);
1.1163 + test(__e32_bit_count_64(~y) == i);
1.1164 + test(__e32_bit_count_64(~z) == i);
1.1165 + }
1.1166 + count = 0;
1.1167 + for (i=0; i<64; ++i)
1.1168 + for (j=0; j<64; ++j)
1.1169 + for (k=0; k<64; ++k)
1.1170 + {
1.1171 + TUint64 b0 = 1u;
1.1172 + TUint64 b1 = 1u;
1.1173 + TUint64 b2 = 1u;
1.1174 + b0 <<= i;
1.1175 + b1 <<= j;
1.1176 + b2 <<= k;
1.1177 + TUint64 y = b0 | b1 | b2;
1.1178 + TUint64 z = ~y;
1.1179 + TInt n;
1.1180 + if (i==j && j==k) n=1;
1.1181 + else if (i==j || j==k || i==k) n=2;
1.1182 + else n=3;
1.1183 + test(__e32_bit_count_64(y) == n);
1.1184 + test(__e32_bit_count_64(z) == 64-n);
1.1185 + ++count;
1.1186 + }
1.1187 + test.Printf(_L("%d iterations done\n"), count);
1.1188 + for (i=0; i<64; ++i)
1.1189 + {
1.1190 + TUint64 y = MAKE_TUINT64(0xaaaaaaaa,0xaaaaaaaa);
1.1191 + TUint64 z = ~y;
1.1192 + test(__e32_bit_count_64(y<<i)==32-(i+1)/2);
1.1193 + test(__e32_bit_count_64(z<<i)==32-i/2);
1.1194 + }
1.1195 + test(__e32_bit_count_64(MAKE_TUINT64(0x33333333u,0x33333333u))==32);
1.1196 + test(__e32_bit_count_64(MAKE_TUINT64(0x88888888u,0x88888888u))==16);
1.1197 + }
1.1198 +
1.1199 +GLDEF_C TInt E32Main()
1.1200 + {
1.1201 + test.Title();
1.1202 + __UHEAP_MARK;
1.1203 + test.Start(_L("TBitMapAllocator tests"));
1.1204 +
1.1205 + TestBitOps();
1.1206 +
1.1207 + TestConstruct(3);
1.1208 + TestConstruct(29);
1.1209 + TestConstruct(256);
1.1210 + TestConstruct(487);
1.1211 +
1.1212 + TestAlloc1(3);
1.1213 + TestAlloc1(29);
1.1214 + TestAlloc1(256);
1.1215 + TestAlloc1(181);
1.1216 +
1.1217 + TestFree1(3);
1.1218 + TestFree1(29);
1.1219 + TestFree1(256);
1.1220 + TestFree1(181);
1.1221 +
1.1222 + TestBlockAlloc(3);
1.1223 + TestBlockAlloc(29);
1.1224 + TestBlockAlloc(256);
1.1225 + TestBlockAlloc(179);
1.1226 +
1.1227 + TestBlockFree(3);
1.1228 + TestBlockFree(31);
1.1229 + TestBlockFree(256);
1.1230 + TestBlockFree(149);
1.1231 +
1.1232 + TestNotFree(3);
1.1233 + TestNotFree(31);
1.1234 + TestNotFree(256);
1.1235 + TestNotFree(149);
1.1236 +
1.1237 + TestNotAllocated(3);
1.1238 + TestNotAllocated(31);
1.1239 + TestNotAllocated(256);
1.1240 + TestNotAllocated(149);
1.1241 +
1.1242 + TestAllocList(3);
1.1243 + TestAllocList(31);
1.1244 + TestAllocList(128);
1.1245 + TestAllocList(149);
1.1246 +
1.1247 + TestSelectiveFree(3);
1.1248 + TestSelectiveFree(31);
1.1249 + TestSelectiveFree(128);
1.1250 + TestSelectiveFree(149);
1.1251 +
1.1252 + TestAllocConsecutive();
1.1253 +
1.1254 + TestChain();
1.1255 +
1.1256 + __UHEAP_MARKEND;
1.1257 + test.End();
1.1258 + return 0;
1.1259 + }