diff -r 000000000000 -r bde4ae8d615e os/persistentdata/persistentstorage/dbms/ustor/US_COMP.CPP --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/os/persistentdata/persistentstorage/dbms/ustor/US_COMP.CPP Fri Jun 15 03:10:57 2012 +0200 @@ -0,0 +1,1259 @@ +// Copyright (c) 1998-2009 Nokia Corporation and/or its subsidiary(-ies). +// All rights reserved. +// This component and the accompanying materials are made available +// under the terms of "Eclipse Public License v1.0" +// which accompanies this distribution, and is available +// at the URL "http://www.eclipse.org/legal/epl-v10.html". +// +// Initial Contributors: +// Nokia Corporation - initial contribution. +// +// Contributors: +// +// Description: +// Store database compression code +// +// + +#include "US_STD.H" +#include + +#ifdef __WINS__ +#define __EXTRA_DEFLATE +#endif + +// deflation constants +const TInt KDeflateMinLength=3; +const TInt KDeflateMaxLength=258; +const TInt KDeflateMaxDistance=4096; +const TInt KDeflateDistCodeBase=0x200; +// huffman coding/decoding +const TInt KHuffMaxCodeLength=25; +const TInt KHuffTerminate=1; +const TUint KBitsEmpty=0x80000000u; +const TUint KBitsInit=KBitsEmpty>>1; +const TUint KBitsFull=KBitsEmpty>>8; +const TUint KBitsEOF=KBitsEmpty>>9; +const TUint KBitsNext=0x80u; +// encoding storage +const TInt KDeflateMetaCodes=26; +// hashing +const TUint KDeflateHashMultiplier=0xAC4B9B19u; +const TInt KDeflateHashShift=24; + +class Huffman + { +public: + static void EncodingL(TUint32* aEncoding,TInt aCodes); + static void Decoding(TUint32* aDecoding,TInt aCodes,TInt aBase=0); +private: + typedef TUint16 THuff; + enum {KLeaf=0x8000}; + struct TNode + { + THuff iLeft; + THuff iRight; + }; + struct TLeaf + { + TUint iCount; + THuff iVal; + }; +private: + static void Lengths(TUint32* aLengths,const TNode* aNodes,TInt aNode,TInt aLen); + static TUint32* SubTree(TUint32* aPtr,const TUint32* aValue,TUint32** aLevel); + }; + +class THuffEncoder + { +public: + THuffEncoder(RWriteStream& aStream); +// + void EncodeL(TUint aCode,TInt aLength); + void EncodeL(TUint32 aHuffCode); + void CompleteL(); +private: + enum {EBufSize=0x100}; +private: + TUint8 iBuf[EBufSize]; + RWriteStream& iStream; + TUint32 iCode; // code in production + TInt iBits; + TUint8* iWrite; + }; + +class HDeflateHash + { +public: + inline static HDeflateHash& NewLC(TInt aLinks); +// + inline TInt First(const TUint8* aPtr,TInt aPos); + inline TInt Next(TInt aPos,TInt aOffset) const; +private: + inline HDeflateHash(); + inline static TInt Hash(const TUint8* aPtr); +private: + typedef TUint16 TOffset; +private: + TInt iHash[256]; + TOffset iOffset[1]; // or more + }; + +class MDeflater + { +public: + void DeflateL(const TUint8* aBase,TInt aLength); +private: + const TUint8* DoDeflateL(const TUint8* aBase,const TUint8* aEnd,HDeflateHash& aHash); + static TInt Match(const TUint8* aPtr,const TUint8* aEnd,TInt aPos,HDeflateHash& aHas); + void SegmentL(TInt aLength,TInt aDistance); + virtual void LitLenL(TInt aCode) =0; + virtual void OffsetL(TInt aCode) =0; + virtual void ExtraL(TInt aLen,TUint aBits) =0; + }; + +class TInflater + { +public: + TInflater(const TUint8* aIn,const CDbStoreCompression::TEncoding& aDecoding); + TInt Inflate(); + inline const TUint8* Ptr() const; + inline static TInt BufferSize(); +private: + const TUint8* iIn; + TUint iBits; + const TUint8* iRptr; // partial segment + TInt iLen; + const TUint32* iLitLenTree; + const TUint32* iDistTree; + TUint8 iOut[KDeflateMaxDistance]; // circular buffer for distance matches + }; + +NONSHARABLE_CLASS(TDeflateStats) : public MDeflater + { +public: + inline TDeflateStats(CDbStoreCompression::TEncoding& aEncoding); +private: +// from MDeflater + void LitLenL(TInt aCode); + void OffsetL(TInt aCode); + void ExtraL(TInt aLen,TUint aBits); +private: + CDbStoreCompression::TEncoding& iEncoding; + }; + +NONSHARABLE_CLASS(TDeflater) : public MDeflater + { +public: + inline TDeflater(THuffEncoder& aEncoder,const CDbStoreCompression::TEncoding& aEncoding); +private: +// from MDeflater + void LitLenL(TInt aCode); + void OffsetL(TInt aCode); + void ExtraL(TInt aLen,TUint aBits); +private: + THuffEncoder& iEncoder; + const CDbStoreCompression::TEncoding& iEncoding; + }; + +NONSHARABLE_CLASS(HDeflateBuf) : public TBufBuf + { +public: + enum TMode {EAnalysis,EDeflate}; // mirror CDbStoreCompression enum +public: + static HDeflateBuf* NewL(MStreamBuf* aHost,CDbStoreCompression::TEncoding& aEncoding,TMode aMode); +private: + inline HDeflateBuf(MStreamBuf* aHost,CDbStoreCompression::TEncoding& aEncoding,CBufBase* aBuf,TMode aMode); + virtual inline ~HDeflateBuf(); +// from MStreamBuf + void DoSynchL(); + void DoRelease(); +private: + RWriteStream iHost; + CDbStoreCompression::TEncoding& iEncoding; + CBufBase* iBuf; + TMode iMode; + }; + +NONSHARABLE_CLASS(HInflateBuf) : public TBufBuf + { +public: + static HInflateBuf* NewL(MStreamBuf* aHost,const CDbStoreCompression::TEncoding& aEncoding); +private: + inline HInflateBuf(CBufBase* aBuf); + virtual inline ~HInflateBuf(); +// from MStreamBuf + void DoRelease(); +private: + CBufBase* iBuf; + }; + +NONSHARABLE_CLASS(CDbStoreTable::CCompressor) : public CBase, public CCluster::MAlter + { +public: + inline CCompressor(); + ~CCompressor(); + void ProcessL(CDbStoreTable* aTable); +private: + TUint8* AlterRecordL(TUint8* aWPtr,const TUint8* aRPtr,TInt aLength); +private: + CDbStoreTable* iTable; + RDbRow iRow; + }; + +// Class Huffman +// +// This class builds a huffman encoding from a frequency table and builds +// a decoding tree from a code-lengths table +// +// the encoding generated is based on the rule that given two symbols s1 and s2, with +// code length l1 and l2, and huffman codes h1 and h2: +// +// if l1aValue) + { + TUint32* sub1=SubTree(aPtr,aValue,aLevel); // 0-tree first + aPtr=SubTree(sub1,aValue-(aPtr-sub1)-1,aLevel); // 1-tree + TInt branch=(TUint8*)sub1-(TUint8*)aPtr; + *--aPtr=branch; + } + else if (l==aValue) + { + TUint term0=*aValue--; // 0-term + aPtr=SubTree(aPtr,aValue,aLevel); // 1-tree + *--aPtr=term0>>16; + } + else // l>16<<16)|(term0>>16); + } + return aPtr; + } + +// +// Build a huffman encoding table from a symbol frequency table +// aTable contains frequency data on input for aCodes symbols +// aTable contains the huffman encoding on output +// +void Huffman::EncodingL(TUint32* aTable,TInt aCodes) + { +// +// step 1. Sort the values into decreasing order of frequency +// + TLeaf* leaves=new(ELeave) TLeaf[aCodes]; + CleanupArrayDeletePushL(leaves); + TInt lCount=0; + TInt ii; + for (ii=0;ii1) + { // don't encode for empty coding: leaves in order now + TInt max=0; + TNode* nodes=new(ELeave) TNode[lCount-1]; + while (--lCount>0) + { + TNode& node=nodes[max]; + node.iLeft=leaves[lCount-1].iVal; + node.iRight=leaves[lCount].iVal; + // re-order the leaves now to reflect new combined frequency + TUint c=leaves[lCount-1].iCount+leaves[lCount].iCount; + TInt jj=lCount; + while (--jj>0) + { + if (leaves[jj-1].iCount>=c) + break; + } + Mem::Move(leaves+jj+1,leaves+jj,sizeof(TLeaf)*(lCount-1-jj)); + // update new leaf + leaves[jj].iCount=c; + leaves[jj].iVal=THuff(max); + max++; + } + Lengths(aTable,nodes,leaves[0].iVal,0); + delete[] nodes; + } + CleanupStack::PopAndDestroy(); // leaves +// +// step 3: Generate the coding based on the code lengths +// + TInt lenCount[KHuffMaxCodeLength]; + Mem::FillZ(lenCount,sizeof(lenCount)); + + for (ii=aCodes;--ii>=0;) + { + TInt len=aTable[ii]-1; + if (len>=0) + ++lenCount[len]; + } + + TUint nextCode[KHuffMaxCodeLength]; + TUint code=0; + for (ii=0;ii=0) + { + TUint len=aDecoding[ii]; + __ASSERT(len<=(TUint)KHuffMaxCodeLength); + if (len) + { + ++counts[len-1]; + ++codes; + } + } +// + TUint32* level[KHuffMaxCodeLength]; + TUint32* lit=aDecoding+codes; + for (ii=0;ii>=16; // 0-terminate at root + else if (codes>1) + { + __DEBUG(TUint32* p=) SubTree(aDecoding+codes-1,aDecoding+codes-1,level); + __ASSERT(p==aDecoding); + } + } + +// Class HDeflateHash + +inline HDeflateHash::HDeflateHash() + {TInt* p=iHash+256;do *--p=-KDeflateMaxDistance-1; while (p>iHash);} + +inline HDeflateHash& HDeflateHash::NewLC(TInt aLinks) + { + __ASSERT(!(KDeflateMaxDistance&(KDeflateMaxDistance-1))); // ensure power of two + return *new(User::AllocLC(_FOFF(HDeflateHash,iOffset[Min(aLinks,KDeflateMaxDistance)]))) HDeflateHash; + } + +inline TInt HDeflateHash::Hash(const TUint8* aPtr) + { + TUint x=aPtr[0]|(aPtr[1]<<8)|(aPtr[2]<<16); + return (x*KDeflateHashMultiplier)>>KDeflateHashShift; + } + +inline TInt HDeflateHash::First(const TUint8* aPtr,TInt aPos) + { + TInt h=Hash(aPtr); + TInt offset=Min(aPos-iHash[h],KDeflateMaxDistance<<1); + iHash[h]=aPos; + iOffset[aPos&(KDeflateMaxDistance-1)]=TOffset(offset); + return offset; + } + +inline TInt HDeflateHash::Next(TInt aPos,TInt aOffset) const + {return aOffset+iOffset[(aPos-aOffset)&(KDeflateMaxDistance-1)];} + + +// Class TDeflater +// +// generic deflation algorithm, can do either statistics and the encoder + +TInt MDeflater::Match(const TUint8* aPtr,const TUint8* aEnd,TInt aPos,HDeflateHash& aHash) + { + TInt offset=aHash.First(aPtr,aPos); + if (offset>KDeflateMaxDistance) + return 0; + TInt match=0; + aEnd=Min(aEnd,aPtr+KDeflateMaxLength); + TUint8 c=*aPtr; + do + { + const TUint8* p=aPtr-offset; + if (p[match>>16]==c) + { // might be a better match + const TUint8* m=aPtr; + for (;;) + { + if (*p++!=*m++) + break; + if (mmatch>>16) + { + match=(l<<16)|offset; + c=m[-1]; + } + } + offset=aHash.Next(aPos,offset); + } while (offset<=KDeflateMaxDistance); + return match; + } + +// +// Apply the deflation algorithm to the data [aBase,aEnd) +// Return a pointer after the last byte that was deflated (which may not be aEnd) +// +const TUint8* MDeflater::DoDeflateL(const TUint8* aBase,const TUint8* aEnd,HDeflateHash& aHash) + { + __ASSERT(aEnd-aBase>KDeflateMinLength); +// + const TUint8* ptr=aBase; +#ifdef __EXTRA_DEFLATE + TInt prev=0; // the previous deflation match +#endif + do + { + TInt match=Match(ptr,aEnd,ptr-aBase,aHash); +#ifdef __EXTRA_DEFLATE +// Extra deflation applies two optimisations which double the time taken +// 1. If we have a match at p, then test for a better match at p+1 before using it +// 2. When we have a match, add the hash links for all the data which will be skipped + if (match>>16 < prev>>16) + { // use the previous match--it was better + TInt len=prev>>16; + SegmentL(len,prev-(len<<16)); + // fill in missing hash entries for better compression + const TUint8* e=ptr+len-2; + do + { + ++ptr; + aHash.First(ptr,ptr-aBase); + } while (ptr>16; + SegmentL(len,match-(len<<16)); + ptr+=len; + } +#endif + } while (ptr+KDeflateMinLength-1>16; + SegmentL(len,prev-(len<<16)); + ptr+=len-1; + } +#endif + return ptr; + } + +// +// The generic deflation algorithm +// +void MDeflater::DeflateL(const TUint8* aBase,TInt aLength) + { + const TUint8* end=aBase+aLength; + if (aLength>KDeflateMinLength) + { // deflation kicks in if there is enough data + HDeflateHash& hash=HDeflateHash::NewLC(aLength); + aBase=DoDeflateL(aBase,end,hash); + CleanupStack::PopAndDestroy(); + } + while (aBase=KDeflateMinLength && aLength<=KDeflateMaxLength); + aLength-=KDeflateMinLength; + TInt extralen=0; + TUint len=aLength; + while (len>=8) + { + ++extralen; + len>>=1; + } + __ASSERT((extralen<<2)+len0 && aDistance<=KDeflateMaxDistance); + aDistance--; + extralen=0; + TUint dist=aDistance; + while (dist>=8) + { + ++extralen; + dist>>=1; + } + __ASSERT((extralen<<2)+dist>KHuffMaxCodeLength); + } + +// +// Store aLength bits from the most significant bits of aCode +// +void THuffEncoder::EncodeL(TUint aCode,TInt aLength) + { + TInt bits=iBits; + TUint code=iCode|(aCode>>(bits+8)); + bits+=aLength; + if (bits>=0) + { + TUint8* write=iWrite; + do + { + if (write-EBufSize==iBuf) + { + iStream.WriteL(iBuf,EBufSize); + write=iBuf; + } + *write++=TUint8(code>>24); + code<<=8; + bits-=8; + } while (bits>=0); + iWrite=write; + } + iCode=code; + iBits=bits; + } + +// +// Terminate the huffman coding. The longest code is always 1111111111 +// +void THuffEncoder::CompleteL() + { + if (iBits>-8) + EncodeL(0xffffffffu,-iBits); + if (iWrite>iBuf) + iStream.WriteL(iBuf,iWrite-iBuf); + } + +// Class TDeflater +// +// Extends MDeflater to provide huffman encoding of the output + +// +// construct for encoding +// +inline TDeflater::TDeflater(THuffEncoder& aEncoder,const CDbStoreCompression::TEncoding& aEncoding) + :iEncoder(aEncoder),iEncoding(aEncoding) + {} + +void TDeflater::LitLenL(TInt aCode) + { + iEncoder.EncodeL(iEncoding.iLitLen[aCode]); + } + +void TDeflater::OffsetL(TInt aCode) + { + iEncoder.EncodeL(iEncoding.iDistance[aCode]); + } + +void TDeflater::ExtraL(TInt aLen,TUint aBits) + { + iEncoder.EncodeL(aBits,aLen); + } + +// Class TInflater +// +// The inflation algorithm, complete with huffman decoding + +TInflater::TInflater(const TUint8* aIn,const CDbStoreCompression::TEncoding& aEncoding) + :iIn(aIn),iBits(KBitsInit),iLen(0),iLitLenTree(aEncoding.iLitLen),iDistTree(aEncoding.iDistance) + {} + +// +// consume all data lag in the history buffer, then decode to fill up the output buffer +// +TInt TInflater::Inflate() + { +// empty the history buffer into the output + const TUint8* data=iIn; + TUint bits=iBits; + const TUint8* from=iRptr; + TInt len=iLen; + TUint8* out=iOut; + TUint8* const end=out+KDeflateMaxDistance; + const TUint32* node; + if (len) + goto useHistory; +// + if (bits&KBitsEOF) + return 0; +// + node=iLitLenTree; + while (out>17)-CDbStoreCompression::TEncoding::ELiterals; + if (val<0) + { + *out++=TUint8(val); + node=iLitLenTree; + continue; // another literal/length combo + } + if (val==CDbStoreCompression::TEncoding::EEos-CDbStoreCompression::TEncoding::ELiterals) + { // eos marker. we're done + bits=KBitsEOF; + break; + } + // get the extra bits for the code + TInt code=val&0xff; + if (code>=8) + { // xtra bits + TInt xtra=(code>>2)-1; + code-=xtra<<2; + do + { + if ((bits<<=1)&KBitsEmpty) + bits=*data++|KBitsFull; + code<<=1; + if (bits&KBitsNext) + code|=1; + } while (--xtra!=0); + } + if (valPtr(0).Ptr(),iBuf->Size()); + } + else + { + THuffEncoder encoder(iHost); + TDeflater deflater(encoder,iEncoding); + deflater.DeflateL(iBuf->Ptr(0).Ptr(),iBuf->Size()); + encoder.CompleteL(); + iHost.CommitL(); + } + } + +// Class HInflateBuf +// +// Inflate the input stream. This is not a filter, it reads all the input, inflates it and +// keeps it in a memory buffer. + +const TInt KInflateBufSize=0x800; // 2K + +HInflateBuf::HInflateBuf(CBufBase* aBuf) + :iBuf(aBuf) + { + Set(*aBuf,0,ERead); + } + +inline HInflateBuf::~HInflateBuf() + {delete iBuf;} + +void HInflateBuf::DoRelease() + { + delete this; + } + +HInflateBuf* HInflateBuf::NewL(MStreamBuf* aHost,const CDbStoreCompression::TEncoding& aEncoding) + { + CBufFlat* host=CBufFlat::NewL(256); + CleanupStack::PushL(host); + TUint8 buffer[KInflateBufSize]; + for (;;) + { + TInt len=aHost->ReadL(buffer,KInflateBufSize); + if (len) + host->InsertL(host->Size(),buffer,len); + if (lenPtr(0).Ptr(),aEncoding); + CleanupStack::PushL(inflater); + for (;;) + { + TInt len=inflater->Inflate(); + if (len) + out->InsertL(out->Size(),inflater->Ptr(),len); + if (lenBufferSize()) + break; + } + HInflateBuf* buf=new(ELeave) HInflateBuf(out); + CleanupStack::PopAndDestroy(); // inflater + CleanupStack::Pop(); // out + CleanupStack::PopAndDestroy(); // host + aHost->Release(); // don't need this anymore + return buf; + } + +// Class CDbStoreTable::Compressor +// +// This class processes an entire table for analysis or compression, using the +// CDbStoreRecords::AlterL() functionality and call back to ensure that all clusters +// and BLOBs are read and written. + +inline CDbStoreTable::CCompressor::CCompressor() + {} + +CDbStoreTable::CCompressor::~CCompressor() + { + if (iTable) + iTable->Close(); + iRow.Close(); + } + +// +// Walk through every cluster in the table +// +void CDbStoreTable::CCompressor::ProcessL(CDbStoreTable* aTable) + { + iTable=aTable; + CDbStoreRecords& rec=aTable->StoreRecordsL(); + for (TClusterId cluster=rec.Head();cluster!=KNullClusterId;cluster=rec.AlterL(cluster,*this)) + ; + } + +// +// Compress every blob, and transfer the record from aRPtr to aWPtr +// +TUint8* CDbStoreTable::CCompressor::AlterRecordL(TUint8* aWPtr,const TUint8* aRPtr,TInt aLength) + { + if (iTable->Def().Columns().HasLongColumns()) + { + iTable->CopyToRowL(iRow,TPtrC8(aRPtr,aLength)); + CDbBlobSpace* blobs=iTable->BlobsL(); + TDbColNo col=1; + HDbColumnSet::TIteratorC iter=iTable->Def().Columns().Begin(); + const HDbColumnSet::TIteratorC end=iTable->Def().Columns().End(); + do + { + if (!TDbCol::IsLong(iter->Type())) + continue; + TDbBlob& blob=CONST_CAST(TDbBlob&,TDbColumnC(iRow,col).Blob()); + if (blob.IsInline()) + continue; + // do what has to be done...? + TUint8* data=(TUint8*)User::AllocLC(blob.Size()); + blobs->ReadLC(blob.Id(),iter->Type())->ReadL(data,blob.Size()); + CleanupStack::PopAndDestroy(); // stream buffer + // re-write the Blob to compress it + blobs->DeleteL(blob.Id()); + blob.SetId(blobs->CreateL(iter->Type(),data,blob.Size())); + CleanupStack::PopAndDestroy(); // data + } while (++col,++iterCopyFromRow(aWPtr,iRow); + } + else + Mem::Copy(aWPtr,aRPtr,aLength); + return aWPtr+aLength; + } + +// Class CDbStoreCompression +// +// This class manages the compression for the database, applying filters as appropriate +// It also defines the extrenalisation format for the huffman trees + +const TInt KDeflationCodes=3*(CDbStoreCompression::TEncoding::ELitLens+CDbStoreCompression::TEncoding::EDistances); + +inline CDbStoreCompression::CDbStoreCompression() +// :iState(EAnalysis) + {} + +CDbStoreCompression* CDbStoreCompression::NewL() + { + return new(ELeave) CDbStoreCompression; + } + +// +// Build huffman codings from the freqeuncy tables +// +void CDbStoreCompression::EncodeL() + { + //Check the comments in CDbStoreCompression::InternalizeL(). + //The implementation there is very similar to this one and is commented why the "array overrun" + //case is impossible. + __ASSERT(iState==EAnalysis); + TUint32* p=iEncoding[0].iLitLen; + TUint32* end=p+KDeflationCodes; + do + { + Huffman::EncodingL(p,TEncoding::ELitLens); + p+=TEncoding::ELitLens; + //coverity[overrun-local] + Huffman::EncodingL(p,TEncoding::EDistances); + //coverity[overrun-local] + p+=TEncoding::EDistances; + } while (p>KHuffMaxCodeLength]; while (p>=KHuffMaxCodeLength; + c1>>=KHuffMaxCodeLength; + aStream.WriteUint8L((c0<<4)|c1); + } while (p>codes); +// write the encoding + THuffEncoder encoder(aStream); + p=base; + do encoder.EncodeL(codes[*p++>>KHuffMaxCodeLength]); while (p>4; + *--p=c&0xf; + } while (p>decode); + Huffman::Decoding(decode,KDeflateMetaCodes); +// decode the encoding + p=iEncoding[0].iLitLen; + TUint32* end=p+KDeflationCodes; + TUint bits=KBitsInit; + do + { + const TUint32* node=decode; + TUint huff; + for (;;) + { + huff=*node++; + if ((bits<<=1)&KBitsEmpty) + bits=aStream.ReadUint8L()|KBitsFull; + if (bits&KBitsNext) + { + if (huff&(KHuffTerminate<<16)) + break; + } + else + { + if (huff&KHuffTerminate) + { + huff<<=16; + break; + } + else + node=PtrAdd(node,huff); + } + } + *p++=huff>>17; + } while (p iter(SchemaL()); + const CDbStoreDef* def; + while ((def=iter++)!=0) + { + CDbStoreTable::CCompressor* comp=new(ELeave) CDbStoreTable::CCompressor; + CleanupStack::PushL(comp); + comp->ProcessL(STATIC_CAST(CDbStoreTable*,TableL(*def))); + CleanupStack::PopAndDestroy(); // comp + } + } + +// +// Compress or decompress the whole database +// +void CDbStoreDatabase::CompressL(TStreamId aStreamId,TZipType aZip) + { + __ASSERT(iStore); + iSchemaId=aStreamId; +// read the databse header for encryption information + RStoreReadStream strm; + strm.OpenLC(Store(),aStreamId); + ReadHeaderL(strm); + CleanupStack::PopAndDestroy(); // strm + InitPagePoolL(); +// + if (iVersion==EDbStoreCompressed) + { + iCompression->Inflate(); + if (aZip==EDeflate) + __LEAVE(KErrArgument); // already compressed + } + else if (aZip==EInflate) + __LEAVE(KErrArgument); // not compressed + else + { // deflate pass #1: analyse the database + CompressionL(); // construct the compression filter + Transaction().DDLBeginLC(); + CompressTablesL(); + iClusterCache->FlushL(); // force through the stats buffer + ReplaceSchemaL(); // force through the stats buffer + CleanupStack::PopAndDestroy(); // rollback after analysis! + iCompression->EncodeL(); + } +// now inflate or deflate the data + Transaction().DDLBeginLC(); + CompressTablesL(); + iVersion=TUint8(aZip==EDeflate ? EDbStoreCompressed : EDbStoreVersion2); + Transaction().DDLCommitL(); + CleanupStack::Pop(); // rollback not required + } + +void CDbStoreDatabase::CompressL(CStreamStore* aStore,TStreamId aStreamId,TZipType aZip) + { + //It looks like there is a potential memory leak in the next 4 lines of code, because: + //1) CDbStoreDatabase* self=NewLC(aStore) - new CDbStoreDatabase object is created on the heap + //2) CDbObject* db=self->InterfaceL() - new CDbObject is created on the heap + //3) CleanupStack::Pop() - the CDbStoreDatabase object from (1) - out from the cleanup stack + //4) db->PushL() - the CDbObject from (2) - in the cleanup stack + //If one of the DDLPrepareL() or CompressL() leaves, looks like the CDbStoreDatabase object from (1) will be leaked. + //This is not a correct guess, becuse: + // - CDbObject* db=self->InterfaceL() - this call creates a new CInterface object + // on the heap. The CInterface() constructor will store the "this" pointer + // passed from the InterfaceL() (so the "self" pointer), + // into its "iDatabase" private data member. + // In which case, the returned CDbObject, of CInterface type, will have + // its "iDatabase" data member initialized. + // The inheritance tree is: + // CBase <-- CDbObject <-- CDbDatabase <-- CInterface + // - db->PushL() - this call puts on the cleanup stack CDbObject::Destroy(). + // The "db" object which real type is CInterface with "iDatabase" data member + // initialized with "self", is on the cleanup stack. + // - If one of the next "L" functions leaves, then CDbObject::Destroy() will be executed. + // - CDbObject::Destroy() will call CInterface::~CInterface() - CBase is at the root of the inheritance tree, with + // virtual ~CBase() destructor. + // - CInterface::~CInterface() implementation calls Database().Close(), so that is CDbStoreDatabase::Close() called on + // the "self" pointer. + // - The CDbStoreDatabase::Close() will call "delete this" when its reference count reaches 0. + // The reference counter is increased by 1 in the InterfaceL() chain of calls. + // -------- Conclusion --------- + // No memory leak will occur in the next lines of code. Coverity errors - disabled. + CDbStoreDatabase* self=NewLC(aStore); + //coverity[alloc_fn] + //coverity[var_assign] + CDbObject* db=self->InterfaceL(); // a reference to the database is required + CleanupStack::Pop(); // self + //coverity[noescape] + db->PushL(); + //coverity[noescape] + self->Transaction().DDLPrepareL(*db); + self->CompressL(aStreamId,aZip); + CleanupStack::PopAndDestroy(); // db + //coverity[leaked_storage] + } + +// Class RDbStoreDatabase + +EXPORT_C void RDbStoreDatabase::CompressL(CStreamStore& aStore,TStreamId aId) + { + CDbStoreDatabase::CompressL(&aStore,aId,CDbStoreDatabase::EDeflate); + } + +EXPORT_C void RDbStoreDatabase::DecompressL(CStreamStore& aStore,TStreamId aId) + { + CDbStoreDatabase::CompressL(&aStore,aId,CDbStoreDatabase::EInflate); + }