sl@0: // Copyright (c) 1995-2009 Nokia Corporation and/or its subsidiary(-ies). sl@0: // All rights reserved. sl@0: // This component and the accompanying materials are made available sl@0: // under the terms of the License "Eclipse Public License v1.0" sl@0: // which accompanies this distribution, and is available sl@0: // at the URL "http://www.eclipse.org/legal/epl-v10.html". sl@0: // sl@0: // Initial Contributors: sl@0: // Nokia Corporation - initial contribution. sl@0: // sl@0: // Contributors: sl@0: // sl@0: // Description: sl@0: // f32\sfile\sf_lepoc.cpp sl@0: // sl@0: // sl@0: sl@0: #include "sf_std.h" sl@0: sl@0: #include sl@0: #include sl@0: #include sl@0: #include sl@0: #include sl@0: #include sl@0: #include sl@0: #include sl@0: #include sl@0: #define INCLUDE_E32IMAGEHEADER_IMPLEMENTATION sl@0: #include "sf_ldr.h" sl@0: #include sl@0: #include "sf_image.h" sl@0: #include sl@0: #include sl@0: #include "sf_cache.h" sl@0: sl@0: #include "sf_pgcompr.h" sl@0: sl@0: _LIT(KLitFinderInconsistent, "LDR-FINDER-INC"); sl@0: _LIT(KLitSysBinError, "LDR-SYS\\BIN ERR"); sl@0: _LIT8(KSysBin,":\\sys\\bin\\"); sl@0: sl@0: #ifdef _DEBUG sl@0: sl@0: enum TLdrEpocPanic sl@0: { sl@0: EFuaiNoFixupTable = 0x10, sl@0: EBcbmNotCodePaged = 0x20, sl@0: ELfiCodePagingNotSupported = 0x30, sl@0: EFprUnexpectedFixup = 0x40, sl@0: }; sl@0: sl@0: static void Panic(TLdrEpocPanic aPanic) sl@0: { sl@0: _LIT(KPanicCat, "LDR-PNC"); sl@0: User::Panic(KPanicCat, aPanic); sl@0: } sl@0: sl@0: extern TRequestStatus* ProcessDestructStatPtr; sl@0: extern TBool ProcessCreated; sl@0: sl@0: #endif sl@0: sl@0: extern void DumpImageHeader(const E32ImageHeader*); sl@0: extern TDriveCacheHeader* gDriveFileNamesCache[]; sl@0: sl@0: TBuf8 gLoadeePath; sl@0: TUint NextCodeSegId; sl@0: sl@0: const TInt KMaxHeaderSize = sizeof(E32ImageHeaderV) + 65536/8; sl@0: sl@0: sl@0: #ifdef __X86__ sl@0: extern TInt UseFloppy; sl@0: #endif sl@0: sl@0: sl@0: sl@0: // -------- demand paging -------- sl@0: sl@0: /** Page size as a power of two. */ sl@0: const TUint32 KPageSizeShift = 12; sl@0: /** Page size, as defined for code relocations. This same page size is used for demand paging. */ sl@0: const TUint32 KPageSize = 1<> KPageSizeShift; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Allocate a block which indexes the reallocations by page. This can be used for demand paging. sl@0: sl@0: @param aSection Pointer to relocation section to process. sl@0: @param aAreaSize Size in bytes of area described by reloc section. sl@0: @param aLoadAddress Address of relocation section in memory sl@0: @param aProcessedBlock On success (return == KErrNone) this is set to the processed sl@0: relocation section which is allocated on the current thread's heap. sl@0: The caller takes ownership. The contents are undefined on failure. sl@0: @return KErrNoMemory if could not allocate memory for processed block sl@0: and auxiliary structures; KErrNone otherwise. sl@0: */ sl@0: TInt E32Image::AllocateRelocationData(E32RelocSection* aSection, TUint32 aAreaSize, TUint32 aLoadAddress, TUint32*& aProcessedBlock) sl@0: { sl@0: __IF_DEBUG(Printf("AllocateRelocationData")); sl@0: sl@0: TUint32 sectionSize = aSection->iSize; sl@0: TUint32 numRelocs = aSection->iNumberOfRelocs; sl@0: TInt pageCount = SizeToPageCount(aAreaSize); sl@0: sl@0: // The file format documentation (SOSI ch10) does not guarantee that each page has sl@0: // relocation information, or that the pages are listed in order, so store them in sl@0: // page order here. sl@0: sl@0: TUint8** subBlocks = (TUint8**)User::AllocZ(sizeof(TUint8*)*pageCount); sl@0: if(subBlocks == 0) sl@0: return KErrNoMemory; sl@0: sl@0: const TUint8* subBlockPtr = (TUint8*)(aSection+1); sl@0: while(sectionSize > 0) sl@0: { sl@0: TUint32 pageOffset = *(TUint32*)(subBlockPtr); sl@0: TUint32 subBlockSize = *(TUint32*)(subBlockPtr+4); sl@0: sl@0: subBlocks[pageOffset >> KPageSizeShift] = (TUint8*)subBlockPtr; sl@0: sl@0: sectionSize -= subBlockSize; sl@0: subBlockPtr += subBlockSize; // move to next sub-block sl@0: } sl@0: sl@0: // now have each relocation page in memory, build lookup table sl@0: TUint32 indexSize = (pageCount + 1) * sizeof(TUint32); // include sentinel sl@0: TUint32 totalRelocations = numRelocs; sl@0: iCodeRelocTableSize = indexSize + totalRelocations * sizeof(TUint16); sl@0: TUint8* table = (TUint8*) User::Alloc(iCodeRelocTableSize); sl@0: sl@0: if(table == 0) sl@0: { sl@0: User::Free(subBlocks); sl@0: return KErrNoMemory; sl@0: } sl@0: sl@0: // where sub-block positions are written to in the table sl@0: TUint32* destSubBlock = (TUint32*)table; sl@0: // where entries are written to in the table sl@0: TUint16* destEntry = (TUint16*)(table + indexSize); sl@0: sl@0: TInt i; sl@0: for(i = 0; i < pageCount; ++i) sl@0: { sl@0: *destSubBlock++ = TUint32(destEntry) - TUint32(table); sl@0: sl@0: // see if a relocation page was defined for this page sl@0: const TUint8* subBlock = subBlocks[i]; sl@0: if(subBlock == 0) sl@0: continue; sl@0: sl@0: // get number of entries in this sub-block, including padding sl@0: TUint32 sbEntryCount; sl@0: TUint32 pageOffset = *(TUint32*)subBlock; // offset of page from start of section sl@0: sbEntryCount = *(TUint32*)(subBlock + 4); // sub-block size sl@0: sbEntryCount -= 8; // exclude sub-block header sl@0: sbEntryCount /= 2; // each entry is two bytes sl@0: const TUint16* srcEntry = (TUint16*)(subBlock + 8); sl@0: sl@0: while(sbEntryCount--) sl@0: { sl@0: TUint16 entry = *srcEntry++; sl@0: if(entry==0) // ignore null padding values sl@0: continue; sl@0: sl@0: // Replace inferred fixup type with actual fixup type sl@0: TUint type = entry & 0xf000; sl@0: if(type==KInferredRelocType) sl@0: { sl@0: TUint32* ptr = (TUint32*)(aLoadAddress + pageOffset + (entry & 0x0fff)); sl@0: TUint32 word = *ptr; sl@0: type = (TUint(word - iHeader->iCodeBase) < TUint(iHeader->iCodeSize)) ? KTextRelocType : KDataRelocType; sl@0: entry = (entry & 0x0fff) | type; sl@0: } sl@0: sl@0: *destEntry++ = entry; sl@0: } sl@0: } sl@0: sl@0: // sentinel entry marks the byte following last sub-block in table sl@0: // This gives the size of the last processed sub-block. sl@0: *destSubBlock = TUint32(destEntry) - TUint32(table); sl@0: sl@0: aProcessedBlock = (TUint32*) table; sl@0: User::Free(subBlocks); sl@0: sl@0: #ifdef _DEBUG sl@0: __IF_DEBUG(Printf("processed reloc table (size=%d,pageCount=%d)", iCodeRelocTableSize, pageCount)); sl@0: sl@0: // dump the import fixup table if loader tracing enabled sl@0: const TUint16* table16 = (const TUint16*)table; sl@0: const TInt halfWordsInTable = iCodeRelocTableSize / 2; sl@0: for(i = 0; i < halfWordsInTable; i += 4) sl@0: { sl@0: __IF_DEBUG(Printf( sl@0: "reloc %04x: %04x %04x %04x %04x", sl@0: i * 2, table16[i+0], table16[i+1], table16[i+2], table16[i+3])); sl@0: } sl@0: #endif sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: /******************************************************************************* sl@0: * These functions run in supervisor mode since they require access to the sl@0: * chunks of the newly-created process or DLL while they are still in the sl@0: * home section. sl@0: ******************************************************************************/ sl@0: sl@0: /** sl@0: Vector which ::ExecuteInSupervisorMode invokes. sl@0: */ sl@0: TInt (*ExecuteInSupervisorModeVector)(TSupervisorFunction, TAny*); sl@0: sl@0: /** sl@0: Executute aFunction in supervisor mode (if the memory model requires this.) sl@0: */ sl@0: TInt ExecuteInSupervisorMode(TSupervisorFunction aFunction, TAny* aParameter) sl@0: { sl@0: return(*ExecuteInSupervisorModeVector)(aFunction, aParameter); sl@0: } sl@0: sl@0: /** sl@0: Implementation of ::ExecuteInSupervisorMode which actually executes the sl@0: function in user mode. sl@0: */ sl@0: TInt UserModeExecuteInSupervisorMode(TSupervisorFunction aFunction, TAny* aParameter) sl@0: { sl@0: return (*aFunction)(aParameter); sl@0: } sl@0: sl@0: /** sl@0: Decide whether any Loader code actually needs to execute in supervisor mode sl@0: and set ::ExecuteInSupervisorModeVector so that invocations of ::ExecuteInSupervisorMode sl@0: call the appropriate function. sl@0: */ sl@0: void InitExecuteInSupervisorMode() sl@0: { sl@0: // work out if we need to really 'execute in supervisor mode'... sl@0: TUint32 memModelAttrs = (TUint32)UserSvr::HalFunction(EHalGroupKernel, EKernelHalMemModelInfo, NULL, NULL); sl@0: TUint32 memModel = memModelAttrs & EMemModelTypeMask; sl@0: if(memModel==EMemModelTypeFlexible) sl@0: { sl@0: // we can do everything user side... sl@0: ExecuteInSupervisorModeVector = UserModeExecuteInSupervisorMode; sl@0: gExecutesInSupervisorMode = EFalse; sl@0: } sl@0: else sl@0: { sl@0: // we need to go kernel side... sl@0: ExecuteInSupervisorModeVector = UserSvr::ExecuteInSupervisorMode; sl@0: gExecutesInSupervisorMode = ETrue; sl@0: } sl@0: } sl@0: sl@0: sl@0: /** sl@0: Arguments for svRelocateSection. sl@0: sl@0: The relocation information (at iRelocsBuf) has list sub blocks, each referring to a 4kB sl@0: page within the section. See E32RelocBlock. sl@0: */ sl@0: struct SRelocateSectionInfo sl@0: { sl@0: E32Image* iImage; ///< The executable being relocated. sl@0: TUint8* iRelocsBuf; ///< Pointer to relocation info. sl@0: TUint32 iNumRelocs; ///< Total number of relocations to apply. sl@0: TUint32 iLoadAddress; ///< Virtual address where section is currently located in memory. sl@0: }; sl@0: sl@0: /** sl@0: Apply relocations to a code or data section. sl@0: sl@0: @param aPtr Pointer to SRelocateSectionInfo. sl@0: */ sl@0: TInt svRelocateSection(TAny* aPtr) sl@0: { sl@0: SRelocateSectionInfo& info=*(SRelocateSectionInfo*)aPtr; sl@0: sl@0: E32Image& img = *(E32Image*)info.iImage; sl@0: TUint8* relocs = info.iRelocsBuf; sl@0: TUint32 numRelocs = info.iNumRelocs; sl@0: TUint32 loadAddress = info.iLoadAddress; sl@0: sl@0: TUint32 codeStart = img.iHeader->iCodeBase; sl@0: TUint32 codeFinish = codeStart+img.iHeader->iCodeSize; sl@0: TUint32 codeDelta = img.iCodeDelta; sl@0: TUint32 dataDelta = img.iDataDelta; sl@0: sl@0: while(numRelocs>0) sl@0: { sl@0: TUint32 pageAddress = ((TUint32*)relocs)[0]; sl@0: TUint32 pageSize = ((TUint32*)relocs)[1]; sl@0: TUint8* relocsEnd = relocs+pageSize; sl@0: relocs += 8; sl@0: sl@0: while(relocs=codeStart && relocAddriExportDirLoad; sl@0: TInt i=pI->iExportDirCount; sl@0: TUint32 codeBase=pI->iCodeRunAddress; sl@0: while (i-->0) sl@0: *destExport+++=codeBase; sl@0: return 0; sl@0: } sl@0: sl@0: sl@0: struct SFixupImportAddressesInfo sl@0: { sl@0: TUint32* iIat; sl@0: TUint32* iExportDir; sl@0: TUint32 iExportDirEntryDelta; sl@0: TInt iNumImports; sl@0: E32Image* iExporter; sl@0: /** sl@0: For demand paging, this points to the buffer which is populated sl@0: so each page can be fixed up as it is loaded in. sl@0: */ sl@0: TUint64* iFixup64; sl@0: // For ElfDerived... sl@0: TUint32 iCodeLoadAddress; sl@0: TUint32* iImportOffsetList; sl@0: }; sl@0: sl@0: sl@0: /** sl@0: Fix up the import address table, used for 'PE derived' executables. sl@0: @param aPtr Pointer to function arguments (SFixupImportAddressesInfo structure). sl@0: SFixupImportAddressesInfo::iIat is updated by this function. sl@0: */ sl@0: TInt svFixupImportAddresses(TAny* aPtr) sl@0: { sl@0: SFixupImportAddressesInfo& info = *(SFixupImportAddressesInfo*)aPtr; sl@0: sl@0: TUint32 maxOrdinal = (TUint32)info.iExporter->iExportDirCount; sl@0: TUint32 absentOrdinal = (TUint32)info.iExporter->iFileEntryPoint; sl@0: sl@0: TUint32* exp_dir = info.iExportDir - KOrdinalBase; // address of 0th ordinal sl@0: TUint32 exp_delta = info.iExportDirEntryDelta; sl@0: sl@0: TUint32* iat = info.iIat; sl@0: TUint32* iatE = iat+info.iNumImports; sl@0: for(; iatmaxOrdinal) sl@0: return KErrNotSupported; sl@0: sl@0: TUint32 writeValue; sl@0: if(imp==0 && !(info.iExporter->iAttr&ECodeSegAttNmdExpData)) sl@0: { sl@0: // attempt to import ordinal zero (symbol name data) from an executable sl@0: // which doesn't export this information, use NULL for imported value in this case... sl@0: writeValue = NULL; sl@0: } sl@0: else sl@0: { sl@0: // get imported value from exporter... sl@0: TUint32 exp_addr = exp_dir[imp]; sl@0: if(exp_addr==0 || exp_addr==absentOrdinal) sl@0: return KErrNotSupported; sl@0: writeValue = exp_addr + exp_delta; sl@0: } sl@0: sl@0: // if not code paging then directly fix up the import... sl@0: if (info.iFixup64 == 0) sl@0: *iat = writeValue; sl@0: else sl@0: // ...otherwise defer until the page is fixed up sl@0: { sl@0: TUint64 iat64 = reinterpret_cast(iat); sl@0: *info.iFixup64++ = (iat64 << 32) | writeValue; sl@0: } sl@0: } sl@0: sl@0: info.iIat = iat; sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Fix up the import addresses, used for 'elf derived' executables. sl@0: @param aPtr Pointer to function arguments (SFixupImportAddressesInfo structure). sl@0: */ sl@0: TInt svElfDerivedFixupImportAddresses(TAny* aPtr) sl@0: { sl@0: SFixupImportAddressesInfo& info = *(SFixupImportAddressesInfo*)aPtr; sl@0: TUint32 maxOrdinal = (TUint32)info.iExporter->iExportDirCount; sl@0: TUint32 absentOrdinal = (TUint32)info.iExporter->iFileEntryPoint; sl@0: sl@0: TUint32* exp_dir = info.iExportDir - KOrdinalBase; // address of 0th ordinal sl@0: TUint32 exp_delta = info.iExportDirEntryDelta; sl@0: TUint32 code = info.iCodeLoadAddress; sl@0: sl@0: TUint32* iol = info.iImportOffsetList; sl@0: TUint32* iolE = iol+info.iNumImports; sl@0: for(; iol> 16; sl@0: if(imp>maxOrdinal) sl@0: return KErrNotSupported; sl@0: sl@0: TUint32 writeValue; sl@0: if(imp==0 && !(info.iExporter->iAttr&ECodeSegAttNmdExpData)) sl@0: { sl@0: // attempt to import ordinal zero (symbol name data) from an executable sl@0: // which doesn't export this information, use NULL for imported value in this case... sl@0: writeValue = NULL; sl@0: } sl@0: else sl@0: { sl@0: // get imported value from exporter... sl@0: TUint32 exp_addr = exp_dir[imp]; sl@0: if(exp_addr==0 || exp_addr==absentOrdinal) sl@0: return KErrNotSupported; sl@0: writeValue = exp_addr + exp_delta + offset; sl@0: } sl@0: sl@0: // if not code paging then directly fix up the import... sl@0: if (info.iFixup64 == 0) sl@0: *impPtr = writeValue; sl@0: // ...otherwise defer until the page is fixed up sl@0: else sl@0: { sl@0: TUint64 impPtr64 = reinterpret_cast(impPtr); sl@0: *info.iFixup64++ = (impPtr64 << 32) | writeValue; sl@0: } sl@0: } sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Wrapper for memory copy arguments. sl@0: */ sl@0: struct SCopyDataInfo sl@0: { sl@0: TAny* iDest; sl@0: const TAny* iSource; sl@0: TInt iNumberOfBytes; sl@0: }; sl@0: sl@0: sl@0: /** sl@0: Copies word aligned memory. sl@0: @param aPtr Pointer to function arguments (SCopyDataInfo structure). sl@0: */ sl@0: TInt svWordCopy(TAny* aPtr) sl@0: { sl@0: SCopyDataInfo& info=*(SCopyDataInfo*)aPtr; sl@0: return (TInt) Mem::Move(info.iDest, info.iSource, info.iNumberOfBytes); sl@0: } sl@0: sl@0: sl@0: /** sl@0: Copies memory. sl@0: @param aPtr Pointer to function arguments (SCopyDataInfo structure). sl@0: */ sl@0: TInt svMemCopy(TAny* aPtr) sl@0: { sl@0: SCopyDataInfo& info=*(SCopyDataInfo*)aPtr; sl@0: return (TInt) Mem::Copy(info.iDest, info.iSource, info.iNumberOfBytes); sl@0: } sl@0: sl@0: sl@0: /** sl@0: Argument for svElfDerivedGetImportInfo. sl@0: */ sl@0: struct SGetImportDataInfo sl@0: { sl@0: TInt iCount; // number to extract sl@0: TUint32* iDest; // destination address for data sl@0: TUint32 iCodeLoadAddress; // address where code has been loaded sl@0: TUint32* iImportOffsetList; // pointer to list of import offsets in E32ImportBlock sl@0: }; sl@0: sl@0: /** sl@0: Extract import ordinals/data sl@0: @param aPtr Pointer to function arguments (SGetImportDataInfo structure). sl@0: */ sl@0: TInt svElfDerivedGetImportInfo(TAny* aPtr) sl@0: { sl@0: SGetImportDataInfo& info = *(SGetImportDataInfo*)aPtr; sl@0: TInt count = info.iCount; sl@0: TUint32* dest = info.iDest; sl@0: TUint32 code = info.iCodeLoadAddress; sl@0: TUint32* iol = info.iImportOffsetList; sl@0: sl@0: TUint32* iolEnd = iol+count; sl@0: while(iol KMaxProcessName) sl@0: return KErrBadName; sl@0: aReq.iFileNameInfo.GetName(iRootName, TFileNameInfo::EIncludeBaseExt); sl@0: return KErrNone; sl@0: } sl@0: sl@0: void RImageFinder::Close() sl@0: { sl@0: iNew.Close(); sl@0: } sl@0: sl@0: _LIT8(KDefaultPathSysBin, "sys\\bin"); sl@0: _LIT8(KDefaultPathSysBin2, "?:\\sys\\bin"); sl@0: _LIT8(KDefaultExePath, "sys\\bin;system\\bin;system\\programs;system\\libs"); sl@0: _LIT8(KDefaultDllPath, "sys\\bin;system\\bin;system\\libs"); sl@0: _LIT8(KDefaultExePath2, "?:\\sys\\bin;?:\\system\\bin;?:\\system\\programs;?:\\system\\libs"); sl@0: _LIT8(KDefaultDllPath2, "?:\\sys\\bin;?:\\system\\bin;?:\\system\\libs"); sl@0: sl@0: TInt RImageFinder::Search() sl@0: { sl@0: __LDRTRACE(iReq->Dump(">RImageFinder::Search")); sl@0: TBool exe = (iReq->iRequestedUids[0] == KExecutableImageUid); sl@0: const TFileNameInfo& fi = iReq->iFileNameInfo; sl@0: TInt r = KErrNone; sl@0: if (fi.PathLen()) sl@0: { sl@0: // path specified, so only look there sl@0: TPtrC8 drive_and_path(fi.DriveAndPath()); sl@0: r = Search(&drive_and_path, 0); sl@0: } sl@0: else sl@0: { sl@0: TInt drv = -1; sl@0: if (fi.DriveLen()) sl@0: { sl@0: // drive specified sl@0: drv = (*iReq->iFileName)[0]; sl@0: } sl@0: // if a search path is specified look there sl@0: if (iReq->iPath) sl@0: r = Search(iReq->iPath, drv); sl@0: if (r == KErrNoMemory) // ignore other errors as they are a potential denial of service sl@0: { sl@0: __LDRTRACE(Dump("Path %S Drive %02x", aPath, aDrive)); sl@0: TInt ppos = 0; sl@0: TInt plen = aPath->Length(); sl@0: while (ppos < plen) sl@0: { sl@0: TPtrC8 remain(aPath->Mid(ppos)); sl@0: TInt pel = remain.Locate(';'); sl@0: if (pel < 0) sl@0: { sl@0: pel = remain.Length(); sl@0: ppos += pel; sl@0: } sl@0: else sl@0: { sl@0: ppos += pel + 1; sl@0: } sl@0: if (pel == 0) sl@0: continue; sl@0: TBool alldrives = EFalse; sl@0: if (pel<2 || remain[1]!=':') sl@0: alldrives = ETrue; sl@0: else if (remain[0]!='?') sl@0: aDrive = remain[0]; sl@0: TInt drive = EDriveY; sl@0: if (!alldrives && RFs::CharToDrive(TChar(aDrive), drive)!=KErrNone) sl@0: continue; sl@0: iCurrentDrive = (TUint8)drive; sl@0: TInt startpos = alldrives ? 0 : 2; sl@0: iCurrentPath.Set(remain.Mid(startpos, pel - startpos)); sl@0: do { sl@0: TInt r; sl@0: #ifdef __X86__ sl@0: if (alldrives && iCurrentDrive<=EDriveB && iCurrentDrive!=UseFloppy) sl@0: goto bypass_drive; sl@0: #endif sl@0: r = SearchSingleDir(); sl@0: if (r == KErrNoMemory) // ignore other errors as they are a potential denial of service sl@0: { sl@0: __IF_DEBUG(Printf("OOM!")); sl@0: return r; sl@0: } sl@0: #ifdef __X86__ sl@0: bypass_drive: sl@0: #endif sl@0: if (!iCurrentDrive--) sl@0: iCurrentDrive = EDriveZ; sl@0: } while(alldrives && iCurrentDrive != EDriveY); sl@0: } sl@0: __IF_DEBUG(Printf("iImporter is never NULL sl@0: // Also gExeAttr must be set up sl@0: TInt RImageFinder::SearchExisting(const RImageArray& aArray) sl@0: { sl@0: __IF_DEBUG(Printf(">RImageFinder::SearchExisting")); sl@0: TUint required_abi = gExeAttr & ECodeSegAttABIMask; sl@0: TInt first, last, i; sl@0: aArray.Find(iRootName, first, last); sl@0: for (i=first; iiUids, iReq->iRequestedUids) != KErrNone) sl@0: continue; sl@0: if (iReq->CheckSecInfo(e->iS) != KErrNone) sl@0: continue; sl@0: TInt action = DetailedCompareVersions(e->iModuleVersion, iReq->iRequestedVersion, iCurrentVersion, EFalse); sl@0: if (action == EAction_Skip) sl@0: continue; sl@0: if (action == EAction_CheckImports || action == EAction_CheckLastImport) sl@0: { sl@0: // Never optimistically link to something with a different ABI sl@0: if ((e->iAttr & ECodeSegAttABIMask) != required_abi) sl@0: continue; sl@0: TInt r = CheckRequiredImports(iReq->iImporter, e, action); sl@0: if (r != KErrNone) sl@0: { sl@0: if (r != KErrNotSupported) sl@0: return r; sl@0: continue; sl@0: } sl@0: } sl@0: iExisting = e; sl@0: iCurrentVersion = e->iModuleVersion; sl@0: } sl@0: __IF_DEBUG(Printf("RImageFinder::Try %S%S", &aDriveAndPath, &aRootName)); sl@0: __IF_DEBUG(Printf(">MA:%08x MV:%08x RV:%08x CV:%08x", aInfo.iAttr, aInfo.iModuleVersion, iReq->iRequestedVersion, iCurrentVersion)); sl@0: ++iNameMatches; sl@0: if (iFindExact) sl@0: { sl@0: if ( ((aInfo.iAttr & ECodeSegAttExpVer) && aInfo.iModuleVersion==iReq->iRequestedVersion) sl@0: || (!(aInfo.iAttr & ECodeSegAttExpVer) && iReq->iRequestedVersion==KModuleVersionWild) sl@0: ) sl@0: { sl@0: __IF_DEBUG(Printf("iRequestedUids; sl@0: TBool dll_wanted = (uid[0] == KDynamicLibraryUidValue); sl@0: if (CheckUids(*(TUidType*)aInfo.iUid, iReq->iRequestedUids) != KErrNone) sl@0: { sl@0: ++iUidFail; sl@0: __IF_DEBUG(Printf("CheckSecInfo(aInfo.iS) != KErrNone) sl@0: { sl@0: ++iCapFail; sl@0: __IF_DEBUG(Printf("iRequestedVersion, iCurrentVersion, !iReq->iImporter); sl@0: if (action == EAction_Skip) sl@0: { sl@0: if (DetailedCompareVersions(aInfo.iModuleVersion, iReq->iRequestedVersion) == EVersion_MajorSmaller) sl@0: ++iMajorVersionFail; sl@0: __IF_DEBUG(Printf("iImporter, aInfo, action)!=KErrNone) sl@0: { sl@0: __IF_DEBUG(Printf("iImporter && dll_wanted && abi_mismatch) sl@0: { sl@0: // Dynamically loading a DLL - ABI must match loading process sl@0: __IF_DEBUG(Printf("iDriveAtt; sl@0: else sl@0: { sl@0: TDriveInfo driveInfo; sl@0: if ((r=gTheLoaderFs.Drive(driveInfo,driveNumber)) != KErrNone) sl@0: { sl@0: __IF_DEBUG(Printf("iRequestedVersion, iCurrentVersion)); sl@0: __IF_DEBUG(Printf("RImageFinder::CompareHashL")); sl@0: sl@0: TInt extraFlag = 0; sl@0: TBuf8 fileName; sl@0: TFileNameInfo fni = iReq->iFileNameInfo; sl@0: if (aInfo.iAttr & ECodeSegAttExpVer) sl@0: { sl@0: fni.iVersion = aInfo.iModuleVersion; sl@0: extraFlag = TFileNameInfo::EForceVer; sl@0: } sl@0: sl@0: TFileName hashname(KSysHash); sl@0: hashname[0] = (TUint8) RFs::GetSystemDriveChar(); sl@0: fileName.SetLength(0); sl@0: fni.GetName(fileName, TFileNameInfo::EIncludeBaseExt | extraFlag); sl@0: hashname.Append(fileName.Expand()); sl@0: sl@0: RFile fHash; sl@0: CleanupClosePushL(fHash); sl@0: sl@0: __IF_DEBUG(Printf("RImageFinder::CompareHashL opening hash file %S ", &hashname)); sl@0: User::LeaveIfError(fHash.Open(gTheLoaderFs,hashname,EFileRead|EFileReadDirectIO)); sl@0: sl@0: TBuf8 installhash; sl@0: User::LeaveIfError(fHash.Read(installhash)); sl@0: CleanupStack::PopAndDestroy(1); sl@0: sl@0: // if we get this far, we have loaded a valid hash, so calculate the file's hash sl@0: sl@0: CSHA1* hasher=CSHA1::NewL(); sl@0: CleanupStack::PushL(hasher); sl@0: sl@0: fileName.Copy(aDriveAndPath); sl@0: fni.GetName(fileName, TFileNameInfo::EIncludeBaseExt | extraFlag); sl@0: sl@0: CleanupClosePushL(aInfo.iFile); sl@0: TBool b = aInfo.FileOpened(); sl@0: if(!b) sl@0: { sl@0: __IF_DEBUG(Printf("RImageFinder::CompareHashL opening the file %S", &fileName)); sl@0: User::LeaveIfError(aInfo.iFile.Open(gTheLoaderFs, fileName.Expand(), EFileRead|EFileReadDirectIO)); sl@0: } sl@0: sl@0: __IF_DEBUG(Printf("RImageFinder::CompareHashL calculate hash")); sl@0: TInt size; sl@0: User::LeaveIfError(aInfo.iFile.Size(size)); sl@0: aInfo.iFileData = (TUint8*)gFileDataAllocator.Alloc(size); sl@0: if (aInfo.iFileData) sl@0: aInfo.iFileSize = size; sl@0: else sl@0: User::Leave(KErrNoMemory); sl@0: TPtr8 filedata(aInfo.iFileData, size); sl@0: User::LeaveIfError(aInfo.iFile.Read(0, filedata, size)); sl@0: if (filedata.Length() != size) sl@0: User::Leave(KErrCorrupt); sl@0: CleanupStack::PopAndDestroy(1); //the file handle only->aInfo.iFile.Close(); sl@0: hasher->Update(filedata); sl@0: sl@0: TBuf8 hash; sl@0: hash=hasher->Final(); sl@0: sl@0: sl@0: __IF_DEBUG(Printf("RImageFinder::CompareHashL comparing hashes...")); sl@0: if(0 != hash.Compare(installhash)) sl@0: User::Leave(KErrPermissionDenied); sl@0: CleanupStack::PopAndDestroy(1); sl@0: sl@0: // if we get this far the hash has passed and the file has been closed sl@0: // but some of the RImageInfo parameters will've been initialised by the cache sl@0: // and may be lies if we're being attacked, so compare them to be sure sl@0: sl@0: // if we already had the header, throw it away: it's from untrusted data sl@0: if (aInfo.iHeader) sl@0: { sl@0: delete aInfo.iHeader; sl@0: aInfo.iHeader = NULL; sl@0: } sl@0: sl@0: // make the header and validate the cached parameters against it sl@0: User::LeaveIfError(E32ImageHeader::New(aInfo.iHeader, aInfo.iFileData, aInfo.iFileSize)); sl@0: sl@0: SSecurityInfo secinfo; sl@0: aInfo.iHeader->GetSecurityInfo(secinfo); sl@0: TUint32 attr = (aInfo.iHeader->iFlags & ECodeSegAttFixed) | aInfo.iHeader->ABI(); sl@0: if(aInfo.iHeader->iFlags&KImageNmdExpData) sl@0: attr |= ECodeSegAttNmdExpData; sl@0: if (Mem::Compare((TUint8*)aInfo.iUid, sizeof(aInfo.iUid), (TUint8*)&aInfo.iHeader->iUid1, sizeof(aInfo.iUid)) sl@0: || aInfo.iModuleVersion != aInfo.iHeader->ModuleVersion() sl@0: || Mem::Compare((TUint8*)&aInfo.iS, sizeof(aInfo.iS), (TUint8*)&secinfo, sizeof(secinfo)) sl@0: || (aInfo.iAttr & ~ECodeSegAttExpVer) != attr) sl@0: User::Leave(KErrPermissionDenied); sl@0: sl@0: __IF_DEBUG(Printf("(8, 2*256) sl@0: { sl@0: } sl@0: sl@0: TInt RImageArray::Add(E32Image* aImage) sl@0: { sl@0: return InsertInOrderAllowRepeats(aImage, &E32Image::Order); sl@0: } sl@0: sl@0: void RImageArray::Find(const TDesC8& aRootName, TInt& aFirst, TInt& aLast) const sl@0: { sl@0: TCodeSegCreateInfo name; sl@0: name.iFileName.Copy(aRootName); sl@0: name.iRootNameOffset = 0; sl@0: name.iRootNameLength = aRootName.Length(); sl@0: aFirst = SpecificFindInOrder((const E32Image*)&name, &E32Image::Order, EArrayFindMode_First); sl@0: aLast = aFirst; sl@0: if (aFirst >= 0) sl@0: aLast = SpecificFindInOrder((const E32Image*)&name, &E32Image::Order, EArrayFindMode_Last); sl@0: } sl@0: sl@0: E32Image* RImageArray::Find(const TRomImageHeader* a) const sl@0: { sl@0: TInt c = Count(); sl@0: if (!c) sl@0: return NULL; sl@0: E32Image* const * ee = &(*this)[0]; sl@0: E32Image* const * eE = ee + c; sl@0: for (; eeiRomImageHeader != a; ++ee) {} sl@0: return (eeClient((RThread&)aReq.iClientThread); sl@0: if (r!=KErrNone) sl@0: { sl@0: return r; sl@0: } sl@0: iClientHandle=aReq.iClientThread.Handle(); sl@0: sl@0: if(iStackSize < aReq.iMinStackSize) sl@0: iStackSize=aReq.iMinStackSize; // If the process required larger stack than the default. sl@0: sl@0: //initialise to zero sl@0: #ifdef _DEBUG sl@0: iDestructStat = ProcessDestructStatPtr; sl@0: #endif sl@0: iDebugAttributes = 0; sl@0: if (iRomImageHeader) sl@0: { sl@0: if (iRomImageHeader->iFlags & KRomImageDebuggable) sl@0: iDebugAttributes |= EDebugAllowed; sl@0: } sl@0: else if (iHeader) sl@0: { sl@0: if (iHeader->iFlags & KImageDebuggable) sl@0: iDebugAttributes |= EDebugAllowed; sl@0: } sl@0: sl@0: // Get the data paging flags and pass to the kernel. sl@0: __ASSERT_COMPILE(EDataPagingUnspecified == 0); sl@0: if (iRomImageHeader) sl@0: { sl@0: TUint dataPaging = iRomImageHeader->iFlags & KRomImageDataPagingMask; sl@0: if (dataPaging == KRomImageDataPagingMask) sl@0: RETURN_FAILURE(KErrCorrupt); sl@0: if (dataPaging == KRomImageFlagDataPaged) sl@0: iFlags |= EDataPaged; sl@0: if (dataPaging == KRomImageFlagDataUnpaged) sl@0: iFlags |= EDataUnpaged; sl@0: } sl@0: else if (iHeader) sl@0: { sl@0: TUint dataPaging = iHeader->iFlags & KImageDataPagingMask; sl@0: if (dataPaging == KImageDataPagingMask) sl@0: RETURN_FAILURE(KErrCorrupt); sl@0: if (dataPaging == KImageDataPaged) sl@0: iFlags |= EDataPaged; sl@0: if (dataPaging == KImageDataUnpaged) sl@0: iFlags |= EDataUnpaged; sl@0: } sl@0: sl@0: r=E32Loader::ProcessCreate(*this, aReq.iCmd); sl@0: __IF_DEBUG(Printf("Done E32Loader::ProcessCreate %d",r)); sl@0: if (r!=KErrNone) sl@0: { sl@0: return r; sl@0: } sl@0: #ifdef _DEBUG sl@0: ProcessCreated = ETrue; sl@0: #endif sl@0: iClientProcessHandle=iProcessHandle; sl@0: if (!iAlreadyLoaded) sl@0: { sl@0: gExeCodeSeg=iHandle; // implicitly linked DLLs must load into the new process sl@0: gExeAttr=iAttr; sl@0: if (!iRomImageHeader) sl@0: r=LoadToRam(); sl@0: if (r==KErrNone) sl@0: r=ProcessImports(); // this sets up gLoadeePath sl@0: } sl@0: // transfers ownership of clamp handle to codeseg; nulls handle if successful sl@0: if (r==KErrNone) sl@0: { sl@0: r=E32Loader::ProcessLoaded(*this); sl@0: if ((r==KErrNone) && iUseCodePaging) sl@0: { sl@0: iFileClamp.iCookie[0]=0;// null handle to indicate sl@0: iFileClamp.iCookie[1]=0;// transfer of ownership of clamp handle to proc's codeseg sl@0: } sl@0: } sl@0: __IF_DEBUG(Printf("Done E32Image::LoadProcess %d",r)); sl@0: return r; sl@0: } sl@0: sl@0: // Load a code segment, plus all imports if main loadee sl@0: TInt E32Image::LoadCodeSeg(const RLdrReq& aReq) sl@0: { sl@0: __LDRTRACE(aReq.Dump(">E32Image::LoadCodeSeg")); sl@0: sl@0: #ifdef __X86__ sl@0: if (iMain==this && iClientProcessHandle) sl@0: { sl@0: RProcess p; sl@0: p.SetHandle(iClientProcessHandle); sl@0: TFileName f(p.FileName()); sl@0: if (f.Length()>=2 && f[1]==':') sl@0: { sl@0: TInt d = f[0]; sl@0: if (d=='a' || d=='A') sl@0: UseFloppy = EDriveA; sl@0: else if (d=='b' || d=='B') sl@0: UseFloppy = EDriveB; sl@0: } sl@0: } sl@0: #endif sl@0: sl@0: RImageFinder finder; sl@0: TInt r = finder.Set(aReq); sl@0: if (r == KErrNone) sl@0: r = finder.Search(); sl@0: if (r!=KErrNone) sl@0: { sl@0: finder.Close(); sl@0: return r; sl@0: } sl@0: return DoLoadCodeSeg(aReq, finder); sl@0: } sl@0: sl@0: // Load a code segment, plus all imports if main loadee sl@0: TInt E32Image::DoLoadCodeSeg(const RLdrReq& aReq, RImageFinder& aFinder) sl@0: { sl@0: __LDRTRACE(aReq.Dump(">E32Image::DoLoadCodeSeg")); sl@0: sl@0: TInt r = Construct(aFinder); // needs to find it if it's already loaded sl@0: aFinder.Close(); sl@0: if (r!=KErrNone) sl@0: { sl@0: return r; sl@0: } sl@0: __IF_DEBUG(Printf("epv=%x, fep=%x, codesize=%x, textsize=%x, uid3=%x",iEntryPtVeneer,iFileEntryPoint,iCodeSize,iTextSize,iUids[2])); sl@0: __IF_DEBUG(Printf("attr=%08x, gExeAttr=%08x",iAttr,gExeAttr)); sl@0: sl@0: // If EXE and not main loadee, EXE code segment must be the same as the client process or newly loaded process sl@0: if (gExeCodeSeg && !iIsDll && iMain!=this && iHandle!=gExeCodeSeg) sl@0: return KErrNotSupported; sl@0: sl@0: // If DLL and main loadee, ABI must match the process sl@0: if (iIsDll && iMain==this && (iAttr & ECodeSegAttABIMask)!=(gExeAttr & ECodeSegAttABIMask) ) sl@0: return KErrNotSupported; sl@0: sl@0: // code segment already loaded sl@0: if (iAlreadyLoaded || (iMain!=this && AlwaysLoaded()) ) sl@0: return KErrNone; sl@0: sl@0: __IF_DEBUG(Printf("CodeSeg create")); sl@0: r=E32Loader::CodeSegCreate(*this); sl@0: if (r!=KErrNone) sl@0: return r; sl@0: sl@0: iCloseCodeSeg=iHandle; // so new code segment is removed if the load fails sl@0: if (!iRomImageHeader) sl@0: r=LoadToRam(); sl@0: if (r==KErrNone) sl@0: { sl@0: iCloseCodeSeg=NULL; sl@0: if (iMain==this) sl@0: { sl@0: r=ProcessImports(); // this sets up gLoadeePath sl@0: // transfers ownership of clamp handle to codeseg; nulls handle if successful sl@0: if (r==KErrNone) sl@0: { sl@0: r=E32Loader::CodeSegLoaded(*this); sl@0: if ((r==KErrNone) && iUseCodePaging) sl@0: { sl@0: iFileClamp.iCookie[0]=0;// null handle to indicate sl@0: iFileClamp.iCookie[1]=0;// transfer of ownership of clamp handle to codeseg sl@0: } sl@0: } sl@0: } sl@0: } sl@0: sl@0: __IF_DEBUG(Printf("iNumberOfEntries : 0; sl@0: iExportDir = a.iExportDir; sl@0: iExportDirCount = a.iExportDirCount; sl@0: iCodeLoadAddress = (TUint32)&a; sl@0: iDataRunAddress = a.iDataBssLinearBase; // for fixed processes sl@0: iHeapSizeMin = a.iHeapSizeMin; sl@0: iHeapSizeMax = a.iHeapSizeMax; sl@0: iStackSize = a.iStackSize; sl@0: iPriority = a.iPriority; sl@0: iIsDll = (a.iFlags & KImageDll)!=0; sl@0: if(iExportDirCount) sl@0: iExportDirLoad = iExportDir; sl@0: sl@0: // setup attributes... sl@0: iAttr &= ~(ECodeSegAttKernel|ECodeSegAttGlobal|ECodeSegAttFixed|ECodeSegAttABIMask|ECodeSegAttNmdExpData); sl@0: if(a.iFlags&KRomImageFlagsKernelMask) sl@0: iAttr |= ECodeSegAttKernel; sl@0: else sl@0: iAttr |= ECodeSegAttGlobal; sl@0: if(a.iFlags&KRomImageFlagFixedAddressExe) sl@0: iAttr |= ECodeSegAttFixed; sl@0: iAttr |= (a.iFlags & KRomImageABIMask); sl@0: if(a.iFlags&KRomImageNmdExpData) sl@0: iAttr |= ECodeSegAttNmdExpData; sl@0: if(a.iFlags&KRomImageSMPSafe) sl@0: iAttr |= ECodeSegAttSMPSafe; sl@0: sl@0: iExceptionDescriptor = a.iExceptionDescriptor; sl@0: } sl@0: sl@0: sl@0: TBool E32Image::AlwaysLoaded() sl@0: { sl@0: // If loaded from ROM and EXE or DLL with no static data or extension or variant, don't need code segment sl@0: TBool r=EFalse; sl@0: __IF_DEBUG(Printf(">E32Image::AlwaysLoaded %08x",iRomImageHeader)); sl@0: if (iRomImageHeader) sl@0: { sl@0: if (iIsDll && (iRomImageHeader->iFlags & KRomImageFlagDataPresent)==0) sl@0: r=ETrue; sl@0: } sl@0: __IF_DEBUG(Printf(" fn = _S8("z:\\"); sl@0: TFileNameInfo fni; sl@0: TPtr8 path_and_name(((TText8*)fn.Ptr())+3, 0, KMaxFileName-3); sl@0: const TRomDir& rootdir = *(const TRomDir*)UserSvr::RomRootDirectoryAddress(); sl@0: if (!TraverseDirs(rootdir, iRomImageHeader, path_and_name)) sl@0: *(const TAny**)1=iRomImageHeader; // DIE! sl@0: fn.SetLength(path_and_name.Length()+3); sl@0: fni.Set(fn, 0); sl@0: iFileName.Zero(); sl@0: fni.GetName(iFileName, TFileNameInfo::EIncludeDrivePathBaseExt); sl@0: if (fni.VerLen()) sl@0: iAttr |= ECodeSegAttExpVer; sl@0: iRootNameOffset = fni.iBasePos; sl@0: iRootNameLength = fni.BaseLen() + fni.ExtLen(); sl@0: iExtOffset = iFileName.Length() - fni.ExtLen(); sl@0: __IF_DEBUG(Printf("GetRomFileName(%08x)->%S,%d,%d,%d Attr %08x",iRomImageHeader,&iFileName,iRootNameOffset,iRootNameLength,iExtOffset,iAttr)); sl@0: } sl@0: sl@0: sl@0: /** sl@0: Starting from aDir, search for XIP executable specified by aHdr. sl@0: If found, return true and set aName to file path and name, (will cause descriptor panics if max size of aName isn't big enough.) sl@0: If not found, return false. sl@0: */ sl@0: TBool E32Image::TraverseDirs(const TRomDir& aDir, const TRomImageHeader* aHdr, TDes8& aName) sl@0: { sl@0: const TRomEntry* pE=&aDir.iEntry; sl@0: const TRomEntry* pEnd=(const TRomEntry*)((TUint8*)pE+aDir.iSize); sl@0: while(pEiAtt & KEntryAttXIP) && (pE->iAddressLin==(TLinAddr)aHdr) ) sl@0: { sl@0: // ROM XIP file found sl@0: aName.Copy(TPtrC16((const TText*)pE->iName, pE->iNameLength)); sl@0: return ETrue; sl@0: } sl@0: if (pE->iAtt & KEntryAttDir) sl@0: { sl@0: // subdirectory found sl@0: const TRomDir& subdir = *(const TRomDir*)pE->iAddressLin; sl@0: TText8* p = (TText8*)aName.Ptr(); sl@0: TInt m = aName.MaxLength(); sl@0: TInt nl = pE->iNameLength; sl@0: TPtr8 ptr(p+nl+1, 0, m-nl-1); sl@0: if (TraverseDirs(subdir, aHdr, ptr)) sl@0: { sl@0: // match found in subdirectory sl@0: aName.SetLength(ptr.Length()+nl+1); sl@0: const TText* s = (const TText*)pE->iName; sl@0: p[nl]='\\'; sl@0: while (nl--) sl@0: *p++ = (TText8)*s++; sl@0: return ETrue; sl@0: } sl@0: } sl@0: TInt entry_size = KRomEntrySize + pE->iNameLength*sizeof(TText); sl@0: entry_size = (entry_size+sizeof(TInt)-1)&~(sizeof(TInt)-1); sl@0: pE=(const TRomEntry*)((TUint8*)pE+entry_size); sl@0: } sl@0: return EFalse; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Read data from a file. sl@0: */ sl@0: TInt FileRead(RFile& aFile, TUint8* aDest, TInt aSize) sl@0: { sl@0: TPtr8 p(aDest,aSize,aSize); sl@0: TInt r = aFile.Read(p,aSize); sl@0: if(r==KErrNone && p.Size()!=aSize) sl@0: RETURN_FAILURE(KErrCorrupt); sl@0: return r; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Construct a new image header by reading a file. File must not be XIP. sl@0: */ sl@0: TInt E32ImageHeader::New(E32ImageHeader*& aHdr, RFile& aFile) sl@0: { sl@0: aHdr = NULL; sl@0: sl@0: TInt fileSize; sl@0: TInt r = aFile.Size(fileSize); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: sl@0: E32ImageHeaderV tempHeader; sl@0: r = FileRead(aFile, (TUint8*)&tempHeader, sizeof(tempHeader)); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: sl@0: TUint headerSize = tempHeader.TotalSize(); sl@0: if(headerSizeTUint(KMaxHeaderSize)) sl@0: RETURN_FAILURE(KErrCorrupt); sl@0: sl@0: E32ImageHeaderV* header = (E32ImageHeaderV*)User::Alloc(headerSize); sl@0: if(!header) sl@0: return KErrNoMemory; sl@0: sl@0: wordmove(header, &tempHeader, sizeof(tempHeader)); sl@0: if(headerSize>sizeof(tempHeader)) sl@0: r = FileRead(aFile, ((TUint8*)header)+sizeof(tempHeader), headerSize-sizeof(tempHeader)); sl@0: sl@0: if(r==KErrNone) sl@0: r = header->ValidateAndAdjust(fileSize); sl@0: sl@0: if(r==KErrNone) sl@0: aHdr = header; sl@0: else sl@0: delete header; sl@0: sl@0: return r; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Construct a new image header using data from the supplied buffer. sl@0: */ sl@0: TInt E32ImageHeader::New(E32ImageHeader*& aHdr, TUint8* aFileData, TUint32 aFileSize) sl@0: { sl@0: aHdr = NULL; sl@0: sl@0: E32ImageHeaderV& tempHeader = *(E32ImageHeaderV*)aFileData; sl@0: sl@0: if(aFileSizeTUint(KMaxHeaderSize)) sl@0: RETURN_FAILURE(KErrCorrupt); sl@0: if(headerSize>aFileSize) sl@0: RETURN_FAILURE(KErrCorrupt); sl@0: sl@0: E32ImageHeaderV* header = (E32ImageHeaderV*)User::Alloc(headerSize); sl@0: if(!header) sl@0: return KErrNoMemory; sl@0: sl@0: wordmove(header, &tempHeader, headerSize); sl@0: sl@0: TInt r = header->ValidateAndAdjust(aFileSize); sl@0: if(r==KErrNone) sl@0: aHdr = header; sl@0: else sl@0: delete header; sl@0: sl@0: return r; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Validate header, then adjust: sl@0: - iUncompressedSize to contain size of data even when file is not compressed. sl@0: - Platform security capability to include all disabled capabilities and exclude invalid ones. sl@0: sl@0: @param aFileSize Total size of the file containing the image data. sl@0: */ sl@0: TInt E32ImageHeaderV::ValidateAndAdjust(TUint32 aFileSize) sl@0: { sl@0: // check header is valid... sl@0: TUint32 uncompressedSize; sl@0: TInt r = ValidateHeader(aFileSize,uncompressedSize); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: sl@0: // set size of data when uncompressed... sl@0: iUncompressedSize = uncompressedSize; sl@0: sl@0: // override capabilities in image to conform to system wide configuration... sl@0: for(TInt i=0; iE32Image::Construct", 0)); sl@0: __ASSERT_ALWAYS(aFinder.iNewValid, User::Panic(KLitFinderInconsistent, 0)); sl@0: sl@0: // fallback security check to ensure we don't try and load an executable from an insecure location... sl@0: if(PlatSec::ConfigSetting(PlatSec::EPlatSecEnforceSysBin)) sl@0: { sl@0: __ASSERT_ALWAYS(aFinder.iNewFileName.Length()>=11, User::Panic(KLitSysBinError, 0)); sl@0: __ASSERT_ALWAYS(KSysBin().CompareF(TPtrC8(aFinder.iNewFileName.Ptr()+1,10))==0, User::Panic(KLitSysBinError, 1)); sl@0: } sl@0: sl@0: TInt r = KErrNone; sl@0: sl@0: // setup file name info... sl@0: iFileName.Copy(aFinder.iNewFileName); sl@0: TFileNameInfo fi; sl@0: fi.Set(iFileName, 0); sl@0: iRootNameOffset = fi.iBasePos; sl@0: iRootNameLength = fi.iLen - fi.iBasePos; sl@0: iExtOffset = fi.iExtPos; sl@0: sl@0: // setup version... sl@0: iAttr |= aFinder.iNew.iAttr & ECodeSegAttExpVer; sl@0: iModuleVersion = aFinder.iNew.iModuleVersion; sl@0: sl@0: if(aFinder.iNew.iRomImageHeader) sl@0: { sl@0: // we're 'loading' an XIP executable from ROM... sl@0: Construct(*aFinder.iNew.iRomImageHeader); sl@0: if(!AlwaysLoaded() || iMain==this) sl@0: r = CheckRomXIPAlreadyLoaded(); sl@0: return r; sl@0: } sl@0: sl@0: // setup more image info... sl@0: iAttr |= aFinder.iNew.iAttr & (ECodeSegAttFixed|ECodeSegAttABIMask|ECodeSegAttNmdExpData); sl@0: iUids = *(const TUidType*)&aFinder.iNew.iUid; sl@0: iIsDll = !(iUids[0].iUid == KExecutableImageUidValue); sl@0: iS = aFinder.iNew.iS; sl@0: sl@0: // check if executable has already been loaded... sl@0: r = CheckAlreadyLoaded(); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: sl@0: // if we are going to need to load it... sl@0: if(!iAlreadyLoaded || !iIsDll) sl@0: { sl@0: if (aFinder.iNew.iNeedHashCheck) sl@0: { sl@0: // we need to check the file hash; the check in RImageFinder::Try sl@0: // was skipped based on the cache. If it fails here, though, someone sl@0: // is tampering with us and we can just fail the load. sl@0: TRAP(r,aFinder.CompareHashL(aFinder.iNew, fi.DriveAndPath())); sl@0: if (r != KErrNone) sl@0: return r; sl@0: } sl@0: sl@0: if(aFinder.iNew.iFileData) sl@0: { sl@0: // take ownership of the file data aFinder has already read in... sl@0: iFileData = aFinder.iNew.iFileData; sl@0: aFinder.iNew.iFileData = NULL; sl@0: iFileSize = aFinder.iNew.iFileSize; sl@0: } sl@0: else if(aFinder.iNew.FileOpened()) sl@0: { sl@0: // take ownership of the file handle that aFinder has already opened... sl@0: iFile = aFinder.iNew.iFile; sl@0: memclr(&aFinder.iNew.iFile, sizeof(RFile)); sl@0: } sl@0: else sl@0: { sl@0: // no resource obtained from aFinder, so create a file handle for ourselves... sl@0: r = OpenFile(); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: } sl@0: sl@0: // take ownership of header... sl@0: iHeader = aFinder.iNew.iHeader; sl@0: aFinder.iNew.iHeader = NULL; sl@0: sl@0: // if there wast't a header, then create one now... sl@0: if(!iHeader) sl@0: { sl@0: if(iFileData) sl@0: r = E32ImageHeader::New(iHeader, iFileData, iFileSize); sl@0: else sl@0: r = E32ImageHeader::New(iHeader, iFile); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: } sl@0: sl@0: // setup info needed for process creation... sl@0: iHeapSizeMin = iHeader->iHeapSizeMin; sl@0: iHeapSizeMax = iHeader->iHeapSizeMax; sl@0: iStackSize = iHeader->iStackSize; sl@0: iPriority = iHeader->ProcessPriority(); sl@0: } sl@0: sl@0: // if already loaded... sl@0: if(iAlreadyLoaded) sl@0: return KErrNone; // nothing more to do sl@0: sl@0: // setup info needed to load an executable... sl@0: iDepCount = iHeader->iDllRefTableCount; sl@0: iExportDirCount = iHeader->iExportDirCount; sl@0: iExportDir = iHeader->iExportDirOffset-iHeader->iCodeOffset; sl@0: iTextSize = iHeader->iTextSize; sl@0: iCodeSize = iHeader->iCodeSize; sl@0: __IF_DEBUG(Printf("Code + const %x",iCodeSize)); sl@0: iDataSize = iHeader->iDataSize; sl@0: __IF_DEBUG(Printf("Data %x",iDataSize)); sl@0: iBssSize = iHeader->iBssSize; sl@0: __IF_DEBUG(Printf("Bss %x",iBssSize)); sl@0: iTotalDataSize = iDataSize+iBssSize; sl@0: sl@0: iFileEntryPoint = iHeader->iEntryPoint; // just an offset at this stage sl@0: iEntryPtVeneer = 0; sl@0: iExceptionDescriptor = iHeader->ExceptionDescriptor(); sl@0: if(iHeader->iExportDirOffset) sl@0: iExportDirLoad = iExportDir; // only set this if not already loaded sl@0: sl@0: // initialise the SMP safe flag from the image header sl@0: // this will get cleared during ProcessImports if any import is not SMP safe sl@0: if(iHeader->iFlags & KImageSMPSafe) sl@0: iAttr |= ECodeSegAttSMPSafe; sl@0: else sl@0: { sl@0: __IF_DEBUG(Printf("%S is not marked SMP safe", &iFileName)); sl@0: iAttr &= ~ECodeSegAttSMPSafe; sl@0: } sl@0: sl@0: // check if executable is to be demand paged... sl@0: r = ShouldBeCodePaged(iUseCodePaging); sl@0: __IF_DEBUG(Printf("ShouldBeCodePaged r=%d,iUseCodePaging=%d", r, iUseCodePaging)); sl@0: if(iUseCodePaging==EFalse || r!=KErrNone) sl@0: return r; sl@0: sl@0: // image needs demand paging, create the additional information needed for this... sl@0: sl@0: // read compression info... sl@0: iCompressionType = iHeader->iCompressionType; sl@0: r = LoadCompressionData(); sl@0: if(r==KErrNotSupported) sl@0: { sl@0: // Compression type not supported, so just load executable as normal, (without paging)... sl@0: iUseCodePaging = EFalse; sl@0: return KErrNone; sl@0: } sl@0: else if (r!=KErrNone) sl@0: return r; sl@0: sl@0: // clamp file so it doesn't get modified whilst it is being demand paged... sl@0: r = iFileClamp.Clamp(iFile); sl@0: // The clamp API will return KErrNotSupported if the media is removable: sl@0: // this implies that paging is not possible but the binary can still be loaded sl@0: if (r != KErrNone) sl@0: { sl@0: iUseCodePaging = EFalse; sl@0: return r == KErrNotSupported ? KErrNone : r; sl@0: } sl@0: sl@0: // get blockmap data which indicates location of media where file contents are stored... sl@0: r = BuildCodeBlockMap(); sl@0: __IF_DEBUG(Printf("BuildCodeBlockMap r=%d", r)); sl@0: if(r==KErrNotSupported) sl@0: { sl@0: // media doesn't support demand paging, so just load executable as normal, (without paging)... sl@0: iUseCodePaging = EFalse; sl@0: iFileClamp.Close(gTheLoaderFs); sl@0: r = KErrNone; sl@0: } sl@0: sl@0: return r; sl@0: } sl@0: sl@0: sl@0: TInt E32Image::CheckRomXIPAlreadyLoaded() sl@0: { sl@0: __IF_DEBUG(Printf("ROM XIP %08x CheckAlreadyLoaded",iRomImageHeader)); sl@0: TFindCodeSeg find; sl@0: find.iRomImgHdr=iRomImageHeader; sl@0: E32Loader::CodeSegDeferDeletes(); sl@0: TAny* h=NULL; sl@0: TInt r=KErrNone; sl@0: E32Loader::CodeSegNext(h, find); sl@0: if (h) sl@0: { sl@0: iHandle=h; sl@0: r=E32Loader::CodeSegOpen(h, iClientProcessHandle); sl@0: if (r==KErrNone) sl@0: E32Loader::CodeSegInfo(iHandle, *this); sl@0: } sl@0: E32Loader::CodeSegEndDeferDeletes(); sl@0: if (iHandle && r==KErrNone) sl@0: { sl@0: iAlreadyLoaded=ETrue; sl@0: __IF_DEBUG(Printf("ROM XIP %08x already loaded", iHandle)); sl@0: } sl@0: __IF_DEBUG(Printf("ROM XIP CheckAlreadyLoaded returns %d",r)); sl@0: return r; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Read the E32Image file into its code and data chunks, relocating them sl@0: as necessary. sl@0: Create a dll reference table from the names of dlls referenced. sl@0: Fix up the import address table and the export table for real addresses. sl@0: */ sl@0: TInt E32Image::LoadToRam() sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::LoadToRam %S",&iFileName)); sl@0: sl@0: // offset of data after code which will be erad into iRestOfFileData... sl@0: iConversionOffset = iHeader->iCodeOffset + iHeader->iCodeSize; sl@0: sl@0: // calculate sizes... sl@0: TUint totalSize = ((E32ImageHeaderV*)iHeader)->iUncompressedSize; sl@0: TUint remainder = totalSize-iConversionOffset; sl@0: if(remainder>totalSize) sl@0: RETURN_FAILURE(KErrCorrupt); // Fuzzer can't trigger this because header validation prevents it sl@0: sl@0: iRestOfFileData = (TUint8*)User::Alloc(remainder); sl@0: if(!iRestOfFileData) sl@0: return KErrNoMemory; sl@0: iRestOfFileSize = remainder; sl@0: sl@0: TInt r = LoadFile(); // Read everything in sl@0: if(r!=KErrNone) sl@0: return r; sl@0: sl@0: __IF_DEBUG(Printf("iHeader->iCodeRelocOffset %d",iHeader->iCodeRelocOffset)); sl@0: r = ((E32ImageHeaderV*)iHeader)->ValidateRelocations(iRestOfFileData,iRestOfFileSize,iHeader->iCodeRelocOffset,iHeader->iCodeSize,iCodeRelocSection); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: sl@0: __IF_DEBUG(Printf("iHeader->iDataRelocOffset %d",iHeader->iDataRelocOffset)); sl@0: r = ((E32ImageHeaderV*)iHeader)->ValidateRelocations(iRestOfFileData,iRestOfFileSize,iHeader->iDataRelocOffset,iHeader->iDataSize,iDataRelocSection); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: sl@0: iCodeDelta = iCodeRunAddress-iHeader->iCodeBase; sl@0: iDataDelta = iDataRunAddress-iHeader->iDataBase; sl@0: sl@0: if(r==KErrNone) sl@0: r = RelocateCode(); sl@0: if(r==KErrNone) sl@0: r = LoadAndRelocateData(); sl@0: if(r==KErrNone) sl@0: r = ReadImportData(); sl@0: sl@0: return r; sl@0: } sl@0: sl@0: sl@0: TInt E32Image::ShouldBeCodePaged(TBool& aPage) sl@0: /** sl@0: Determine whether this binary should be paged. Some of this sl@0: function is unimplemented because it requires the media pageable sl@0: attribute sl@0: sl@0: @param aPage On success, this variable is set to sl@0: whether the binary should be paged. Its sl@0: value is undefined if the return code is sl@0: not KErrNone. sl@0: @return Symbian OS error code. sl@0: sl@0: See S3.1.3.2 of PREQ1110 Design Sketch. sl@0: */ sl@0: { sl@0: aPage = EFalse; sl@0: sl@0: // kernel and global dlls can't be paged... sl@0: if(iAttr&(ECodeSegAttKernel|ECodeSegAttGlobal)) sl@0: return KErrNone; sl@0: sl@0: // 1. if paging policy is NOPAGING then executable is unpaged sl@0: TUint32 policy = E32Loader::PagingPolicy(); sl@0: sl@0: __IF_DEBUG(Printf("sbcp,policy=0x%x", policy)); sl@0: if (policy == EKernelConfigCodePagingPolicyNoPaging) sl@0: return KErrNone; sl@0: sl@0: // 2. if executable is on media without Pageable Media Attribute then unpaged sl@0: // 3. if executable is on removable media then unpaged sl@0: // both superseded by the BlockMap API sl@0: sl@0: // 3a. if executable has already been loaded into RAM for tamperproofing then sl@0: // it can't be paged sl@0: if (iFileData != NULL) sl@0: return KErrNone; sl@0: sl@0: // 4. if not compressed with bytepair or uncompressed then unpaged sl@0: __IF_DEBUG(Printf("sbcp,iHeader=0x%08x", iHeader)); sl@0: TUint32 comp = iHeader->CompressionType(); sl@0: __IF_DEBUG(Printf("sbcp,comp=0x%x", comp)); sl@0: if (comp != KUidCompressionBytePair && comp != KFormatNotCompressed) sl@0: return KErrNone; sl@0: sl@0: aPage = ETrue; sl@0: sl@0: // 5. if policy is ALWAYSPAGE then page sl@0: if (policy == EKernelConfigCodePagingPolicyAlwaysPage) sl@0: return KErrNone; sl@0: sl@0: // 6. sl@0: TUint KPagedMask = (KImageCodePaged | KImageCodeUnpaged); sl@0: TUint pagedFlags = iHeader->iFlags & KPagedMask; sl@0: __IF_DEBUG(Printf("sbcp,iHeader->iFlags=0x%x,pagedFlags=0x%x", iHeader->iFlags, pagedFlags)); sl@0: sl@0: // if KImageCodePaged and KImageCodeUnpaged flags present then corrupt sl@0: if (pagedFlags == KPagedMask) sl@0: RETURN_FAILURE(KErrCorrupt); sl@0: sl@0: // if KImageCodePaged set in executable then page sl@0: if (pagedFlags == KImageCodePaged) sl@0: return KErrNone; sl@0: sl@0: // if KImageCodeUnpaged set in executable then do not page sl@0: if (pagedFlags == KImageCodeUnpaged) sl@0: { sl@0: aPage = EFalse; sl@0: return KErrNone; sl@0: } sl@0: sl@0: // 7. otherwise (neither paged nor unpaged set) use paging policy sl@0: sl@0: // policy must be EKernelConfigCodePagingPolicyDefaultUnpaged or EKernelConfigCodePagingPolicyDefaultPaged sl@0: aPage = (policy == EKernelConfigCodePagingPolicyDefaultPaged); sl@0: return KErrNone; sl@0: } sl@0: sl@0: TInt E32Image::BuildCodeBlockMap() sl@0: /** sl@0: Use the block map API to build an array of TBlockMapInfo sl@0: objects which the kernel can use to page in code as required. sl@0: sl@0: @return Symbian OS error code. KErrNotSupported means the sl@0: Block Map functionality does not support paging from sl@0: the binary's location. sl@0: */ sl@0: { sl@0: __IF_DEBUG(Printf("BuildCodeBlockMap,iCodeStartInFile=%d,iCodeLengthInFile=%d", iCodeStartInFile, iCodeLengthInFile)); sl@0: sl@0: __ASSERT_DEBUG(iUseCodePaging, Panic(EBcbmNotCodePaged)); sl@0: sl@0: // do nothing if no code section sl@0: if (iCodeLengthInFile == 0) sl@0: return KErrNone; sl@0: sl@0: // RFile::BlockMap populates an instance of this object. Need to sl@0: // retain information such as granularity which applies to all entries. sl@0: SBlockMapInfo bmi; sl@0: sl@0: TInt curEntriesSize = 0; sl@0: TUint8* entries8 = 0; // points to heap cell containing TBlockMapEntryBase array sl@0: sl@0: TInt64 bmPos = 0; sl@0: TInt64 bmEnd = iCodeStartInFile + iCodeLengthInFile; sl@0: TInt r; sl@0: do sl@0: { sl@0: __IF_DEBUG(Printf("lfbpu:BlockMap,in,bmPos=%ld,bmEnd=%ld", bmPos, bmEnd)); sl@0: r = iFile.BlockMap(bmi, bmPos, bmEnd, EBlockMapUsagePaging); // updates bmPos to end of mapped range sl@0: __IF_DEBUG( sl@0: Printf("lfbpu:BlockMap,out,r=%d,bmPos=%ld,bmEnd=%ld,maplen=%d(%d)", sl@0: r, bmPos, bmEnd, bmi.iMap.Length(), bmi.iMap.Length() / sizeof(TBlockMapEntryBase))); sl@0: __IF_DEBUG( sl@0: Printf("lfbpu:BlockMap,out,iBlockGranularity=%u,iBlockStartOffset=%u,iStartBlockAddress=%ld,iLocalDriveNumber=%d", sl@0: bmi.iBlockGranularity, bmi.iBlockStartOffset, bmi.iStartBlockAddress, bmi.iLocalDriveNumber)); sl@0: if (r != KErrNone && r != KErrCompletion) sl@0: break; sl@0: sl@0: // Copy info the first time round as this gets overwritten on subsequent passes sl@0: if (curEntriesSize == 0) sl@0: iCodeBlockMapCommon = bmi; // slices the SBlockMapCommon subclass data sl@0: sl@0: // grow the buffer which contains the entries sl@0: TInt newEntriesSize = bmi.iMap.Length(); sl@0: TInt newArraySize = curEntriesSize + newEntriesSize; sl@0: TUint8* newEntries8 = (TUint8*) User::ReAlloc(entries8, newArraySize); sl@0: if (newEntries8 == 0) sl@0: { sl@0: r = KErrNoMemory; sl@0: break; sl@0: } sl@0: entries8 = newEntries8; sl@0: sl@0: #ifdef _DEBUG sl@0: // dump the newly-returned block entries sl@0: for (TInt i = 0; i < newEntriesSize; i += sizeof(TBlockMapEntryBase)) sl@0: { sl@0: const TBlockMapEntryBase& bme = *reinterpret_cast(bmi.iMap.Ptr() + i); sl@0: __IF_DEBUG(Printf("lfbpu:bme,iNumberOfBlocks=%d,iStartBlock=%d", bme.iNumberOfBlocks, bme.iStartBlock)); sl@0: } sl@0: #endif sl@0: sl@0: // append the new entries to the array. sl@0: Mem::Copy(entries8 + curEntriesSize, bmi.iMap.Ptr(), newEntriesSize); sl@0: curEntriesSize = newArraySize; sl@0: } while (r != KErrCompletion); sl@0: sl@0: // r == KErrCompletion when mapped code section range sl@0: if (r != KErrCompletion) sl@0: { sl@0: User::Free(entries8); sl@0: return r; sl@0: } sl@0: sl@0: #ifdef _DEBUG sl@0: // dump the block map table sl@0: __IF_DEBUG(Printf("lfbpu:endbme,r=%d,curEntriesSize=%d", r, curEntriesSize)); sl@0: for (TInt i = 0; i < curEntriesSize; i += 8) sl@0: { sl@0: __IF_DEBUG(Printf( sl@0: "entries[0x%08x], %02x %02x %02x %02x %02x %02x %02x %02x", sl@0: entries8[i+0], entries8[i+1], entries8[i+2], entries8[i+3], sl@0: entries8[i+4], entries8[i+5], entries8[i+6], entries8[i+7])); sl@0: } sl@0: #endif sl@0: sl@0: iCodeBlockMapEntries = reinterpret_cast(entries8); sl@0: iCodeBlockMapEntriesSize = curEntriesSize; sl@0: sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Get the compression data relevant to demand paging sl@0: */ sl@0: TInt E32Image::LoadCompressionData() sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::LoadCompressionData %S 0x%08x",&iFileName,iHeader->CompressionType())); sl@0: sl@0: TUint compression = iHeader->CompressionType(); sl@0: sl@0: TInt r = KErrNone; sl@0: if(compression==KFormatNotCompressed) sl@0: { sl@0: r = LoadCompressionDataNoCompress(); sl@0: } sl@0: else if(compression==KUidCompressionBytePair) sl@0: { sl@0: TRAP(r,LoadCompressionDataBytePairUnpakL()); sl@0: } sl@0: else sl@0: { sl@0: r = KErrNotSupported; sl@0: } sl@0: sl@0: __IF_DEBUG(Printf("E32Image::LoadCompressionData exiting %S r=%d",&iFileName,r)); sl@0: return r; sl@0: } sl@0: sl@0: sl@0: TInt E32Image::LoadCompressionDataNoCompress() sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::LoadCompressionDataNoCompress %S",&iFileName)); sl@0: if (iHeader->iCodeSize) sl@0: { sl@0: iCodeStartInFile = iHeader->iCodeOffset; sl@0: iCodeLengthInFile = iCodeSize; sl@0: } sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: void E32Image::LoadCompressionDataBytePairUnpakL() sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::LoadCompressionDataBytePairUnpakL %S",&iFileName)); sl@0: sl@0: if (iFileData) sl@0: User::Leave(KErrNotSupported); // if the file data has been loaded into RAM we can't page it! sl@0: sl@0: TInt pos = iHeader->TotalSize(); sl@0: User::LeaveIfError(iFile.Seek(ESeekStart,pos)); // Start at beginning of compressed data sl@0: sl@0: CBytePairReader* reader = CBytePairFileReader::NewLC(iFile); sl@0: sl@0: if (iHeader->iCodeSize) sl@0: { sl@0: __IF_DEBUG(Printf("Code & const size %x",iCodeSize)); sl@0: __IF_DEBUG(Printf("Code & const offset %x",iHeader->iCodeOffset)); sl@0: __IF_DEBUG(Printf("Code & const dest %x",iCodeLoadAddress)); sl@0: sl@0: TInt pageCount; sl@0: reader->GetPageOffsetsL(pos, pageCount, iCodePageOffsets); sl@0: sl@0: #ifdef _DEBUG sl@0: for (TInt i = 0; i <= pageCount; ++i) sl@0: { sl@0: __IF_DEBUG(Printf("lfbpu:raw iCodePageOffsets[%d] = %d", i, iCodePageOffsets[i])); sl@0: } sl@0: #endif sl@0: sl@0: // record the code start position in the file and its compressed length sl@0: // so BuildCodeBlockMap can construct a block map for the kernel if this sl@0: // file is demand paged. sl@0: iCodeStartInFile = iCodePageOffsets[0]; sl@0: iCodeLengthInFile = iCodePageOffsets[pageCount] - iCodePageOffsets[0]; sl@0: } sl@0: sl@0: CleanupStack::PopAndDestroy(reader); sl@0: } sl@0: sl@0: sl@0: /** sl@0: Read all image data into memory, decompressing it using the method indicated in the image header.. sl@0: If code isn't being demand paged the code part is read into #iCodeLoadAddress. sl@0: The rest of the file data after the code part is read into #iRestOfFileData. sl@0: */ sl@0: TInt E32Image::LoadFile() sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::LoadFile %S 0x%08x",&iFileName,iHeader->CompressionType())); sl@0: sl@0: TUint compression = iHeader->CompressionType(); sl@0: sl@0: TInt r=KErrNone; sl@0: if(compression==KFormatNotCompressed) sl@0: { sl@0: r = LoadFileNoCompress(); sl@0: CHECK_FAILURE(r); // Fuzzer can't trigger this because it only happens on file i/o error sl@0: } sl@0: else if(compression==KUidCompressionDeflate) sl@0: { sl@0: TRAP(r,LoadFileInflateL()); sl@0: CHECK_FAILURE(r); sl@0: } sl@0: else if(compression==KUidCompressionBytePair) sl@0: { sl@0: TRAP(r,LoadFileBytePairUnpakL()); sl@0: CHECK_FAILURE(r); sl@0: } sl@0: else sl@0: { sl@0: r = KErrNotSupported; sl@0: CHECK_FAILURE(r); // Fuzzer can't trigger this because header validation ensures compression type is OK sl@0: } sl@0: sl@0: // we're done with the file contents now, free up memory before resolving imports sl@0: if(iFileData) sl@0: { sl@0: gFileDataAllocator.Free(iFileData); sl@0: iFileData=NULL; sl@0: } sl@0: sl@0: __IF_DEBUG(Printf("E32Image::LoadFile exiting %S r=%d",&iFileName,r)); sl@0: return r; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Read data from the image's file (or the preloaded data at #iFileData if present). sl@0: */ sl@0: TInt E32Image::Read(TUint aPos, TUint8* aDest, TUint aSize, TBool aSvPerms) sl@0: { sl@0: TPtr8 p(aDest,aSize,aSize); sl@0: if(iFileData) sl@0: { sl@0: // get data from pre-loaded image data... sl@0: if(aPos+aSize>iFileSize) sl@0: RETURN_FAILURE(KErrCorrupt); // Fuzzer can't trigger this because earlier validation prevents sizes being wrong sl@0: if (aSvPerms) sl@0: WordCopy(aDest,iFileData+aPos,aSize); sl@0: else sl@0: p.Copy(iFileData+aPos,aSize); sl@0: } sl@0: else sl@0: { sl@0: // get data from file... sl@0: TInt r = iFile.Read(aPos,p,aSize); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: } sl@0: sl@0: // check we got the amount of data requested... sl@0: if(TUint(p.Length())!=aSize) sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::Read() Expected:%d, read:%d", aSize, p.Length() )); sl@0: RETURN_FAILURE(KErrCorrupt); // Fuzzer can't trigger this because requires file length to change during load sl@0: } sl@0: sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Read all image data into memory. sl@0: If code isn't being demand paged the code part is read into #iCodeLoadAddress. sl@0: The rest of the file data after the code part is read into #iRestOfFileData. sl@0: */ sl@0: TInt E32Image::LoadFileNoCompress() sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::LoadFileNoCompress exiting %S",&iFileName)); sl@0: TInt r = KErrNone; sl@0: sl@0: if(iHeader->iCodeSize && !iUseCodePaging) sl@0: { sl@0: __IF_DEBUG(Printf("Code & const size %x",iCodeSize)); sl@0: __IF_DEBUG(Printf("Code & const offset %x",iHeader->iCodeOffset)); sl@0: __IF_DEBUG(Printf("Code & const dest %x",iCodeLoadAddress)); sl@0: r = Read(iHeader->iCodeOffset, (TText8*)iCodeLoadAddress, iCodeSize, ETrue); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: } sl@0: sl@0: if(iRestOfFileSize) sl@0: r = Read(iConversionOffset, iRestOfFileData, iRestOfFileSize); sl@0: sl@0: return r; sl@0: } sl@0: sl@0: sl@0: void FileCleanup(TAny* aPtr) sl@0: { sl@0: TFileInput* f=(TFileInput*)aPtr; sl@0: f->Cancel(); sl@0: delete f; sl@0: } sl@0: sl@0: /** sl@0: Read all image data into memory, decompressing it using the Inflate method. sl@0: If code isn't being demand paged the code part is read into #iCodeLoadAddress. sl@0: The rest of the file data after the code part is read into #iRestOfFileData. sl@0: */ sl@0: void E32Image::LoadFileInflateL() sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::LoadFileInflateL %S",&iFileName)); sl@0: __ASSERT_DEBUG(!iUseCodePaging, Panic(ELfiCodePagingNotSupported)); sl@0: sl@0: TInt pos = iHeader->TotalSize(); sl@0: TBitInput* file; sl@0: if(iFileData) sl@0: { sl@0: if(pos < 0) sl@0: User::Leave(KErrArgument); sl@0: file = new (ELeave) TBitInput(iFileData, iFileSize*8, pos*8); sl@0: CleanupStack::PushL(file); sl@0: } sl@0: else sl@0: { sl@0: User::LeaveIfError(iFile.Seek(ESeekStart,pos)); // Start at beginning of compressed data sl@0: file = new (ELeave) TFileInput(iFile); sl@0: CleanupStack::PushL(TCleanupItem(&FileCleanup,file)); sl@0: } sl@0: sl@0: CInflater* inflater=CInflater::NewLC(*file); sl@0: sl@0: if(iHeader->iCodeSize) sl@0: { sl@0: __IF_DEBUG(Printf("Code & const size %x",iCodeSize)); sl@0: __IF_DEBUG(Printf("Code & const offset %x",iHeader->iCodeOffset)); sl@0: __IF_DEBUG(Printf("Code & const dest %x",iCodeLoadAddress)); sl@0: sl@0: TInt count = inflater->ReadL((TUint8*)iCodeLoadAddress,iCodeSize,&WordCopy); sl@0: if(count!=iCodeSize) sl@0: User::Leave(KErrCorrupt); sl@0: } sl@0: sl@0: if(iRestOfFileSize) sl@0: { sl@0: TUint32 count = inflater->ReadL(iRestOfFileData,iRestOfFileSize,&Mem::Copy); sl@0: if(count!=iRestOfFileSize) sl@0: User::Leave(KErrCorrupt); sl@0: } sl@0: sl@0: CleanupStack::PopAndDestroy(2,file); sl@0: } sl@0: sl@0: sl@0: /** sl@0: Read all image data into memory, decompressing it using the BytePair method. sl@0: If code isn't being demand paged the code part is read into #iCodeLoadAddress. sl@0: The rest of the file data after the code part is read into #iRestOfFileData. sl@0: */ sl@0: void E32Image::LoadFileBytePairUnpakL() sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::LoadFileBytePairUnpak %S",&iFileName)); sl@0: sl@0: // code starts after header sl@0: TInt pos = iHeader->TotalSize(); sl@0: sl@0: CBytePairReader* reader; sl@0: if(iFileData) sl@0: reader = CBytePairReader::NewLC(iFileData+pos, iFileSize-pos); sl@0: else sl@0: { sl@0: iFile.Seek(ESeekStart, pos); sl@0: reader = CBytePairFileReader::NewLC(iFile); sl@0: } sl@0: sl@0: TBool codeLoaded = false; sl@0: if(iHeader->iCodeSize && !iUseCodePaging) sl@0: { sl@0: __IF_DEBUG(Printf("Code & const size %x",iCodeSize)); sl@0: __IF_DEBUG(Printf("Code & const offset %x",iHeader->iCodeOffset)); sl@0: __IF_DEBUG(Printf("Code & const dest %x",iCodeLoadAddress)); sl@0: sl@0: TUint32 bytes = reader->DecompressPagesL((TUint8*)iCodeLoadAddress,iCodeSize,&WordCopy); sl@0: sl@0: __IF_DEBUG(Printf("bytes:%x",bytes)); sl@0: if((TInt)bytes!=iCodeSize) sl@0: User::Leave(KErrCorrupt); sl@0: sl@0: codeLoaded = true; sl@0: } sl@0: sl@0: if(iRestOfFileSize) sl@0: { sl@0: if(!codeLoaded) sl@0: { sl@0: // skip past code part of file... sl@0: TInt pageCount = (iCodeSize + KPageOffsetMask) >> KPageSizeShift; sl@0: sl@0: TInt pos = KIndexTableHeaderSize sl@0: + pageCount * sizeof(TUint16) sl@0: + iCodeLengthInFile; sl@0: sl@0: __IF_DEBUG(Printf("lfpbu:pos=%x", pos)); sl@0: reader->SeekForwardL(pos); sl@0: } sl@0: sl@0: __IF_DEBUG(Printf(" iRestOfFileSize==%x, iRestOfFileData==%x", iRestOfFileSize, iRestOfFileData)); sl@0: sl@0: TUint32 bytes = reader->DecompressPagesL(iRestOfFileData,iRestOfFileSize,NULL); sl@0: __IF_DEBUG(Printf("bytes:%x",bytes)); sl@0: if(bytes!=iRestOfFileSize) sl@0: User::Leave(KErrCorrupt); sl@0: } sl@0: sl@0: CleanupStack::PopAndDestroy(reader); sl@0: } sl@0: sl@0: sl@0: /** sl@0: Relocate code. sl@0: */ sl@0: TInt E32Image::RelocateCode() sl@0: { sl@0: if(iHeader->iExportDirOffset) sl@0: iExportDirLoad += iCodeLoadAddress; // only for RAM modules which are not already loaded sl@0: sl@0: __IF_DEBUG(Printf("**EntryPointVeneer %08x FileEntryPoint %08x",iEntryPtVeneer,iFileEntryPoint)); sl@0: __IF_DEBUG(Printf("**ExportDir load@%08x run@%08x",iExportDirLoad,iExportDir)); sl@0: TInt r = KErrNone; sl@0: if(iHeader->iCodeRelocOffset) sl@0: { sl@0: __IF_DEBUG(Printf("Relocate code & const")); sl@0: sl@0: if(!iUseCodePaging) sl@0: r = RelocateSection(iCodeRelocSection, iCodeLoadAddress); sl@0: else sl@0: { sl@0: r = AllocateRelocationData(iCodeRelocSection, iHeader->iCodeSize, iCodeLoadAddress, iCodeRelocTable); sl@0: iExportDirEntryDelta = iCodeDelta; // so exports get relocated sl@0: } sl@0: } sl@0: sl@0: if(r==KErrNone) sl@0: r = RelocateExports(); sl@0: sl@0: if(r==KErrNone) sl@0: { sl@0: // put a unique ID into the third word after the entry point sl@0: sl@0: // address for ID... sl@0: TLinAddr csid_addr = iFileEntryPoint+KCodeSegIdOffset-iCodeRunAddress+iCodeLoadAddress; sl@0: __IF_DEBUG(Printf("csid_addr %08x", csid_addr)); sl@0: sl@0: // get existing ID... sl@0: TUint x; sl@0: WordCopy(&x, (const TAny*)csid_addr, sizeof(x)); sl@0: if(x==0) sl@0: { sl@0: // generate next ID... sl@0: if(++NextCodeSegId == 0xffffffffu) sl@0: Fault(ELdrCsIdWrap); sl@0: __IF_DEBUG(Printf("NextCSID %08x", NextCodeSegId)); sl@0: // store ID... sl@0: if(!iUseCodePaging) sl@0: WordCopy((TAny*)csid_addr, &NextCodeSegId, sizeof(NextCodeSegId)); sl@0: else sl@0: { sl@0: // demand paged code needs modifying when paged in, so add ID as a new 'fixup'... sl@0: TUint64* fixup = ExpandFixups(1); sl@0: if(!fixup) sl@0: r = KErrNoMemory; sl@0: else sl@0: *fixup = MAKE_TUINT64(csid_addr,NextCodeSegId); sl@0: } sl@0: } sl@0: } sl@0: sl@0: return r; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Copy the data section from buffer #iRestOfFileData to the memory allocated at #iDataLoadAddress. sl@0: Then relocate this data ready for use at the executables run addresses. sl@0: */ sl@0: TInt E32Image::LoadAndRelocateData() sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::LoadAndRelocateData %S",&iFileName)); sl@0: if(!iHeader->iDataOffset) sl@0: return KErrNone; // do data section sl@0: sl@0: // copy data... sl@0: __IF_DEBUG(Printf("Read Data: size %x->%08x",iDataSize,iDataLoadAddress)); sl@0: TUint32 bufferOffset=iHeader->iDataOffset-iConversionOffset; sl@0: TUint8* source=iRestOfFileData+bufferOffset; sl@0: MemCopy((TText8*)iDataLoadAddress,source,iDataSize); sl@0: sl@0: // relocate data... sl@0: __IF_DEBUG(Printf("Relocate data section")); sl@0: __IF_DEBUG(Printf("iDataRelocOffset %08x",iHeader->iDataRelocOffset)); sl@0: TInt r = KErrNone; sl@0: if(iHeader->iDataRelocOffset) sl@0: r = RelocateSection(iDataRelocSection, iDataLoadAddress); sl@0: sl@0: return r; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Copies data from aDestination to aSource by running in supervisor mode. sl@0: aDest, aSource & aNumberOfBytes must be word aligned. sl@0: */ sl@0: TUint8* E32Image::WordCopy(TAny* aDestination, const TAny* aSource, TInt aNumberOfBytes) sl@0: { sl@0: aNumberOfBytes &= ~3; // Avoid panics for corrupt data which is not word size sl@0: SCopyDataInfo info = {aDestination,aSource, aNumberOfBytes}; sl@0: return (TUint8*) ExecuteInSupervisorMode(&svWordCopy, &info); sl@0: } sl@0: sl@0: sl@0: /** sl@0: Copies data from aDestination to aSource by running in supervisor mode. sl@0: */ sl@0: TUint8* E32Image::MemCopy(TAny* aDestination, const TAny* aSource, TInt aNumberOfBytes) sl@0: { sl@0: SCopyDataInfo info={aDestination,aSource, aNumberOfBytes}; sl@0: return (TUint8*) ExecuteInSupervisorMode(&svMemCopy, &info); sl@0: } sl@0: sl@0: sl@0: /** sl@0: Relocate a section, applying relocations for run addresses to values currently at their load addresses. sl@0: */ sl@0: TInt E32Image::RelocateSection(E32RelocSection* aSection, TUint32 aLoadAddress) sl@0: { sl@0: if(!aSection) sl@0: return KErrNone; sl@0: sl@0: __IF_DEBUG(Printf("Relocate: NRelocs:%08x LoadAddr:%08x", aSection->iNumberOfRelocs, aLoadAddress)); sl@0: sl@0: SRelocateSectionInfo info={this, (TUint8*)(aSection+1), aSection->iNumberOfRelocs, aLoadAddress}; sl@0: sl@0: // call function in supervisor mode to relocate the section sl@0: TInt r = ExecuteInSupervisorMode(&svRelocateSection, &info); sl@0: sl@0: __IF_DEBUG(Printf("Relocate returning %d",r)); sl@0: return r; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Relocate the export directory for the code's run address sl@0: */ sl@0: TInt E32Image::RelocateExports() sl@0: { sl@0: // This only has to be done for PE-derived images, ELF marks all sl@0: // export table entries as 'relocations' so this job has already been done. sl@0: TUint impfmt = iHeader->ImportFormat(); sl@0: if (impfmt == KImageImpFmt_ELF) sl@0: return KErrNone; sl@0: sl@0: __IF_DEBUG(Printf("E32Image::RelocateExports %S",&iFileName)); sl@0: sl@0: if(iHeader->iExportDirOffset) sl@0: { sl@0: // call function in supervisor mode to fix up export directory sl@0: ExecuteInSupervisorMode(&svRelocateExports, this); sl@0: } sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Validate import section data structures in iRestOfFileData. sl@0: Set iImportData to point to point to start of this. sl@0: Allocate memory (iCurrentImportList) which is big enough to store imports for a single dependency. sl@0: */ sl@0: TInt E32Image::ReadImportData() sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::ReadImportData %S",&iFileName)); sl@0: sl@0: if(!iHeader->iImportOffset) sl@0: return KErrNone; sl@0: sl@0: TUint biggestImportCount; sl@0: TInt r = ((E32ImageHeaderV*)iHeader)->ValidateImports(iRestOfFileData,iRestOfFileSize,biggestImportCount); sl@0: if(r!=KErrNone) sl@0: return r; sl@0: sl@0: iImportData = (TUint32*)(iRestOfFileData+iHeader->iImportOffset-iConversionOffset); sl@0: iCurrentImportList = (TUint32*)User::Alloc(biggestImportCount * sizeof(TUint32)); sl@0: __IF_DEBUG(Printf("E32Image::ReadImportData - alloc %d current import slots at %08x", biggestImportCount, iCurrentImportList)); sl@0: if(!iCurrentImportList) sl@0: return KErrNoMemory; sl@0: sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: void E32Image::SortCurrentImportList() sl@0: { sl@0: if (!iCurrentImportListSorted) sl@0: { sl@0: RArray array((TUint*)iCurrentImportList, iCurrentImportCount); sl@0: array.Sort(); sl@0: iCurrentImportListSorted = (TUint8)ETrue; sl@0: } sl@0: } sl@0: sl@0: sl@0: TInt CheckRomExports(const TRomImageHeader* aR, const E32Image* aI) sl@0: { sl@0: __IF_DEBUG(Printf("CheckRomExports")); sl@0: if (aR->iExportDirCount == 0) sl@0: return aI->iCurrentImportCount ? KErrNotSupported : KErrNone; sl@0: const TUint32* xd = (const TUint32*)aR->iExportDir; sl@0: const TUint32* p = aI->iCurrentImportList; sl@0: const TUint32* pE = p + aI->iCurrentImportCount; sl@0: for (; piCurrentImportCount ? KErrNotSupported : KErrNone; sl@0: if (aEDT == KImageHdr_ExpD_NoHoles) sl@0: return KErrNone; // nothing missing sl@0: sl@0: const TUint32* p = aI->iCurrentImportList; sl@0: const TUint32* pE = p + aI->iCurrentImportCount; sl@0: sl@0: if (aEDT == KImageHdr_ExpD_FullBitmap) sl@0: { sl@0: for (; p>3] & (1u<<(x&7))) ) sl@0: return KErrNotSupported; sl@0: } sl@0: return KErrNone; sl@0: } sl@0: sl@0: if (aEDT != KImageHdr_ExpD_SparseBitmap8) sl@0: return KErrNotSupported; // don't know what this is sl@0: aI->SortCurrentImportList(); // sort imports to increasing order sl@0: TUint32 memsz = (aEDC + 7) >> 3; // size of complete bitmap sl@0: TUint32 mbs = (memsz + 7) >> 3; // size of meta-bitmap sl@0: const TUint8* mptr = aED; sl@0: const TUint8* gptr = mptr + mbs; sl@0: const TUint8* mptrE = mptr + mbs; sl@0: TUint xlim = 64; sl@0: for (; mptr>=1, ++g) sl@0: if (m&1) sl@0: *g = *gptr++; sl@0: g = (TUint8*)g32; sl@0: for (; p>3] & (1u<<(ix&7))) ) sl@0: return KErrNotSupported; sl@0: } sl@0: } sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: TInt CheckRequiredImports(E32Image* aImporter, E32Image* aExporter, TInt aAction) sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::CheckRequiredImports (existing) %d", aAction)); sl@0: TInt last = aImporter->LastCurrentImport(); sl@0: if (last > aExporter->iExportDirCount) sl@0: return KErrNotSupported; sl@0: if (aAction == EAction_CheckLastImport) sl@0: return KErrNone; sl@0: if (aExporter->iRomImageHeader) sl@0: return CheckRomExports(aExporter->iRomImageHeader, aImporter); sl@0: if (aExporter->iHeader) sl@0: { sl@0: E32ImageHeaderV* v = (E32ImageHeaderV*)aExporter->iHeader; sl@0: return CheckRamExports(v->iExportDescType, v->iExportDesc, v->iExportDirCount, aImporter); sl@0: } sl@0: TInt r = aExporter->ReadExportDirLoad(); sl@0: if (r != KErrNone) sl@0: return r; // could fail with OOM sl@0: TBool hasNmdExp = (aExporter->iAttr & ECodeSegAttNmdExpData); sl@0: const TUint32* p = aImporter->iCurrentImportList; sl@0: const TUint32* pE = p + aImporter->iCurrentImportCount; sl@0: const TUint32* pX = (const TUint32*)aExporter->iExportDirLoad - 1; sl@0: TUint32 xep = aExporter->iFileEntryPoint; sl@0: for (; pLastCurrentImport(); sl@0: if (last > aExporter.iExportDirCount) sl@0: return KErrNotSupported; sl@0: if (aAction == EAction_CheckLastImport) sl@0: return KErrNone; sl@0: if (aExporter.iRomImageHeader) sl@0: return CheckRomExports(aExporter.iRomImageHeader, aImporter); sl@0: return CheckRamExports(aExporter.iExportDescType, aExporter.iExportDesc, aExporter.iExportDirCount, aImporter); sl@0: } sl@0: sl@0: sl@0: TInt E32Image::GetCurrentImportList(const E32ImportBlock* a) sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::GetCurrentImportList(E32ImportBlock* a:%08X)", a)); sl@0: TInt r; sl@0: TInt n = a->iNumberOfImports; sl@0: iCurrentImportCount = n; sl@0: iCurrentImportListSorted = (TUint8)EFalse; sl@0: __IF_DEBUG(Printf("iCurrentImportCount:%d, iCurrentImportListSorted:%d)", iCurrentImportCount, iCurrentImportListSorted)); sl@0: __IF_DEBUG(Printf("iHeader->ImportFormat() == KImageImpFmt_ELF:%d", (iHeader->ImportFormat() == KImageImpFmt_ELF) )); sl@0: sl@0: if (iHeader->ImportFormat() == KImageImpFmt_ELF) sl@0: { sl@0: SGetImportDataInfo info; sl@0: info.iCount = n; sl@0: info.iDest = iCurrentImportList; sl@0: info.iCodeLoadAddress = iCodeLoadAddress; sl@0: info.iImportOffsetList = (TUint32*)a->Imports(); sl@0: r = ExecuteInSupervisorMode(&svElfDerivedGetImportInfo, &info); sl@0: } sl@0: else sl@0: { sl@0: TUint32* iat = (TUint32*)(iCodeLoadAddress + iTextSize); sl@0: WordCopy(iCurrentImportList, iat + iNextImportPos, n * sizeof(TUint32)); sl@0: r = KErrNone; sl@0: } sl@0: iNextImportPos += n; sl@0: __IF_DEBUG(Printf("End of E32Image::GetCurrentImportList:%d)", r)); sl@0: return r; sl@0: } sl@0: sl@0: sl@0: TInt E32Image::LastCurrentImport() sl@0: { sl@0: TUint32 last = 0; sl@0: if (iCurrentImportListSorted) sl@0: last = iCurrentImportList[iCurrentImportCount - 1]; sl@0: else sl@0: { sl@0: const TUint32* p = iCurrentImportList; sl@0: const TUint32* pE = p + iCurrentImportCount; sl@0: for (; p last) last = *p; sl@0: } sl@0: __IF_DEBUG(Printf("E32Image::LastCurrentImport = %d", last)); sl@0: return last; sl@0: } sl@0: sl@0: sl@0: TInt E32Image::ProcessImports() sl@0: // sl@0: // This function is only ever called on the exe/dll which is loaded from sl@0: // the RProcess/RLibrary load. sl@0: // It reads this DLL/EXE's imports section and builds up a table of dlls referenced. sl@0: // It never goes recursive. sl@0: // sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::ProcessImports %S",&iFileName)); sl@0: __IF_DEBUG(Printf("DepCount=%d",iDepCount)); sl@0: sl@0: if (iDepCount==0 || AlwaysLoaded()) sl@0: return KErrNone; // no imports sl@0: sl@0: TFileNameInfo fi; sl@0: fi.Set(iFileName, 0); sl@0: gLoadeePath.Zero(); sl@0: fi.GetName(gLoadeePath, TFileNameInfo::EIncludeDrivePath); sl@0: if (PlatSec::ConfigSetting(PlatSec::EPlatSecEnforceSysBin) sl@0: && gLoadeePath.Length()==11 sl@0: && KSysBin().CompareF(TPtrC8(gLoadeePath.Ptr()+1,10))==0) sl@0: { sl@0: // Main loadee is in the default path, so unset this in order to sl@0: // search normally for dependents sl@0: gLoadeePath.Zero(); sl@0: } sl@0: #ifdef __X86__ sl@0: if (gLoadeePath.Length()>=2 && gLoadeePath[1]==':') sl@0: { sl@0: TInt d = gLoadeePath[0]; sl@0: if (d=='a' || d=='A') sl@0: UseFloppy = EDriveA; sl@0: else if (d=='b' || d=='B') sl@0: UseFloppy = EDriveB; sl@0: } sl@0: #endif sl@0: RImageArray array; sl@0: TInt r = array.Add(this); sl@0: if (r==KErrNone) sl@0: r = LoadDlls(array); sl@0: if (r==KErrNone) sl@0: r = FixupDlls(array); sl@0: if (r==KErrNone) sl@0: r = FinaliseDlls(array); sl@0: CleanupDlls(array); sl@0: array.Close(); sl@0: sl@0: __IF_DEBUG(Printf("E32Image::ProcessImports returns %d",r)); sl@0: return r; sl@0: } sl@0: sl@0: void E32Image::CleanupDlls(RImageArray& aArray) sl@0: // sl@0: // Free the space used in fixing up the dlls. sl@0: // Don't free the entry corresponding to the main loadee. sl@0: // sl@0: { sl@0: sl@0: __IF_DEBUG(Printf("CleanupDlls")); sl@0: TInt n = aArray.Count(); sl@0: TInt i; sl@0: for (i=0; iiAlreadyLoaded) sl@0: { sl@0: // transfers ownership of clamp handle to codeseg; nulls handle if successful sl@0: if(!e->AlwaysLoaded()) sl@0: r = E32Loader::CodeSegLoaded(*e); sl@0: if(r==KErrNone && e->iUseCodePaging) sl@0: { sl@0: e->iFileClamp.iCookie[0]=0;// null handle to indicate sl@0: e->iFileClamp.iCookie[1]=0;// transfer of ownership of clamp handle to codeseg sl@0: } sl@0: } sl@0: } sl@0: __IF_DEBUG(Printf("E32Image::FinaliseDlls returns %d",r)); sl@0: return r; sl@0: } sl@0: sl@0: sl@0: TInt E32Image::LoadDlls(RImageArray& aArray) sl@0: // sl@0: // Build a matrix of all DLLs referenced by the one we're loading, and sl@0: // ensure they're all loaded. sl@0: // sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::LoadDlls")); sl@0: TInt r=KErrNone; sl@0: E32ImportSection* importSection=(E32ImportSection *)iImportData; sl@0: E32ImportBlock* block; sl@0: if(importSection) sl@0: block=(E32ImportBlock*)(importSection+1); sl@0: else sl@0: block=NULL; sl@0: const TRomImageHeader* const * pR=NULL; sl@0: if (iRomImageHeader) sl@0: pR=iRomImageHeader->iDllRefTable->iEntry; sl@0: iNextImportPos = 0; sl@0: sl@0: // For each module referenced by this module sl@0: for (TInt i=0; i rootname; sl@0: req.iFileName = (HBufC8*)&rootname; sl@0: sl@0: if (pR) sl@0: { sl@0: // Processing imports for ROM XIP module sl@0: rih = *pR++; sl@0: __IF_DEBUG(Printf("Importing from ROM XIP %08x", rih)); sl@0: e = aArray.Find(rih); sl@0: } sl@0: else sl@0: { sl@0: // Processing imports for RAM module sl@0: __IF_DEBUG(Printf("Import block address %08x",block)); sl@0: TPtrC8 dllname = (const TText8*)((TUint32)iImportData + block->iOffsetOfDllName); sl@0: if (dllname.Length() > KMaxKernelName) sl@0: { sl@0: __IF_DEBUG(Printf("Import DLL name too big: %S",&dllname)); sl@0: RETURN_FAILURE(KErrNotSupported); sl@0: } sl@0: TFileNameInfo fni; sl@0: r = fni.Set(dllname, TFileNameInfo::EAllowUid); sl@0: if (r!=KErrNone) sl@0: RETURN_FAILURE(KErrCorrupt); sl@0: fni.GetName(rootname, TFileNameInfo::EIncludeBaseExt); sl@0: TUint32* uid=(TUint32*)&req.iRequestedUids; sl@0: uid[2] = fni.Uid(); sl@0: req.iRequestedVersion = fni.Version(); sl@0: if (gLoadeePath.Length() > 0) sl@0: req.iPath = (HBufC8*)&gLoadeePath; sl@0: req.iPlatSecCaps = iS.iCaps; sl@0: req.iFileNameInfo.Set(rootname, 0); sl@0: req.iImporter = this; sl@0: r = GetCurrentImportList(block); // get list of required exports from this exporter sl@0: if (r!=KErrNone) sl@0: { sl@0: return r; sl@0: } sl@0: TUint impfmt = iHeader->ImportFormat(); sl@0: block = (E32ImportBlock*)block->NextBlock(impfmt); sl@0: sl@0: r = finder.Set(req); sl@0: if (r == KErrNone) sl@0: r = finder.SearchExisting(aArray); // see what we've already got sl@0: if (r == KErrNone) sl@0: { sl@0: TBool search = ETrue; sl@0: if (finder.iExisting) sl@0: { sl@0: // Found an existing DLL - check for an exact version match sl@0: if (DetailedCompareVersions(finder.iCurrentVersion, finder.iReq->iRequestedVersion) <= EVersion_Exact) sl@0: search = EFalse; // if exact match, don't need to continue search sl@0: } sl@0: if (search) sl@0: r = finder.Search(); // see what else is available sl@0: } sl@0: if (r!=KErrNone) sl@0: { sl@0: finder.Close(); sl@0: return r; sl@0: } sl@0: if (finder.iExisting) sl@0: e = finder.iExisting; // already have the required module sl@0: } sl@0: sl@0: // If it's already in the array, go on to the next module sl@0: if (e) sl@0: { sl@0: __IF_DEBUG(Printf("Already there")); sl@0: } sl@0: else sl@0: { sl@0: // Not already in the array sl@0: __IF_DEBUG(Printf("Not in array, add it")); sl@0: e = new E32Image; sl@0: if (!e) sl@0: { sl@0: finder.Close(); sl@0: return KErrNoMemory; sl@0: } sl@0: e->iMain = iMain; sl@0: e->iClientProcessHandle = iMain->iClientProcessHandle; sl@0: if (iMain->iAttr & ECodeSegAttKernel) sl@0: e->iAttr |= ECodeSegAttKernel; sl@0: if (rih) sl@0: { sl@0: // loading a specified ROM XIP DLL sl@0: r = e->DoLoadCodeSeg(*rih); sl@0: } sl@0: else sl@0: { sl@0: // loading a DLL by name sl@0: r = e->DoLoadCodeSeg(req, finder); // also closes 'finder' sl@0: __IF_DEBUG(Printf("%S DoLoadCodeSeg returned %d",req.iFileName,r)); sl@0: } sl@0: sl@0: // Add the new entry to the array sl@0: if (r==KErrNone) sl@0: { sl@0: __IF_DEBUG(Printf("Add to the array")); sl@0: r = aArray.Add(e); sl@0: } sl@0: if (r!=KErrNone) sl@0: { sl@0: delete e; sl@0: return r; sl@0: } sl@0: sl@0: // Now go nice and recursive, and call LoadDlls on this latest dll, if it sl@0: // imports anything sl@0: // This recursive horror *will* terminate because it is only called sl@0: // on "new" dlls sl@0: if (e->iDepCount && !e->iAlreadyLoaded && e->iIsDll) sl@0: { sl@0: __IF_DEBUG(Printf("****Go recursive****")); sl@0: r = e->LoadDlls(aArray); sl@0: if (r!=KErrNone) sl@0: { sl@0: return r; sl@0: } sl@0: } sl@0: sl@0: } sl@0: sl@0: // If we added an SMP unsafe dependent, this image is SMP unsafe. sl@0: // This is done after recursing into LoadDlls, so a single unsafe sl@0: // dependent anywhere down the tree will poison everything above it. sl@0: // This isn't sufficient to deal with cycles, though, so the kernel sl@0: // also has to update the flag in DCodeSeg::FinaliseRecursiveFlags. sl@0: // It has to be done here first because the kernel doesn't know sl@0: // about XIP DLLs that don't have a codeseg created. sl@0: if (!(e->iAttr & ECodeSegAttSMPSafe)) sl@0: { sl@0: __IF_DEBUG(Printf("%S is not SMP safe because it loads %S", &iFileName, &e->iFileName)); sl@0: iAttr &= ~ECodeSegAttSMPSafe; sl@0: } sl@0: sl@0: // If exporter is an EXE it must be the same as the client process or newly created process sl@0: __IF_DEBUG(Printf("Check EXE->EXE")); sl@0: if (gExeCodeSeg && !e->iIsDll && e->iHandle!=gExeCodeSeg) sl@0: return KErrNotSupported; sl@0: sl@0: // A globally-visible module may only link to other globally visible modules sl@0: __IF_DEBUG(Printf("Check Global Attribute")); sl@0: if ( (iAttr&ECodeSegAttGlobal) && !(e->iAttr&ECodeSegAttGlobal) ) sl@0: return KErrNotSupported; sl@0: sl@0: // A ram-loaded globally-visible module may only link to ROM XIP modules with no static data sl@0: __IF_DEBUG(Printf("Check RAM Global")); sl@0: if ( (iAttr&ECodeSegAttGlobal) && !iRomImageHeader && e->iHandle) sl@0: return KErrNotSupported; sl@0: sl@0: if (thisBlock) sl@0: thisBlock->iOffsetOfDllName=(TUint32)e; // For easy access when fixing up imports sl@0: if (e->iHandle) sl@0: { sl@0: // Record the dependence of this on e sl@0: r=E32Loader::CodeSegAddDependency(iHandle, e->iHandle); sl@0: if (r!=KErrNone) sl@0: { sl@0: return r; sl@0: } sl@0: } sl@0: } sl@0: __IF_DEBUG(Printf("E32Image::LoadDlls OK")); sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: TInt E32Image::ReadExportDirLoad() sl@0: { sl@0: // Get the exporter's export directory sl@0: __IF_DEBUG(Printf("ReadExportDirLoad exp_dir=%08x", iExportDirLoad)); sl@0: if (!iExportDirLoad) sl@0: { sl@0: // already loaded nonglobal DLL - must read the export directory sl@0: if (iExportDirCount==0 && !(iAttr&ECodeSegAttNmdExpData)) sl@0: return KErrGeneral; // DLL has no exports, something must be wrong sl@0: iCopyOfExportDir = (TUint32*)User::Alloc((iExportDirCount+1) * sizeof(TUint32)); sl@0: if (!iCopyOfExportDir) sl@0: return KErrNoMemory; sl@0: __IF_DEBUG(Printf("Reading %d exports", iExportDirCount)); sl@0: E32Loader::ReadExportDir(iHandle, iCopyOfExportDir); sl@0: iExportDirLoad = (TUint32)(iCopyOfExportDir+1); sl@0: } sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: TInt E32Image::FixupDlls(RImageArray& aArray) sl@0: // sl@0: // Go through the array, fixing up the files sl@0: // sl@0: { sl@0: __IF_DEBUG(Printf("E32Image::FixupDlls")); sl@0: sl@0: // For each E32Image file in the array sl@0: TInt i; sl@0: TInt c = aArray.Count(); sl@0: sl@0: for (i=0; iiFileName)); sl@0: sl@0: const E32ImportSection* importSection = (const E32ImportSection*)imp->iImportData; sl@0: if (!importSection) sl@0: { sl@0: __IF_DEBUG(Printf("Has no imports to fixup")); sl@0: continue; // No imports, skip this dll (true of ALL ROM dlls) sl@0: } sl@0: sl@0: const E32ImportBlock* block = (const E32ImportBlock*)(importSection + 1); sl@0: sl@0: SFixupImportAddressesInfo info; sl@0: info.iIat = (TUint32*)(imp->iCodeLoadAddress + imp->iTextSize); sl@0: info.iCodeLoadAddress = imp->iCodeLoadAddress; sl@0: sl@0: // fix up imports from each dependent DLL, building a table of all the imports for the binary sl@0: TInt depCount = imp->iDepCount; sl@0: while (depCount--) sl@0: { sl@0: // declare variables at start of loop body to prevent 'crosses initialization' errors sl@0: TUint impfmt; sl@0: sl@0: // E32Image::LoadDlls() will have set iOffsetOfDllName of the sl@0: // import block to point to the E32Image object of the exporter sl@0: // it's importing sl@0: E32Image* exp = (E32Image*)(block->iOffsetOfDllName); // LoadDlls() set this to exporter sl@0: sl@0: // Get the exporter's export directory sl@0: r = exp->ReadExportDirLoad(); sl@0: if (r != KErrNone) sl@0: return r; sl@0: info.iExportDir = (TUint32*)exp->iExportDirLoad; sl@0: info.iExportDirEntryDelta = exp->iExportDirEntryDelta; sl@0: info.iNumImports = block->iNumberOfImports; sl@0: info.iExporter = exp; sl@0: sl@0: // if demand paging, expand the import fixup buffer for this next exporting DLL sl@0: if (! imp->iUseCodePaging) sl@0: info.iFixup64 = 0; sl@0: else sl@0: { sl@0: info.iFixup64 = imp->ExpandFixups(block->iNumberOfImports); sl@0: if (!info.iFixup64) sl@0: return KErrNoMemory; sl@0: } sl@0: sl@0: // call function in supervisor mode to fix up the import addresses. sl@0: impfmt = imp->iHeader->ImportFormat(); sl@0: if (impfmt == KImageImpFmt_ELF) sl@0: { sl@0: info.iImportOffsetList = (TUint32*)(block+1); sl@0: r = ExecuteInSupervisorMode(&svElfDerivedFixupImportAddresses, &info); sl@0: } sl@0: else sl@0: r = ExecuteInSupervisorMode(&svFixupImportAddresses, &info); sl@0: sl@0: if (r != KErrNone) sl@0: { sl@0: __IF_DEBUG(Printf("svFixupImportAddresses returns %d", r)); sl@0: return r; sl@0: } sl@0: sl@0: // Next import block... sl@0: block = block->NextBlock(impfmt); sl@0: } // while (depCount--) sl@0: sl@0: if (imp->iUseCodePaging && imp->iFixupCount > 0) sl@0: { sl@0: // convert the pairs to an import fixup tab which can be used when sl@0: // the code is paged. sl@0: r = imp->BuildImportFixupTable(); sl@0: if (r != KErrNone) sl@0: return r; sl@0: } sl@0: } sl@0: sl@0: __IF_DEBUG(Printf("E32Image::FixupDlls OK")); sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: /** sl@0: This function is defined because RArray does not natively support sl@0: sorting 64-bit integers. sl@0: sl@0: It is used by FixupDlls to order the import fixup locations in the image sl@0: so they can be organized by page. sl@0: sl@0: @param aLeft 64-bit unsigned integer to compare against aRight. sl@0: @param aRight 64-bit unsigned integer to compare against aLeft. sl@0: @return -1 if aLeft < aRight; 0 if aLeft == aRight; and sl@0: +1 if aLeft > aRight. This conforms to the behavior sl@0: which is expected from a function used by TLinearOrder. sl@0: */ sl@0: static TInt Uint64LinearOrderFunc(const TUint64& aLeft, const TUint64& aRight) sl@0: { sl@0: if (aLeft < aRight) sl@0: return -1; sl@0: else if (aLeft > aRight) sl@0: return 1; sl@0: else sl@0: return 0; sl@0: } sl@0: sl@0: sl@0: TUint64* E32Image::ExpandFixups(TInt aNumFixups) sl@0: { sl@0: __IF_DEBUG(Printf("ExpandFixups,%d+%d", iFixupCount,aNumFixups)); sl@0: TInt newCount = iFixupCount+aNumFixups; sl@0: TUint64* fixups = (TUint64*) User::ReAlloc(iFixups, sizeof(TUint64) * newCount); sl@0: if(!fixups) sl@0: return 0; sl@0: TUint64* newFixups = fixups+iFixupCount; sl@0: iFixupCount = newCount; sl@0: iFixups = fixups; sl@0: return newFixups; sl@0: } sl@0: sl@0: sl@0: /** sl@0: Helper function for FixupImports. Takes the set of sl@0: 64-bit fixups, and organizes them into pages. sl@0: sl@0: Each page is stored as fXXX YYYY ZZZZ where YYYY ZZZZ is written sl@0: to the word at offset XXX. (See PREQ1110 Design Sketch v1.0 S3.1.1.2.3.2.) sl@0: sl@0: On success iImportFixupTableSize is set to the table size in bytes, sl@0: and iImportFixupTable is a cell containing the table. sl@0: sl@0: @return Symbian OS error code. sl@0: */ sl@0: TInt E32Image::BuildImportFixupTable() sl@0: { sl@0: __IF_DEBUG(Printf(">BuildImportFixupTable,0x%08x,%d", iFixups, iFixupCount)); sl@0: sl@0: // sort the array in address order, to organize by page sl@0: RArray fixup64ToSort(sizeof(TUint64), iFixups, iFixupCount); sl@0: // SortUnsigned doesn't work on TUint64 sl@0: fixup64ToSort.Sort(TLinearOrder(Uint64LinearOrderFunc)); sl@0: sl@0: // now have
pairs, organize into pages. sl@0: // Each page is stored as fXXX YYYY ZZZZ where YYYY ZZZZ is written sl@0: // to the word at offset XXX. (See PREQ1110 Design Sketch v1.0 S3.1.1.2.3.2.) sl@0: sl@0: TUint32 pageCount = SizeToPageCount(iCodeSize); sl@0: iImportFixupTableSize = (pageCount+1) * sizeof(TUint32) + iFixupCount * 3 * sizeof(TUint16); sl@0: iImportFixupTable = (TUint32*) User::Alloc(iImportFixupTableSize); sl@0: __IF_DEBUG(Printf("iImportFixupTable=0x%08x", iImportFixupTable)); sl@0: if (iImportFixupTable == 0) sl@0: return KErrNoMemory; sl@0: sl@0: // byte offsets of pages into the table are written as 32-bit words at sl@0: // the start of the table sl@0: sl@0: TUint32 lastPage = 0; sl@0: // byte index of first 48-bit entry in the table, after sentinel index sl@0: iImportFixupTable[0] = (pageCount + 1) * sizeof(TUint32);; sl@0: sl@0: // location to which 48-bit imports are written sl@0: TUint16* importOffset = (TUint16*)(iImportFixupTable + pageCount + 1); sl@0: sl@0: // location from where 64-bit pairs are read sl@0: const TUint64* avEnd = iFixups + iFixupCount; sl@0: sl@0: for (const TUint64* avPtr = iFixups; avPtr < avEnd; ++avPtr) sl@0: { sl@0: TUint64 addr_val = *avPtr; sl@0: TUint32 addr = I64HIGH(addr_val) - iCodeLoadAddress; sl@0: TUint32 page = addr >> 12; sl@0: if (page > lastPage) sl@0: { sl@0: // calculate new start index for current page sl@0: TUint32 newStart = TUint32(importOffset) - TUint32(iImportFixupTable); sl@0: sl@0: __IF_DEBUG(Printf("page=%d, lastPage=%d, newStart=0x%08x", page, lastPage, newStart)); sl@0: sl@0: // mark intermediate pages as zero-length, starting and ending at sl@0: // current offset sl@0: while (++lastPage <= page) sl@0: iImportFixupTable[lastPage] = newStart; sl@0: --lastPage; sl@0: } sl@0: sl@0: TUint16 offsetIntoPage; sl@0: offsetIntoPage = (addr & KPageOffsetMask); sl@0: *importOffset++ = offsetIntoPage; sl@0: sl@0: TUint32 val = I64LOW(addr_val); sl@0: *importOffset++ = val; // low halfword stored first (YYYY) sl@0: *importOffset++ = val >> 16; // high halfword stored second (ZZZZ) sl@0: } sl@0: sl@0: // sentinel value marks end of table sl@0: while (++lastPage <= pageCount) sl@0: iImportFixupTable[lastPage] = iImportFixupTableSize; sl@0: sl@0: __IF_DEBUG(Printf("processed table (size=%d,pageCount=%d)", iImportFixupTableSize, pageCount)); sl@0: sl@0: #ifdef _DEBUG sl@0: // dump the import fixup table if loader tracing enabled sl@0: const TUint16* table16 = (const TUint16*)iImportFixupTable; sl@0: const TInt halfWordsInTable = iImportFixupTableSize / 2; sl@0: for (TInt i = 0; i < halfWordsInTable; i += 4) sl@0: { sl@0: __IF_DEBUG(Printf( sl@0: "%04x: %04x %04x %04x %04x", sl@0: i * 2, table16[i+0], table16[i+1], table16[i+2], table16[i+3])); sl@0: } sl@0: #endif sl@0: sl@0: User::Free(iFixups); sl@0: iFixups = 0; sl@0: return KErrNone; sl@0: } sl@0: sl@0: sl@0: TInt GetModuleInfo(RLdrReq& aReq) sl@0: // sl@0: // Read capabilities from file found sl@0: // sl@0: { sl@0: __IF_DEBUG(Printf("ReadModuleInfo %S",aReq.iFileName)); sl@0: TFileNameInfo& fi = aReq.iFileNameInfo; sl@0: RImageFinder finder; sl@0: TInt r = finder.Set(aReq); sl@0: if (r == KErrNone) sl@0: { sl@0: finder.iFindExact = ETrue; sl@0: sl@0: r = KErrNotSupported; sl@0: sl@0: // must specify a fully qualified name sl@0: if (fi.DriveLen() && fi.PathLen()) sl@0: { sl@0: if (fi.VerLen()) sl@0: aReq.iRequestedVersion = fi.iVersion; sl@0: else sl@0: aReq.iRequestedVersion = KModuleVersionWild; sl@0: r = finder.Search(); sl@0: if (r == KErrNone) sl@0: { sl@0: RLibrary::TInfo ret_info; sl@0: memclr(&ret_info,sizeof(ret_info)); sl@0: ret_info.iModuleVersion = finder.iNew.iModuleVersion; sl@0: ret_info.iUids = *(const TUidType*)finder.iNew.iUid; sl@0: *(SSecurityInfo*)&ret_info.iSecurityInfo = finder.iNew.iS; sl@0: TPckgC ret_pckg(ret_info); sl@0: r = aReq.iMsg->Write(2, ret_pckg); sl@0: } sl@0: } sl@0: } sl@0: finder.Close(); sl@0: return r; sl@0: } sl@0: sl@0: TInt GetInfoFromHeader(const RLoaderMsg& aMsg) sl@0: { sl@0: TInt r; sl@0: sl@0: // Get size of header supplied by client sl@0: TInt size; sl@0: size = aMsg.GetDesLength(0); sl@0: if(size<0) sl@0: return size; sl@0: if(size>RLibrary::KRequiredImageHeaderSize) sl@0: size = RLibrary::KRequiredImageHeaderSize; sl@0: if((TUint)sizeTotalSize()>size) sl@0: r = KErrUnderflow; sl@0: else sl@0: { sl@0: TUint32 uncompressedSize; sl@0: r = header->ValidateHeader(-1,uncompressedSize); sl@0: } sl@0: if(r==KErrNone) sl@0: { sl@0: // Get info sl@0: RLibrary::TInfoV2 ret_info; sl@0: memclr(&ret_info,sizeof(ret_info)); sl@0: ret_info.iModuleVersion = header->ModuleVersion(); sl@0: ret_info.iUids = (TUidType&)header->iUid1; sl@0: header->GetSecurityInfo((SSecurityInfo&)ret_info.iSecurityInfo); sl@0: ret_info.iHardwareFloatingPoint = (header->iFlags & KImageHWFloatMask) >> KImageHWFloatShift; sl@0: sl@0: ret_info.iDebugAttributes = 0; // default sl@0: if (header->iFlags & KImageDebuggable) sl@0: ret_info.iDebugAttributes |= RLibrary::TInfoV2::EDebugAllowed; sl@0: sl@0: TPckg ret_pckg(ret_info); sl@0: TInt max = aMsg.GetDesMaxLength(1); sl@0: if (ret_pckg.Length() > max) sl@0: ret_pckg.SetLength(max); sl@0: r = aMsg.Write(1, ret_pckg); sl@0: } sl@0: } sl@0: sl@0: delete[] data; sl@0: return r; sl@0: } sl@0: sl@0: #if defined(_DEBUG) || defined(_DEBUG_RELEASE) sl@0: void memory_dump(const TAny* a, TUint l) sl@0: { sl@0: TBuf8<80> buf; sl@0: const TUint8* s = (const TUint8*)a; sl@0: TInt n=0; sl@0: while (l) sl@0: { sl@0: buf.Append(' '); sl@0: buf.AppendNumFixedWidth(*s++, EHex, 2); sl@0: --l; sl@0: ++n; sl@0: if (l==0 || n==16) sl@0: { sl@0: RDebug::Printf((const char*)buf.PtrZ()); sl@0: buf.Zero(); sl@0: n=0; sl@0: } sl@0: } sl@0: } sl@0: sl@0: void RImageFinder::Dump(const char* aTitle, TInt aR) sl@0: { sl@0: RDebug::Printf(aTitle); sl@0: RDebug::Printf("r=%d",aR); sl@0: if (iExisting) sl@0: { sl@0: RDebug::Printf("Existing image found"); sl@0: RDebug::Printf("Filename=%S Attr=%08x", &iExisting->iFileName, iExisting->iAttr); sl@0: RDebug::Printf("SID %08x Caps %08x %08x", iExisting->iS.iSecureId, iExisting->iS.iCaps[1], iExisting->iS.iCaps[0]); sl@0: const TUint32* uid = (const TUint32*)&iExisting->iUids; sl@0: RDebug::Printf("UIDs %08x %08x %08x VER %08x", uid[0], uid[1], uid[2], iExisting->iModuleVersion); sl@0: RDebug::Printf("Rom %08x", iExisting->iRomImageHeader); sl@0: } sl@0: else if (iNewValid) sl@0: { sl@0: RDebug::Printf("New image found"); sl@0: RDebug::Printf("Filename=%S Attr=%08x", &iNewFileName, iNew.iAttr); sl@0: RDebug::Printf("SID %08x Caps %08x %08x", iNew.iS.iSecureId, iNew.iS.iCaps[1], iNew.iS.iCaps[0]); sl@0: const TUint32* uid = (const TUint32*)iNew.iUid; sl@0: RDebug::Printf("UIDs %08x %08x %08x VER %08x", uid[0], uid[1], uid[2], iNew.iModuleVersion); sl@0: RDebug::Printf("Rom %08x", iNew.iRomImageHeader); sl@0: } sl@0: else sl@0: { sl@0: RDebug::Printf("No suitable image found"); sl@0: RDebug::Printf("#NM=%d #UidFail=%d #CapFail=%d #MajVFail=%d #ImpFail=%d", iNameMatches, iUidFail, iCapFail, iMajorVersionFail, iImportFail); sl@0: } sl@0: } sl@0: sl@0: void DumpImageHeader(const E32ImageHeader* a) sl@0: { sl@0: RDebug::Printf("E32ImageHeader at %08x :", a); sl@0: TUint abi = a->ABI(); sl@0: TUint hdrfmt = a->HeaderFormat(); sl@0: TUint impfmt = a->ImportFormat(); sl@0: TUint eptfmt = a->EntryPointFormat(); sl@0: RDebug::Printf("Header format %d", hdrfmt>>KImageHdrFmtShift); sl@0: RDebug::Printf("Import format %d", impfmt>>KImageImpFmtShift); sl@0: RDebug::Printf("EntryPoint format %d", eptfmt>>KImageEptShift); sl@0: RDebug::Printf("ABI %d", abi>>KImageABIShift); sl@0: RDebug::Printf("UIDs %08x %08x %08x (%08x)", a->iUid1, a->iUid2, a->iUid3, a->iUidChecksum); sl@0: RDebug::Printf("Header CRC %08x", a->iHeaderCrc); sl@0: RDebug::Printf("Signature %08x", a->iSignature); sl@0: RDebug::Printf("CPU %08x", (TUint)a->CpuIdentifier()); sl@0: RDebug::Printf("ModuleVersion %08x", a->ModuleVersion()); sl@0: RDebug::Printf("Compression Type %08x", a->CompressionType()); sl@0: RDebug::Printf("Tools Version %d.%02d(%d)", a->iToolsVersion.iMajor, a->iToolsVersion.iMinor, a->iToolsVersion.iBuild); sl@0: RDebug::Printf("Flags %08x", a->iFlags); sl@0: RDebug::Printf("Code Size %08x", a->iCodeSize); sl@0: RDebug::Printf("Text Size %08x", a->iTextSize); sl@0: RDebug::Printf("Data Size %08x", a->iDataSize); sl@0: RDebug::Printf("BSS Size %08x", a->iBssSize); sl@0: RDebug::Printf("Stack Size %08x", a->iStackSize); sl@0: RDebug::Printf("HeapSizeMin %08x", a->iHeapSizeMin); sl@0: RDebug::Printf("HeapSizeMax %08x", a->iHeapSizeMax); sl@0: RDebug::Printf("iEntryPoint %08x", a->iEntryPoint); sl@0: RDebug::Printf("iCodeBase %08x", a->iCodeBase); sl@0: RDebug::Printf("iDataBase %08x", a->iDataBase); sl@0: RDebug::Printf("DLL Ref Table Count %d", a->iDllRefTableCount); sl@0: RDebug::Printf("Export Dir Count %d", a->iExportDirCount); sl@0: RDebug::Printf("Code Offset %08x", a->iCodeOffset); sl@0: RDebug::Printf("Data Offset %08x", a->iDataOffset); sl@0: RDebug::Printf("Code Reloc Offset %08x", a->iCodeRelocOffset); sl@0: RDebug::Printf("Data Reloc Offset %08x", a->iDataRelocOffset); sl@0: RDebug::Printf("Import Offset %08x", a->iImportOffset); sl@0: RDebug::Printf("Export Dir Offset %08x", a->iExportDirOffset); sl@0: RDebug::Printf("Priority %d", (TUint)a->ProcessPriority()); sl@0: // KImageHdrFmt_J sl@0: RDebug::Printf("iUncompressedSize %08x", ((E32ImageHeaderComp*)a)->iUncompressedSize); sl@0: // KImageHdrFmt_V sl@0: E32ImageHeaderV* v = (E32ImageHeaderV*)a; sl@0: RDebug::Printf("SID %08x VID %08x CAP %08x %08x", v->iS.iSecureId, v->iS.iVendorId, v->iS.iCaps[1], v->iS.iCaps[0]); sl@0: RDebug::Printf("iExportDescType %02x", v->iExportDescType); sl@0: RDebug::Printf("iExportDescSize %04x", v->iExportDescSize); sl@0: if (v->iExportDescSize) sl@0: memory_dump(v->iExportDesc, v->iExportDescSize); sl@0: } sl@0: #endif sl@0: