From 1c5d30e757f3cf191b2f96d472bb022d4ce24f56 Mon Sep 17 00:00:00 2001 From: Rama Krishnan Raghupathy Date: Wed, 31 Aug 2016 16:55:28 -0700 Subject: [PATCH] Implement FixupPrecode for Arm64 --- src/inc/jithelpers.h | 2 +- src/vm/arm64/asmconstants.h | 13 +++++ src/vm/arm64/asmhelpers.asm | 20 +++++++- src/vm/arm64/cgencpu.h | 60 ++++++++++++++++------ src/vm/arm64/stubs.cpp | 99 ++++++++++++++++++++++++++++++++++--- src/vm/precode.cpp | 2 +- 6 files changed, 170 insertions(+), 26 deletions(-) diff --git a/src/inc/jithelpers.h b/src/inc/jithelpers.h index 7441774c9643..8a719927e5f5 100644 --- a/src/inc/jithelpers.h +++ b/src/inc/jithelpers.h @@ -315,7 +315,7 @@ JITHELPER(CORINFO_HELP_EE_PRESTUB, ThePreStub, CORINFO_HELP_SIG_NO_ALIGN_STUB) -#if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) || defined(_TARGET_ARM_) +#if defined(HAS_FIXUP_PRECODE) JITHELPER(CORINFO_HELP_EE_PRECODE_FIXUP, PrecodeFixupThunk, CORINFO_HELP_SIG_NO_ALIGN_STUB) #else JITHELPER(CORINFO_HELP_EE_PRECODE_FIXUP, NULL, CORINFO_HELP_SIG_NO_ALIGN_STUB) diff --git a/src/vm/arm64/asmconstants.h b/src/vm/arm64/asmconstants.h index 87751d74928c..b0300ca32492 100644 --- a/src/vm/arm64/asmconstants.h +++ b/src/vm/arm64/asmconstants.h @@ -147,6 +147,19 @@ ASMCONSTANTS_C_ASSERT(CONTEXT_Pc == offsetof(T_CONTEXT,Pc)) ASMCONSTANTS_C_ASSERT(SIZEOF__FaultingExceptionFrame == sizeof(FaultingExceptionFrame)); ASMCONSTANTS_C_ASSERT(FaultingExceptionFrame__m_fFilterExecuted == offsetof(FaultingExceptionFrame, m_fFilterExecuted)); +#define SIZEOF__FixupPrecode 24 +#define Offset_PrecodeChunkIndex 15 +#define Offset_MethodDescChunkIndex 14 +#define MethodDesc_ALIGNMENT_SHIFT 3 +#define FixupPrecode_ALIGNMENT_SHIFT_1 3 +#define FixupPrecode_ALIGNMENT_SHIFT_2 4 + +ASMCONSTANTS_C_ASSERT(SIZEOF__FixupPrecode == sizeof(FixupPrecode)); +ASMCONSTANTS_C_ASSERT(Offset_PrecodeChunkIndex == offsetof(FixupPrecode, m_PrecodeChunkIndex)); +ASMCONSTANTS_C_ASSERT(Offset_MethodDescChunkIndex == offsetof(FixupPrecode, m_MethodDescChunkIndex)); +ASMCONSTANTS_C_ASSERT(MethodDesc_ALIGNMENT_SHIFT == MethodDesc::ALIGNMENT_SHIFT); +ASMCONSTANTS_C_ASSERT((1<(this) + (m_PrecodeChunkIndex + 1) * sizeof(FixupPrecode); } TADDR GetMethodDesc(); PCODE GetTarget() { - _ASSERTE(!"ARM64:NYI"); - return NULL; + LIMITED_METHOD_DAC_CONTRACT; + return m_pTarget; } BOOL SetTargetInterlocked(TADDR target, TADDR expected) { - _ASSERTE(!"ARM64:NYI"); - return NULL; + CONTRACTL + { + THROWS; + GC_TRIGGERS; + } + CONTRACTL_END; + + EnsureWritableExecutablePages(&m_pTarget); + return (TADDR)InterlockedCompareExchange64( + (LONGLONG*)&m_pTarget, (TADDR)target, (TADDR)expected) == expected; } static BOOL IsFixupPrecodeByASM(PCODE addr) { - _ASSERTE(!"ARM64:NYI"); - return NULL; + PTR_DWORD pInstr = dac_cast(PCODEToPINSTR(addr)); + return + (pInstr[0] == 0x1000000C) && + (pInstr[1] == 0x5800006B) && + (pInstr[2] == 0xD61F0160); } #ifdef FEATURE_PREJIT diff --git a/src/vm/arm64/stubs.cpp b/src/vm/arm64/stubs.cpp index e95bb0d06b5d..9c9b6a8a6816 100644 --- a/src/vm/arm64/stubs.cpp +++ b/src/vm/arm64/stubs.cpp @@ -494,14 +494,22 @@ void HelperMethodFrame::UpdateRegDisplay(const PREGDISPLAY pRD) TADDR FixupPrecode::GetMethodDesc() { - _ASSERTE(!"ARM64:NYI"); - return NULL; + LIMITED_METHOD_DAC_CONTRACT; + + // This lookup is also manually inlined in PrecodeFixupThunk assembly code + TADDR base = *PTR_TADDR(GetBase()); + if (base == NULL) + return NULL; + return base + (m_MethodDescChunkIndex * MethodDesc::ALIGNMENT); } #ifdef DACCESS_COMPILE void FixupPrecode::EnumMemoryRegions(CLRDataEnumMemoryFlags flags) { - _ASSERTE(!"ARM64:NYI"); + SUPPORTS_DAC; + DacEnumMemoryRegion(dac_cast(this), sizeof(FixupPrecode)); + + DacEnumMemoryRegion(GetBase(), sizeof(TADDR)); } #endif // DACCESS_COMPILE @@ -574,19 +582,78 @@ void NDirectImportPrecode::Fixup(DataImage *image) void FixupPrecode::Init(MethodDesc* pMD, LoaderAllocator *pLoaderAllocator, int iMethodDescChunkIndex /*=0*/, int iPrecodeChunkIndex /*=0*/) { - _ASSERTE(!"ARM64:NYI"); + WRAPPER_NO_CONTRACT; + + InitCommon(); + + // Initialize chunk indices only if they are not initialized yet. This is necessary to make MethodDesc::Reset work. + if (m_PrecodeChunkIndex == 0) + { + _ASSERTE(FitsInU1(iPrecodeChunkIndex)); + m_PrecodeChunkIndex = static_cast(iPrecodeChunkIndex); + } + + if (iMethodDescChunkIndex != -1) + { + if (m_MethodDescChunkIndex == 0) + { + _ASSERTE(FitsInU1(iMethodDescChunkIndex)); + m_MethodDescChunkIndex = static_cast(iMethodDescChunkIndex); + } + + if (*(void**)GetBase() == NULL) + *(void**)GetBase() = (BYTE*)pMD - (iMethodDescChunkIndex * MethodDesc::ALIGNMENT); + } + + _ASSERTE(GetMethodDesc() == (TADDR)pMD); + + if (pLoaderAllocator != NULL) + { + m_pTarget = GetEEFuncEntryPoint(PrecodeFixupThunk); + } } #ifdef FEATURE_NATIVE_IMAGE_GENERATION // Partial initialization. Used to save regrouped chunks. void FixupPrecode::InitForSave(int iPrecodeChunkIndex) { - _ASSERTE(!"ARM64:NYI"); + STANDARD_VM_CONTRACT; + + InitCommon(); + + _ASSERTE(FitsInU1(iPrecodeChunkIndex)); + m_PrecodeChunkIndex = static_cast(iPrecodeChunkIndex); + // The rest is initialized in code:FixupPrecode::Fixup } void FixupPrecode::Fixup(DataImage *image, MethodDesc * pMD) { - _ASSERTE(!"ARM64:NYI"); + STANDARD_VM_CONTRACT; + + // Note that GetMethodDesc() does not return the correct value because of + // regrouping of MethodDescs into hot and cold blocks. That's why the caller + // has to supply the actual MethodDesc + + SSIZE_T mdChunkOffset; + ZapNode * pMDChunkNode = image->GetNodeForStructure(pMD, &mdChunkOffset); + ZapNode * pHelperThunk = image->GetHelperThunk(CORINFO_HELP_EE_PRECODE_FIXUP); + + image->FixupFieldToNode(this, offsetof(FixupPrecode, m_pTarget), pHelperThunk); + + // Set the actual chunk index + FixupPrecode * pNewPrecode = (FixupPrecode *)image->GetImagePointer(this); + + size_t mdOffset = mdChunkOffset - sizeof(MethodDescChunk); + size_t chunkIndex = mdOffset / MethodDesc::ALIGNMENT; + _ASSERTE(FitsInU1(chunkIndex)); + pNewPrecode->m_MethodDescChunkIndex = (BYTE)chunkIndex; + + // Fixup the base of MethodDescChunk + if (m_PrecodeChunkIndex == 0) + { + image->FixupFieldToNode(this, (BYTE *)GetBase() - (BYTE *)this, + pMDChunkNode, sizeof(MethodDescChunk)); + } } #endif // FEATURE_NATIVE_IMAGE_GENERATION @@ -618,7 +685,20 @@ BOOL DoesSlotCallPrestub(PCODE pCode) { PTR_DWORD pInstr = dac_cast(PCODEToPINSTR(pCode)); - // ARM64TODO: Check for FixupPrecode + //FixupPrecode +#if defined(HAS_FIXUP_PRECODE) + if (FixupPrecode::IsFixupPrecodeByASM(pCode)) + { + PCODE pTarget = dac_cast(pInstr)->m_pTarget; + + if (isJump(pTarget)) + { + pTarget = decodeJump(pTarget); + } + + return pTarget == (TADDR)PrecodeFixupThunk; + } +#endif // StubPrecode if (pInstr[0] == 0x10000089 && // adr x9, #16 @@ -627,7 +707,10 @@ BOOL DoesSlotCallPrestub(PCODE pCode) { PCODE pTarget = dac_cast(pInstr)->m_pTarget; - // ARM64TODO: implement for NGen case + if (isJump(pTarget)) + { + pTarget = decodeJump(pTarget); + } return pTarget == GetPreStubEntryPoint(); } diff --git a/src/vm/precode.cpp b/src/vm/precode.cpp index 551ef1358e9b..1934eb10b435 100644 --- a/src/vm/precode.cpp +++ b/src/vm/precode.cpp @@ -433,7 +433,7 @@ BOOL Precode::SetTargetInterlocked(PCODE target) // SetTargetInterlocked does not modify code on ARM so the flush instruction cache is // not necessary. // -#if !defined(_TARGET_ARM_) +#if !defined(_TARGET_ARM_) && !defined(_TARGET_ARM64_) if (ret) { FlushInstructionCache(GetCurrentProcess(),this,SizeOf()); }