| 1 | // Licensed to the .NET Foundation under one or more agreements. |
| 2 | // The .NET Foundation licenses this file to you under the MIT license. |
| 3 | // See the LICENSE file in the project root for more information. |
| 4 | // precode.h |
| 5 | // |
| 6 | |
| 7 | // |
| 8 | // Stub that runs before the actual native code |
| 9 | |
| 10 | #ifndef __PRECODE_H__ |
| 11 | #define __PRECODE_H__ |
| 12 | |
| 13 | typedef DPTR(class Precode) PTR_Precode; |
| 14 | |
| 15 | #ifndef PRECODE_ALIGNMENT |
| 16 | #define PRECODE_ALIGNMENT sizeof(void*) |
| 17 | #endif |
| 18 | |
| 19 | enum PrecodeType { |
| 20 | PRECODE_INVALID = InvalidPrecode::Type, |
| 21 | PRECODE_STUB = StubPrecode::Type, |
| 22 | #ifdef HAS_NDIRECT_IMPORT_PRECODE |
| 23 | PRECODE_NDIRECT_IMPORT = NDirectImportPrecode::Type, |
| 24 | #endif // HAS_NDIRECT_IMPORT_PRECODE |
| 25 | #ifdef HAS_FIXUP_PRECODE |
| 26 | PRECODE_FIXUP = FixupPrecode::Type, |
| 27 | #endif // HAS_FIXUP_PRECODE |
| 28 | #ifdef HAS_THISPTR_RETBUF_PRECODE |
| 29 | PRECODE_THISPTR_RETBUF = ThisPtrRetBufPrecode::Type, |
| 30 | #endif // HAS_THISPTR_RETBUF_PRECODE |
| 31 | }; |
| 32 | |
| 33 | // For more details see. file:../../doc/BookOfTheRuntime/ClassLoader/MethodDescDesign.doc |
| 34 | class Precode { |
| 35 | #ifdef DACCESS_COMPILE |
| 36 | friend class NativeImageDumper; |
| 37 | #endif |
| 38 | |
| 39 | BYTE m_data[SIZEOF_PRECODE_BASE]; |
| 40 | |
| 41 | StubPrecode* AsStubPrecode() |
| 42 | { |
| 43 | LIMITED_METHOD_CONTRACT; |
| 44 | SUPPORTS_DAC; |
| 45 | |
| 46 | return dac_cast<PTR_StubPrecode>(this); |
| 47 | } |
| 48 | |
| 49 | #ifdef HAS_NDIRECT_IMPORT_PRECODE |
| 50 | public: |
| 51 | // Fake precodes has to be exposed |
| 52 | NDirectImportPrecode* AsNDirectImportPrecode() |
| 53 | { |
| 54 | LIMITED_METHOD_CONTRACT; |
| 55 | SUPPORTS_DAC; |
| 56 | |
| 57 | return dac_cast<PTR_NDirectImportPrecode>(this); |
| 58 | } |
| 59 | |
| 60 | private: |
| 61 | #endif // HAS_NDIRECT_IMPORT_PRECODE |
| 62 | |
| 63 | #ifdef HAS_FIXUP_PRECODE |
| 64 | FixupPrecode* AsFixupPrecode() |
| 65 | { |
| 66 | LIMITED_METHOD_CONTRACT; |
| 67 | SUPPORTS_DAC; |
| 68 | |
| 69 | return dac_cast<PTR_FixupPrecode>(this); |
| 70 | } |
| 71 | #endif // HAS_FIXUP_PRECODE |
| 72 | |
| 73 | #ifdef HAS_THISPTR_RETBUF_PRECODE |
| 74 | ThisPtrRetBufPrecode* AsThisPtrRetBufPrecode() |
| 75 | { |
| 76 | LIMITED_METHOD_CONTRACT; |
| 77 | SUPPORTS_DAC; |
| 78 | return dac_cast<PTR_ThisPtrRetBufPrecode>(this); |
| 79 | } |
| 80 | #endif // HAS_THISPTR_RETBUF_PRECODE |
| 81 | |
| 82 | TADDR GetStart() |
| 83 | { |
| 84 | SUPPORTS_DAC; |
| 85 | LIMITED_METHOD_CONTRACT; |
| 86 | return dac_cast<TADDR>(this); |
| 87 | } |
| 88 | |
| 89 | static void UnexpectedPrecodeType(const char * originator, PrecodeType precodeType) |
| 90 | |
| 91 | { |
| 92 | SUPPORTS_DAC; |
| 93 | #ifdef DACCESS_COMPILE |
| 94 | DacError(E_UNEXPECTED); |
| 95 | #else |
| 96 | #ifdef _PREFIX_ |
| 97 | // We only use __UNREACHABLE here since otherwise it would be a hint |
| 98 | // for the compiler to fold this case with the other cases in a switch |
| 99 | // statement. However, we would rather have this case be a separate |
| 100 | // code path so that we will get a clean crash sooner. |
| 101 | __UNREACHABLE("Unexpected precode type" ); |
| 102 | #endif |
| 103 | CONSISTENCY_CHECK_MSGF(false, ("%s: Unexpected precode type: 0x%02x." , originator, precodeType)); |
| 104 | #endif |
| 105 | } |
| 106 | |
| 107 | public: |
| 108 | PrecodeType GetType() |
| 109 | { |
| 110 | LIMITED_METHOD_CONTRACT; |
| 111 | SUPPORTS_DAC; |
| 112 | |
| 113 | #ifdef OFFSETOF_PRECODE_TYPE |
| 114 | |
| 115 | BYTE type = m_data[OFFSETOF_PRECODE_TYPE]; |
| 116 | #ifdef _TARGET_X86_ |
| 117 | if (type == X86_INSTR_MOV_RM_R) |
| 118 | type = m_data[OFFSETOF_PRECODE_TYPE_MOV_RM_R]; |
| 119 | #endif // _TARGET_X86_ |
| 120 | |
| 121 | #ifdef _TARGET_AMD64_ |
| 122 | if (type == (X86_INSTR_MOV_R10_IMM64 & 0xFF)) |
| 123 | type = m_data[OFFSETOF_PRECODE_TYPE_MOV_R10]; |
| 124 | else if ((type == (X86_INSTR_CALL_REL32 & 0xFF)) || (type == (X86_INSTR_JMP_REL32 & 0xFF))) |
| 125 | type = m_data[OFFSETOF_PRECODE_TYPE_CALL_OR_JMP]; |
| 126 | #endif // _AMD64 |
| 127 | |
| 128 | #if defined(HAS_FIXUP_PRECODE) && (defined(_TARGET_X86_) || defined(_TARGET_AMD64_)) |
| 129 | if (type == FixupPrecode::TypePrestub) |
| 130 | type = FixupPrecode::Type; |
| 131 | #endif |
| 132 | |
| 133 | #ifdef _TARGET_ARM_ |
| 134 | static_assert_no_msg(offsetof(StubPrecode, m_pTarget) == offsetof(NDirectImportPrecode, m_pMethodDesc)); |
| 135 | // If the precode does not have thumb bit on target, it must be NDirectImportPrecode. |
| 136 | if (type == StubPrecode::Type && ((AsStubPrecode()->m_pTarget & THUMB_CODE) == 0)) |
| 137 | type = NDirectImportPrecode::Type; |
| 138 | #endif |
| 139 | |
| 140 | return (PrecodeType)type; |
| 141 | |
| 142 | #else // OFFSETOF_PRECODE_TYPE |
| 143 | return PRECODE_STUB; |
| 144 | #endif // OFFSETOF_PRECODE_TYPE |
| 145 | } |
| 146 | |
| 147 | static BOOL IsValidType(PrecodeType t); |
| 148 | |
| 149 | static int AlignOf(PrecodeType t) |
| 150 | { |
| 151 | SUPPORTS_DAC; |
| 152 | int align = PRECODE_ALIGNMENT; |
| 153 | |
| 154 | #if defined(_TARGET_X86_) && defined(HAS_FIXUP_PRECODE) |
| 155 | // Fixup precodes has to be aligned to allow atomic patching |
| 156 | if (t == PRECODE_FIXUP) |
| 157 | align = 8; |
| 158 | #endif // _TARGET_X86_ && HAS_FIXUP_PRECODE |
| 159 | |
| 160 | #if defined(_TARGET_ARM_) && defined(HAS_COMPACT_ENTRYPOINTS) |
| 161 | // Precodes have to be aligned to allow fast compact entry points check |
| 162 | _ASSERTE (align >= sizeof(void*)); |
| 163 | #endif // _TARGET_ARM_ && HAS_COMPACT_ENTRYPOINTS |
| 164 | |
| 165 | return align; |
| 166 | } |
| 167 | |
| 168 | static SIZE_T SizeOf(PrecodeType t); |
| 169 | |
| 170 | SIZE_T SizeOf() |
| 171 | { |
| 172 | WRAPPER_NO_CONTRACT; |
| 173 | return SizeOf(GetType()); |
| 174 | } |
| 175 | |
| 176 | // Note: This is immediate target of the precode. It does not follow jump stub if there is one. |
| 177 | PCODE GetTarget(); |
| 178 | |
| 179 | BOOL IsPointingTo(PCODE target, PCODE addr) |
| 180 | { |
| 181 | WRAPPER_NO_CONTRACT; |
| 182 | SUPPORTS_DAC; |
| 183 | |
| 184 | #ifdef CROSSGEN_COMPILE |
| 185 | // Crossgen does not create jump stubs on AMD64, so just return always false here to |
| 186 | // avoid non-deterministic behavior. |
| 187 | return FALSE; |
| 188 | #else // CROSSGEN_COMPILE |
| 189 | if (target == addr) |
| 190 | return TRUE; |
| 191 | |
| 192 | #ifdef _TARGET_AMD64_ |
| 193 | // Handle jump stubs |
| 194 | if (isJumpRel64(target)) { |
| 195 | target = decodeJump64(target); |
| 196 | if (target == addr) |
| 197 | return TRUE; |
| 198 | } |
| 199 | #endif // _TARGET_AMD64_ |
| 200 | |
| 201 | return FALSE; |
| 202 | #endif // CROSSGEN_COMPILE |
| 203 | } |
| 204 | |
| 205 | BOOL IsPointingToNativeCode(PCODE pNativeCode) |
| 206 | { |
| 207 | WRAPPER_NO_CONTRACT; |
| 208 | SUPPORTS_DAC; |
| 209 | |
| 210 | return IsPointingTo(GetTarget(), pNativeCode); |
| 211 | } |
| 212 | |
| 213 | BOOL IsPointingToPrestub(PCODE target); |
| 214 | |
| 215 | BOOL IsPointingToPrestub() |
| 216 | { |
| 217 | WRAPPER_NO_CONTRACT; |
| 218 | return IsPointingToPrestub(GetTarget()); |
| 219 | } |
| 220 | |
| 221 | PCODE GetEntryPoint() |
| 222 | { |
| 223 | LIMITED_METHOD_CONTRACT; |
| 224 | return dac_cast<TADDR>(this) + GetEntryPointOffset(); |
| 225 | } |
| 226 | |
| 227 | static SIZE_T GetEntryPointOffset() |
| 228 | { |
| 229 | LIMITED_METHOD_CONTRACT; |
| 230 | #ifdef _TARGET_ARM_ |
| 231 | return THUMB_CODE; |
| 232 | #else |
| 233 | return 0; |
| 234 | #endif |
| 235 | } |
| 236 | |
| 237 | MethodDesc * GetMethodDesc(BOOL fSpeculative = FALSE); |
| 238 | BOOL IsCorrectMethodDesc(MethodDesc * pMD); |
| 239 | |
| 240 | static Precode* Allocate(PrecodeType t, MethodDesc* pMD, |
| 241 | LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); |
| 242 | void Init(PrecodeType t, MethodDesc* pMD, LoaderAllocator *pLoaderAllocator); |
| 243 | |
| 244 | #ifndef DACCESS_COMPILE |
| 245 | void ResetTargetInterlocked(); |
| 246 | BOOL SetTargetInterlocked(PCODE target, BOOL fOnlyRedirectFromPrestub = TRUE); |
| 247 | |
| 248 | // Reset precode to point to prestub |
| 249 | void Reset(); |
| 250 | #endif // DACCESS_COMPILE |
| 251 | |
| 252 | static Precode* GetPrecodeFromEntryPoint(PCODE addr, BOOL fSpeculative = FALSE) |
| 253 | { |
| 254 | LIMITED_METHOD_DAC_CONTRACT; |
| 255 | |
| 256 | #ifdef DACCESS_COMPILE |
| 257 | // Always use speculative checks with DAC |
| 258 | fSpeculative = TRUE; |
| 259 | #endif |
| 260 | |
| 261 | TADDR pInstr = PCODEToPINSTR(addr); |
| 262 | |
| 263 | // Always do consistency check in debug |
| 264 | if (fSpeculative INDEBUG(|| TRUE)) |
| 265 | { |
| 266 | if (!IS_ALIGNED(pInstr, PRECODE_ALIGNMENT) || !IsValidType(PTR_Precode(pInstr)->GetType())) |
| 267 | { |
| 268 | if (fSpeculative) return NULL; |
| 269 | _ASSERTE(!"Precode::GetPrecodeFromEntryPoint: Unexpected code in precode" ); |
| 270 | } |
| 271 | } |
| 272 | |
| 273 | Precode* pPrecode = PTR_Precode(pInstr); |
| 274 | |
| 275 | if (!fSpeculative) |
| 276 | { |
| 277 | g_IBCLogger.LogMethodPrecodeAccess(pPrecode->GetMethodDesc()); |
| 278 | } |
| 279 | |
| 280 | return pPrecode; |
| 281 | } |
| 282 | |
| 283 | // If addr is patched fixup precode, returns address that it points to. Otherwise returns NULL. |
| 284 | static PCODE TryToSkipFixupPrecode(PCODE addr); |
| 285 | |
| 286 | // |
| 287 | // Precode as temporary entrypoint |
| 288 | // |
| 289 | |
| 290 | static SIZE_T SizeOfTemporaryEntryPoint(PrecodeType t) |
| 291 | { |
| 292 | LIMITED_METHOD_DAC_CONTRACT; |
| 293 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
| 294 | _ASSERTE(t != PRECODE_FIXUP); |
| 295 | #endif |
| 296 | return ALIGN_UP(SizeOf(t), AlignOf(t)); |
| 297 | } |
| 298 | |
| 299 | static Precode * GetPrecodeForTemporaryEntryPoint(TADDR temporaryEntryPoints, int index); |
| 300 | |
| 301 | static SIZE_T SizeOfTemporaryEntryPoints(PrecodeType t, bool preallocateJumpStubs, int count); |
| 302 | static SIZE_T SizeOfTemporaryEntryPoints(TADDR temporaryEntryPoints, int count); |
| 303 | |
| 304 | static TADDR AllocateTemporaryEntryPoints(MethodDescChunk* pChunk, |
| 305 | LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker); |
| 306 | |
| 307 | #ifdef FEATURE_PREJIT |
| 308 | // |
| 309 | // NGEN stuff |
| 310 | // |
| 311 | |
| 312 | void Save(DataImage *image); |
| 313 | void Fixup(DataImage *image, MethodDesc * pMD); |
| 314 | |
| 315 | BOOL IsPrebound(DataImage *image); |
| 316 | |
| 317 | // Helper class for saving precodes in chunks |
| 318 | class SaveChunk |
| 319 | { |
| 320 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
| 321 | // Array of methods to be saved in the method desc chunk |
| 322 | InlineSArray<MethodDesc *, 20> m_rgPendingChunk; |
| 323 | #endif // HAS_FIXUP_PRECODE_CHUNKS |
| 324 | |
| 325 | public: |
| 326 | void Save(DataImage * image, MethodDesc * pMD); |
| 327 | void Flush(DataImage * image); |
| 328 | }; |
| 329 | #endif // FEATURE_PREJIT |
| 330 | |
| 331 | #ifdef DACCESS_COMPILE |
| 332 | void EnumMemoryRegions(CLRDataEnumMemoryFlags flags); |
| 333 | #endif |
| 334 | |
| 335 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
| 336 | static DWORD GetOffsetOfBase(PrecodeType t, DWORD count) |
| 337 | { |
| 338 | assert(t == PRECODE_FIXUP); |
| 339 | return (DWORD)(count * sizeof(FixupPrecode)); |
| 340 | } |
| 341 | |
| 342 | static DWORD GetOffset(PrecodeType t, DWORD index, DWORD count) |
| 343 | { |
| 344 | assert(t == PRECODE_FIXUP); |
| 345 | assert(index < count); |
| 346 | return (DWORD)((count - index - 1)* sizeof(FixupPrecode)); |
| 347 | } |
| 348 | #endif |
| 349 | }; |
| 350 | |
| 351 | #endif // __PRECODE_H__ |
| 352 | |