| 1 | // Licensed to the .NET Foundation under one or more agreements. |
| 2 | // The .NET Foundation licenses this file to you under the MIT license. |
| 3 | // See the LICENSE file in the project root for more information. |
| 4 | // =========================================================================== |
| 5 | // File: Prestub.cpp |
| 6 | // |
| 7 | |
| 8 | // =========================================================================== |
| 9 | // This file contains the implementation for creating and using prestubs |
| 10 | // =========================================================================== |
| 11 | // |
| 12 | |
| 13 | |
| 14 | #include "common.h" |
| 15 | #include "vars.hpp" |
| 16 | #include "eeconfig.h" |
| 17 | #include "dllimport.h" |
| 18 | #include "comdelegate.h" |
| 19 | #include "dbginterface.h" |
| 20 | #include "stubgen.h" |
| 21 | #include "eventtrace.h" |
| 22 | #include "array.h" |
| 23 | #include "compile.h" |
| 24 | #include "ecall.h" |
| 25 | #include "virtualcallstub.h" |
| 26 | |
| 27 | #ifdef FEATURE_PREJIT |
| 28 | #include "compile.h" |
| 29 | #endif |
| 30 | |
| 31 | #ifdef FEATURE_INTERPRETER |
| 32 | #include "interpreter.h" |
| 33 | #endif |
| 34 | |
| 35 | #ifdef FEATURE_COMINTEROP |
| 36 | #include "clrtocomcall.h" |
| 37 | #endif |
| 38 | |
| 39 | #include "mdaassistants.h" |
| 40 | |
| 41 | #ifdef FEATURE_STACK_SAMPLING |
| 42 | #include "stacksampler.h" |
| 43 | #endif |
| 44 | |
| 45 | #ifdef FEATURE_PERFMAP |
| 46 | #include "perfmap.h" |
| 47 | #endif |
| 48 | |
| 49 | #ifdef FEATURE_TIERED_COMPILATION |
| 50 | #include "callcounter.h" |
| 51 | #endif |
| 52 | |
| 53 | #if defined(FEATURE_GDBJIT) |
| 54 | #include "gdbjit.h" |
| 55 | #endif // FEATURE_GDBJIT |
| 56 | |
| 57 | #ifndef DACCESS_COMPILE |
| 58 | |
| 59 | #if defined(FEATURE_JIT_PITCHING) |
| 60 | EXTERN_C void CheckStacksAndPitch(); |
| 61 | EXTERN_C void SavePitchingCandidate(MethodDesc* pMD, ULONG sizeOfCode); |
| 62 | EXTERN_C void DeleteFromPitchingCandidate(MethodDesc* pMD); |
| 63 | EXTERN_C void MarkMethodNotPitchingCandidate(MethodDesc* pMD); |
| 64 | #endif |
| 65 | |
| 66 | EXTERN_C void STDCALL ThePreStubPatch(); |
| 67 | |
| 68 | #if defined(HAVE_GCCOVER) |
| 69 | CrstStatic MethodDesc::m_GCCoverCrst; |
| 70 | |
| 71 | void MethodDesc::Init() |
| 72 | { |
| 73 | m_GCCoverCrst.Init(CrstGCCover); |
| 74 | } |
| 75 | |
| 76 | #endif |
| 77 | |
| 78 | //========================================================================== |
| 79 | |
| 80 | PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, BOOL fFullBackPatch) |
| 81 | { |
| 82 | CONTRACTL |
| 83 | { |
| 84 | STANDARD_VM_CHECK; |
| 85 | PRECONDITION(!ContainsGenericVariables()); |
| 86 | PRECONDITION(HasStableEntryPoint()); |
| 87 | PRECONDITION(pMT == GetMethodTable()); |
| 88 | } |
| 89 | CONTRACTL_END; |
| 90 | PCODE pTarget = GetStableEntryPoint(); |
| 91 | |
| 92 | if (!HasTemporaryEntryPoint()) |
| 93 | return pTarget; |
| 94 | |
| 95 | PCODE pExpected = GetTemporaryEntryPoint(); |
| 96 | |
| 97 | if (pExpected == pTarget) |
| 98 | return pTarget; |
| 99 | |
| 100 | // True interface methods are never backpatched |
| 101 | if (pMT->IsInterface() && !IsStatic()) |
| 102 | return pTarget; |
| 103 | |
| 104 | if (fFullBackPatch) |
| 105 | { |
| 106 | FuncPtrStubs * pFuncPtrStubs = GetLoaderAllocator()->GetFuncPtrStubsNoCreate(); |
| 107 | if (pFuncPtrStubs != NULL) |
| 108 | { |
| 109 | Precode* pFuncPtrPrecode = pFuncPtrStubs->Lookup(this); |
| 110 | if (pFuncPtrPrecode != NULL) |
| 111 | { |
| 112 | // If there is a funcptr precode to patch, we are done for this round. |
| 113 | if (pFuncPtrPrecode->SetTargetInterlocked(pTarget)) |
| 114 | return pTarget; |
| 115 | } |
| 116 | } |
| 117 | |
| 118 | #ifndef HAS_COMPACT_ENTRYPOINTS |
| 119 | // Patch the fake entrypoint if necessary |
| 120 | Precode::GetPrecodeFromEntryPoint(pExpected)->SetTargetInterlocked(pTarget); |
| 121 | #endif // HAS_COMPACT_ENTRYPOINTS |
| 122 | } |
| 123 | |
| 124 | if (HasNonVtableSlot()) |
| 125 | return pTarget; |
| 126 | |
| 127 | BOOL fBackpatched = FALSE; |
| 128 | |
| 129 | #define BACKPATCH(pPatchedMT) \ |
| 130 | do \ |
| 131 | { \ |
| 132 | if (pPatchedMT->GetSlot(dwSlot) == pExpected) \ |
| 133 | { \ |
| 134 | pPatchedMT->SetSlot(dwSlot, pTarget); \ |
| 135 | fBackpatched = TRUE; \ |
| 136 | } \ |
| 137 | } \ |
| 138 | while(0) |
| 139 | |
| 140 | // The owning slot has been updated already, so there is no need to backpatch it |
| 141 | _ASSERTE(pMT->GetSlot(GetSlot()) == pTarget); |
| 142 | |
| 143 | if (pDispatchingMT != NULL && pDispatchingMT != pMT) |
| 144 | { |
| 145 | DWORD dwSlot = GetSlot(); |
| 146 | |
| 147 | BACKPATCH(pDispatchingMT); |
| 148 | |
| 149 | if (fFullBackPatch) |
| 150 | { |
| 151 | // |
| 152 | // Backpatch the MethodTable that code:MethodTable::GetRestoredSlot() reads the value from. |
| 153 | // VSD reads the slot value using code:MethodTable::GetRestoredSlot(), and so we need to make sure |
| 154 | // that it returns the stable entrypoint eventually to avoid going through the slow path all the time. |
| 155 | // |
| 156 | MethodTable * pRestoredSlotMT = pDispatchingMT->GetRestoredSlotMT(dwSlot); |
| 157 | |
| 158 | BACKPATCH(pRestoredSlotMT); |
| 159 | } |
| 160 | } |
| 161 | |
| 162 | if (IsMethodImpl()) |
| 163 | { |
| 164 | MethodImpl::Iterator it(this); |
| 165 | while (it.IsValid()) |
| 166 | { |
| 167 | DWORD dwSlot = it.GetSlot(); |
| 168 | |
| 169 | BACKPATCH(pMT); |
| 170 | |
| 171 | if (pDispatchingMT != NULL) |
| 172 | { |
| 173 | BACKPATCH(pDispatchingMT); |
| 174 | } |
| 175 | |
| 176 | it.Next(); |
| 177 | } |
| 178 | } |
| 179 | |
| 180 | if (fFullBackPatch && !fBackpatched && IsDuplicate()) |
| 181 | { |
| 182 | // If this is a duplicate, let's scan the rest of the VTable hunting for other hits. |
| 183 | unsigned numSlots = pMT->GetNumVirtuals(); |
| 184 | for (DWORD dwSlot=0; dwSlot<numSlots; dwSlot++) |
| 185 | { |
| 186 | BACKPATCH(pMT); |
| 187 | |
| 188 | if (pDispatchingMT != NULL) |
| 189 | { |
| 190 | BACKPATCH(pDispatchingMT); |
| 191 | } |
| 192 | } |
| 193 | } |
| 194 | |
| 195 | #undef BACKPATCH |
| 196 | |
| 197 | return pTarget; |
| 198 | } |
| 199 | |
| 200 | // <TODO> FIX IN BETA 2 |
| 201 | // |
| 202 | // g_pNotificationTable is only modified by the DAC and therefore the |
| 203 | // optmizer can assume that it will always be its default value and has |
| 204 | // been seen to (on IA64 free builds) eliminate the code in DACNotifyCompilationFinished |
| 205 | // such that DAC notifications are no longer sent. |
| 206 | // |
| 207 | // TODO: fix this in Beta 2 |
| 208 | // the RIGHT fix is to make g_pNotificationTable volatile, but currently |
| 209 | // we don't have DAC macros to do that. Additionally, there are a number |
| 210 | // of other places we should look at DAC definitions to determine if they |
| 211 | // should be also declared volatile. |
| 212 | // |
| 213 | // for now we just turn off optimization for these guys |
| 214 | #ifdef _MSC_VER |
| 215 | #pragma optimize("", off) |
| 216 | #endif |
| 217 | |
| 218 | void DACNotifyCompilationFinished(MethodDesc *methodDesc, PCODE pCode) |
| 219 | { |
| 220 | CONTRACTL |
| 221 | { |
| 222 | NOTHROW; |
| 223 | GC_NOTRIGGER; |
| 224 | SO_INTOLERANT; |
| 225 | MODE_PREEMPTIVE; |
| 226 | } |
| 227 | CONTRACTL_END; |
| 228 | |
| 229 | // Is the list active? |
| 230 | JITNotifications jn(g_pNotificationTable); |
| 231 | if (jn.IsActive()) |
| 232 | { |
| 233 | // Get Module and mdToken |
| 234 | mdToken t = methodDesc->GetMemberDef(); |
| 235 | Module *modulePtr = methodDesc->GetModule(); |
| 236 | |
| 237 | _ASSERTE(modulePtr); |
| 238 | |
| 239 | // Are we listed? |
| 240 | USHORT jnt = jn.Requested((TADDR) modulePtr, t); |
| 241 | if (jnt & CLRDATA_METHNOTIFY_GENERATED) |
| 242 | { |
| 243 | // If so, throw an exception! |
| 244 | DACNotify::DoJITNotification(methodDesc, (TADDR)pCode); |
| 245 | } |
| 246 | } |
| 247 | } |
| 248 | |
| 249 | #ifdef _MSC_VER |
| 250 | #pragma optimize("", on) |
| 251 | #endif |
| 252 | // </TODO> |
| 253 | |
| 254 | PCODE MethodDesc::PrepareInitialCode() |
| 255 | { |
| 256 | STANDARD_VM_CONTRACT; |
| 257 | PrepareCodeConfig config(NativeCodeVersion(this), TRUE, TRUE); |
| 258 | PCODE pCode = PrepareCode(&config); |
| 259 | |
| 260 | #if defined(FEATURE_GDBJIT) && defined(FEATURE_PAL) && !defined(CROSSGEN_COMPILE) |
| 261 | NotifyGdb::MethodPrepared(this); |
| 262 | #endif |
| 263 | |
| 264 | return pCode; |
| 265 | } |
| 266 | |
| 267 | PCODE MethodDesc::PrepareCode(NativeCodeVersion codeVersion) |
| 268 | { |
| 269 | STANDARD_VM_CONTRACT; |
| 270 | |
| 271 | #ifdef FEATURE_CODE_VERSIONING |
| 272 | if (codeVersion.IsDefaultVersion()) |
| 273 | { |
| 274 | #endif |
| 275 | // fast path |
| 276 | PrepareCodeConfig config(codeVersion, TRUE, TRUE); |
| 277 | return PrepareCode(&config); |
| 278 | #ifdef FEATURE_CODE_VERSIONING |
| 279 | } |
| 280 | else |
| 281 | { |
| 282 | // a bit slower path (+1 usec?) |
| 283 | VersionedPrepareCodeConfig config; |
| 284 | { |
| 285 | CodeVersionManager::TableLockHolder lock(GetCodeVersionManager()); |
| 286 | config = VersionedPrepareCodeConfig(codeVersion); |
| 287 | } |
| 288 | config.FinishConfiguration(); |
| 289 | return PrepareCode(&config); |
| 290 | } |
| 291 | #endif |
| 292 | |
| 293 | } |
| 294 | |
| 295 | PCODE MethodDesc::PrepareCode(PrepareCodeConfig* pConfig) |
| 296 | { |
| 297 | STANDARD_VM_CONTRACT; |
| 298 | |
| 299 | // If other kinds of code need multi-versioning we could add more cases here, |
| 300 | // but for now generation of all other code/stubs occurs in other code paths |
| 301 | _ASSERTE(IsIL() || IsNoMetadata()); |
| 302 | return PrepareILBasedCode(pConfig); |
| 303 | } |
| 304 | |
| 305 | PCODE MethodDesc::PrepareILBasedCode(PrepareCodeConfig* pConfig) |
| 306 | { |
| 307 | STANDARD_VM_CONTRACT; |
| 308 | PCODE pCode = NULL; |
| 309 | |
| 310 | if (pConfig->MayUsePrecompiledCode()) |
| 311 | { |
| 312 | pCode = GetPrecompiledCode(pConfig); |
| 313 | } |
| 314 | if (pCode == NULL) |
| 315 | { |
| 316 | LOG((LF_CLASSLOADER, LL_INFO1000000, |
| 317 | " In PrepareILBasedCode, calling JitCompileCode\n" )); |
| 318 | pCode = JitCompileCode(pConfig); |
| 319 | } |
| 320 | |
| 321 | // Mark the code as hot in case the method ends up in the native image |
| 322 | g_IBCLogger.LogMethodCodeAccess(this); |
| 323 | |
| 324 | return pCode; |
| 325 | } |
| 326 | |
| 327 | PCODE MethodDesc::GetPrecompiledCode(PrepareCodeConfig* pConfig) |
| 328 | { |
| 329 | STANDARD_VM_CONTRACT; |
| 330 | PCODE pCode = NULL; |
| 331 | |
| 332 | #ifdef FEATURE_PREJIT |
| 333 | pCode = GetPrecompiledNgenCode(pConfig); |
| 334 | #endif |
| 335 | |
| 336 | #ifdef FEATURE_READYTORUN |
| 337 | if (pCode == NULL) |
| 338 | { |
| 339 | pCode = GetPrecompiledR2RCode(pConfig); |
| 340 | if (pCode != NULL) |
| 341 | { |
| 342 | pConfig->SetNativeCode(pCode, &pCode); |
| 343 | } |
| 344 | } |
| 345 | #endif // FEATURE_READYTORUN |
| 346 | |
| 347 | return pCode; |
| 348 | } |
| 349 | |
| 350 | PCODE MethodDesc::GetPrecompiledNgenCode(PrepareCodeConfig* pConfig) |
| 351 | { |
| 352 | STANDARD_VM_CONTRACT; |
| 353 | PCODE pCode = NULL; |
| 354 | |
| 355 | #ifdef FEATURE_PREJIT |
| 356 | pCode = GetPreImplementedCode(); |
| 357 | |
| 358 | #ifdef PROFILING_SUPPORTED |
| 359 | |
| 360 | // The pre-existing cache search callbacks aren't implemented as you might expect. |
| 361 | // Instead of sending a cache search started for all methods, we only send the notification |
| 362 | // when we already know a pre-compiled version of the method exists. In the NGEN case we also |
| 363 | // don't send callbacks unless the method triggers the prestub which excludes a lot of methods. |
| 364 | // From the profiler's perspective this technique is only reliable/predictable when using profiler |
| 365 | // instrumented NGEN images (that virtually no profilers use). As-is the callback only |
| 366 | // gives an opportunity for the profiler to say whether or not it wants to use the ngen'ed |
| 367 | // code. |
| 368 | // |
| 369 | // Despite those oddities I am leaving this behavior as-is during refactoring because trying to |
| 370 | // improve it probably offers little value vs. the potential for compat issues and creating more |
| 371 | // complexity reasoning how the API behavior changed across runtime releases. |
| 372 | if (pCode != NULL) |
| 373 | { |
| 374 | BOOL fShouldSearchCache = TRUE; |
| 375 | { |
| 376 | BEGIN_PIN_PROFILER(CORProfilerTrackCacheSearches()); |
| 377 | g_profControlBlock.pProfInterface->JITCachedFunctionSearchStarted((FunctionID)this, &fShouldSearchCache); |
| 378 | END_PIN_PROFILER(); |
| 379 | } |
| 380 | |
| 381 | if (!fShouldSearchCache) |
| 382 | { |
| 383 | SetNativeCodeInterlocked(NULL, pCode); |
| 384 | _ASSERTE(!IsPreImplemented()); |
| 385 | pConfig->SetProfilerRejectedPrecompiledCode(); |
| 386 | pCode = NULL; |
| 387 | } |
| 388 | } |
| 389 | #endif // PROFILING_SUPPORTED |
| 390 | |
| 391 | if (pCode != NULL) |
| 392 | { |
| 393 | LOG((LF_ZAP, LL_INFO10000, |
| 394 | "ZAP: Using code" FMT_ADDR "for %s.%s sig=\"%s\" (token %x).\n" , |
| 395 | DBG_ADDR(pCode), |
| 396 | m_pszDebugClassName, |
| 397 | m_pszDebugMethodName, |
| 398 | m_pszDebugMethodSignature, |
| 399 | GetMemberDef())); |
| 400 | |
| 401 | TADDR pFixupList = GetFixupList(); |
| 402 | if (pFixupList != NULL) |
| 403 | { |
| 404 | Module *pZapModule = GetZapModule(); |
| 405 | _ASSERTE(pZapModule != NULL); |
| 406 | if (!pZapModule->FixupDelayList(pFixupList)) |
| 407 | { |
| 408 | _ASSERTE(!"FixupDelayList failed" ); |
| 409 | ThrowHR(COR_E_BADIMAGEFORMAT); |
| 410 | } |
| 411 | } |
| 412 | |
| 413 | #ifdef HAVE_GCCOVER |
| 414 | if (GCStress<cfg_instr_ngen>::IsEnabled()) |
| 415 | SetupGcCoverage(this, (BYTE*)pCode); |
| 416 | #endif // HAVE_GCCOVER |
| 417 | |
| 418 | #ifdef PROFILING_SUPPORTED |
| 419 | /* |
| 420 | * This notifies the profiler that a search to find a |
| 421 | * cached jitted function has been made. |
| 422 | */ |
| 423 | { |
| 424 | BEGIN_PIN_PROFILER(CORProfilerTrackCacheSearches()); |
| 425 | g_profControlBlock.pProfInterface-> |
| 426 | JITCachedFunctionSearchFinished((FunctionID)this, COR_PRF_CACHED_FUNCTION_FOUND); |
| 427 | END_PIN_PROFILER(); |
| 428 | } |
| 429 | #endif // PROFILING_SUPPORTED |
| 430 | |
| 431 | } |
| 432 | #endif // FEATURE_PREJIT |
| 433 | |
| 434 | return pCode; |
| 435 | } |
| 436 | |
| 437 | |
| 438 | PCODE MethodDesc::GetPrecompiledR2RCode(PrepareCodeConfig* pConfig) |
| 439 | { |
| 440 | STANDARD_VM_CONTRACT; |
| 441 | |
| 442 | PCODE pCode = NULL; |
| 443 | #ifdef FEATURE_READYTORUN |
| 444 | Module * pModule = GetModule(); |
| 445 | if (pModule->IsReadyToRun()) |
| 446 | { |
| 447 | pCode = pModule->GetReadyToRunInfo()->GetEntryPoint(this, pConfig, TRUE /* fFixups */); |
| 448 | } |
| 449 | #endif |
| 450 | return pCode; |
| 451 | } |
| 452 | |
| 453 | PCODE MethodDesc::GetMulticoreJitCode() |
| 454 | { |
| 455 | STANDARD_VM_CONTRACT; |
| 456 | |
| 457 | PCODE pCode = NULL; |
| 458 | #ifdef FEATURE_MULTICOREJIT |
| 459 | // Quick check before calling expensive out of line function on this method's domain has code JITted by background thread |
| 460 | MulticoreJitManager & mcJitManager = GetAppDomain()->GetMulticoreJitManager(); |
| 461 | if (mcJitManager.GetMulticoreJitCodeStorage().GetRemainingMethodCount() > 0) |
| 462 | { |
| 463 | if (MulticoreJitManager::IsMethodSupported(this)) |
| 464 | { |
| 465 | pCode = mcJitManager.RequestMethodCode(this); // Query multi-core JIT manager for compiled code |
| 466 | } |
| 467 | } |
| 468 | #endif |
| 469 | return pCode; |
| 470 | } |
| 471 | |
| 472 | COR_ILMETHOD_DECODER* MethodDesc::GetAndVerifyMetadataILHeader(PrepareCodeConfig* pConfig, COR_ILMETHOD_DECODER* pDecoderMemory) |
| 473 | { |
| 474 | STANDARD_VM_CONTRACT; |
| 475 | |
| 476 | _ASSERTE(!IsNoMetadata()); |
| 477 | |
| 478 | COR_ILMETHOD_DECODER* pHeader = NULL; |
| 479 | COR_ILMETHOD* ilHeader = pConfig->GetILHeader(); |
| 480 | if (ilHeader == NULL) |
| 481 | { |
| 482 | #ifdef FEATURE_COMINTEROP |
| 483 | // Abstract methods can be called through WinRT derivation if the deriving type |
| 484 | // is not implemented in managed code, and calls through the CCW to the abstract |
| 485 | // method. Throw a sensible exception in that case. |
| 486 | if (GetMethodTable()->IsExportedToWinRT() && IsAbstract()) |
| 487 | { |
| 488 | COMPlusThrowHR(E_NOTIMPL); |
| 489 | } |
| 490 | #endif // FEATURE_COMINTEROP |
| 491 | |
| 492 | COMPlusThrowHR(COR_E_BADIMAGEFORMAT, BFA_BAD_IL); |
| 493 | } |
| 494 | |
| 495 | COR_ILMETHOD_DECODER::DecoderStatus status = COR_ILMETHOD_DECODER::FORMAT_ERROR; |
| 496 | { |
| 497 | // Decoder ctor can AV on a malformed method header |
| 498 | AVInRuntimeImplOkayHolder AVOkay; |
| 499 | pHeader = new (pDecoderMemory) COR_ILMETHOD_DECODER(ilHeader, GetMDImport(), &status); |
| 500 | } |
| 501 | |
| 502 | if (status == COR_ILMETHOD_DECODER::FORMAT_ERROR) |
| 503 | { |
| 504 | COMPlusThrowHR(COR_E_BADIMAGEFORMAT, BFA_BAD_IL); |
| 505 | } |
| 506 | |
| 507 | return pHeader; |
| 508 | } |
| 509 | |
| 510 | COR_ILMETHOD_DECODER* MethodDesc::GetAndVerifyNoMetadataILHeader() |
| 511 | { |
| 512 | STANDARD_VM_CONTRACT; |
| 513 | |
| 514 | if (IsILStub()) |
| 515 | { |
| 516 | ILStubResolver* pResolver = AsDynamicMethodDesc()->GetILStubResolver(); |
| 517 | return pResolver->GetILHeader(); |
| 518 | } |
| 519 | else |
| 520 | { |
| 521 | return NULL; |
| 522 | } |
| 523 | |
| 524 | // NoMetadata currently doesn't verify the IL. I'm not sure if that was |
| 525 | // a deliberate decision in the past or not, but I've left the behavior |
| 526 | // as-is during refactoring. |
| 527 | } |
| 528 | |
| 529 | COR_ILMETHOD_DECODER* MethodDesc::GetAndVerifyILHeader(PrepareCodeConfig* pConfig, COR_ILMETHOD_DECODER* pIlDecoderMemory) |
| 530 | { |
| 531 | STANDARD_VM_CONTRACT; |
| 532 | _ASSERTE(IsIL() || IsNoMetadata()); |
| 533 | |
| 534 | if (IsNoMetadata()) |
| 535 | { |
| 536 | // The NoMetadata version already has a decoder to use, it doesn't need the stack allocated one |
| 537 | return GetAndVerifyNoMetadataILHeader(); |
| 538 | } |
| 539 | else |
| 540 | { |
| 541 | return GetAndVerifyMetadataILHeader(pConfig, pIlDecoderMemory); |
| 542 | } |
| 543 | } |
| 544 | |
| 545 | // ******************************************************************** |
| 546 | // README!! |
| 547 | // ******************************************************************** |
| 548 | |
| 549 | // JitCompileCode is the thread safe way to invoke the JIT compiler |
| 550 | // If multiple threads get in here for the same config, ALL of them |
| 551 | // MUST return the SAME value for pcode. |
| 552 | // |
| 553 | // This function creates a DeadlockAware list of methods being jitted |
| 554 | // which prevents us from trying to JIT the same method more that once. |
| 555 | |
| 556 | PCODE MethodDesc::JitCompileCode(PrepareCodeConfig* pConfig) |
| 557 | { |
| 558 | STANDARD_VM_CONTRACT; |
| 559 | |
| 560 | LOG((LF_JIT, LL_INFO1000000, |
| 561 | "JitCompileCode(" FMT_ADDR ", %s) for %s:%s\n" , |
| 562 | DBG_ADDR(this), |
| 563 | IsILStub() ? " TRUE" : "FALSE" , |
| 564 | GetMethodTable()->GetDebugClassName(), |
| 565 | m_pszDebugMethodName)); |
| 566 | |
| 567 | #if defined(FEATURE_JIT_PITCHING) |
| 568 | CheckStacksAndPitch(); |
| 569 | #endif |
| 570 | |
| 571 | PCODE pCode = NULL; |
| 572 | { |
| 573 | // Enter the global lock which protects the list of all functions being JITd |
| 574 | JitListLock::LockHolder pJitLock(GetDomain()->GetJitLock()); |
| 575 | |
| 576 | // It is possible that another thread stepped in before we entered the global lock for the first time. |
| 577 | if ((pCode = pConfig->IsJitCancellationRequested())) |
| 578 | { |
| 579 | return pCode; |
| 580 | } |
| 581 | |
| 582 | const char *description = "jit lock" ; |
| 583 | INDEBUG(description = m_pszDebugMethodName;) |
| 584 | ReleaseHolder<JitListLockEntry> pEntry(JitListLockEntry::Find( |
| 585 | pJitLock, pConfig->GetCodeVersion(), description)); |
| 586 | |
| 587 | // We have an entry now, we can release the global lock |
| 588 | pJitLock.Release(); |
| 589 | |
| 590 | // Take the entry lock |
| 591 | { |
| 592 | JitListLockEntry::LockHolder pEntryLock(pEntry, FALSE); |
| 593 | |
| 594 | if (pEntryLock.DeadlockAwareAcquire()) |
| 595 | { |
| 596 | if (pEntry->m_hrResultCode == S_FALSE) |
| 597 | { |
| 598 | // Nobody has jitted the method yet |
| 599 | } |
| 600 | else |
| 601 | { |
| 602 | // We came in to jit but someone beat us so return the |
| 603 | // jitted method! |
| 604 | |
| 605 | // We can just fall through because we will notice below that |
| 606 | // the method has code. |
| 607 | |
| 608 | // @todo: Note that we may have a failed HRESULT here - |
| 609 | // we might want to return an early error rather than |
| 610 | // repeatedly failing the jit. |
| 611 | } |
| 612 | } |
| 613 | else |
| 614 | { |
| 615 | // Taking this lock would cause a deadlock (presumably because we |
| 616 | // are involved in a class constructor circular dependency.) For |
| 617 | // instance, another thread may be waiting to run the class constructor |
| 618 | // that we are jitting, but is currently jitting this function. |
| 619 | // |
| 620 | // To remedy this, we want to go ahead and do the jitting anyway. |
| 621 | // The other threads contending for the lock will then notice that |
| 622 | // the jit finished while they were running class constructors, and abort their |
| 623 | // current jit effort. |
| 624 | // |
| 625 | // We don't have to do anything special right here since we |
| 626 | // can check HasNativeCode() to detect this case later. |
| 627 | // |
| 628 | // Note that at this point we don't have the lock, but that's OK because the |
| 629 | // thread which does have the lock is blocked waiting for us. |
| 630 | } |
| 631 | |
| 632 | // It is possible that another thread stepped in before we entered the lock. |
| 633 | if ((pCode = pConfig->IsJitCancellationRequested())) |
| 634 | { |
| 635 | return pCode; |
| 636 | } |
| 637 | |
| 638 | pCode = GetMulticoreJitCode(); |
| 639 | if (pCode != NULL) |
| 640 | { |
| 641 | pConfig->SetNativeCode(pCode, &pCode); |
| 642 | pEntry->m_hrResultCode = S_OK; |
| 643 | return pCode; |
| 644 | } |
| 645 | else |
| 646 | { |
| 647 | return JitCompileCodeLockedEventWrapper(pConfig, pEntryLock); |
| 648 | } |
| 649 | } |
| 650 | } |
| 651 | } |
| 652 | |
| 653 | PCODE MethodDesc::JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry) |
| 654 | { |
| 655 | STANDARD_VM_CONTRACT; |
| 656 | |
| 657 | PCODE pCode = NULL; |
| 658 | ULONG sizeOfCode = 0; |
| 659 | CORJIT_FLAGS flags; |
| 660 | |
| 661 | #ifdef MDA_SUPPORTED |
| 662 | MdaJitCompilationStart* pProbe = MDA_GET_ASSISTANT(JitCompilationStart); |
| 663 | if (pProbe) |
| 664 | pProbe->NowCompiling(this); |
| 665 | #endif // MDA_SUPPORTED |
| 666 | |
| 667 | #ifdef PROFILING_SUPPORTED |
| 668 | { |
| 669 | BEGIN_PIN_PROFILER(CORProfilerTrackJITInfo()); |
| 670 | // For methods with non-zero rejit id we send ReJITCompilationStarted, otherwise |
| 671 | // JITCompilationStarted. It isn't clear if this is the ideal policy for these |
| 672 | // notifications yet. |
| 673 | ReJITID rejitId = pConfig->GetCodeVersion().GetILCodeVersionId(); |
| 674 | if (rejitId != 0) |
| 675 | { |
| 676 | g_profControlBlock.pProfInterface->ReJITCompilationStarted((FunctionID)this, |
| 677 | rejitId, |
| 678 | TRUE); |
| 679 | } |
| 680 | else |
| 681 | // If profiling, need to give a chance for a tool to examine and modify |
| 682 | // the IL before it gets to the JIT. This allows one to add probe calls for |
| 683 | // things like code coverage, performance, or whatever. |
| 684 | { |
| 685 | if (!IsNoMetadata()) |
| 686 | { |
| 687 | g_profControlBlock.pProfInterface->JITCompilationStarted((FunctionID)this, TRUE); |
| 688 | |
| 689 | } |
| 690 | else |
| 691 | { |
| 692 | unsigned int ilSize, unused; |
| 693 | CorInfoOptions corOptions; |
| 694 | LPCBYTE ilHeaderPointer = this->AsDynamicMethodDesc()->GetResolver()->GetCodeInfo(&ilSize, &unused, &corOptions, &unused); |
| 695 | |
| 696 | g_profControlBlock.pProfInterface->DynamicMethodJITCompilationStarted((FunctionID)this, TRUE, ilHeaderPointer, ilSize); |
| 697 | } |
| 698 | } |
| 699 | END_PIN_PROFILER(); |
| 700 | } |
| 701 | #endif // PROFILING_SUPPORTED |
| 702 | |
| 703 | if (!ETW_TRACING_CATEGORY_ENABLED(MICROSOFT_WINDOWS_DOTNETRUNTIME_PROVIDER_Context, |
| 704 | TRACE_LEVEL_VERBOSE, |
| 705 | CLR_JIT_KEYWORD)) |
| 706 | { |
| 707 | pCode = JitCompileCodeLocked(pConfig, pEntry, &sizeOfCode, &flags); |
| 708 | } |
| 709 | else |
| 710 | { |
| 711 | SString namespaceOrClassName, methodName, methodSignature; |
| 712 | |
| 713 | // Methods that may be interpreted defer this notification until it is certain |
| 714 | // we are jitting and not interpreting in CompileMethodWithEtwWrapper. |
| 715 | // Some further refactoring could consolidate the notification to always |
| 716 | // occur at the point the interpreter does it, but it might even better |
| 717 | // to fix the issues that cause us to avoid generating jit notifications |
| 718 | // for interpreted methods in the first place. The interpreter does generate |
| 719 | // a small stub of native code but no native-IL mapping. |
| 720 | #ifndef FEATURE_INTERPRETER |
| 721 | ETW::MethodLog::MethodJitting(this, |
| 722 | &namespaceOrClassName, |
| 723 | &methodName, |
| 724 | &methodSignature); |
| 725 | #endif |
| 726 | |
| 727 | pCode = JitCompileCodeLocked(pConfig, pEntry, &sizeOfCode, &flags); |
| 728 | |
| 729 | // Interpretted methods skip this notification |
| 730 | #ifdef FEATURE_INTERPRETER |
| 731 | if (Interpreter::InterpretationStubToMethodInfo(pCode) == NULL) |
| 732 | #endif |
| 733 | { |
| 734 | // Fire an ETW event to mark the end of JIT'ing |
| 735 | ETW::MethodLog::MethodJitted(this, |
| 736 | &namespaceOrClassName, |
| 737 | &methodName, |
| 738 | &methodSignature, |
| 739 | pCode, |
| 740 | pConfig->GetCodeVersion().GetVersionId(), |
| 741 | pConfig->ProfilerRejectedPrecompiledCode(), |
| 742 | pConfig->ReadyToRunRejectedPrecompiledCode()); |
| 743 | } |
| 744 | |
| 745 | } |
| 746 | |
| 747 | #ifdef FEATURE_STACK_SAMPLING |
| 748 | StackSampler::RecordJittingInfo(this, flags); |
| 749 | #endif // FEATURE_STACK_SAMPLING |
| 750 | |
| 751 | #ifdef PROFILING_SUPPORTED |
| 752 | { |
| 753 | BEGIN_PIN_PROFILER(CORProfilerTrackJITInfo()); |
| 754 | // For methods with non-zero rejit id we send ReJITCompilationFinished, otherwise |
| 755 | // JITCompilationFinished. It isn't clear if this is the ideal policy for these |
| 756 | // notifications yet. |
| 757 | ReJITID rejitId = pConfig->GetCodeVersion().GetILCodeVersionId(); |
| 758 | if (rejitId != 0) |
| 759 | { |
| 760 | |
| 761 | g_profControlBlock.pProfInterface->ReJITCompilationFinished((FunctionID)this, |
| 762 | rejitId, |
| 763 | S_OK, |
| 764 | TRUE); |
| 765 | } |
| 766 | else |
| 767 | // Notify the profiler that JIT completed. |
| 768 | // Must do this after the address has been set. |
| 769 | // @ToDo: Why must we set the address before notifying the profiler ?? |
| 770 | { |
| 771 | if (!IsNoMetadata()) |
| 772 | { |
| 773 | g_profControlBlock.pProfInterface-> |
| 774 | JITCompilationFinished((FunctionID)this, |
| 775 | pEntry->m_hrResultCode, |
| 776 | TRUE); |
| 777 | } |
| 778 | else |
| 779 | { |
| 780 | g_profControlBlock.pProfInterface->DynamicMethodJITCompilationFinished((FunctionID)this, pEntry->m_hrResultCode, TRUE); |
| 781 | } |
| 782 | } |
| 783 | END_PIN_PROFILER(); |
| 784 | } |
| 785 | #endif // PROFILING_SUPPORTED |
| 786 | |
| 787 | #ifdef FEATURE_INTERPRETER |
| 788 | bool isJittedMethod = (Interpreter::InterpretationStubToMethodInfo(pCode) == NULL); |
| 789 | #endif |
| 790 | |
| 791 | // Interpretted methods skip this notification |
| 792 | #ifdef FEATURE_INTERPRETER |
| 793 | if (isJittedMethod) |
| 794 | #endif |
| 795 | { |
| 796 | #ifdef FEATURE_PERFMAP |
| 797 | // Save the JIT'd method information so that perf can resolve JIT'd call frames. |
| 798 | PerfMap::LogJITCompiledMethod(this, pCode, sizeOfCode); |
| 799 | #endif |
| 800 | } |
| 801 | |
| 802 | |
| 803 | #ifdef FEATURE_MULTICOREJIT |
| 804 | // Non-initial code versions and multicore jit initial compilation all skip this |
| 805 | if (pConfig->NeedsMulticoreJitNotification()) |
| 806 | { |
| 807 | MulticoreJitManager & mcJitManager = GetAppDomain()->GetMulticoreJitManager(); |
| 808 | if (mcJitManager.IsRecorderActive()) |
| 809 | { |
| 810 | if (MulticoreJitManager::IsMethodSupported(this)) |
| 811 | { |
| 812 | mcJitManager.RecordMethodJit(this); // Tell multi-core JIT manager to record method on successful JITting |
| 813 | } |
| 814 | } |
| 815 | } |
| 816 | #endif |
| 817 | |
| 818 | #ifdef FEATURE_INTERPRETER |
| 819 | if (isJittedMethod) |
| 820 | #endif |
| 821 | { |
| 822 | // The notification will only occur if someone has registered for this method. |
| 823 | DACNotifyCompilationFinished(this, pCode); |
| 824 | } |
| 825 | |
| 826 | return pCode; |
| 827 | } |
| 828 | |
| 829 | PCODE MethodDesc::JitCompileCodeLocked(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry, ULONG* pSizeOfCode, CORJIT_FLAGS* pFlags) |
| 830 | { |
| 831 | STANDARD_VM_CONTRACT; |
| 832 | |
| 833 | PCODE pCode = NULL; |
| 834 | |
| 835 | // The profiler may have changed the code on the callback. Need to |
| 836 | // pick up the new code. |
| 837 | COR_ILMETHOD_DECODER ilDecoderTemp; |
| 838 | COR_ILMETHOD_DECODER *pilHeader = GetAndVerifyILHeader(pConfig, &ilDecoderTemp); |
| 839 | *pFlags = pConfig->GetJitCompilationFlags(); |
| 840 | PCODE pOtherCode = NULL; |
| 841 | EX_TRY |
| 842 | { |
| 843 | pCode = UnsafeJitFunction(this, pilHeader, *pFlags, pSizeOfCode); |
| 844 | } |
| 845 | EX_CATCH |
| 846 | { |
| 847 | // If the current thread threw an exception, but a competing thread |
| 848 | // somehow succeeded at JITting the same function (e.g., out of memory |
| 849 | // encountered on current thread but not competing thread), then go ahead |
| 850 | // and swallow this current thread's exception, since we somehow managed |
| 851 | // to successfully JIT the code on the other thread. |
| 852 | // |
| 853 | // Note that if a deadlock cycle is broken, that does not result in an |
| 854 | // exception--the thread would just pass through the lock and JIT the |
| 855 | // function in competition with the other thread (with the winner of the |
| 856 | // race decided later on when we do SetNativeCodeInterlocked). This |
| 857 | // try/catch is purely to deal with the (unusual) case where a competing |
| 858 | // thread succeeded where we aborted. |
| 859 | |
| 860 | if (!(pOtherCode = pConfig->IsJitCancellationRequested())) |
| 861 | { |
| 862 | pEntry->m_hrResultCode = E_FAIL; |
| 863 | EX_RETHROW; |
| 864 | } |
| 865 | } |
| 866 | EX_END_CATCH(RethrowTerminalExceptions) |
| 867 | |
| 868 | if (pOtherCode != NULL) |
| 869 | { |
| 870 | // Somebody finished jitting recursively while we were jitting the method. |
| 871 | // Just use their method & leak the one we finished. (Normally we hope |
| 872 | // not to finish our JIT in this case, as we will abort early if we notice |
| 873 | // a reentrant jit has occurred. But we may not catch every place so we |
| 874 | // do a definitive final check here. |
| 875 | return pOtherCode; |
| 876 | } |
| 877 | |
| 878 | _ASSERTE(pCode != NULL); |
| 879 | |
| 880 | #ifdef HAVE_GCCOVER |
| 881 | // Instrument for coverage before trying to publish this version |
| 882 | // of the code as the native code, to avoid other threads seeing |
| 883 | // partially instrumented methods. |
| 884 | if (GCStress<cfg_instr_jit>::IsEnabled()) |
| 885 | { |
| 886 | // Do the instrumentation and publish atomically, so that the |
| 887 | // instrumentation data always matches the published code. |
| 888 | CrstHolder gcCoverLock(&m_GCCoverCrst); |
| 889 | |
| 890 | // Make sure no other thread has stepped in before us. |
| 891 | if ((pOtherCode = pConfig->IsJitCancellationRequested())) |
| 892 | { |
| 893 | return pOtherCode; |
| 894 | } |
| 895 | |
| 896 | SetupGcCoverage(this, (BYTE*)pCode); |
| 897 | |
| 898 | // This thread should always win the publishing race |
| 899 | // since we're under a lock. |
| 900 | if (!pConfig->SetNativeCode(pCode, &pOtherCode)) |
| 901 | { |
| 902 | _ASSERTE(!"GC Cover native code publish failed" ); |
| 903 | } |
| 904 | } |
| 905 | else |
| 906 | #endif // HAVE_GCCOVER |
| 907 | |
| 908 | // Aside from rejit, performing a SetNativeCodeInterlocked at this point |
| 909 | // generally ensures that there is only one winning version of the native |
| 910 | // code. This also avoid races with profiler overriding ngened code (see |
| 911 | // matching SetNativeCodeInterlocked done after |
| 912 | // JITCachedFunctionSearchStarted) |
| 913 | if (!pConfig->SetNativeCode(pCode, &pOtherCode)) |
| 914 | { |
| 915 | // Another thread beat us to publishing its copy of the JITted code. |
| 916 | return pOtherCode; |
| 917 | } |
| 918 | |
| 919 | #if defined(FEATURE_JIT_PITCHING) |
| 920 | SavePitchingCandidate(this, *pSizeOfCode); |
| 921 | #endif |
| 922 | |
| 923 | // We succeeded in jitting the code, and our jitted code is the one that's going to run now. |
| 924 | pEntry->m_hrResultCode = S_OK; |
| 925 | |
| 926 | return pCode; |
| 927 | } |
| 928 | |
| 929 | |
| 930 | |
| 931 | PrepareCodeConfig::PrepareCodeConfig() {} |
| 932 | |
| 933 | PrepareCodeConfig::PrepareCodeConfig(NativeCodeVersion codeVersion, BOOL needsMulticoreJitNotification, BOOL mayUsePrecompiledCode) : |
| 934 | m_pMethodDesc(codeVersion.GetMethodDesc()), |
| 935 | m_nativeCodeVersion(codeVersion), |
| 936 | m_needsMulticoreJitNotification(needsMulticoreJitNotification), |
| 937 | m_mayUsePrecompiledCode(mayUsePrecompiledCode), |
| 938 | m_ProfilerRejectedPrecompiledCode(FALSE), |
| 939 | m_ReadyToRunRejectedPrecompiledCode(FALSE) |
| 940 | {} |
| 941 | |
| 942 | MethodDesc* PrepareCodeConfig::GetMethodDesc() |
| 943 | { |
| 944 | LIMITED_METHOD_CONTRACT; |
| 945 | return m_pMethodDesc; |
| 946 | } |
| 947 | |
| 948 | PCODE PrepareCodeConfig::IsJitCancellationRequested() |
| 949 | { |
| 950 | LIMITED_METHOD_CONTRACT; |
| 951 | return m_pMethodDesc->GetNativeCode(); |
| 952 | } |
| 953 | |
| 954 | BOOL PrepareCodeConfig::NeedsMulticoreJitNotification() |
| 955 | { |
| 956 | LIMITED_METHOD_CONTRACT; |
| 957 | return m_needsMulticoreJitNotification; |
| 958 | } |
| 959 | |
| 960 | BOOL PrepareCodeConfig::ProfilerRejectedPrecompiledCode() |
| 961 | { |
| 962 | LIMITED_METHOD_CONTRACT; |
| 963 | return m_ProfilerRejectedPrecompiledCode; |
| 964 | } |
| 965 | |
| 966 | void PrepareCodeConfig::SetProfilerRejectedPrecompiledCode() |
| 967 | { |
| 968 | LIMITED_METHOD_CONTRACT; |
| 969 | m_ProfilerRejectedPrecompiledCode = TRUE; |
| 970 | } |
| 971 | |
| 972 | BOOL PrepareCodeConfig::ReadyToRunRejectedPrecompiledCode() |
| 973 | { |
| 974 | LIMITED_METHOD_CONTRACT; |
| 975 | return m_ReadyToRunRejectedPrecompiledCode; |
| 976 | } |
| 977 | |
| 978 | void PrepareCodeConfig::SetReadyToRunRejectedPrecompiledCode() |
| 979 | { |
| 980 | LIMITED_METHOD_CONTRACT; |
| 981 | m_ReadyToRunRejectedPrecompiledCode = TRUE; |
| 982 | } |
| 983 | |
| 984 | NativeCodeVersion PrepareCodeConfig::GetCodeVersion() |
| 985 | { |
| 986 | LIMITED_METHOD_CONTRACT; |
| 987 | return m_nativeCodeVersion; |
| 988 | } |
| 989 | |
| 990 | BOOL PrepareCodeConfig::SetNativeCode(PCODE pCode, PCODE * ppAlternateCodeToUse) |
| 991 | { |
| 992 | LIMITED_METHOD_CONTRACT; |
| 993 | |
| 994 | // If this function had already been requested for rejit (before its original |
| 995 | // code was jitted), then give the CodeVersionManager a chance to jump-stamp the |
| 996 | // code we just compiled so the first thread entering the function will jump |
| 997 | // to the prestub and trigger the rejit. Note that the PublishMethodHolder takes |
| 998 | // a lock to avoid a particular kind of rejit race. See |
| 999 | // code:CodeVersionManager::PublishMethodHolder::PublishMethodHolder#PublishCode for |
| 1000 | // details on the rejit race. |
| 1001 | // |
| 1002 | if (m_pMethodDesc->IsVersionableWithJumpStamp()) |
| 1003 | { |
| 1004 | PublishMethodHolder publishWorker(GetMethodDesc(), pCode); |
| 1005 | if (m_pMethodDesc->SetNativeCodeInterlocked(pCode, NULL)) |
| 1006 | { |
| 1007 | return TRUE; |
| 1008 | } |
| 1009 | } |
| 1010 | else |
| 1011 | { |
| 1012 | if (m_pMethodDesc->SetNativeCodeInterlocked(pCode, NULL)) |
| 1013 | { |
| 1014 | return TRUE; |
| 1015 | } |
| 1016 | } |
| 1017 | |
| 1018 | *ppAlternateCodeToUse = m_pMethodDesc->GetNativeCode(); |
| 1019 | return FALSE; |
| 1020 | } |
| 1021 | |
| 1022 | COR_ILMETHOD* PrepareCodeConfig::GetILHeader() |
| 1023 | { |
| 1024 | STANDARD_VM_CONTRACT; |
| 1025 | return m_pMethodDesc->GetILHeader(TRUE); |
| 1026 | } |
| 1027 | |
| 1028 | CORJIT_FLAGS PrepareCodeConfig::GetJitCompilationFlags() |
| 1029 | { |
| 1030 | STANDARD_VM_CONTRACT; |
| 1031 | |
| 1032 | CORJIT_FLAGS flags; |
| 1033 | if (m_pMethodDesc->IsILStub()) |
| 1034 | { |
| 1035 | ILStubResolver* pResolver = m_pMethodDesc->AsDynamicMethodDesc()->GetILStubResolver(); |
| 1036 | flags = pResolver->GetJitFlags(); |
| 1037 | } |
| 1038 | #ifdef FEATURE_TIERED_COMPILATION |
| 1039 | flags.Add(TieredCompilationManager::GetJitFlags(m_nativeCodeVersion)); |
| 1040 | #endif |
| 1041 | return flags; |
| 1042 | } |
| 1043 | |
| 1044 | BOOL PrepareCodeConfig::MayUsePrecompiledCode() |
| 1045 | { |
| 1046 | LIMITED_METHOD_CONTRACT; |
| 1047 | return m_mayUsePrecompiledCode; |
| 1048 | } |
| 1049 | |
| 1050 | #ifdef FEATURE_CODE_VERSIONING |
| 1051 | VersionedPrepareCodeConfig::VersionedPrepareCodeConfig() {} |
| 1052 | |
| 1053 | VersionedPrepareCodeConfig::VersionedPrepareCodeConfig(NativeCodeVersion codeVersion) : |
| 1054 | PrepareCodeConfig(codeVersion, TRUE, FALSE) |
| 1055 | { |
| 1056 | LIMITED_METHOD_CONTRACT; |
| 1057 | |
| 1058 | _ASSERTE(!m_nativeCodeVersion.IsDefaultVersion()); |
| 1059 | _ASSERTE(m_pMethodDesc->GetCodeVersionManager()->LockOwnedByCurrentThread()); |
| 1060 | m_ilCodeVersion = m_nativeCodeVersion.GetILCodeVersion(); |
| 1061 | } |
| 1062 | |
| 1063 | HRESULT VersionedPrepareCodeConfig::FinishConfiguration() |
| 1064 | { |
| 1065 | STANDARD_VM_CONTRACT; |
| 1066 | |
| 1067 | _ASSERTE(!GetMethodDesc()->GetCodeVersionManager()->LockOwnedByCurrentThread()); |
| 1068 | |
| 1069 | // Any code build stages that do just in time configuration should |
| 1070 | // be configured now |
| 1071 | #ifdef FEATURE_REJIT |
| 1072 | if (m_ilCodeVersion.GetRejitState() != ILCodeVersion::kStateActive) |
| 1073 | { |
| 1074 | ReJitManager::ConfigureILCodeVersion(m_ilCodeVersion); |
| 1075 | } |
| 1076 | _ASSERTE(m_ilCodeVersion.GetRejitState() == ILCodeVersion::kStateActive); |
| 1077 | #endif |
| 1078 | |
| 1079 | return S_OK; |
| 1080 | } |
| 1081 | |
| 1082 | PCODE VersionedPrepareCodeConfig::IsJitCancellationRequested() |
| 1083 | { |
| 1084 | LIMITED_METHOD_CONTRACT; |
| 1085 | return m_nativeCodeVersion.GetNativeCode(); |
| 1086 | } |
| 1087 | |
| 1088 | BOOL VersionedPrepareCodeConfig::SetNativeCode(PCODE pCode, PCODE * ppAlternateCodeToUse) |
| 1089 | { |
| 1090 | LIMITED_METHOD_CONTRACT; |
| 1091 | |
| 1092 | //This isn't the default version so jumpstamp is never needed |
| 1093 | _ASSERTE(!m_nativeCodeVersion.IsDefaultVersion()); |
| 1094 | if (m_nativeCodeVersion.SetNativeCodeInterlocked(pCode, NULL)) |
| 1095 | { |
| 1096 | return TRUE; |
| 1097 | } |
| 1098 | else |
| 1099 | { |
| 1100 | *ppAlternateCodeToUse = m_nativeCodeVersion.GetNativeCode(); |
| 1101 | return FALSE; |
| 1102 | } |
| 1103 | } |
| 1104 | |
| 1105 | COR_ILMETHOD* VersionedPrepareCodeConfig::GetILHeader() |
| 1106 | { |
| 1107 | STANDARD_VM_CONTRACT; |
| 1108 | return m_ilCodeVersion.GetIL(); |
| 1109 | } |
| 1110 | |
| 1111 | CORJIT_FLAGS VersionedPrepareCodeConfig::GetJitCompilationFlags() |
| 1112 | { |
| 1113 | STANDARD_VM_CONTRACT; |
| 1114 | CORJIT_FLAGS flags; |
| 1115 | |
| 1116 | #ifdef FEATURE_REJIT |
| 1117 | DWORD profilerFlags = m_ilCodeVersion.GetJitFlags(); |
| 1118 | flags.Add(ReJitManager::JitFlagsFromProfCodegenFlags(profilerFlags)); |
| 1119 | #endif |
| 1120 | |
| 1121 | #ifdef FEATURE_TIERED_COMPILATION |
| 1122 | flags.Add(TieredCompilationManager::GetJitFlags(m_nativeCodeVersion)); |
| 1123 | #endif |
| 1124 | |
| 1125 | return flags; |
| 1126 | } |
| 1127 | |
| 1128 | #endif //FEATURE_CODE_VERSIONING |
| 1129 | |
| 1130 | #ifdef FEATURE_STUBS_AS_IL |
| 1131 | |
| 1132 | // CreateInstantiatingILStubTargetSig: |
| 1133 | // This method is used to create the signature of the target of the ILStub |
| 1134 | // for instantiating and unboxing stubs, when/where we need to introduce a generic context. |
| 1135 | // And since the generic context is a hidden parameter, we're creating a signature that |
| 1136 | // looks like non-generic but has one additional parameter right after the thisptr |
| 1137 | void CreateInstantiatingILStubTargetSig(MethodDesc *pBaseMD, |
| 1138 | SigTypeContext &typeContext, |
| 1139 | SigBuilder *stubSigBuilder) |
| 1140 | { |
| 1141 | STANDARD_VM_CONTRACT; |
| 1142 | |
| 1143 | MetaSig msig(pBaseMD); |
| 1144 | BYTE callingConvention = IMAGE_CEE_CS_CALLCONV_DEFAULT; |
| 1145 | if (msig.HasThis()) |
| 1146 | callingConvention |= IMAGE_CEE_CS_CALLCONV_HASTHIS; |
| 1147 | // CallingConvention |
| 1148 | stubSigBuilder->AppendByte(callingConvention); |
| 1149 | |
| 1150 | // ParamCount |
| 1151 | stubSigBuilder->AppendData(msig.NumFixedArgs() + 1); // +1 is for context param |
| 1152 | |
| 1153 | // Return type |
| 1154 | SigPointer pReturn = msig.GetReturnProps(); |
| 1155 | pReturn.ConvertToInternalExactlyOne(msig.GetModule(), &typeContext, stubSigBuilder, FALSE); |
| 1156 | |
| 1157 | #ifndef _TARGET_X86_ |
| 1158 | // The hidden context parameter |
| 1159 | stubSigBuilder->AppendElementType(ELEMENT_TYPE_I); |
| 1160 | #endif // !_TARGET_X86_ |
| 1161 | |
| 1162 | // Copy rest of the arguments |
| 1163 | msig.NextArg(); |
| 1164 | SigPointer pArgs = msig.GetArgProps(); |
| 1165 | for (unsigned i = 0; i < msig.NumFixedArgs(); i++) |
| 1166 | { |
| 1167 | pArgs.ConvertToInternalExactlyOne(msig.GetModule(), &typeContext, stubSigBuilder); |
| 1168 | } |
| 1169 | |
| 1170 | #ifdef _TARGET_X86_ |
| 1171 | // The hidden context parameter |
| 1172 | stubSigBuilder->AppendElementType(ELEMENT_TYPE_I); |
| 1173 | #endif // _TARGET_X86_ |
| 1174 | } |
| 1175 | |
| 1176 | Stub * CreateUnboxingILStubForSharedGenericValueTypeMethods(MethodDesc* pTargetMD) |
| 1177 | { |
| 1178 | |
| 1179 | CONTRACT(Stub*) |
| 1180 | { |
| 1181 | THROWS; |
| 1182 | GC_TRIGGERS; |
| 1183 | POSTCONDITION(CheckPointer(RETVAL)); |
| 1184 | } |
| 1185 | CONTRACT_END; |
| 1186 | |
| 1187 | SigTypeContext typeContext(pTargetMD); |
| 1188 | |
| 1189 | MetaSig msig(pTargetMD); |
| 1190 | |
| 1191 | _ASSERTE(msig.HasThis()); |
| 1192 | |
| 1193 | ILStubLinker sl(pTargetMD->GetModule(), |
| 1194 | pTargetMD->GetSignature(), |
| 1195 | &typeContext, |
| 1196 | pTargetMD, |
| 1197 | TRUE, // fTargetHasThis |
| 1198 | TRUE, // fStubHasThis |
| 1199 | FALSE // fIsNDirectStub |
| 1200 | ); |
| 1201 | |
| 1202 | ILCodeStream *pCode = sl.NewCodeStream(ILStubLinker::kDispatch); |
| 1203 | |
| 1204 | // 1. Build the new signature |
| 1205 | SigBuilder stubSigBuilder; |
| 1206 | CreateInstantiatingILStubTargetSig(pTargetMD, typeContext, &stubSigBuilder); |
| 1207 | |
| 1208 | // 2. Emit the method body |
| 1209 | mdToken tokRawData = pCode->GetToken(MscorlibBinder::GetField(FIELD__RAW_DATA__DATA)); |
| 1210 | |
| 1211 | // 2.1 Push the thisptr |
| 1212 | // We need to skip over the MethodTable* |
| 1213 | // The trick below will do that. |
| 1214 | pCode->EmitLoadThis(); |
| 1215 | pCode->EmitLDFLDA(tokRawData); |
| 1216 | |
| 1217 | #if defined(_TARGET_X86_) |
| 1218 | // 2.2 Push the rest of the arguments for x86 |
| 1219 | for (unsigned i = 0; i < msig.NumFixedArgs();i++) |
| 1220 | { |
| 1221 | pCode->EmitLDARG(i); |
| 1222 | } |
| 1223 | #endif |
| 1224 | |
| 1225 | // 2.3 Push the hidden context param |
| 1226 | // The context is going to be captured from the thisptr |
| 1227 | pCode->EmitLoadThis(); |
| 1228 | pCode->EmitLDFLDA(tokRawData); |
| 1229 | pCode->EmitLDC(Object::GetOffsetOfFirstField()); |
| 1230 | pCode->EmitSUB(); |
| 1231 | pCode->EmitLDIND_I(); |
| 1232 | |
| 1233 | #if !defined(_TARGET_X86_) |
| 1234 | // 2.4 Push the rest of the arguments for not x86 |
| 1235 | for (unsigned i = 0; i < msig.NumFixedArgs();i++) |
| 1236 | { |
| 1237 | pCode->EmitLDARG(i); |
| 1238 | } |
| 1239 | #endif |
| 1240 | |
| 1241 | // 2.5 Push the target address |
| 1242 | pCode->EmitLDC((TADDR)pTargetMD->GetMultiCallableAddrOfCode(CORINFO_ACCESS_ANY)); |
| 1243 | |
| 1244 | // 2.6 Do the calli |
| 1245 | pCode->EmitCALLI(TOKEN_ILSTUB_TARGET_SIG, msig.NumFixedArgs() + 1, msig.IsReturnTypeVoid() ? 0 : 1); |
| 1246 | pCode->EmitRET(); |
| 1247 | |
| 1248 | PCCOR_SIGNATURE pSig; |
| 1249 | DWORD cbSig; |
| 1250 | pTargetMD->GetSig(&pSig,&cbSig); |
| 1251 | PTR_Module pLoaderModule = pTargetMD->GetLoaderModule(); |
| 1252 | MethodDesc * pStubMD = ILStubCache::CreateAndLinkNewILStubMethodDesc(pTargetMD->GetLoaderAllocator(), |
| 1253 | pLoaderModule->GetILStubCache()->GetOrCreateStubMethodTable(pLoaderModule), |
| 1254 | ILSTUB_UNBOXINGILSTUB, |
| 1255 | pTargetMD->GetModule(), |
| 1256 | pSig, cbSig, |
| 1257 | &typeContext, |
| 1258 | &sl); |
| 1259 | |
| 1260 | ILStubResolver *pResolver = pStubMD->AsDynamicMethodDesc()->GetILStubResolver(); |
| 1261 | |
| 1262 | DWORD cbTargetSig = 0; |
| 1263 | PCCOR_SIGNATURE pTargetSig = (PCCOR_SIGNATURE) stubSigBuilder.GetSignature(&cbTargetSig); |
| 1264 | pResolver->SetStubTargetMethodSig(pTargetSig, cbTargetSig); |
| 1265 | pResolver->SetStubTargetMethodDesc(pTargetMD); |
| 1266 | |
| 1267 | RETURN Stub::NewStub(JitILStub(pStubMD)); |
| 1268 | |
| 1269 | } |
| 1270 | |
| 1271 | Stub * CreateInstantiatingILStub(MethodDesc* pTargetMD, void* pHiddenArg) |
| 1272 | { |
| 1273 | |
| 1274 | CONTRACT(Stub*) |
| 1275 | { |
| 1276 | THROWS; |
| 1277 | GC_TRIGGERS; |
| 1278 | PRECONDITION(CheckPointer(pHiddenArg)); |
| 1279 | POSTCONDITION(CheckPointer(RETVAL)); |
| 1280 | } |
| 1281 | CONTRACT_END; |
| 1282 | |
| 1283 | SigTypeContext typeContext; |
| 1284 | MethodTable* pStubMT; |
| 1285 | if (pTargetMD->HasMethodInstantiation()) |
| 1286 | { |
| 1287 | // The pHiddenArg shall be a MethodDesc* |
| 1288 | MethodDesc* pMD = static_cast<MethodDesc *>(pHiddenArg); |
| 1289 | SigTypeContext::InitTypeContext(pMD, &typeContext); |
| 1290 | pStubMT = pMD->GetMethodTable(); |
| 1291 | } |
| 1292 | else |
| 1293 | { |
| 1294 | // The pHiddenArg shall be a MethodTable* |
| 1295 | SigTypeContext::InitTypeContext(TypeHandle::FromPtr(pHiddenArg), &typeContext); |
| 1296 | pStubMT = static_cast<MethodTable *>(pHiddenArg); |
| 1297 | } |
| 1298 | |
| 1299 | MetaSig msig(pTargetMD); |
| 1300 | |
| 1301 | ILStubLinker sl(pTargetMD->GetModule(), |
| 1302 | pTargetMD->GetSignature(), |
| 1303 | &typeContext, |
| 1304 | pTargetMD, |
| 1305 | msig.HasThis(), // fTargetHasThis |
| 1306 | msig.HasThis(), // fStubHasThis |
| 1307 | FALSE // fIsNDirectStub |
| 1308 | ); |
| 1309 | |
| 1310 | ILCodeStream *pCode = sl.NewCodeStream(ILStubLinker::kDispatch); |
| 1311 | |
| 1312 | // 1. Build the new signature |
| 1313 | SigBuilder stubSigBuilder; |
| 1314 | CreateInstantiatingILStubTargetSig(pTargetMD, typeContext, &stubSigBuilder); |
| 1315 | |
| 1316 | // 2. Emit the method body |
| 1317 | if (msig.HasThis()) |
| 1318 | { |
| 1319 | // 2.1 Push the thisptr |
| 1320 | pCode->EmitLoadThis(); |
| 1321 | } |
| 1322 | |
| 1323 | #if defined(_TARGET_X86_) |
| 1324 | // 2.2 Push the rest of the arguments for x86 |
| 1325 | for (unsigned i = 0; i < msig.NumFixedArgs();i++) |
| 1326 | { |
| 1327 | pCode->EmitLDARG(i); |
| 1328 | } |
| 1329 | #endif // _TARGET_X86_ |
| 1330 | |
| 1331 | // 2.3 Push the hidden context param |
| 1332 | // InstantiatingStub |
| 1333 | pCode->EmitLDC((TADDR)pHiddenArg); |
| 1334 | |
| 1335 | #if !defined(_TARGET_X86_) |
| 1336 | // 2.4 Push the rest of the arguments for not x86 |
| 1337 | for (unsigned i = 0; i < msig.NumFixedArgs();i++) |
| 1338 | { |
| 1339 | pCode->EmitLDARG(i); |
| 1340 | } |
| 1341 | #endif // !_TARGET_X86_ |
| 1342 | |
| 1343 | // 2.5 Push the target address |
| 1344 | pCode->EmitLDC((TADDR)pTargetMD->GetMultiCallableAddrOfCode(CORINFO_ACCESS_ANY)); |
| 1345 | |
| 1346 | // 2.6 Do the calli |
| 1347 | pCode->EmitCALLI(TOKEN_ILSTUB_TARGET_SIG, msig.NumFixedArgs() + 1, msig.IsReturnTypeVoid() ? 0 : 1); |
| 1348 | pCode->EmitRET(); |
| 1349 | |
| 1350 | PCCOR_SIGNATURE pSig; |
| 1351 | DWORD cbSig; |
| 1352 | pTargetMD->GetSig(&pSig,&cbSig); |
| 1353 | PTR_Module pLoaderModule = pTargetMD->GetLoaderModule(); |
| 1354 | MethodDesc * pStubMD = ILStubCache::CreateAndLinkNewILStubMethodDesc(pTargetMD->GetLoaderAllocator(), |
| 1355 | pStubMT, |
| 1356 | ILSTUB_INSTANTIATINGSTUB, |
| 1357 | pTargetMD->GetModule(), |
| 1358 | pSig, cbSig, |
| 1359 | &typeContext, |
| 1360 | &sl); |
| 1361 | |
| 1362 | ILStubResolver *pResolver = pStubMD->AsDynamicMethodDesc()->GetILStubResolver(); |
| 1363 | |
| 1364 | DWORD cbTargetSig = 0; |
| 1365 | PCCOR_SIGNATURE pTargetSig = (PCCOR_SIGNATURE) stubSigBuilder.GetSignature(&cbTargetSig); |
| 1366 | pResolver->SetStubTargetMethodSig(pTargetSig, cbTargetSig); |
| 1367 | pResolver->SetStubTargetMethodDesc(pTargetMD); |
| 1368 | |
| 1369 | RETURN Stub::NewStub(JitILStub(pStubMD)); |
| 1370 | } |
| 1371 | #endif |
| 1372 | |
| 1373 | /* Make a stub that for a value class method that expects a BOXed this pointer */ |
| 1374 | Stub * MakeUnboxingStubWorker(MethodDesc *pMD) |
| 1375 | { |
| 1376 | CONTRACT(Stub*) |
| 1377 | { |
| 1378 | THROWS; |
| 1379 | GC_TRIGGERS; |
| 1380 | POSTCONDITION(CheckPointer(RETVAL)); |
| 1381 | } |
| 1382 | CONTRACT_END; |
| 1383 | |
| 1384 | Stub *pstub = NULL; |
| 1385 | |
| 1386 | _ASSERTE (pMD->GetMethodTable()->IsValueType()); |
| 1387 | _ASSERTE(!pMD->ContainsGenericVariables()); |
| 1388 | MethodDesc *pUnboxedMD = pMD->GetWrappedMethodDesc(); |
| 1389 | |
| 1390 | _ASSERTE(pUnboxedMD != NULL && pUnboxedMD != pMD); |
| 1391 | |
| 1392 | #ifdef FEATURE_STUBS_AS_IL |
| 1393 | if (pUnboxedMD->RequiresInstMethodTableArg()) |
| 1394 | { |
| 1395 | pstub = CreateUnboxingILStubForSharedGenericValueTypeMethods(pUnboxedMD); |
| 1396 | } |
| 1397 | else |
| 1398 | #endif |
| 1399 | { |
| 1400 | CPUSTUBLINKER sl; |
| 1401 | sl.EmitUnboxMethodStub(pUnboxedMD); |
| 1402 | pstub = sl.Link(pMD->GetLoaderAllocator()->GetStubHeap()); |
| 1403 | } |
| 1404 | RETURN pstub; |
| 1405 | } |
| 1406 | |
| 1407 | #if defined(FEATURE_SHARE_GENERIC_CODE) |
| 1408 | Stub * MakeInstantiatingStubWorker(MethodDesc *pMD) |
| 1409 | { |
| 1410 | CONTRACT(Stub*) |
| 1411 | { |
| 1412 | THROWS; |
| 1413 | GC_TRIGGERS; |
| 1414 | PRECONDITION(pMD->IsInstantiatingStub()); |
| 1415 | PRECONDITION(!pMD->RequiresInstArg()); |
| 1416 | PRECONDITION(!pMD->IsSharedByGenericMethodInstantiations()); |
| 1417 | POSTCONDITION(CheckPointer(RETVAL)); |
| 1418 | } |
| 1419 | CONTRACT_END; |
| 1420 | |
| 1421 | // Note: this should be kept idempotent ... in the sense that |
| 1422 | // if multiple threads get in here for the same pMD |
| 1423 | // it should not matter whose stuff finally gets used. |
| 1424 | |
| 1425 | MethodDesc *pSharedMD = NULL; |
| 1426 | void* extraArg = NULL; |
| 1427 | |
| 1428 | // It's an instantiated generic method |
| 1429 | // Fetch the shared code associated with this instantiation |
| 1430 | pSharedMD = pMD->GetWrappedMethodDesc(); |
| 1431 | _ASSERTE(pSharedMD != NULL && pSharedMD != pMD); |
| 1432 | |
| 1433 | if (pMD->HasMethodInstantiation()) |
| 1434 | { |
| 1435 | extraArg = pMD; |
| 1436 | } |
| 1437 | else |
| 1438 | { |
| 1439 | // It's a per-instantiation static method |
| 1440 | extraArg = pMD->GetMethodTable(); |
| 1441 | } |
| 1442 | Stub *pstub = NULL; |
| 1443 | |
| 1444 | #ifdef FEATURE_STUBS_AS_IL |
| 1445 | pstub = CreateInstantiatingILStub(pSharedMD, extraArg); |
| 1446 | #else |
| 1447 | CPUSTUBLINKER sl; |
| 1448 | _ASSERTE(pSharedMD != NULL && pSharedMD != pMD); |
| 1449 | sl.EmitInstantiatingMethodStub(pSharedMD, extraArg); |
| 1450 | |
| 1451 | pstub = sl.Link(pMD->GetLoaderAllocator()->GetStubHeap()); |
| 1452 | #endif |
| 1453 | |
| 1454 | RETURN pstub; |
| 1455 | } |
| 1456 | #endif // defined(FEATURE_SHARE_GENERIC_CODE) |
| 1457 | |
| 1458 | #if defined (HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_) |
| 1459 | |
| 1460 | extern "C" MethodDesc * STDCALL PreStubGetMethodDescForCompactEntryPoint (PCODE pCode) |
| 1461 | { |
| 1462 | _ASSERTE (pCode >= PC_REG_RELATIVE_OFFSET); |
| 1463 | |
| 1464 | pCode = (PCODE) (pCode - PC_REG_RELATIVE_OFFSET + THUMB_CODE); |
| 1465 | |
| 1466 | _ASSERTE (MethodDescChunk::IsCompactEntryPointAtAddress (pCode)); |
| 1467 | |
| 1468 | return MethodDescChunk::GetMethodDescFromCompactEntryPoint(pCode, FALSE); |
| 1469 | } |
| 1470 | |
| 1471 | #endif // defined (HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_) |
| 1472 | |
| 1473 | //============================================================================= |
| 1474 | // This function generates the real code for a method and installs it into |
| 1475 | // the methoddesc. Usually ***BUT NOT ALWAYS***, this function runs only once |
| 1476 | // per methoddesc. In addition to installing the new code, this function |
| 1477 | // returns a pointer to the new code for the prestub's convenience. |
| 1478 | //============================================================================= |
| 1479 | extern "C" PCODE STDCALL PreStubWorker(TransitionBlock * pTransitionBlock, MethodDesc * pMD) |
| 1480 | { |
| 1481 | PCODE pbRetVal = NULL; |
| 1482 | |
| 1483 | BEGIN_PRESERVE_LAST_ERROR; |
| 1484 | |
| 1485 | STATIC_CONTRACT_THROWS; |
| 1486 | STATIC_CONTRACT_GC_TRIGGERS; |
| 1487 | STATIC_CONTRACT_MODE_COOPERATIVE; |
| 1488 | STATIC_CONTRACT_ENTRY_POINT; |
| 1489 | |
| 1490 | MAKE_CURRENT_THREAD_AVAILABLE(); |
| 1491 | |
| 1492 | #ifdef _DEBUG |
| 1493 | Thread::ObjectRefFlush(CURRENT_THREAD); |
| 1494 | #endif |
| 1495 | |
| 1496 | FrameWithCookie<PrestubMethodFrame> frame(pTransitionBlock, pMD); |
| 1497 | PrestubMethodFrame * pPFrame = &frame; |
| 1498 | |
| 1499 | pPFrame->Push(CURRENT_THREAD); |
| 1500 | |
| 1501 | INSTALL_MANAGED_EXCEPTION_DISPATCHER; |
| 1502 | INSTALL_UNWIND_AND_CONTINUE_HANDLER; |
| 1503 | |
| 1504 | ETWOnStartup (PrestubWorker_V1,PrestubWorkerEnd_V1); |
| 1505 | |
| 1506 | _ASSERTE(!NingenEnabled() && "You cannot invoke managed code inside the ngen compilation process." ); |
| 1507 | |
| 1508 | // Running the PreStubWorker on a method causes us to access its MethodTable |
| 1509 | g_IBCLogger.LogMethodDescAccess(pMD); |
| 1510 | |
| 1511 | // Make sure the method table is restored, and method instantiation if present |
| 1512 | pMD->CheckRestore(); |
| 1513 | |
| 1514 | CONSISTENCY_CHECK(GetAppDomain()->CheckCanExecuteManagedCode(pMD)); |
| 1515 | |
| 1516 | // Note this is redundant with the above check but we do it anyway for safety |
| 1517 | // |
| 1518 | // This has been disabled so we have a better chance of catching these. Note that this check is |
| 1519 | // NOT sufficient for domain neutral and ngen cases. |
| 1520 | // |
| 1521 | // pMD->EnsureActive(); |
| 1522 | |
| 1523 | MethodTable *pDispatchingMT = NULL; |
| 1524 | |
| 1525 | if (pMD->IsVtableMethod()) |
| 1526 | { |
| 1527 | OBJECTREF curobj = pPFrame->GetThis(); |
| 1528 | |
| 1529 | if (curobj != NULL) // Check for virtual function called non-virtually on a NULL object |
| 1530 | { |
| 1531 | pDispatchingMT = curobj->GetMethodTable(); |
| 1532 | |
| 1533 | #ifdef FEATURE_ICASTABLE |
| 1534 | if (pDispatchingMT->IsICastable()) |
| 1535 | { |
| 1536 | MethodTable *pMDMT = pMD->GetMethodTable(); |
| 1537 | TypeHandle objectType(pDispatchingMT); |
| 1538 | TypeHandle methodType(pMDMT); |
| 1539 | |
| 1540 | GCStress<cfg_any>::MaybeTrigger(); |
| 1541 | INDEBUG(curobj = NULL); // curobj is unprotected and CanCastTo() can trigger GC |
| 1542 | if (!objectType.CanCastTo(methodType)) |
| 1543 | { |
| 1544 | // Apparently ICastable magic was involved when we chose this method to be called |
| 1545 | // that's why we better stick to the MethodTable it belongs to, otherwise |
| 1546 | // DoPrestub() will fail not being able to find implementation for pMD in pDispatchingMT. |
| 1547 | |
| 1548 | pDispatchingMT = pMDMT; |
| 1549 | } |
| 1550 | } |
| 1551 | #endif // FEATURE_ICASTABLE |
| 1552 | |
| 1553 | // For value types, the only virtual methods are interface implementations. |
| 1554 | // Thus pDispatching == pMT because there |
| 1555 | // is no inheritance in value types. Note the BoxedEntryPointStubs are shared |
| 1556 | // between all sharable generic instantiations, so the == test is on |
| 1557 | // canonical method tables. |
| 1558 | #ifdef _DEBUG |
| 1559 | MethodTable *pMDMT = pMD->GetMethodTable(); // put this here to see what the MT is in debug mode |
| 1560 | _ASSERTE(!pMD->GetMethodTable()->IsValueType() || |
| 1561 | (pMD->IsUnboxingStub() && (pDispatchingMT->GetCanonicalMethodTable() == pMDMT->GetCanonicalMethodTable()))); |
| 1562 | #endif // _DEBUG |
| 1563 | } |
| 1564 | } |
| 1565 | |
| 1566 | GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD); |
| 1567 | pbRetVal = pMD->DoPrestub(pDispatchingMT); |
| 1568 | |
| 1569 | UNINSTALL_UNWIND_AND_CONTINUE_HANDLER; |
| 1570 | UNINSTALL_MANAGED_EXCEPTION_DISPATCHER; |
| 1571 | |
| 1572 | { |
| 1573 | HardwareExceptionHolder |
| 1574 | |
| 1575 | // Give debugger opportunity to stop here |
| 1576 | ThePreStubPatch(); |
| 1577 | } |
| 1578 | |
| 1579 | pPFrame->Pop(CURRENT_THREAD); |
| 1580 | |
| 1581 | POSTCONDITION(pbRetVal != NULL); |
| 1582 | |
| 1583 | END_PRESERVE_LAST_ERROR; |
| 1584 | |
| 1585 | return pbRetVal; |
| 1586 | } |
| 1587 | |
| 1588 | #ifdef _DEBUG |
| 1589 | // |
| 1590 | // These are two functions for testing purposes only, in debug builds only. They can be used by setting |
| 1591 | // InjectFatalError to 3. They ensure that we really can restore the guard page for SEH try/catch clauses. |
| 1592 | // |
| 1593 | // @todo: Do we use this for anything anymore? |
| 1594 | // |
| 1595 | static void TestSEHGuardPageRestoreOverflow() |
| 1596 | { |
| 1597 | } |
| 1598 | |
| 1599 | static void TestSEHGuardPageRestore() |
| 1600 | { |
| 1601 | PAL_TRY(void *, unused, NULL) |
| 1602 | { |
| 1603 | TestSEHGuardPageRestoreOverflow(); |
| 1604 | } |
| 1605 | PAL_EXCEPT(EXCEPTION_EXECUTE_HANDLER) |
| 1606 | { |
| 1607 | _ASSERTE(!"Got first overflow." ); |
| 1608 | } |
| 1609 | PAL_ENDTRY; |
| 1610 | |
| 1611 | PAL_TRY(void *, unused, NULL) |
| 1612 | { |
| 1613 | TestSEHGuardPageRestoreOverflow(); |
| 1614 | } |
| 1615 | PAL_EXCEPT(EXCEPTION_EXECUTE_HANDLER) |
| 1616 | { |
| 1617 | // If you get two asserts, then it works! |
| 1618 | _ASSERTE(!"Got second overflow." ); |
| 1619 | } |
| 1620 | PAL_ENDTRY; |
| 1621 | } |
| 1622 | #endif // _DEBUG |
| 1623 | |
| 1624 | // Separated out the body of PreStubWorker for the case where we don't have a frame. |
| 1625 | // |
| 1626 | // Note that pDispatchingMT may not actually be the MT that is indirected through. |
| 1627 | // If a virtual method is called non-virtually, pMT will be used to indirect through |
| 1628 | // |
| 1629 | // This returns a pointer to the stable entrypoint for the jitted method. Typically, this |
| 1630 | // is the same as the pointer to the top of the JITted code of the method. However, in |
| 1631 | // the case of methods that require stubs to be executed first (e.g., remoted methods |
| 1632 | // that require remoting stubs to be executed first), this stable entrypoint would be a |
| 1633 | // pointer to the stub, and not a pointer directly to the JITted code. |
| 1634 | PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT) |
| 1635 | { |
| 1636 | CONTRACT(PCODE) |
| 1637 | { |
| 1638 | STANDARD_VM_CHECK; |
| 1639 | POSTCONDITION(RETVAL != NULL); |
| 1640 | } |
| 1641 | CONTRACT_END; |
| 1642 | |
| 1643 | Stub *pStub = NULL; |
| 1644 | PCODE pCode = NULL; |
| 1645 | |
| 1646 | Thread *pThread = GetThread(); |
| 1647 | |
| 1648 | MethodTable *pMT = GetMethodTable(); |
| 1649 | |
| 1650 | // Running a prestub on a method causes us to access its MethodTable |
| 1651 | g_IBCLogger.LogMethodDescAccess(this); |
| 1652 | |
| 1653 | if (ContainsGenericVariables()) |
| 1654 | { |
| 1655 | COMPlusThrow(kInvalidOperationException, IDS_EE_CODEEXECUTION_CONTAINSGENERICVAR); |
| 1656 | } |
| 1657 | |
| 1658 | /************************** DEBUG CHECKS *************************/ |
| 1659 | /*----------------------------------------------------------------- |
| 1660 | // Halt if needed, GC stress, check the sharing count etc. |
| 1661 | */ |
| 1662 | |
| 1663 | #ifdef _DEBUG |
| 1664 | static unsigned ctr = 0; |
| 1665 | ctr++; |
| 1666 | |
| 1667 | if (g_pConfig->ShouldPrestubHalt(this)) |
| 1668 | { |
| 1669 | _ASSERTE(!"PreStubHalt" ); |
| 1670 | } |
| 1671 | |
| 1672 | LOG((LF_CLASSLOADER, LL_INFO10000, "In PreStubWorker for %s::%s\n" , |
| 1673 | m_pszDebugClassName, m_pszDebugMethodName)); |
| 1674 | |
| 1675 | // This is a nice place to test out having some fatal EE errors. We do this only in a checked build, and only |
| 1676 | // under the InjectFatalError key. |
| 1677 | if (g_pConfig->InjectFatalError() == 1) |
| 1678 | { |
| 1679 | EEPOLICY_HANDLE_FATAL_ERROR(COR_E_EXECUTIONENGINE); |
| 1680 | } |
| 1681 | else if (g_pConfig->InjectFatalError() == 2) |
| 1682 | { |
| 1683 | EEPOLICY_HANDLE_FATAL_ERROR(COR_E_STACKOVERFLOW); |
| 1684 | } |
| 1685 | else if (g_pConfig->InjectFatalError() == 3) |
| 1686 | { |
| 1687 | TestSEHGuardPageRestore(); |
| 1688 | } |
| 1689 | |
| 1690 | // Useful to test GC with the prestub on the call stack |
| 1691 | if (g_pConfig->ShouldPrestubGC(this)) |
| 1692 | { |
| 1693 | GCX_COOP(); |
| 1694 | GCHeapUtilities::GetGCHeap()->GarbageCollect(-1); |
| 1695 | } |
| 1696 | #endif // _DEBUG |
| 1697 | |
| 1698 | STRESS_LOG1(LF_CLASSLOADER, LL_INFO10000, "Prestubworker: method %pM\n" , this); |
| 1699 | |
| 1700 | |
| 1701 | GCStress<cfg_any, EeconfigFastGcSPolicy, CoopGcModePolicy>::MaybeTrigger(); |
| 1702 | |
| 1703 | |
| 1704 | #ifdef FEATURE_COMINTEROP |
| 1705 | /************************** INTEROP *************************/ |
| 1706 | /*----------------------------------------------------------------- |
| 1707 | // Some method descriptors are COMPLUS-to-COM call descriptors |
| 1708 | // they are not your every day method descriptors, for example |
| 1709 | // they don't have an IL or code. |
| 1710 | */ |
| 1711 | if (IsComPlusCall() || IsGenericComPlusCall()) |
| 1712 | { |
| 1713 | pCode = GetStubForInteropMethod(this); |
| 1714 | |
| 1715 | GetPrecode()->SetTargetInterlocked(pCode); |
| 1716 | |
| 1717 | RETURN GetStableEntryPoint(); |
| 1718 | } |
| 1719 | #endif // FEATURE_COMINTEROP |
| 1720 | |
| 1721 | // workaround: This is to handle a punted work item dealing with a skipped module constructor |
| 1722 | // due to appdomain unload. Basically shared code was JITted in domain A, and then |
| 1723 | // this caused a link to another shared module with a module CCTOR, which was skipped |
| 1724 | // or aborted in another appdomain we were trying to propagate the activation to. |
| 1725 | // |
| 1726 | // Note that this is not a fix, but that it just minimizes the window in which the |
| 1727 | // issue can occur. |
| 1728 | if (pThread->IsAbortRequested()) |
| 1729 | { |
| 1730 | pThread->HandleThreadAbort(); |
| 1731 | } |
| 1732 | |
| 1733 | /*************************** CALL COUNTER ***********************/ |
| 1734 | // If we are counting calls for tiered compilation, leave the prestub |
| 1735 | // in place so that we can continue intercepting method invocations. |
| 1736 | // When the TieredCompilationManager has received enough call notifications |
| 1737 | // for this method only then do we back-patch it. |
| 1738 | BOOL fCanBackpatchPrestub = TRUE; |
| 1739 | #ifdef FEATURE_TIERED_COMPILATION |
| 1740 | BOOL fNeedsCallCounting = FALSE; |
| 1741 | TieredCompilationManager* pTieredCompilationManager = nullptr; |
| 1742 | if (IsEligibleForTieredCompilation() && TieredCompilationManager::RequiresCallCounting(this)) |
| 1743 | { |
| 1744 | pTieredCompilationManager = GetAppDomain()->GetTieredCompilationManager(); |
| 1745 | CallCounter * pCallCounter = GetCallCounter(); |
| 1746 | BOOL fWasPromotedToTier1 = FALSE; |
| 1747 | pCallCounter->OnMethodCalled(this, pTieredCompilationManager, &fCanBackpatchPrestub, &fWasPromotedToTier1); |
| 1748 | fNeedsCallCounting = !fWasPromotedToTier1; |
| 1749 | } |
| 1750 | #endif |
| 1751 | |
| 1752 | /*************************** VERSIONABLE CODE *********************/ |
| 1753 | |
| 1754 | BOOL fIsPointingToPrestub = IsPointingToPrestub(); |
| 1755 | #ifdef FEATURE_CODE_VERSIONING |
| 1756 | if (IsVersionableWithPrecode() || |
| 1757 | (!fIsPointingToPrestub && IsVersionableWithJumpStamp())) |
| 1758 | { |
| 1759 | pCode = GetCodeVersionManager()->PublishVersionableCodeIfNecessary(this, fCanBackpatchPrestub); |
| 1760 | |
| 1761 | #ifdef FEATURE_TIERED_COMPILATION |
| 1762 | if (pTieredCompilationManager != nullptr && fNeedsCallCounting && fCanBackpatchPrestub && pCode != NULL) |
| 1763 | { |
| 1764 | pTieredCompilationManager->OnMethodCallCountingStoppedWithoutTier1Promotion(this); |
| 1765 | } |
| 1766 | #endif |
| 1767 | |
| 1768 | fIsPointingToPrestub = IsPointingToPrestub(); |
| 1769 | } |
| 1770 | #endif |
| 1771 | |
| 1772 | /************************** BACKPATCHING *************************/ |
| 1773 | // See if the addr of code has changed from the pre-stub |
| 1774 | if (!fIsPointingToPrestub) |
| 1775 | { |
| 1776 | LOG((LF_CLASSLOADER, LL_INFO10000, |
| 1777 | " In PreStubWorker, method already jitted, backpatching call point\n" )); |
| 1778 | #if defined(FEATURE_JIT_PITCHING) |
| 1779 | MarkMethodNotPitchingCandidate(this); |
| 1780 | #endif |
| 1781 | RETURN DoBackpatch(pMT, pDispatchingMT, TRUE); |
| 1782 | } |
| 1783 | |
| 1784 | if (pCode) |
| 1785 | { |
| 1786 | // The only reasons we are still pointing to prestub is because the call counter |
| 1787 | // prevented it or this thread lost the race with another thread in updating the |
| 1788 | // entry point. We should still short circuit and return the code without |
| 1789 | // backpatching. |
| 1790 | RETURN pCode; |
| 1791 | } |
| 1792 | |
| 1793 | /************************** CODE CREATION *************************/ |
| 1794 | if (IsUnboxingStub()) |
| 1795 | { |
| 1796 | pStub = MakeUnboxingStubWorker(this); |
| 1797 | } |
| 1798 | #if defined(FEATURE_SHARE_GENERIC_CODE) |
| 1799 | else if (IsInstantiatingStub()) |
| 1800 | { |
| 1801 | pStub = MakeInstantiatingStubWorker(this); |
| 1802 | } |
| 1803 | #endif // defined(FEATURE_SHARE_GENERIC_CODE) |
| 1804 | else if (IsIL() || IsNoMetadata()) |
| 1805 | { |
| 1806 | if (!IsNativeCodeStableAfterInit()) |
| 1807 | { |
| 1808 | GetOrCreatePrecode(); |
| 1809 | } |
| 1810 | pCode = PrepareInitialCode(); |
| 1811 | } // end else if (IsIL() || IsNoMetadata()) |
| 1812 | else if (IsNDirect()) |
| 1813 | { |
| 1814 | pCode = GetStubForInteropMethod(this); |
| 1815 | GetOrCreatePrecode(); |
| 1816 | } |
| 1817 | else if (IsFCall()) |
| 1818 | { |
| 1819 | // Get the fcall implementation |
| 1820 | BOOL fSharedOrDynamicFCallImpl; |
| 1821 | pCode = ECall::GetFCallImpl(this, &fSharedOrDynamicFCallImpl); |
| 1822 | |
| 1823 | if (fSharedOrDynamicFCallImpl) |
| 1824 | { |
| 1825 | // Fake ctors share one implementation that has to be wrapped by prestub |
| 1826 | GetOrCreatePrecode(); |
| 1827 | } |
| 1828 | } |
| 1829 | else if (IsArray()) |
| 1830 | { |
| 1831 | pStub = GenerateArrayOpStub((ArrayMethodDesc*)this); |
| 1832 | } |
| 1833 | else if (IsEEImpl()) |
| 1834 | { |
| 1835 | _ASSERTE(GetMethodTable()->IsDelegate()); |
| 1836 | pCode = COMDelegate::GetInvokeMethodStub((EEImplMethodDesc*)this); |
| 1837 | GetOrCreatePrecode(); |
| 1838 | } |
| 1839 | else |
| 1840 | { |
| 1841 | // This is a method type we don't handle yet |
| 1842 | _ASSERTE(!"Unknown Method Type" ); |
| 1843 | } |
| 1844 | |
| 1845 | /************************** POSTJIT *************************/ |
| 1846 | _ASSERTE(pCode == NULL || GetNativeCode() == NULL || pCode == GetNativeCode()); |
| 1847 | |
| 1848 | // At this point we must have either a pointer to managed code or to a stub. All of the above code |
| 1849 | // should have thrown an exception if it couldn't make a stub. |
| 1850 | _ASSERTE((pStub != NULL) ^ (pCode != NULL)); |
| 1851 | |
| 1852 | /************************** SECURITY *************************/ |
| 1853 | |
| 1854 | // Lets check to see if we need declarative security on this stub, If we have |
| 1855 | // security checks on this method or class then we need to add an intermediate |
| 1856 | // stub that performs declarative checks prior to calling the real stub. |
| 1857 | // record if security needs to intercept this call (also depends on whether we plan to use stubs for declarative security) |
| 1858 | |
| 1859 | |
| 1860 | _ASSERTE((pStub != NULL) ^ (pCode != NULL)); |
| 1861 | |
| 1862 | #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) |
| 1863 | // |
| 1864 | // We are seeing memory reordering race around fixups (see DDB 193514 and related bugs). We get into |
| 1865 | // situation where the patched precode is visible by other threads, but the resolved fixups |
| 1866 | // are not. IT SHOULD NEVER HAPPEN according to our current understanding of x86/x64 memory model. |
| 1867 | // (see email thread attached to the bug for details). |
| 1868 | // |
| 1869 | // We suspect that there may be bug in the hardware or that hardware may have shortcuts that may be |
| 1870 | // causing grief. We will try to avoid the race by executing an extra memory barrier. |
| 1871 | // |
| 1872 | MemoryBarrier(); |
| 1873 | #endif |
| 1874 | |
| 1875 | if (pCode != NULL) |
| 1876 | { |
| 1877 | if (HasPrecode()) |
| 1878 | GetPrecode()->SetTargetInterlocked(pCode); |
| 1879 | else |
| 1880 | if (!HasStableEntryPoint()) |
| 1881 | { |
| 1882 | SetStableEntryPointInterlocked(pCode); |
| 1883 | } |
| 1884 | } |
| 1885 | else |
| 1886 | { |
| 1887 | if (!GetOrCreatePrecode()->SetTargetInterlocked(pStub->GetEntryPoint())) |
| 1888 | { |
| 1889 | pStub->DecRef(); |
| 1890 | } |
| 1891 | else |
| 1892 | if (pStub->HasExternalEntryPoint()) |
| 1893 | { |
| 1894 | // If the Stub wraps code that is outside of the Stub allocation, then we |
| 1895 | // need to free the Stub allocation now. |
| 1896 | pStub->DecRef(); |
| 1897 | } |
| 1898 | } |
| 1899 | |
| 1900 | _ASSERTE(!IsPointingToPrestub()); |
| 1901 | _ASSERTE(HasStableEntryPoint()); |
| 1902 | |
| 1903 | RETURN DoBackpatch(pMT, pDispatchingMT, FALSE); |
| 1904 | } |
| 1905 | |
| 1906 | #endif // !DACCESS_COMPILE |
| 1907 | |
| 1908 | //========================================================================== |
| 1909 | // The following code manages the PreStub. All method stubs initially |
| 1910 | // use the prestub. |
| 1911 | //========================================================================== |
| 1912 | |
| 1913 | #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL) |
| 1914 | static PCODE g_UMThunkPreStub; |
| 1915 | #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL |
| 1916 | |
| 1917 | #ifndef DACCESS_COMPILE |
| 1918 | |
| 1919 | void ThePreStubManager::Init(void) |
| 1920 | { |
| 1921 | STANDARD_VM_CONTRACT; |
| 1922 | |
| 1923 | // |
| 1924 | // Add the prestub manager |
| 1925 | // |
| 1926 | |
| 1927 | StubManager::AddStubManager(new ThePreStubManager()); |
| 1928 | } |
| 1929 | |
| 1930 | //----------------------------------------------------------- |
| 1931 | // Initialize the prestub. |
| 1932 | //----------------------------------------------------------- |
| 1933 | void InitPreStubManager(void) |
| 1934 | { |
| 1935 | STANDARD_VM_CONTRACT; |
| 1936 | |
| 1937 | if (NingenEnabled()) |
| 1938 | { |
| 1939 | return; |
| 1940 | } |
| 1941 | |
| 1942 | #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL) |
| 1943 | g_UMThunkPreStub = GenerateUMThunkPrestub()->GetEntryPoint(); |
| 1944 | #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL |
| 1945 | |
| 1946 | ThePreStubManager::Init(); |
| 1947 | } |
| 1948 | |
| 1949 | PCODE TheUMThunkPreStub() |
| 1950 | { |
| 1951 | LIMITED_METHOD_CONTRACT; |
| 1952 | |
| 1953 | #if defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL) |
| 1954 | return g_UMThunkPreStub; |
| 1955 | #else // _TARGET_X86_ && !FEATURE_STUBS_AS_IL |
| 1956 | return GetEEFuncEntryPoint(TheUMEntryPrestub); |
| 1957 | #endif // _TARGET_X86_ && !FEATURE_STUBS_AS_IL |
| 1958 | } |
| 1959 | |
| 1960 | PCODE TheVarargNDirectStub(BOOL hasRetBuffArg) |
| 1961 | { |
| 1962 | LIMITED_METHOD_CONTRACT; |
| 1963 | |
| 1964 | #if !defined(_TARGET_X86_) && !defined(_TARGET_ARM64_) |
| 1965 | if (hasRetBuffArg) |
| 1966 | { |
| 1967 | return GetEEFuncEntryPoint(VarargPInvokeStub_RetBuffArg); |
| 1968 | } |
| 1969 | else |
| 1970 | #endif |
| 1971 | { |
| 1972 | return GetEEFuncEntryPoint(VarargPInvokeStub); |
| 1973 | } |
| 1974 | } |
| 1975 | |
| 1976 | static PCODE PatchNonVirtualExternalMethod(MethodDesc * pMD, PCODE pCode, PTR_CORCOMPILE_IMPORT_SECTION pImportSection, TADDR pIndirection) |
| 1977 | { |
| 1978 | STANDARD_VM_CONTRACT; |
| 1979 | |
| 1980 | // |
| 1981 | // Skip fixup precode jump for better perf. Since we have MethodDesc available, we can use cheaper method |
| 1982 | // than code:Precode::TryToSkipFixupPrecode. |
| 1983 | // |
| 1984 | #ifdef HAS_FIXUP_PRECODE |
| 1985 | if (pMD->HasPrecode() && pMD->GetPrecode()->GetType() == PRECODE_FIXUP |
| 1986 | && pMD->IsNativeCodeStableAfterInit()) |
| 1987 | { |
| 1988 | PCODE pDirectTarget = pMD->IsFCall() ? ECall::GetFCallImpl(pMD) : pMD->GetNativeCode(); |
| 1989 | if (pDirectTarget != NULL) |
| 1990 | pCode = pDirectTarget; |
| 1991 | } |
| 1992 | #endif //HAS_FIXUP_PRECODE |
| 1993 | |
| 1994 | if (pImportSection->Flags & CORCOMPILE_IMPORT_FLAGS_CODE) |
| 1995 | { |
| 1996 | CORCOMPILE_EXTERNAL_METHOD_THUNK * pThunk = (CORCOMPILE_EXTERNAL_METHOD_THUNK *)pIndirection; |
| 1997 | |
| 1998 | #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) |
| 1999 | INT64 oldValue = *(INT64*)pThunk; |
| 2000 | BYTE* pOldValue = (BYTE*)&oldValue; |
| 2001 | |
| 2002 | if (pOldValue[0] == X86_INSTR_CALL_REL32) |
| 2003 | { |
| 2004 | INT64 newValue = oldValue; |
| 2005 | BYTE* pNewValue = (BYTE*)&newValue; |
| 2006 | pNewValue[0] = X86_INSTR_JMP_REL32; |
| 2007 | |
| 2008 | *(INT32 *)(pNewValue+1) = rel32UsingJumpStub((INT32*)(&pThunk->callJmp[1]), pCode, pMD, NULL); |
| 2009 | |
| 2010 | _ASSERTE(IS_ALIGNED((size_t)pThunk, sizeof(INT64))); |
| 2011 | EnsureWritableExecutablePages(pThunk, sizeof(INT64)); |
| 2012 | FastInterlockCompareExchangeLong((INT64*)pThunk, newValue, oldValue); |
| 2013 | |
| 2014 | FlushInstructionCache(GetCurrentProcess(), pThunk, 8); |
| 2015 | } |
| 2016 | #elif defined(_TARGET_ARM_) || defined(_TARGET_ARM64_) |
| 2017 | // Patchup the thunk to point to the actual implementation of the cross module external method |
| 2018 | EnsureWritableExecutablePages(&pThunk->m_pTarget); |
| 2019 | pThunk->m_pTarget = pCode; |
| 2020 | |
| 2021 | #if defined(_TARGET_ARM_) |
| 2022 | // ThumbBit must be set on the target address |
| 2023 | _ASSERTE(pCode & THUMB_CODE); |
| 2024 | #endif |
| 2025 | #else |
| 2026 | PORTABILITY_ASSERT("ExternalMethodFixupWorker" ); |
| 2027 | #endif |
| 2028 | } |
| 2029 | else |
| 2030 | { |
| 2031 | *EnsureWritableExecutablePages((TADDR *)pIndirection) = pCode; |
| 2032 | } |
| 2033 | |
| 2034 | return pCode; |
| 2035 | } |
| 2036 | |
| 2037 | //========================================================================================== |
| 2038 | // In NGen images calls to external methods start out pointing to jump thunks. |
| 2039 | // These jump thunks initially point to the assembly code _ExternalMethodFixupStub |
| 2040 | // It transfers control to ExternalMethodFixupWorker which will patch the jump |
| 2041 | // thunk to point to the actual cross module address for the method body |
| 2042 | // Some methods also have one-time prestubs we defer the patching until |
| 2043 | // we have the final stable method entry point. |
| 2044 | // |
| 2045 | EXTERN_C PCODE STDCALL ExternalMethodFixupWorker(TransitionBlock * pTransitionBlock, TADDR pIndirection, DWORD sectionIndex, Module * pModule) |
| 2046 | { |
| 2047 | STATIC_CONTRACT_THROWS; |
| 2048 | STATIC_CONTRACT_GC_TRIGGERS; |
| 2049 | STATIC_CONTRACT_MODE_COOPERATIVE; |
| 2050 | STATIC_CONTRACT_ENTRY_POINT; |
| 2051 | |
| 2052 | // We must save (and restore) the Last Error code before we call anything |
| 2053 | // that could overwrite it. Any callsite that leads to TlsGetValue will |
| 2054 | // potentially overwrite the Last Error code. |
| 2055 | |
| 2056 | // |
| 2057 | // In Dev10 bug 837293 we were overwriting the Last Error code on the first |
| 2058 | // call to a PInvoke method. This occurred when we were running a |
| 2059 | // (precompiled) PInvoke IL stub implemented in the ngen image. |
| 2060 | // |
| 2061 | // In this IL stub implementation we call the native method kernel32!GetFileAttributes, |
| 2062 | // and then we immediately try to save the Last Error code by calling the |
| 2063 | // mscorlib method System.StubHelpers.StubHelpers.SetLastError(). |
| 2064 | // |
| 2065 | // However when we are coming from a precompiled IL Stub in an ngen image |
| 2066 | // we must use an ExternalMethodFixup to find the target address of |
| 2067 | // System.StubHelpers.StubHelpers.SetLastError() and this was overwriting |
| 2068 | // the value of the Last Error before it could be retrieved and saved. |
| 2069 | // |
| 2070 | |
| 2071 | PCODE pCode = NULL; |
| 2072 | |
| 2073 | BEGIN_PRESERVE_LAST_ERROR; |
| 2074 | |
| 2075 | MAKE_CURRENT_THREAD_AVAILABLE(); |
| 2076 | |
| 2077 | #ifdef _DEBUG |
| 2078 | Thread::ObjectRefFlush(CURRENT_THREAD); |
| 2079 | #endif |
| 2080 | |
| 2081 | FrameWithCookie<ExternalMethodFrame> frame(pTransitionBlock); |
| 2082 | ExternalMethodFrame * pEMFrame = &frame; |
| 2083 | |
| 2084 | #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) |
| 2085 | // Decode indirection cell from callsite if it is not present |
| 2086 | if (pIndirection == NULL) |
| 2087 | { |
| 2088 | // Asssume that the callsite is call [xxxxxxxx] |
| 2089 | PCODE retAddr = pEMFrame->GetReturnAddress(); |
| 2090 | #ifdef _TARGET_X86_ |
| 2091 | pIndirection = *(((TADDR *)retAddr) - 1); |
| 2092 | #else |
| 2093 | pIndirection = *(((INT32 *)retAddr) - 1) + retAddr; |
| 2094 | #endif |
| 2095 | } |
| 2096 | #endif |
| 2097 | |
| 2098 | // FUTURE: Consider always passing in module and section index to avoid the lookups |
| 2099 | if (pModule == NULL) |
| 2100 | { |
| 2101 | pModule = ExecutionManager::FindZapModule(pIndirection); |
| 2102 | sectionIndex = (DWORD)-1; |
| 2103 | } |
| 2104 | _ASSERTE(pModule != NULL); |
| 2105 | |
| 2106 | pEMFrame->SetCallSite(pModule, pIndirection); |
| 2107 | |
| 2108 | pEMFrame->Push(CURRENT_THREAD); // Push the new ExternalMethodFrame onto the frame stack |
| 2109 | |
| 2110 | INSTALL_MANAGED_EXCEPTION_DISPATCHER; |
| 2111 | INSTALL_UNWIND_AND_CONTINUE_HANDLER; |
| 2112 | |
| 2113 | bool fVirtual = false; |
| 2114 | MethodDesc * pMD = NULL; |
| 2115 | MethodTable * pMT = NULL; |
| 2116 | DWORD slot = 0; |
| 2117 | |
| 2118 | { |
| 2119 | GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD); |
| 2120 | |
| 2121 | PEImageLayout *pNativeImage = pModule->GetNativeOrReadyToRunImage(); |
| 2122 | |
| 2123 | RVA rva = pNativeImage->GetDataRva(pIndirection); |
| 2124 | |
| 2125 | PTR_CORCOMPILE_IMPORT_SECTION pImportSection; |
| 2126 | if (sectionIndex != (DWORD)-1) |
| 2127 | { |
| 2128 | pImportSection = pModule->GetImportSectionFromIndex(sectionIndex); |
| 2129 | _ASSERTE(pImportSection == pModule->GetImportSectionForRVA(rva)); |
| 2130 | } |
| 2131 | else |
| 2132 | { |
| 2133 | pImportSection = pModule->GetImportSectionForRVA(rva); |
| 2134 | } |
| 2135 | _ASSERTE(pImportSection != NULL); |
| 2136 | |
| 2137 | COUNT_T index; |
| 2138 | if (pImportSection->Flags & CORCOMPILE_IMPORT_FLAGS_CODE) |
| 2139 | { |
| 2140 | _ASSERTE(pImportSection->EntrySize == sizeof(CORCOMPILE_EXTERNAL_METHOD_THUNK)); |
| 2141 | index = (rva - pImportSection->Section.VirtualAddress) / sizeof(CORCOMPILE_EXTERNAL_METHOD_THUNK); |
| 2142 | } |
| 2143 | else |
| 2144 | { |
| 2145 | _ASSERTE(pImportSection->EntrySize == sizeof(TADDR)); |
| 2146 | index = (rva - pImportSection->Section.VirtualAddress) / sizeof(TADDR); |
| 2147 | } |
| 2148 | |
| 2149 | PTR_DWORD pSignatures = dac_cast<PTR_DWORD>(pNativeImage->GetRvaData(pImportSection->Signatures)); |
| 2150 | |
| 2151 | PCCOR_SIGNATURE pBlob = (BYTE *)pNativeImage->GetRvaData(pSignatures[index]); |
| 2152 | |
| 2153 | BYTE kind = *pBlob++; |
| 2154 | |
| 2155 | Module * pInfoModule = pModule; |
| 2156 | if (kind & ENCODE_MODULE_OVERRIDE) |
| 2157 | { |
| 2158 | DWORD moduleIndex = CorSigUncompressData(pBlob); |
| 2159 | pInfoModule = pModule->GetModuleFromIndex(moduleIndex); |
| 2160 | kind &= ~ENCODE_MODULE_OVERRIDE; |
| 2161 | } |
| 2162 | |
| 2163 | TypeHandle th; |
| 2164 | switch (kind) |
| 2165 | { |
| 2166 | case ENCODE_METHOD_ENTRY: |
| 2167 | { |
| 2168 | pMD = ZapSig::DecodeMethod(pModule, |
| 2169 | pInfoModule, |
| 2170 | pBlob); |
| 2171 | |
| 2172 | if (pModule->IsReadyToRun()) |
| 2173 | { |
| 2174 | // We do not emit activation fixups for version resilient references. Activate the target explicitly. |
| 2175 | pMD->EnsureActive(); |
| 2176 | } |
| 2177 | |
| 2178 | break; |
| 2179 | } |
| 2180 | |
| 2181 | case ENCODE_METHOD_ENTRY_DEF_TOKEN: |
| 2182 | { |
| 2183 | mdToken MethodDef = TokenFromRid(CorSigUncompressData(pBlob), mdtMethodDef); |
| 2184 | pMD = MemberLoader::GetMethodDescFromMethodDef(pInfoModule, MethodDef, FALSE); |
| 2185 | |
| 2186 | pMD->PrepareForUseAsADependencyOfANativeImage(); |
| 2187 | |
| 2188 | if (pModule->IsReadyToRun()) |
| 2189 | { |
| 2190 | // We do not emit activation fixups for version resilient references. Activate the target explicitly. |
| 2191 | pMD->EnsureActive(); |
| 2192 | } |
| 2193 | |
| 2194 | break; |
| 2195 | } |
| 2196 | |
| 2197 | case ENCODE_METHOD_ENTRY_REF_TOKEN: |
| 2198 | { |
| 2199 | SigTypeContext typeContext; |
| 2200 | mdToken MemberRef = TokenFromRid(CorSigUncompressData(pBlob), mdtMemberRef); |
| 2201 | FieldDesc * pFD = NULL; |
| 2202 | |
| 2203 | MemberLoader::GetDescFromMemberRef(pInfoModule, MemberRef, &pMD, &pFD, &typeContext, FALSE /* strict metadata checks */, &th); |
| 2204 | _ASSERTE(pMD != NULL); |
| 2205 | |
| 2206 | pMD->PrepareForUseAsADependencyOfANativeImage(); |
| 2207 | |
| 2208 | if (pModule->IsReadyToRun()) |
| 2209 | { |
| 2210 | // We do not emit activation fixups for version resilient references. Activate the target explicitly. |
| 2211 | pMD->EnsureActive(); |
| 2212 | } |
| 2213 | else |
| 2214 | { |
| 2215 | #ifdef FEATURE_WINMD_RESILIENT |
| 2216 | // We do not emit activation fixups for version resilient references. Activate the target explicitly. |
| 2217 | pMD->EnsureActive(); |
| 2218 | #endif |
| 2219 | } |
| 2220 | |
| 2221 | break; |
| 2222 | } |
| 2223 | |
| 2224 | case ENCODE_VIRTUAL_ENTRY: |
| 2225 | { |
| 2226 | pMD = ZapSig::DecodeMethod(pModule, pInfoModule, pBlob, &th); |
| 2227 | |
| 2228 | VirtualEntry: |
| 2229 | pMD->PrepareForUseAsADependencyOfANativeImage(); |
| 2230 | |
| 2231 | if (pMD->IsVtableMethod()) |
| 2232 | { |
| 2233 | slot = pMD->GetSlot(); |
| 2234 | pMT = th.IsNull() ? pMD->GetMethodTable() : th.GetMethodTable(); |
| 2235 | |
| 2236 | fVirtual = true; |
| 2237 | } |
| 2238 | else |
| 2239 | if (pModule->IsReadyToRun()) |
| 2240 | { |
| 2241 | // We do not emit activation fixups for version resilient references. Activate the target explicitly. |
| 2242 | pMD->EnsureActive(); |
| 2243 | } |
| 2244 | break; |
| 2245 | } |
| 2246 | |
| 2247 | case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN: |
| 2248 | { |
| 2249 | mdToken MethodDef = TokenFromRid(CorSigUncompressData(pBlob), mdtMethodDef); |
| 2250 | pMD = MemberLoader::GetMethodDescFromMethodDef(pInfoModule, MethodDef, FALSE); |
| 2251 | |
| 2252 | goto VirtualEntry; |
| 2253 | } |
| 2254 | |
| 2255 | case ENCODE_VIRTUAL_ENTRY_REF_TOKEN: |
| 2256 | { |
| 2257 | mdToken MemberRef = TokenFromRid(CorSigUncompressData(pBlob), mdtMemberRef); |
| 2258 | |
| 2259 | FieldDesc * pFD = NULL; |
| 2260 | |
| 2261 | SigTypeContext typeContext; |
| 2262 | MemberLoader::GetDescFromMemberRef(pInfoModule, MemberRef, &pMD, &pFD, &typeContext, FALSE /* strict metadata checks */, &th, TRUE /* actual type required */); |
| 2263 | _ASSERTE(pMD != NULL); |
| 2264 | |
| 2265 | goto VirtualEntry; |
| 2266 | } |
| 2267 | |
| 2268 | case ENCODE_VIRTUAL_ENTRY_SLOT: |
| 2269 | { |
| 2270 | slot = CorSigUncompressData(pBlob); |
| 2271 | pMT = ZapSig::DecodeType(pModule, pInfoModule, pBlob).GetMethodTable(); |
| 2272 | |
| 2273 | fVirtual = true; |
| 2274 | break; |
| 2275 | } |
| 2276 | |
| 2277 | default: |
| 2278 | _ASSERTE(!"Unexpected CORCOMPILE_FIXUP_BLOB_KIND" ); |
| 2279 | ThrowHR(COR_E_BADIMAGEFORMAT); |
| 2280 | } |
| 2281 | |
| 2282 | if (fVirtual) |
| 2283 | { |
| 2284 | GCX_COOP_THREAD_EXISTS(CURRENT_THREAD); |
| 2285 | |
| 2286 | // Get the stub manager for this module |
| 2287 | VirtualCallStubManager *pMgr = pModule->GetLoaderAllocator()->GetVirtualCallStubManager(); |
| 2288 | |
| 2289 | OBJECTREF *protectedObj = pEMFrame->GetThisPtr(); |
| 2290 | _ASSERTE(protectedObj != NULL); |
| 2291 | if (*protectedObj == NULL) { |
| 2292 | COMPlusThrow(kNullReferenceException); |
| 2293 | } |
| 2294 | |
| 2295 | DispatchToken token; |
| 2296 | if (pMT->IsInterface() || MethodTable::VTableIndir_t::isRelative) |
| 2297 | { |
| 2298 | token = pMT->GetLoaderAllocator()->GetDispatchToken(pMT->GetTypeID(), slot); |
| 2299 | StubCallSite callSite(pIndirection, pEMFrame->GetReturnAddress()); |
| 2300 | pCode = pMgr->ResolveWorker(&callSite, protectedObj, token, VirtualCallStubManager::SK_LOOKUP); |
| 2301 | } |
| 2302 | else |
| 2303 | { |
| 2304 | pCode = pMgr->GetVTableCallStub(slot); |
| 2305 | *EnsureWritableExecutablePages((TADDR *)pIndirection) = pCode; |
| 2306 | } |
| 2307 | _ASSERTE(pCode != NULL); |
| 2308 | } |
| 2309 | else |
| 2310 | { |
| 2311 | _ASSERTE(pMD != NULL); |
| 2312 | |
| 2313 | { |
| 2314 | // Switch to cooperative mode to avoid racing with GC stackwalk |
| 2315 | GCX_COOP_THREAD_EXISTS(CURRENT_THREAD); |
| 2316 | pEMFrame->SetFunction(pMD); |
| 2317 | } |
| 2318 | |
| 2319 | pCode = pMD->GetMethodEntryPoint(); |
| 2320 | |
| 2321 | // |
| 2322 | // Note that we do not want to call code:MethodDesc::IsPointingToPrestub() here. It does not take remoting interception |
| 2323 | // into account and so it would cause otherwise intercepted methods to be JITed. It is a compat issue if the JITing fails. |
| 2324 | // |
| 2325 | if (!DoesSlotCallPrestub(pCode)) |
| 2326 | { |
| 2327 | pCode = PatchNonVirtualExternalMethod(pMD, pCode, pImportSection, pIndirection); |
| 2328 | } |
| 2329 | } |
| 2330 | |
| 2331 | #if defined (FEATURE_JIT_PITCHING) |
| 2332 | DeleteFromPitchingCandidate(pMD); |
| 2333 | #endif |
| 2334 | } |
| 2335 | |
| 2336 | // Force a GC on every jit if the stress level is high enough |
| 2337 | GCStress<cfg_any>::MaybeTrigger(); |
| 2338 | |
| 2339 | // Ready to return |
| 2340 | |
| 2341 | UNINSTALL_UNWIND_AND_CONTINUE_HANDLER; |
| 2342 | UNINSTALL_MANAGED_EXCEPTION_DISPATCHER; |
| 2343 | |
| 2344 | pEMFrame->Pop(CURRENT_THREAD); // Pop the ExternalMethodFrame from the frame stack |
| 2345 | |
| 2346 | END_PRESERVE_LAST_ERROR; |
| 2347 | |
| 2348 | return pCode; |
| 2349 | } |
| 2350 | |
| 2351 | |
| 2352 | #if !defined(_TARGET_X86_) && !defined(_TARGET_AMD64_) |
| 2353 | |
| 2354 | //========================================================================================== |
| 2355 | // In NGen image, virtual slots inherited from cross-module dependencies point to jump thunks. |
| 2356 | // These jump thunk initially point to VirtualMethodFixupStub which transfers control here. |
| 2357 | // This method 'VirtualMethodFixupWorker' will patch the jump thunk to point to the actual |
| 2358 | // inherited method body after we have execute the precode and a stable entry point. |
| 2359 | // |
| 2360 | EXTERN_C PCODE VirtualMethodFixupWorker(Object * pThisPtr, CORCOMPILE_VIRTUAL_IMPORT_THUNK *pThunk) |
| 2361 | { |
| 2362 | CONTRACTL |
| 2363 | { |
| 2364 | NOTHROW; |
| 2365 | GC_NOTRIGGER; |
| 2366 | MODE_COOPERATIVE; |
| 2367 | ENTRY_POINT; |
| 2368 | } |
| 2369 | CONTRACTL_END; |
| 2370 | |
| 2371 | _ASSERTE(pThisPtr != NULL); |
| 2372 | VALIDATEOBJECT(pThisPtr); |
| 2373 | |
| 2374 | MethodTable * pMT = pThisPtr->GetMethodTable(); |
| 2375 | |
| 2376 | WORD slotNumber = pThunk->slotNum; |
| 2377 | _ASSERTE(slotNumber != (WORD)-1); |
| 2378 | |
| 2379 | PCODE pCode = pMT->GetRestoredSlot(slotNumber); |
| 2380 | |
| 2381 | if (!DoesSlotCallPrestub(pCode)) |
| 2382 | { |
| 2383 | // Skip fixup precode jump for better perf |
| 2384 | PCODE pDirectTarget = Precode::TryToSkipFixupPrecode(pCode); |
| 2385 | if (pDirectTarget != NULL) |
| 2386 | pCode = pDirectTarget; |
| 2387 | |
| 2388 | // Patch the thunk to the actual method body |
| 2389 | if (EnsureWritableExecutablePagesNoThrow(&pThunk->m_pTarget, sizeof(pThunk->m_pTarget))) |
| 2390 | pThunk->m_pTarget = pCode; |
| 2391 | } |
| 2392 | #if defined(_TARGET_ARM_) |
| 2393 | // The target address should have the thumb bit set |
| 2394 | _ASSERTE(pCode & THUMB_CODE); |
| 2395 | #endif |
| 2396 | return pCode; |
| 2397 | } |
| 2398 | #endif // !defined(_TARGET_X86_) && !defined(_TARGET_AMD64_) |
| 2399 | |
| 2400 | #ifdef FEATURE_READYTORUN |
| 2401 | |
| 2402 | static PCODE getHelperForInitializedStatic(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT, FieldDesc * pFD) |
| 2403 | { |
| 2404 | STANDARD_VM_CONTRACT; |
| 2405 | |
| 2406 | PCODE pHelper = NULL; |
| 2407 | |
| 2408 | switch (kind) |
| 2409 | { |
| 2410 | case ENCODE_STATIC_BASE_NONGC_HELPER: |
| 2411 | { |
| 2412 | PVOID baseNonGC; |
| 2413 | { |
| 2414 | GCX_COOP(); |
| 2415 | baseNonGC = pMT->GetNonGCStaticsBasePointer(); |
| 2416 | } |
| 2417 | pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)baseNonGC); |
| 2418 | } |
| 2419 | break; |
| 2420 | case ENCODE_STATIC_BASE_GC_HELPER: |
| 2421 | { |
| 2422 | PVOID baseGC; |
| 2423 | { |
| 2424 | GCX_COOP(); |
| 2425 | baseGC = pMT->GetGCStaticsBasePointer(); |
| 2426 | } |
| 2427 | pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)baseGC); |
| 2428 | } |
| 2429 | break; |
| 2430 | case ENCODE_CCTOR_TRIGGER: |
| 2431 | pHelper = DynamicHelpers::CreateReturn(pModule->GetLoaderAllocator()); |
| 2432 | break; |
| 2433 | case ENCODE_FIELD_ADDRESS: |
| 2434 | { |
| 2435 | _ASSERTE(pFD->IsStatic()); |
| 2436 | |
| 2437 | PTR_VOID pAddress; |
| 2438 | |
| 2439 | { |
| 2440 | GCX_COOP(); |
| 2441 | |
| 2442 | PTR_BYTE base = 0; |
| 2443 | if (!pFD->IsRVA()) // for RVA the base is ignored |
| 2444 | base = pFD->GetBase(); |
| 2445 | pAddress = pFD->GetStaticAddressHandle((void *)dac_cast<TADDR>(base)); |
| 2446 | } |
| 2447 | |
| 2448 | // The following code assumes that the statics are pinned that is not the case for collectible types |
| 2449 | _ASSERTE(!pFD->GetEnclosingMethodTable()->Collectible()); |
| 2450 | |
| 2451 | // Unbox valuetype fields |
| 2452 | if (pFD->GetFieldType() == ELEMENT_TYPE_VALUETYPE && !pFD->IsRVA()) |
| 2453 | pHelper = DynamicHelpers::CreateReturnIndirConst(pModule->GetLoaderAllocator(), (TADDR)pAddress, (INT8)Object::GetOffsetOfFirstField()); |
| 2454 | else |
| 2455 | pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), (TADDR)pAddress); |
| 2456 | } |
| 2457 | break; |
| 2458 | default: |
| 2459 | _ASSERTE(!"Unexpected statics CORCOMPILE_FIXUP_BLOB_KIND" ); |
| 2460 | ThrowHR(COR_E_BADIMAGEFORMAT); |
| 2461 | } |
| 2462 | |
| 2463 | return pHelper; |
| 2464 | } |
| 2465 | |
| 2466 | static PCODE getHelperForSharedStatic(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT, FieldDesc * pFD) |
| 2467 | { |
| 2468 | STANDARD_VM_CONTRACT; |
| 2469 | |
| 2470 | _ASSERTE(kind == ENCODE_FIELD_ADDRESS); |
| 2471 | |
| 2472 | CorInfoHelpFunc helpFunc = CEEInfo::getSharedStaticsHelper(pFD, pMT); |
| 2473 | |
| 2474 | TADDR moduleID = pMT->GetModuleForStatics()->GetModuleID(); |
| 2475 | |
| 2476 | TADDR classID = 0; |
| 2477 | if (helpFunc != CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_NOCTOR && helpFunc != CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR) |
| 2478 | { |
| 2479 | if (pMT->IsDynamicStatics()) |
| 2480 | { |
| 2481 | classID = pMT->GetModuleDynamicEntryID(); |
| 2482 | } |
| 2483 | else |
| 2484 | { |
| 2485 | classID = pMT->GetClassIndex(); |
| 2486 | } |
| 2487 | } |
| 2488 | |
| 2489 | bool fUnbox = (pFD->GetFieldType() == ELEMENT_TYPE_VALUETYPE); |
| 2490 | |
| 2491 | AllocMemTracker amTracker; |
| 2492 | |
| 2493 | StaticFieldAddressArgs * pArgs = (StaticFieldAddressArgs *)amTracker.Track( |
| 2494 | pModule->GetLoaderAllocator()->GetHighFrequencyHeap()-> |
| 2495 | AllocMem(S_SIZE_T(sizeof(StaticFieldAddressArgs)))); |
| 2496 | |
| 2497 | pArgs->staticBaseHelper = (FnStaticBaseHelper)CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc); |
| 2498 | pArgs->arg0 = moduleID; |
| 2499 | pArgs->arg1 = classID; |
| 2500 | pArgs->offset = pFD->GetOffset(); |
| 2501 | |
| 2502 | PCODE pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), (TADDR)pArgs, |
| 2503 | fUnbox ? GetEEFuncEntryPoint(JIT_StaticFieldAddressUnbox_Dynamic) : GetEEFuncEntryPoint(JIT_StaticFieldAddress_Dynamic)); |
| 2504 | |
| 2505 | amTracker.SuppressRelease(); |
| 2506 | |
| 2507 | return pHelper; |
| 2508 | } |
| 2509 | |
| 2510 | static PCODE getHelperForStaticBase(Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND kind, MethodTable * pMT) |
| 2511 | { |
| 2512 | STANDARD_VM_CONTRACT; |
| 2513 | |
| 2514 | int helpFunc = CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE; |
| 2515 | |
| 2516 | if (kind == ENCODE_STATIC_BASE_GC_HELPER || kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER) |
| 2517 | { |
| 2518 | helpFunc = CORINFO_HELP_GETSHARED_GCSTATIC_BASE; |
| 2519 | } |
| 2520 | |
| 2521 | if (pMT->IsDynamicStatics()) |
| 2522 | { |
| 2523 | const int delta = CORINFO_HELP_GETSHARED_GCSTATIC_BASE_DYNAMICCLASS - CORINFO_HELP_GETSHARED_GCSTATIC_BASE; |
| 2524 | helpFunc += delta; |
| 2525 | } |
| 2526 | else |
| 2527 | if (!pMT->HasClassConstructor() && !pMT->HasBoxedRegularStatics()) |
| 2528 | { |
| 2529 | const int delta = CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR - CORINFO_HELP_GETSHARED_GCSTATIC_BASE; |
| 2530 | helpFunc += delta; |
| 2531 | } |
| 2532 | |
| 2533 | if (kind == ENCODE_THREAD_STATIC_BASE_NONGC_HELPER || kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER) |
| 2534 | { |
| 2535 | const int delta = CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE - CORINFO_HELP_GETSHARED_GCSTATIC_BASE; |
| 2536 | helpFunc += delta; |
| 2537 | } |
| 2538 | |
| 2539 | PCODE pHelper; |
| 2540 | if (helpFunc == CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_NOCTOR || helpFunc == CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR) |
| 2541 | { |
| 2542 | pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), pMT->GetModule()->GetModuleID(), CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc)); |
| 2543 | } |
| 2544 | else |
| 2545 | { |
| 2546 | TADDR moduleID = pMT->GetModuleForStatics()->GetModuleID(); |
| 2547 | |
| 2548 | TADDR classID; |
| 2549 | if (pMT->IsDynamicStatics()) |
| 2550 | { |
| 2551 | classID = pMT->GetModuleDynamicEntryID(); |
| 2552 | } |
| 2553 | else |
| 2554 | { |
| 2555 | classID = pMT->GetClassIndex(); |
| 2556 | } |
| 2557 | |
| 2558 | pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), moduleID, classID, CEEJitInfo::getHelperFtnStatic((CorInfoHelpFunc)helpFunc)); |
| 2559 | } |
| 2560 | |
| 2561 | return pHelper; |
| 2562 | } |
| 2563 | |
| 2564 | TADDR GetFirstArgumentRegisterValuePtr(TransitionBlock * pTransitionBlock) |
| 2565 | { |
| 2566 | TADDR pArgument = (TADDR)pTransitionBlock + TransitionBlock::GetOffsetOfArgumentRegisters(); |
| 2567 | #ifdef _TARGET_X86_ |
| 2568 | // x86 is special as always |
| 2569 | pArgument += offsetof(ArgumentRegisters, ECX); |
| 2570 | #endif |
| 2571 | |
| 2572 | return pArgument; |
| 2573 | } |
| 2574 | |
| 2575 | void ProcessDynamicDictionaryLookup(TransitionBlock * pTransitionBlock, |
| 2576 | Module * pModule, |
| 2577 | Module * pInfoModule, |
| 2578 | BYTE kind, |
| 2579 | PCCOR_SIGNATURE pBlob, |
| 2580 | PCCOR_SIGNATURE pBlobStart, |
| 2581 | CORINFO_RUNTIME_LOOKUP * pResult, |
| 2582 | DWORD * pDictionaryIndexAndSlot) |
| 2583 | { |
| 2584 | STANDARD_VM_CONTRACT; |
| 2585 | |
| 2586 | TADDR genericContextPtr = *(TADDR*)GetFirstArgumentRegisterValuePtr(pTransitionBlock); |
| 2587 | |
| 2588 | pResult->testForFixup = pResult->testForNull = false; |
| 2589 | pResult->signature = NULL; |
| 2590 | |
| 2591 | pResult->indirectFirstOffset = 0; |
| 2592 | pResult->indirectSecondOffset = 0; |
| 2593 | |
| 2594 | pResult->indirections = CORINFO_USEHELPER; |
| 2595 | |
| 2596 | DWORD numGenericArgs = 0; |
| 2597 | MethodTable* pContextMT = NULL; |
| 2598 | MethodDesc* pContextMD = NULL; |
| 2599 | |
| 2600 | if (kind == ENCODE_DICTIONARY_LOOKUP_METHOD) |
| 2601 | { |
| 2602 | pContextMD = (MethodDesc*)genericContextPtr; |
| 2603 | numGenericArgs = pContextMD->GetNumGenericMethodArgs(); |
| 2604 | pResult->helper = CORINFO_HELP_RUNTIMEHANDLE_METHOD; |
| 2605 | } |
| 2606 | else |
| 2607 | { |
| 2608 | pContextMT = (MethodTable*)genericContextPtr; |
| 2609 | |
| 2610 | if (kind == ENCODE_DICTIONARY_LOOKUP_THISOBJ) |
| 2611 | { |
| 2612 | TypeHandle contextTypeHandle = ZapSig::DecodeType(pModule, pInfoModule, pBlob); |
| 2613 | |
| 2614 | SigPointer p(pBlob); |
| 2615 | p.SkipExactlyOne(); |
| 2616 | pBlob = p.GetPtr(); |
| 2617 | |
| 2618 | pContextMT = pContextMT->GetMethodTableMatchingParentClass(contextTypeHandle.AsMethodTable()); |
| 2619 | } |
| 2620 | |
| 2621 | numGenericArgs = pContextMT->GetNumGenericArgs(); |
| 2622 | pResult->helper = CORINFO_HELP_RUNTIMEHANDLE_CLASS; |
| 2623 | } |
| 2624 | |
| 2625 | _ASSERTE(numGenericArgs > 0); |
| 2626 | |
| 2627 | CORCOMPILE_FIXUP_BLOB_KIND signatureKind = (CORCOMPILE_FIXUP_BLOB_KIND)CorSigUncompressData(pBlob); |
| 2628 | |
| 2629 | // |
| 2630 | // Optimization cases |
| 2631 | // |
| 2632 | if (signatureKind == ENCODE_TYPE_HANDLE) |
| 2633 | { |
| 2634 | SigPointer sigptr(pBlob, -1); |
| 2635 | |
| 2636 | CorElementType type; |
| 2637 | IfFailThrow(sigptr.GetElemType(&type)); |
| 2638 | |
| 2639 | if ((type == ELEMENT_TYPE_MVAR) && (kind == ENCODE_DICTIONARY_LOOKUP_METHOD)) |
| 2640 | { |
| 2641 | pResult->indirections = 2; |
| 2642 | pResult->offsets[0] = offsetof(InstantiatedMethodDesc, m_pPerInstInfo); |
| 2643 | |
| 2644 | if (decltype(InstantiatedMethodDesc::m_pPerInstInfo)::isRelative) |
| 2645 | { |
| 2646 | pResult->indirectFirstOffset = 1; |
| 2647 | } |
| 2648 | |
| 2649 | ULONG data; |
| 2650 | IfFailThrow(sigptr.GetData(&data)); |
| 2651 | pResult->offsets[1] = sizeof(TypeHandle) * data; |
| 2652 | |
| 2653 | return; |
| 2654 | } |
| 2655 | else if ((type == ELEMENT_TYPE_VAR) && (kind != ENCODE_DICTIONARY_LOOKUP_METHOD)) |
| 2656 | { |
| 2657 | pResult->indirections = 3; |
| 2658 | pResult->offsets[0] = MethodTable::GetOffsetOfPerInstInfo(); |
| 2659 | pResult->offsets[1] = sizeof(TypeHandle*) * (pContextMT->GetNumDicts() - 1); |
| 2660 | |
| 2661 | ULONG data; |
| 2662 | IfFailThrow(sigptr.GetData(&data)); |
| 2663 | pResult->offsets[2] = sizeof(TypeHandle) * data; |
| 2664 | |
| 2665 | if (MethodTable::IsPerInstInfoRelative()) |
| 2666 | { |
| 2667 | pResult->indirectFirstOffset = 1; |
| 2668 | pResult->indirectSecondOffset = 1; |
| 2669 | } |
| 2670 | |
| 2671 | return; |
| 2672 | } |
| 2673 | } |
| 2674 | |
| 2675 | if (pContextMT != NULL && pContextMT->GetNumDicts() > 0xFFFF) |
| 2676 | ThrowHR(COR_E_BADIMAGEFORMAT); |
| 2677 | |
| 2678 | // Dictionary index and slot number are encoded in a 32-bit DWORD. The higher 16 bits |
| 2679 | // are used for the dictionary index, and the lower 16 bits for the slot number. |
| 2680 | *pDictionaryIndexAndSlot = (pContextMT == NULL ? 0 : pContextMT->GetNumDicts() - 1); |
| 2681 | *pDictionaryIndexAndSlot <<= 16; |
| 2682 | |
| 2683 | WORD dictionarySlot; |
| 2684 | |
| 2685 | if (kind == ENCODE_DICTIONARY_LOOKUP_METHOD) |
| 2686 | { |
| 2687 | if (DictionaryLayout::FindToken(pModule->GetLoaderAllocator(), numGenericArgs, pContextMD->GetDictionaryLayout(), pResult, (BYTE*)pBlobStart, 1, FromReadyToRunImage, &dictionarySlot)) |
| 2688 | { |
| 2689 | pResult->testForNull = 1; |
| 2690 | |
| 2691 | // Indirect through dictionary table pointer in InstantiatedMethodDesc |
| 2692 | pResult->offsets[0] = offsetof(InstantiatedMethodDesc, m_pPerInstInfo); |
| 2693 | |
| 2694 | if (decltype(InstantiatedMethodDesc::m_pPerInstInfo)::isRelative) |
| 2695 | { |
| 2696 | pResult->indirectFirstOffset = 1; |
| 2697 | } |
| 2698 | |
| 2699 | *pDictionaryIndexAndSlot |= dictionarySlot; |
| 2700 | } |
| 2701 | } |
| 2702 | |
| 2703 | // It's a class dictionary lookup (CORINFO_LOOKUP_CLASSPARAM or CORINFO_LOOKUP_THISOBJ) |
| 2704 | else |
| 2705 | { |
| 2706 | if (DictionaryLayout::FindToken(pModule->GetLoaderAllocator(), numGenericArgs, pContextMT->GetClass()->GetDictionaryLayout(), pResult, (BYTE*)pBlobStart, 2, FromReadyToRunImage, &dictionarySlot)) |
| 2707 | { |
| 2708 | pResult->testForNull = 1; |
| 2709 | |
| 2710 | // Indirect through dictionary table pointer in vtable |
| 2711 | pResult->offsets[0] = MethodTable::GetOffsetOfPerInstInfo(); |
| 2712 | |
| 2713 | // Next indirect through the dictionary appropriate to this instantiated type |
| 2714 | pResult->offsets[1] = sizeof(TypeHandle*) * (pContextMT->GetNumDicts() - 1); |
| 2715 | |
| 2716 | if (MethodTable::IsPerInstInfoRelative()) |
| 2717 | { |
| 2718 | pResult->indirectFirstOffset = 1; |
| 2719 | pResult->indirectSecondOffset = 1; |
| 2720 | } |
| 2721 | |
| 2722 | *pDictionaryIndexAndSlot |= dictionarySlot; |
| 2723 | } |
| 2724 | } |
| 2725 | } |
| 2726 | |
| 2727 | PCODE DynamicHelperFixup(TransitionBlock * pTransitionBlock, TADDR * pCell, DWORD sectionIndex, Module * pModule, CORCOMPILE_FIXUP_BLOB_KIND * pKind, TypeHandle * pTH, MethodDesc ** ppMD, FieldDesc ** ppFD) |
| 2728 | { |
| 2729 | STANDARD_VM_CONTRACT; |
| 2730 | |
| 2731 | PEImageLayout *pNativeImage = pModule->GetNativeOrReadyToRunImage(); |
| 2732 | |
| 2733 | RVA rva = pNativeImage->GetDataRva((TADDR)pCell); |
| 2734 | |
| 2735 | PTR_CORCOMPILE_IMPORT_SECTION pImportSection = pModule->GetImportSectionFromIndex(sectionIndex); |
| 2736 | _ASSERTE(pImportSection == pModule->GetImportSectionForRVA(rva)); |
| 2737 | |
| 2738 | _ASSERTE(pImportSection->EntrySize == sizeof(TADDR)); |
| 2739 | |
| 2740 | COUNT_T index = (rva - pImportSection->Section.VirtualAddress) / sizeof(TADDR); |
| 2741 | |
| 2742 | PTR_DWORD pSignatures = dac_cast<PTR_DWORD>(pNativeImage->GetRvaData(pImportSection->Signatures)); |
| 2743 | |
| 2744 | PCCOR_SIGNATURE pBlob = (BYTE *)pNativeImage->GetRvaData(pSignatures[index]); |
| 2745 | PCCOR_SIGNATURE pBlobStart = pBlob; |
| 2746 | |
| 2747 | BYTE kind = *pBlob++; |
| 2748 | |
| 2749 | Module * pInfoModule = pModule; |
| 2750 | if (kind & ENCODE_MODULE_OVERRIDE) |
| 2751 | { |
| 2752 | DWORD moduleIndex = CorSigUncompressData(pBlob); |
| 2753 | pInfoModule = pModule->GetModuleFromIndex(moduleIndex); |
| 2754 | kind &= ~ENCODE_MODULE_OVERRIDE; |
| 2755 | } |
| 2756 | |
| 2757 | bool fReliable = false; |
| 2758 | TypeHandle th; |
| 2759 | MethodDesc * pMD = NULL; |
| 2760 | FieldDesc * pFD = NULL; |
| 2761 | CORINFO_RUNTIME_LOOKUP genericLookup; |
| 2762 | DWORD dictionaryIndexAndSlot = -1; |
| 2763 | |
| 2764 | switch (kind) |
| 2765 | { |
| 2766 | case ENCODE_NEW_HELPER: |
| 2767 | th = ZapSig::DecodeType(pModule, pInfoModule, pBlob); |
| 2768 | th.AsMethodTable()->EnsureInstanceActive(); |
| 2769 | break; |
| 2770 | case ENCODE_ISINSTANCEOF_HELPER: |
| 2771 | case ENCODE_CHKCAST_HELPER: |
| 2772 | fReliable = true; |
| 2773 | case ENCODE_NEW_ARRAY_HELPER: |
| 2774 | th = ZapSig::DecodeType(pModule, pInfoModule, pBlob); |
| 2775 | break; |
| 2776 | |
| 2777 | case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER: |
| 2778 | case ENCODE_THREAD_STATIC_BASE_GC_HELPER: |
| 2779 | case ENCODE_STATIC_BASE_NONGC_HELPER: |
| 2780 | case ENCODE_STATIC_BASE_GC_HELPER: |
| 2781 | case ENCODE_CCTOR_TRIGGER: |
| 2782 | th = ZapSig::DecodeType(pModule, pInfoModule, pBlob); |
| 2783 | Statics: |
| 2784 | th.AsMethodTable()->EnsureInstanceActive(); |
| 2785 | th.AsMethodTable()->CheckRunClassInitThrowing(); |
| 2786 | fReliable = true; |
| 2787 | break; |
| 2788 | |
| 2789 | case ENCODE_FIELD_ADDRESS: |
| 2790 | pFD = ZapSig::DecodeField(pModule, pInfoModule, pBlob, &th); |
| 2791 | _ASSERTE(pFD->IsStatic()); |
| 2792 | goto Statics; |
| 2793 | |
| 2794 | case ENCODE_VIRTUAL_ENTRY: |
| 2795 | // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN: |
| 2796 | // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN: |
| 2797 | // case ENCODE_VIRTUAL_ENTRY_SLOT: |
| 2798 | fReliable = true; |
| 2799 | case ENCODE_DELEGATE_CTOR: |
| 2800 | { |
| 2801 | pMD = ZapSig::DecodeMethod(pModule, pInfoModule, pBlob, &th); |
| 2802 | if (pMD->RequiresInstArg()) |
| 2803 | { |
| 2804 | pMD = MethodDesc::FindOrCreateAssociatedMethodDesc(pMD, |
| 2805 | th.AsMethodTable(), |
| 2806 | FALSE /* forceBoxedEntryPoint */, |
| 2807 | pMD->GetMethodInstantiation(), |
| 2808 | FALSE /* allowInstParam */); |
| 2809 | } |
| 2810 | pMD->EnsureActive(); |
| 2811 | } |
| 2812 | break; |
| 2813 | |
| 2814 | case ENCODE_DICTIONARY_LOOKUP_THISOBJ: |
| 2815 | case ENCODE_DICTIONARY_LOOKUP_TYPE: |
| 2816 | case ENCODE_DICTIONARY_LOOKUP_METHOD: |
| 2817 | ProcessDynamicDictionaryLookup(pTransitionBlock, pModule, pInfoModule, kind, pBlob, pBlobStart, &genericLookup, &dictionaryIndexAndSlot); |
| 2818 | break; |
| 2819 | |
| 2820 | default: |
| 2821 | _ASSERTE(!"Unexpected CORCOMPILE_FIXUP_BLOB_KIND" ); |
| 2822 | ThrowHR(COR_E_BADIMAGEFORMAT); |
| 2823 | } |
| 2824 | |
| 2825 | PCODE pHelper = NULL; |
| 2826 | |
| 2827 | if (fReliable) |
| 2828 | { |
| 2829 | // For reliable helpers, exceptions in creating the optimized helper are non-fatal. Swallow them to make CER work well. |
| 2830 | EX_TRY |
| 2831 | { |
| 2832 | switch (kind) |
| 2833 | { |
| 2834 | case ENCODE_ISINSTANCEOF_HELPER: |
| 2835 | case ENCODE_CHKCAST_HELPER: |
| 2836 | { |
| 2837 | bool fClassMustBeRestored; |
| 2838 | CorInfoHelpFunc helpFunc = CEEInfo::getCastingHelperStatic(th, /* throwing */ (kind == ENCODE_CHKCAST_HELPER), &fClassMustBeRestored); |
| 2839 | pHelper = DynamicHelpers::CreateHelperArgMove(pModule->GetLoaderAllocator(), th.AsTAddr(), CEEJitInfo::getHelperFtnStatic(helpFunc)); |
| 2840 | } |
| 2841 | break; |
| 2842 | case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER: |
| 2843 | case ENCODE_THREAD_STATIC_BASE_GC_HELPER: |
| 2844 | case ENCODE_STATIC_BASE_NONGC_HELPER: |
| 2845 | case ENCODE_STATIC_BASE_GC_HELPER: |
| 2846 | case ENCODE_CCTOR_TRIGGER: |
| 2847 | case ENCODE_FIELD_ADDRESS: |
| 2848 | { |
| 2849 | MethodTable * pMT = th.AsMethodTable(); |
| 2850 | |
| 2851 | bool fNeedsNonTrivialHelper = false; |
| 2852 | |
| 2853 | if (pMT->Collectible() && (kind != ENCODE_CCTOR_TRIGGER)) |
| 2854 | { |
| 2855 | // Collectible statics are not pinned - the fast getters expect statics to be pinned |
| 2856 | fNeedsNonTrivialHelper = true; |
| 2857 | } |
| 2858 | else |
| 2859 | { |
| 2860 | if (pFD != NULL) |
| 2861 | { |
| 2862 | fNeedsNonTrivialHelper = !!pFD->IsSpecialStatic(); |
| 2863 | } |
| 2864 | else |
| 2865 | { |
| 2866 | fNeedsNonTrivialHelper = (kind == ENCODE_THREAD_STATIC_BASE_NONGC_HELPER) || (kind == ENCODE_THREAD_STATIC_BASE_GC_HELPER); |
| 2867 | } |
| 2868 | } |
| 2869 | |
| 2870 | if (fNeedsNonTrivialHelper) |
| 2871 | { |
| 2872 | if (pFD != NULL) |
| 2873 | { |
| 2874 | if (pFD->IsRVA()) |
| 2875 | { |
| 2876 | _ASSERTE(!"Fast getter for rare kinds of static fields" ); |
| 2877 | } |
| 2878 | else |
| 2879 | { |
| 2880 | pHelper = getHelperForSharedStatic(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT, pFD); |
| 2881 | } |
| 2882 | } |
| 2883 | else |
| 2884 | { |
| 2885 | pHelper = getHelperForStaticBase(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT); |
| 2886 | } |
| 2887 | } |
| 2888 | else |
| 2889 | { |
| 2890 | // Delay the creation of the helper until the type is initialized |
| 2891 | if (pMT->IsClassInited()) |
| 2892 | pHelper = getHelperForInitializedStatic(pModule, (CORCOMPILE_FIXUP_BLOB_KIND)kind, pMT, pFD); |
| 2893 | } |
| 2894 | } |
| 2895 | break; |
| 2896 | |
| 2897 | case ENCODE_VIRTUAL_ENTRY: |
| 2898 | // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN: |
| 2899 | // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN: |
| 2900 | // case ENCODE_VIRTUAL_ENTRY_SLOT: |
| 2901 | { |
| 2902 | if (!pMD->IsVtableMethod()) |
| 2903 | { |
| 2904 | pHelper = DynamicHelpers::CreateReturnConst(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode()); |
| 2905 | } |
| 2906 | else |
| 2907 | { |
| 2908 | AllocMemTracker amTracker; |
| 2909 | |
| 2910 | VirtualFunctionPointerArgs * pArgs = (VirtualFunctionPointerArgs *)amTracker.Track( |
| 2911 | pModule->GetLoaderAllocator()->GetHighFrequencyHeap()-> |
| 2912 | AllocMem(S_SIZE_T(sizeof(VirtualFunctionPointerArgs)))); |
| 2913 | |
| 2914 | pArgs->classHnd = (CORINFO_CLASS_HANDLE)th.AsPtr(); |
| 2915 | pArgs->methodHnd = (CORINFO_METHOD_HANDLE)pMD; |
| 2916 | |
| 2917 | pHelper = DynamicHelpers::CreateHelperWithArg(pModule->GetLoaderAllocator(), (TADDR)pArgs, |
| 2918 | GetEEFuncEntryPoint(JIT_VirtualFunctionPointer_Dynamic)); |
| 2919 | |
| 2920 | amTracker.SuppressRelease(); |
| 2921 | } |
| 2922 | } |
| 2923 | break; |
| 2924 | |
| 2925 | default: |
| 2926 | UNREACHABLE(); |
| 2927 | } |
| 2928 | |
| 2929 | if (pHelper != NULL) |
| 2930 | { |
| 2931 | *EnsureWritableExecutablePages((TADDR *)pCell) = pHelper; |
| 2932 | } |
| 2933 | |
| 2934 | #ifdef _DEBUG |
| 2935 | // Always execute the reliable fallback in debug builds |
| 2936 | pHelper = NULL; |
| 2937 | #endif |
| 2938 | } |
| 2939 | EX_CATCH |
| 2940 | { |
| 2941 | } |
| 2942 | EX_END_CATCH (SwallowAllExceptions); |
| 2943 | } |
| 2944 | else |
| 2945 | { |
| 2946 | switch (kind) |
| 2947 | { |
| 2948 | case ENCODE_NEW_HELPER: |
| 2949 | { |
| 2950 | CorInfoHelpFunc helpFunc = CEEInfo::getNewHelperStatic(th.AsMethodTable()); |
| 2951 | pHelper = DynamicHelpers::CreateHelper(pModule->GetLoaderAllocator(), th.AsTAddr(), CEEJitInfo::getHelperFtnStatic(helpFunc)); |
| 2952 | } |
| 2953 | break; |
| 2954 | case ENCODE_NEW_ARRAY_HELPER: |
| 2955 | { |
| 2956 | CorInfoHelpFunc helpFunc = CEEInfo::getNewArrHelperStatic(th); |
| 2957 | ArrayTypeDesc *pArrayTypeDesc = th.AsArray(); |
| 2958 | MethodTable *pArrayMT = pArrayTypeDesc->GetTemplateMethodTable(); |
| 2959 | pHelper = DynamicHelpers::CreateHelperArgMove(pModule->GetLoaderAllocator(), dac_cast<TADDR>(pArrayMT), CEEJitInfo::getHelperFtnStatic(helpFunc)); |
| 2960 | } |
| 2961 | break; |
| 2962 | |
| 2963 | case ENCODE_DELEGATE_CTOR: |
| 2964 | { |
| 2965 | MethodTable * pDelegateType = NULL; |
| 2966 | |
| 2967 | { |
| 2968 | GCX_COOP(); |
| 2969 | |
| 2970 | TADDR pArgument = GetFirstArgumentRegisterValuePtr(pTransitionBlock); |
| 2971 | |
| 2972 | if (pArgument != NULL) |
| 2973 | { |
| 2974 | pDelegateType = (*(Object **)pArgument)->GetMethodTable(); |
| 2975 | _ASSERTE(pDelegateType->IsDelegate()); |
| 2976 | } |
| 2977 | } |
| 2978 | |
| 2979 | DelegateCtorArgs ctorData; |
| 2980 | ctorData.pMethod = NULL; |
| 2981 | ctorData.pArg3 = NULL; |
| 2982 | ctorData.pArg4 = NULL; |
| 2983 | ctorData.pArg5 = NULL; |
| 2984 | |
| 2985 | MethodDesc * pDelegateCtor = NULL; |
| 2986 | |
| 2987 | if (pDelegateType != NULL) |
| 2988 | { |
| 2989 | pDelegateCtor = COMDelegate::GetDelegateCtor(TypeHandle(pDelegateType), pMD, &ctorData); |
| 2990 | |
| 2991 | if (ctorData.pArg4 != NULL || ctorData.pArg5 != NULL) |
| 2992 | { |
| 2993 | // This should never happen - we should never get collectible or secure delegates here |
| 2994 | _ASSERTE(false); |
| 2995 | pDelegateCtor = NULL; |
| 2996 | } |
| 2997 | } |
| 2998 | |
| 2999 | TADDR target = NULL; |
| 3000 | |
| 3001 | if (pDelegateCtor != NULL) |
| 3002 | { |
| 3003 | target = pDelegateCtor->GetMultiCallableAddrOfCode(); |
| 3004 | } |
| 3005 | else |
| 3006 | { |
| 3007 | target = ECall::GetFCallImpl(MscorlibBinder::GetMethod(METHOD__DELEGATE__CONSTRUCT_DELEGATE)); |
| 3008 | ctorData.pArg3 = NULL; |
| 3009 | } |
| 3010 | |
| 3011 | if (ctorData.pArg3 != NULL) |
| 3012 | { |
| 3013 | pHelper = DynamicHelpers::CreateHelperWithTwoArgs(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode(), (TADDR)ctorData.pArg3, target); |
| 3014 | } |
| 3015 | else |
| 3016 | { |
| 3017 | pHelper = DynamicHelpers::CreateHelperWithTwoArgs(pModule->GetLoaderAllocator(), pMD->GetMultiCallableAddrOfCode(), target); |
| 3018 | } |
| 3019 | } |
| 3020 | break; |
| 3021 | |
| 3022 | case ENCODE_DICTIONARY_LOOKUP_THISOBJ: |
| 3023 | case ENCODE_DICTIONARY_LOOKUP_TYPE: |
| 3024 | case ENCODE_DICTIONARY_LOOKUP_METHOD: |
| 3025 | { |
| 3026 | pHelper = DynamicHelpers::CreateDictionaryLookupHelper(pModule->GetLoaderAllocator(), &genericLookup, dictionaryIndexAndSlot, pModule); |
| 3027 | } |
| 3028 | break; |
| 3029 | |
| 3030 | default: |
| 3031 | UNREACHABLE(); |
| 3032 | } |
| 3033 | |
| 3034 | if (pHelper != NULL) |
| 3035 | { |
| 3036 | *EnsureWritableExecutablePages((TADDR *)pCell) = pHelper; |
| 3037 | } |
| 3038 | } |
| 3039 | |
| 3040 | *pKind = (CORCOMPILE_FIXUP_BLOB_KIND)kind; |
| 3041 | *pTH = th; |
| 3042 | *ppMD = pMD; |
| 3043 | *ppFD = pFD; |
| 3044 | |
| 3045 | return pHelper; |
| 3046 | } |
| 3047 | |
| 3048 | extern "C" SIZE_T STDCALL DynamicHelperWorker(TransitionBlock * pTransitionBlock, TADDR * pCell, DWORD sectionIndex, Module * pModule, INT frameFlags) |
| 3049 | { |
| 3050 | PCODE pHelper = NULL; |
| 3051 | SIZE_T result = NULL; |
| 3052 | |
| 3053 | STATIC_CONTRACT_THROWS; |
| 3054 | STATIC_CONTRACT_GC_TRIGGERS; |
| 3055 | STATIC_CONTRACT_MODE_COOPERATIVE; |
| 3056 | |
| 3057 | MAKE_CURRENT_THREAD_AVAILABLE(); |
| 3058 | |
| 3059 | #ifdef _DEBUG |
| 3060 | Thread::ObjectRefFlush(CURRENT_THREAD); |
| 3061 | #endif |
| 3062 | |
| 3063 | FrameWithCookie<DynamicHelperFrame> frame(pTransitionBlock, frameFlags); |
| 3064 | DynamicHelperFrame * pFrame = &frame; |
| 3065 | |
| 3066 | pFrame->Push(CURRENT_THREAD); |
| 3067 | |
| 3068 | INSTALL_MANAGED_EXCEPTION_DISPATCHER; |
| 3069 | INSTALL_UNWIND_AND_CONTINUE_HANDLER; |
| 3070 | |
| 3071 | #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) |
| 3072 | // Decode indirection cell from callsite if it is not present |
| 3073 | if (pCell == NULL) |
| 3074 | { |
| 3075 | // Asssume that the callsite is call [xxxxxxxx] |
| 3076 | PCODE retAddr = pFrame->GetReturnAddress(); |
| 3077 | #ifdef _TARGET_X86_ |
| 3078 | pCell = *(((TADDR **)retAddr) - 1); |
| 3079 | #else |
| 3080 | pCell = (TADDR *)(*(((INT32 *)retAddr) - 1) + retAddr); |
| 3081 | #endif |
| 3082 | } |
| 3083 | #endif |
| 3084 | _ASSERTE(pCell != NULL); |
| 3085 | |
| 3086 | TypeHandle th; |
| 3087 | MethodDesc * pMD = NULL; |
| 3088 | FieldDesc * pFD = NULL; |
| 3089 | CORCOMPILE_FIXUP_BLOB_KIND kind = ENCODE_NONE; |
| 3090 | |
| 3091 | { |
| 3092 | GCX_PREEMP_THREAD_EXISTS(CURRENT_THREAD); |
| 3093 | |
| 3094 | pHelper = DynamicHelperFixup(pTransitionBlock, pCell, sectionIndex, pModule, &kind, &th, &pMD, &pFD); |
| 3095 | } |
| 3096 | |
| 3097 | if (pHelper == NULL) |
| 3098 | { |
| 3099 | TADDR pArgument = GetFirstArgumentRegisterValuePtr(pTransitionBlock); |
| 3100 | |
| 3101 | switch (kind) |
| 3102 | { |
| 3103 | case ENCODE_ISINSTANCEOF_HELPER: |
| 3104 | case ENCODE_CHKCAST_HELPER: |
| 3105 | { |
| 3106 | BOOL throwInvalidCast = (kind == ENCODE_CHKCAST_HELPER); |
| 3107 | if (*(Object **)pArgument == NULL || ObjIsInstanceOf(*(Object **)pArgument, th, throwInvalidCast)) |
| 3108 | { |
| 3109 | result = (SIZE_T)(*(Object **)pArgument); |
| 3110 | } |
| 3111 | else |
| 3112 | { |
| 3113 | _ASSERTE (!throwInvalidCast); |
| 3114 | result = NULL; |
| 3115 | } |
| 3116 | } |
| 3117 | break; |
| 3118 | case ENCODE_STATIC_BASE_NONGC_HELPER: |
| 3119 | result = (SIZE_T)th.AsMethodTable()->GetNonGCStaticsBasePointer(); |
| 3120 | break; |
| 3121 | case ENCODE_STATIC_BASE_GC_HELPER: |
| 3122 | result = (SIZE_T)th.AsMethodTable()->GetGCStaticsBasePointer(); |
| 3123 | break; |
| 3124 | case ENCODE_THREAD_STATIC_BASE_NONGC_HELPER: |
| 3125 | ThreadStatics::GetTLM(th.AsMethodTable())->EnsureClassAllocated(th.AsMethodTable()); |
| 3126 | result = (SIZE_T)th.AsMethodTable()->GetNonGCThreadStaticsBasePointer(); |
| 3127 | break; |
| 3128 | case ENCODE_THREAD_STATIC_BASE_GC_HELPER: |
| 3129 | ThreadStatics::GetTLM(th.AsMethodTable())->EnsureClassAllocated(th.AsMethodTable()); |
| 3130 | result = (SIZE_T)th.AsMethodTable()->GetGCThreadStaticsBasePointer(); |
| 3131 | break; |
| 3132 | case ENCODE_CCTOR_TRIGGER: |
| 3133 | break; |
| 3134 | case ENCODE_FIELD_ADDRESS: |
| 3135 | result = (SIZE_T)pFD->GetCurrentStaticAddress(); |
| 3136 | break; |
| 3137 | case ENCODE_VIRTUAL_ENTRY: |
| 3138 | // case ENCODE_VIRTUAL_ENTRY_DEF_TOKEN: |
| 3139 | // case ENCODE_VIRTUAL_ENTRY_REF_TOKEN: |
| 3140 | // case ENCODE_VIRTUAL_ENTRY_SLOT: |
| 3141 | { |
| 3142 | OBJECTREF objRef = ObjectToOBJECTREF(*(Object **)pArgument); |
| 3143 | |
| 3144 | GCPROTECT_BEGIN(objRef); |
| 3145 | |
| 3146 | if (objRef == NULL) |
| 3147 | COMPlusThrow(kNullReferenceException); |
| 3148 | |
| 3149 | // Duplicated logic from JIT_VirtualFunctionPointer_Framed |
| 3150 | if (!pMD->IsVtableMethod()) |
| 3151 | { |
| 3152 | result = pMD->GetMultiCallableAddrOfCode(); |
| 3153 | } |
| 3154 | else |
| 3155 | { |
| 3156 | result = pMD->GetMultiCallableAddrOfVirtualizedCode(&objRef, th); |
| 3157 | } |
| 3158 | |
| 3159 | GCPROTECT_END(); |
| 3160 | } |
| 3161 | break; |
| 3162 | default: |
| 3163 | UNREACHABLE(); |
| 3164 | } |
| 3165 | } |
| 3166 | |
| 3167 | UNINSTALL_UNWIND_AND_CONTINUE_HANDLER; |
| 3168 | UNINSTALL_MANAGED_EXCEPTION_DISPATCHER; |
| 3169 | |
| 3170 | pFrame->Pop(CURRENT_THREAD); |
| 3171 | |
| 3172 | if (pHelper == NULL) |
| 3173 | *(SIZE_T *)((TADDR)pTransitionBlock + TransitionBlock::GetOffsetOfArgumentRegisters()) = result; |
| 3174 | return pHelper; |
| 3175 | } |
| 3176 | |
| 3177 | #endif // FEATURE_READYTORUN |
| 3178 | |
| 3179 | #endif // !DACCESS_COMPILE |
| 3180 | |