| 1 | // Licensed to the .NET Foundation under one or more agreements. |
| 2 | // The .NET Foundation licenses this file to you under the MIT license. |
| 3 | // See the LICENSE file in the project root for more information. |
| 4 | |
| 5 | #include "common.h" |
| 6 | #include "gcinfodumper.h" |
| 7 | #include "gcinfodecoder.h" |
| 8 | |
| 9 | // Stolen from gc.h. |
| 10 | #define GC_CALL_INTERIOR 0x1 |
| 11 | #define GC_CALL_PINNED 0x2 |
| 12 | |
| 13 | |
| 14 | #ifdef _WIN64 |
| 15 | // All stack offsets are INT32's, so this guarantees a disjoint range of |
| 16 | // addresses for each register. |
| 17 | #define ADDRESS_SPACING UI64(0x100000000) |
| 18 | #elif defined(_TARGET_ARM_) |
| 19 | #define ADDRESS_SPACING 0x100000 |
| 20 | #else |
| 21 | #error pick suitable ADDRESS_SPACING for platform |
| 22 | #endif |
| 23 | |
| 24 | GcInfoDumper::GcInfoDumper (GCInfoToken gcInfoToken) |
| 25 | { |
| 26 | m_gcTable = gcInfoToken; |
| 27 | m_pRecords = NULL; |
| 28 | m_gcInfoSize = 0; |
| 29 | } |
| 30 | |
| 31 | |
| 32 | GcInfoDumper::~GcInfoDumper () |
| 33 | { |
| 34 | FreePointerRecords(m_pRecords); |
| 35 | } |
| 36 | size_t GcInfoDumper::GetGCInfoSize() |
| 37 | { |
| 38 | return m_gcInfoSize; |
| 39 | } |
| 40 | |
| 41 | |
| 42 | //static* |
| 43 | void GcInfoDumper::LivePointerCallback ( |
| 44 | LPVOID hCallback, // callback data |
| 45 | OBJECTREF* pObject, // address of obect-reference we are reporting |
| 46 | uint32_t flags // is this a pinned and/or interior pointer |
| 47 | DAC_ARG(DacSlotLocation loc)) // the location of the slot |
| 48 | { |
| 49 | GcInfoDumper *pDumper = (GcInfoDumper*)hCallback; |
| 50 | LivePointerRecord **ppRecords = &pDumper->m_pRecords; |
| 51 | LivePointerRecord *pRecord = new LivePointerRecord(); |
| 52 | if (!pRecord) |
| 53 | { |
| 54 | pDumper->m_Error = OUT_OF_MEMORY; |
| 55 | return; |
| 56 | } |
| 57 | |
| 58 | pRecord->ppObject = pObject; |
| 59 | pRecord->flags = flags; |
| 60 | pRecord->marked = -1; |
| 61 | |
| 62 | pRecord->pNext = *ppRecords; |
| 63 | *ppRecords = pRecord; |
| 64 | } |
| 65 | |
| 66 | |
| 67 | //static |
| 68 | void GcInfoDumper::FreePointerRecords (LivePointerRecord *pRecords) |
| 69 | { |
| 70 | while (pRecords) |
| 71 | { |
| 72 | LivePointerRecord *trash = pRecords; |
| 73 | pRecords = pRecords->pNext; |
| 74 | delete trash; |
| 75 | } |
| 76 | } |
| 77 | |
| 78 | //This function tries to find the address of the managed object in the registers of the current function's context, |
| 79 | //failing which it checks if it is present in the stack of the current function. IF it finds one it reports appropriately |
| 80 | // |
| 81 | //For Amd64, this additionally tries to probe in the stack for the caller. |
| 82 | //This behavior largely seems to be present for legacy x64 jit and is not likely to be used anywhere else |
| 83 | BOOL GcInfoDumper::ReportPointerRecord ( |
| 84 | UINT32 CodeOffset, |
| 85 | BOOL fLive, |
| 86 | REGDISPLAY *pRD, |
| 87 | LivePointerRecord *pRecord) |
| 88 | { |
| 89 | // |
| 90 | // Convert the flags passed to the GC into flags used by GcInfoEncoder. |
| 91 | // |
| 92 | |
| 93 | int EncodedFlags = 0; |
| 94 | |
| 95 | if (pRecord->flags & GC_CALL_INTERIOR) |
| 96 | EncodedFlags |= GC_SLOT_INTERIOR; |
| 97 | |
| 98 | if (pRecord->flags & GC_CALL_PINNED) |
| 99 | EncodedFlags |= GC_SLOT_PINNED; |
| 100 | |
| 101 | // |
| 102 | // Compare the reported pointer against the REGIDISPLAY pointers to |
| 103 | // figure out the register or register-relative location. |
| 104 | // |
| 105 | |
| 106 | struct RegisterInfo |
| 107 | { |
| 108 | SIZE_T cbContextOffset; |
| 109 | }; |
| 110 | |
| 111 | static RegisterInfo rgRegisters[] = { |
| 112 | #define REG(reg, field) { FIELD_OFFSET(T_CONTEXT, field) } |
| 113 | |
| 114 | #ifdef _TARGET_AMD64_ |
| 115 | REG(rax, Rax), |
| 116 | REG(rcx, Rcx), |
| 117 | REG(rdx, Rdx), |
| 118 | REG(rbx, Rbx), |
| 119 | REG(rsp, Rsp), |
| 120 | REG(rbp, Rbp), |
| 121 | REG(rsi, Rsi), |
| 122 | REG(rdi, Rdi), |
| 123 | REG(r8, R8), |
| 124 | REG(r9, R9), |
| 125 | REG(r10, R10), |
| 126 | REG(r11, R11), |
| 127 | REG(r12, R12), |
| 128 | REG(r13, R13), |
| 129 | REG(r14, R14), |
| 130 | REG(r15, R15), |
| 131 | #elif defined(_TARGET_ARM_) |
| 132 | #undef REG |
| 133 | #define REG(reg, field) { FIELD_OFFSET(ArmVolatileContextPointer, field) } |
| 134 | REG(r0, R0), |
| 135 | REG(r1, R1), |
| 136 | REG(r2, R2), |
| 137 | REG(r3, R3), |
| 138 | #undef REG |
| 139 | #define REG(reg, field) { FIELD_OFFSET(T_KNONVOLATILE_CONTEXT_POINTERS, field) } |
| 140 | REG(r4, R4), |
| 141 | REG(r5, R5), |
| 142 | REG(r6, R6), |
| 143 | REG(r7, R7), |
| 144 | REG(r8, R8), |
| 145 | REG(r9, R9), |
| 146 | REG(r10, R10), |
| 147 | REG(r11, R11), |
| 148 | { FIELD_OFFSET(ArmVolatileContextPointer, R12) }, |
| 149 | { FIELD_OFFSET(T_CONTEXT, Sp) }, |
| 150 | { FIELD_OFFSET(T_KNONVOLATILE_CONTEXT_POINTERS, Lr) }, |
| 151 | { FIELD_OFFSET(T_CONTEXT, Sp) }, |
| 152 | { FIELD_OFFSET(T_KNONVOLATILE_CONTEXT_POINTERS, R7) }, |
| 153 | #elif defined(_TARGET_ARM64_) |
| 154 | #undef REG |
| 155 | #define REG(reg, field) { FIELD_OFFSET(Arm64VolatileContextPointer, field) } |
| 156 | REG(x0, X0), |
| 157 | REG(x1, X1), |
| 158 | REG(x2, X2), |
| 159 | REG(x3, X3), |
| 160 | REG(x4, X4), |
| 161 | REG(x5, X5), |
| 162 | REG(x6, X6), |
| 163 | REG(x7, X7), |
| 164 | REG(x8, X8), |
| 165 | REG(x9, X9), |
| 166 | REG(x10, X10), |
| 167 | REG(x11, X11), |
| 168 | REG(x12, X12), |
| 169 | REG(x13, X13), |
| 170 | REG(x14, X14), |
| 171 | REG(x15, X15), |
| 172 | REG(x16, X16), |
| 173 | REG(x17, X17), |
| 174 | #undef REG |
| 175 | #define REG(reg, field) { FIELD_OFFSET(T_KNONVOLATILE_CONTEXT_POINTERS, field) } |
| 176 | REG(x19, X19), |
| 177 | REG(x20, X20), |
| 178 | REG(x21, X21), |
| 179 | REG(x22, X22), |
| 180 | REG(x23, X23), |
| 181 | REG(x24, X24), |
| 182 | REG(x25, X25), |
| 183 | REG(x26, X26), |
| 184 | REG(x27, X27), |
| 185 | REG(x28, X28), |
| 186 | REG(Fp, Fp), |
| 187 | REG(Lr, Lr), |
| 188 | { FIELD_OFFSET(T_CONTEXT, Sp) }, |
| 189 | #undef REG |
| 190 | #else |
| 191 | PORTABILITY_ASSERT("GcInfoDumper::ReportPointerRecord is not implemented on this platform." ) |
| 192 | #endif |
| 193 | |
| 194 | }; |
| 195 | |
| 196 | const UINT nCONTEXTRegisters = sizeof(rgRegisters)/sizeof(rgRegisters[0]); |
| 197 | |
| 198 | UINT iFirstRegister; |
| 199 | UINT iSPRegister; |
| 200 | UINT nRegisters; |
| 201 | |
| 202 | iFirstRegister = 0; |
| 203 | nRegisters = nCONTEXTRegisters; |
| 204 | #ifdef _TARGET_AMD64_ |
| 205 | iSPRegister = (FIELD_OFFSET(CONTEXT, Rsp) - FIELD_OFFSET(CONTEXT, Rax)) / sizeof(ULONGLONG); |
| 206 | #elif defined(_TARGET_ARM64_) |
| 207 | iSPRegister = (FIELD_OFFSET(T_CONTEXT, Sp) - FIELD_OFFSET(T_CONTEXT, X0)) / sizeof(ULONGLONG); |
| 208 | #elif defined(_TARGET_ARM_) |
| 209 | iSPRegister = (FIELD_OFFSET(T_CONTEXT, Sp) - FIELD_OFFSET(T_CONTEXT, R0)) / sizeof(ULONG); |
| 210 | UINT iBFRegister = m_StackBaseRegister; |
| 211 | #endif |
| 212 | |
| 213 | #if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_) |
| 214 | BYTE* pContext = (BYTE*)&(pRD->volatileCurrContextPointers); |
| 215 | #else |
| 216 | BYTE* pContext = (BYTE*)pRD->pCurrentContext; |
| 217 | #endif |
| 218 | |
| 219 | for (int ctx = 0; ctx < 2; ctx++) |
| 220 | { |
| 221 | SIZE_T *pReg = NULL; |
| 222 | |
| 223 | for (UINT iReg = 0; iReg < nRegisters; iReg++) |
| 224 | { |
| 225 | UINT iEncodedReg = iFirstRegister + iReg; |
| 226 | #ifdef _TARGET_ARM_ |
| 227 | if (ctx == 1) |
| 228 | { |
| 229 | if ((iReg < 4 || iReg == 12)) // skip volatile registers for second context |
| 230 | { |
| 231 | continue; |
| 232 | } |
| 233 | // Force StackRegister and BaseRegister at the end (r15, r16) |
| 234 | if (iReg == iSPRegister || iReg == m_StackBaseRegister) |
| 235 | { |
| 236 | continue; |
| 237 | } |
| 238 | if (iReg == 15) |
| 239 | { |
| 240 | if (iBFRegister != NO_STACK_BASE_REGISTER) |
| 241 | { |
| 242 | iEncodedReg = iBFRegister; |
| 243 | } |
| 244 | else |
| 245 | { |
| 246 | continue; |
| 247 | } |
| 248 | } |
| 249 | if (iReg == 16) |
| 250 | { |
| 251 | iEncodedReg = iSPRegister; |
| 252 | } |
| 253 | } |
| 254 | if (ctx == 0 && iReg == 4) //ArmVolatileContextPointer 5th register is R12 |
| 255 | { |
| 256 | iEncodedReg = 12; |
| 257 | } |
| 258 | else if (ctx == 0 && iReg > 4) |
| 259 | { |
| 260 | break; |
| 261 | } |
| 262 | #elif defined (_TARGET_ARM64_) |
| 263 | iEncodedReg = iEncodedReg + ctx; //We have to compensate for not tracking x18 |
| 264 | if (ctx == 1) |
| 265 | { |
| 266 | if (iReg < 18 ) // skip volatile registers for second context |
| 267 | { |
| 268 | continue; |
| 269 | } |
| 270 | |
| 271 | if (iReg == 30) |
| 272 | { |
| 273 | iEncodedReg = iSPRegister; |
| 274 | } |
| 275 | } |
| 276 | |
| 277 | if (ctx == 0 && iReg > 17) |
| 278 | { |
| 279 | break; |
| 280 | } |
| 281 | #endif |
| 282 | { |
| 283 | _ASSERTE(iReg < nCONTEXTRegisters); |
| 284 | #ifdef _TARGET_ARM_ |
| 285 | pReg = *(SIZE_T**)(pContext + rgRegisters[iReg].cbContextOffset); |
| 286 | if (iEncodedReg == 12) |
| 287 | { |
| 288 | pReg = *(SIZE_T**)((BYTE*)&pRD->volatileCurrContextPointers + rgRegisters[iEncodedReg].cbContextOffset); |
| 289 | } |
| 290 | if (iEncodedReg == iSPRegister) |
| 291 | { |
| 292 | pReg = (SIZE_T*)((BYTE*)pRD->pCurrentContext + rgRegisters[iEncodedReg].cbContextOffset); |
| 293 | } |
| 294 | if (iEncodedReg == iBFRegister) |
| 295 | { |
| 296 | pReg = *(SIZE_T**)((BYTE*)pRD->pCurrentContextPointers + rgRegisters[iEncodedReg].cbContextOffset); |
| 297 | } |
| 298 | |
| 299 | #elif defined(_TARGET_ARM64_) |
| 300 | pReg = *(SIZE_T**)(pContext + rgRegisters[iReg].cbContextOffset); |
| 301 | if (iEncodedReg == iSPRegister) |
| 302 | { |
| 303 | pReg = (SIZE_T*)((BYTE*)pRD->pCurrentContext + rgRegisters[iReg].cbContextOffset); |
| 304 | } |
| 305 | #else |
| 306 | pReg = (SIZE_T*)(pContext + rgRegisters[iReg].cbContextOffset); |
| 307 | #endif |
| 308 | |
| 309 | } |
| 310 | |
| 311 | SIZE_T ptr = (SIZE_T)pRecord->ppObject; |
| 312 | |
| 313 | |
| 314 | // |
| 315 | // Is it reporting the register? |
| 316 | // |
| 317 | if (ptr == (SIZE_T)pReg) |
| 318 | { |
| 319 | // Make sure the register is in the current frame. |
| 320 | #if defined(_TARGET_AMD64_) |
| 321 | if (0 != ctx) |
| 322 | { |
| 323 | m_Error = REPORTED_REGISTER_IN_CALLERS_FRAME; |
| 324 | return TRUE; |
| 325 | } |
| 326 | #endif |
| 327 | // Make sure the register isn't sp or the frame pointer. |
| 328 | if ( iSPRegister == iEncodedReg |
| 329 | || m_StackBaseRegister == iEncodedReg) |
| 330 | { |
| 331 | m_Error = REPORTED_FRAME_POINTER; |
| 332 | return TRUE; |
| 333 | } |
| 334 | |
| 335 | if (m_pfnRegisterStateChange( |
| 336 | CodeOffset, |
| 337 | iEncodedReg, |
| 338 | (GcSlotFlags)EncodedFlags, |
| 339 | fLive ? GC_SLOT_LIVE : GC_SLOT_DEAD, |
| 340 | m_pvCallbackData)) |
| 341 | { |
| 342 | return TRUE; |
| 343 | } |
| 344 | |
| 345 | return FALSE; |
| 346 | } |
| 347 | |
| 348 | // |
| 349 | // Is it reporting an address relative to the register's value? |
| 350 | // |
| 351 | |
| 352 | SIZE_T regVal = *pReg; |
| 353 | |
| 354 | if ( ptr >= regVal - ADDRESS_SPACING/2 |
| 355 | && ptr < regVal + ADDRESS_SPACING/2) |
| 356 | { |
| 357 | // |
| 358 | // The register must be sp, caller's sp, or the frame register. |
| 359 | // The GcInfoEncoder interface doesn't have a way to express |
| 360 | // anything else. |
| 361 | // |
| 362 | |
| 363 | if (!( iSPRegister == iEncodedReg |
| 364 | || m_StackBaseRegister == iEncodedReg)) |
| 365 | { |
| 366 | continue; |
| 367 | } |
| 368 | |
| 369 | GcStackSlotBase base; |
| 370 | if (iSPRegister == iEncodedReg) |
| 371 | { |
| 372 | #if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_) |
| 373 | base = GC_SP_REL; |
| 374 | #else |
| 375 | if (0 == ctx) |
| 376 | base = GC_SP_REL; |
| 377 | else |
| 378 | base = GC_CALLER_SP_REL; |
| 379 | #endif //defined(_TARGET_ARM_) || defined(_TARGET_ARM64_) |
| 380 | } |
| 381 | else |
| 382 | { |
| 383 | base = GC_FRAMEREG_REL; |
| 384 | } |
| 385 | |
| 386 | if (m_pfnStackSlotStateChange( |
| 387 | CodeOffset, |
| 388 | (GcSlotFlags)EncodedFlags, |
| 389 | base, |
| 390 | ptr - regVal, |
| 391 | fLive ? GC_SLOT_LIVE : GC_SLOT_DEAD, |
| 392 | m_pvCallbackData)) |
| 393 | { |
| 394 | return TRUE; |
| 395 | } |
| 396 | |
| 397 | return FALSE; |
| 398 | } |
| 399 | } |
| 400 | |
| 401 | #if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_) |
| 402 | pContext = (BYTE*)pRD->pCurrentContextPointers; |
| 403 | #else |
| 404 | pContext = (BYTE*)pRD->pCallerContext; |
| 405 | #endif |
| 406 | |
| 407 | } |
| 408 | |
| 409 | m_Error = REPORTED_INVALID_POINTER; |
| 410 | return TRUE; |
| 411 | } |
| 412 | |
| 413 | |
| 414 | BOOL GcInfoDumper::ReportPointerDifferences ( |
| 415 | UINT32 offset, |
| 416 | REGDISPLAY *pRD, |
| 417 | LivePointerRecord *pPrevState) |
| 418 | { |
| 419 | LivePointerRecord *pNewRecord; |
| 420 | LivePointerRecord *pOldRecord; |
| 421 | |
| 422 | // |
| 423 | // Match up old and new records |
| 424 | // |
| 425 | |
| 426 | for (pNewRecord = m_pRecords; pNewRecord; pNewRecord = pNewRecord->pNext) |
| 427 | { |
| 428 | for (LivePointerRecord *pOldRecord = pPrevState; pOldRecord; pOldRecord = pOldRecord->pNext) |
| 429 | { |
| 430 | if ( pOldRecord->flags == pNewRecord->flags |
| 431 | && pOldRecord->ppObject == pNewRecord->ppObject) |
| 432 | { |
| 433 | pOldRecord->marked = offset; |
| 434 | pNewRecord->marked = offset; |
| 435 | } |
| 436 | } |
| 437 | } |
| 438 | |
| 439 | // |
| 440 | // Report out any old records that were not marked as dead pointers. |
| 441 | // |
| 442 | |
| 443 | for (pOldRecord = pPrevState; pOldRecord; pOldRecord = pOldRecord->pNext) |
| 444 | { |
| 445 | if (pOldRecord->marked != offset) |
| 446 | { |
| 447 | if ( ReportPointerRecord(offset, FALSE, pRD, pOldRecord) |
| 448 | || m_Error) |
| 449 | { |
| 450 | return TRUE; |
| 451 | } |
| 452 | } |
| 453 | } |
| 454 | |
| 455 | // |
| 456 | // Report any new records that were not marked as new pointers. |
| 457 | // |
| 458 | |
| 459 | for (pNewRecord = m_pRecords; pNewRecord; pNewRecord = pNewRecord->pNext) |
| 460 | { |
| 461 | if (pNewRecord->marked != offset) |
| 462 | { |
| 463 | if ( ReportPointerRecord(offset, TRUE, pRD, pNewRecord) |
| 464 | || m_Error) |
| 465 | { |
| 466 | return TRUE; |
| 467 | } |
| 468 | } |
| 469 | } |
| 470 | |
| 471 | return FALSE; |
| 472 | } |
| 473 | |
| 474 | |
| 475 | GcInfoDumper::EnumerateStateChangesResults GcInfoDumper::EnumerateStateChanges ( |
| 476 | InterruptibleStateChangeProc *pfnInterruptibleStateChange, |
| 477 | RegisterStateChangeProc *pfnRegisterStateChange, |
| 478 | StackSlotStateChangeProc *pfnStackSlotStateChange, |
| 479 | OnSafePointProc *pfnSafePointFunc, |
| 480 | PVOID pvData) |
| 481 | { |
| 482 | m_Error = SUCCESS; |
| 483 | |
| 484 | // |
| 485 | // Save callback functions for use by helper functions |
| 486 | // |
| 487 | |
| 488 | m_pfnRegisterStateChange = pfnRegisterStateChange; |
| 489 | m_pfnStackSlotStateChange = pfnStackSlotStateChange; |
| 490 | m_pvCallbackData = pvData; |
| 491 | |
| 492 | // |
| 493 | // Decode header information |
| 494 | // |
| 495 | GcInfoDecoder hdrdecoder(m_gcTable, |
| 496 | (GcInfoDecoderFlags)( DECODE_SECURITY_OBJECT |
| 497 | | DECODE_CODE_LENGTH |
| 498 | | DECODE_GC_LIFETIMES |
| 499 | | DECODE_VARARG), |
| 500 | 0); |
| 501 | |
| 502 | UINT32 cbEncodedMethodSize = hdrdecoder.GetCodeLength(); |
| 503 | m_StackBaseRegister = hdrdecoder.GetStackBaseRegister(); |
| 504 | |
| 505 | // |
| 506 | // Set up a bogus REGDISPLAY to pass to EnumerateLiveSlots. This will |
| 507 | // allow us to later identify registers or stack offsets passed to the |
| 508 | // callback. |
| 509 | // |
| 510 | |
| 511 | REGDISPLAY regdisp; |
| 512 | |
| 513 | ZeroMemory(®disp, sizeof(regdisp)); |
| 514 | |
| 515 | regdisp.pContext = ®disp.ctxOne; |
| 516 | regdisp.IsCallerContextValid = TRUE; |
| 517 | regdisp.pCurrentContext = ®disp.ctxOne; |
| 518 | regdisp.pCallerContext = ®disp.ctxTwo; |
| 519 | |
| 520 | #define NEXT_ADDRESS() (UniqueAddress += ADDRESS_SPACING) |
| 521 | |
| 522 | UINT iReg; |
| 523 | |
| 524 | #ifdef _WIN64 |
| 525 | ULONG64 UniqueAddress = ADDRESS_SPACING*2; |
| 526 | ULONG64 *pReg; |
| 527 | #else |
| 528 | DWORD UniqueAddress = ADDRESS_SPACING*2; |
| 529 | DWORD *pReg; |
| 530 | #endif |
| 531 | |
| 532 | #define FILL_REGS(start, count) \ |
| 533 | do { \ |
| 534 | for (iReg = 0, pReg = ®disp.start; iReg < count; iReg++, pReg++) \ |
| 535 | { \ |
| 536 | *pReg = NEXT_ADDRESS(); \ |
| 537 | } \ |
| 538 | } while (0) |
| 539 | |
| 540 | #ifdef _TARGET_AMD64_ |
| 541 | FILL_REGS(pCurrentContext->Rax, 16); |
| 542 | FILL_REGS(pCallerContext->Rax, 16); |
| 543 | |
| 544 | regdisp.pCurrentContextPointers = ®disp.ctxPtrsOne; |
| 545 | regdisp.pCallerContextPointers = ®disp.ctxPtrsTwo; |
| 546 | |
| 547 | ULONGLONG **ppCurrentRax = ®disp.pCurrentContextPointers->Rax; |
| 548 | ULONGLONG **ppCallerRax = ®disp.pCallerContextPointers ->Rax; |
| 549 | |
| 550 | for (iReg = 0; iReg < 16; iReg++) |
| 551 | { |
| 552 | *(ppCurrentRax + iReg) = ®disp.pCurrentContext->Rax + iReg; |
| 553 | *(ppCallerRax + iReg) = ®disp.pCallerContext ->Rax + iReg; |
| 554 | } |
| 555 | #elif defined(_TARGET_ARM_) |
| 556 | FILL_REGS(pCurrentContext->R0, 16); |
| 557 | FILL_REGS(pCallerContext->R0, 16); |
| 558 | |
| 559 | regdisp.pCurrentContextPointers = ®disp.ctxPtrsOne; |
| 560 | regdisp.pCallerContextPointers = ®disp.ctxPtrsTwo; |
| 561 | |
| 562 | ULONG **ppCurrentReg = ®disp.pCurrentContextPointers->R4; |
| 563 | ULONG **ppCallerReg = ®disp.pCallerContextPointers->R4; |
| 564 | |
| 565 | for (iReg = 0; iReg < 8; iReg++) |
| 566 | { |
| 567 | *(ppCurrentReg + iReg) = ®disp.pCurrentContext->R4 + iReg; |
| 568 | *(ppCallerReg + iReg) = ®disp.pCallerContext->R4 + iReg; |
| 569 | } |
| 570 | /// Set Lr |
| 571 | *(ppCurrentReg + 8) = ®disp.pCurrentContext->R4 + 10; |
| 572 | *(ppCallerReg + 8) = ®disp.pCallerContext->R4 + 10; |
| 573 | ULONG **ppVolatileReg = ®disp.volatileCurrContextPointers.R0; |
| 574 | for (iReg = 0; iReg < 4; iReg++) |
| 575 | { |
| 576 | *(ppVolatileReg+iReg) = ®disp.pCurrentContext->R0 + iReg; |
| 577 | } |
| 578 | /// Set R12 |
| 579 | *(ppVolatileReg+4) = ®disp.pCurrentContext->R0+12; |
| 580 | |
| 581 | #elif defined(_TARGET_ARM64_) |
| 582 | FILL_REGS(pCurrentContext->X0, 33); |
| 583 | FILL_REGS(pCallerContext->X0, 33); |
| 584 | |
| 585 | regdisp.pCurrentContextPointers = ®disp.ctxPtrsOne; |
| 586 | regdisp.pCallerContextPointers = ®disp.ctxPtrsTwo; |
| 587 | |
| 588 | ULONG64 **ppCurrentReg = ®disp.pCurrentContextPointers->X19; |
| 589 | ULONG64 **ppCallerReg = ®disp.pCallerContextPointers->X19; |
| 590 | |
| 591 | for (iReg = 0; iReg < 11; iReg++) |
| 592 | { |
| 593 | *(ppCurrentReg + iReg) = ®disp.pCurrentContext->X19 + iReg; |
| 594 | *(ppCallerReg + iReg) = ®disp.pCallerContext->X19 + iReg; |
| 595 | } |
| 596 | |
| 597 | /// Set Lr |
| 598 | *(ppCurrentReg + 11) = ®disp.pCurrentContext->Lr; |
| 599 | *(ppCallerReg + 11) = ®disp.pCallerContext->Lr; |
| 600 | |
| 601 | ULONG64 **ppVolatileReg = ®disp.volatileCurrContextPointers.X0; |
| 602 | for (iReg = 0; iReg < 18; iReg++) |
| 603 | { |
| 604 | *(ppVolatileReg+iReg) = ®disp.pCurrentContext->X0 + iReg; |
| 605 | } |
| 606 | #else |
| 607 | PORTABILITY_ASSERT("GcInfoDumper::EnumerateStateChanges is not implemented on this platform." ) |
| 608 | #endif |
| 609 | |
| 610 | #undef FILL_REGS |
| 611 | #undef NEXT_ADDRESS |
| 612 | |
| 613 | SyncRegDisplayToCurrentContext(®disp); |
| 614 | |
| 615 | // |
| 616 | // Enumerate pointers at every possible offset. |
| 617 | // |
| 618 | |
| 619 | #ifdef PARTIALLY_INTERRUPTIBLE_GC_SUPPORTED |
| 620 | GcInfoDecoder safePointDecoder(m_gcTable, (GcInfoDecoderFlags)0, 0); |
| 621 | #endif |
| 622 | |
| 623 | { |
| 624 | GcInfoDecoder untrackedDecoder(m_gcTable, DECODE_GC_LIFETIMES, 0); |
| 625 | untrackedDecoder.EnumerateUntrackedSlots(®disp, |
| 626 | 0, |
| 627 | &LivePointerCallback, |
| 628 | this); |
| 629 | |
| 630 | BOOL fStop = ReportPointerDifferences( |
| 631 | -2, |
| 632 | ®disp, |
| 633 | NULL); |
| 634 | |
| 635 | FreePointerRecords(m_pRecords); |
| 636 | m_pRecords = NULL; |
| 637 | |
| 638 | if (fStop || m_Error) |
| 639 | return m_Error; |
| 640 | } |
| 641 | |
| 642 | LivePointerRecord *pLastState = NULL; |
| 643 | BOOL fPrevInterruptible = FALSE; |
| 644 | |
| 645 | for (UINT32 offset = 0; offset <= cbEncodedMethodSize; offset++) |
| 646 | { |
| 647 | BOOL fNewInterruptible = FALSE; |
| 648 | |
| 649 | GcInfoDecoder decoder1(m_gcTable, |
| 650 | (GcInfoDecoderFlags)( DECODE_SECURITY_OBJECT |
| 651 | | DECODE_CODE_LENGTH |
| 652 | | DECODE_VARARG |
| 653 | #if defined(_TARGET_ARM_) || defined(_TARGET_ARM64_) |
| 654 | | DECODE_HAS_TAILCALLS |
| 655 | #endif // _TARGET_ARM_ || _TARGET_ARM64_ |
| 656 | |
| 657 | | DECODE_INTERRUPTIBILITY), |
| 658 | offset); |
| 659 | |
| 660 | fNewInterruptible = decoder1.IsInterruptible(); |
| 661 | |
| 662 | if (fNewInterruptible != fPrevInterruptible) |
| 663 | { |
| 664 | if (pfnInterruptibleStateChange(offset, fNewInterruptible, pvData)) |
| 665 | break; |
| 666 | |
| 667 | fPrevInterruptible = fNewInterruptible; |
| 668 | } |
| 669 | |
| 670 | unsigned flags = ActiveStackFrame; |
| 671 | |
| 672 | #ifdef PARTIALLY_INTERRUPTIBLE_GC_SUPPORTED |
| 673 | UINT32 safePointOffset = offset; |
| 674 | #if defined(_TARGET_AMD64_) || defined(_TARGET_ARM_) || defined(_TARGET_ARM64_) |
| 675 | safePointOffset++; |
| 676 | #endif |
| 677 | if(safePointDecoder.IsSafePoint(safePointOffset)) |
| 678 | { |
| 679 | _ASSERTE(!fNewInterruptible); |
| 680 | if (pfnSafePointFunc(safePointOffset, pvData)) |
| 681 | break; |
| 682 | |
| 683 | flags = 0; |
| 684 | } |
| 685 | #endif |
| 686 | |
| 687 | GcInfoDecoder decoder2(m_gcTable, |
| 688 | (GcInfoDecoderFlags)( DECODE_SECURITY_OBJECT |
| 689 | | DECODE_CODE_LENGTH |
| 690 | | DECODE_VARARG |
| 691 | | DECODE_GC_LIFETIMES |
| 692 | | DECODE_NO_VALIDATION), |
| 693 | offset); |
| 694 | |
| 695 | _ASSERTE(!m_pRecords); |
| 696 | |
| 697 | if(!fNewInterruptible && (flags == ActiveStackFrame)) |
| 698 | { |
| 699 | // Decoding at non-interruptible offsets is only |
| 700 | // valid in the ExecutionAborted case |
| 701 | flags |= ExecutionAborted; |
| 702 | } |
| 703 | |
| 704 | if (!decoder2.EnumerateLiveSlots( |
| 705 | ®disp, |
| 706 | true, |
| 707 | flags | NoReportUntracked, |
| 708 | &LivePointerCallback, |
| 709 | this)) |
| 710 | { |
| 711 | m_Error = DECODER_FAILED; |
| 712 | } |
| 713 | |
| 714 | if (m_Error) |
| 715 | break; |
| 716 | |
| 717 | if (ReportPointerDifferences( |
| 718 | offset, |
| 719 | ®disp, |
| 720 | pLastState)) |
| 721 | { |
| 722 | break; |
| 723 | } |
| 724 | |
| 725 | if (m_Error) |
| 726 | break; |
| 727 | |
| 728 | FreePointerRecords(pLastState); |
| 729 | |
| 730 | pLastState = m_pRecords; |
| 731 | m_pRecords = NULL; |
| 732 | |
| 733 | size_t tempSize = decoder2.GetNumBytesRead(); |
| 734 | if( m_gcInfoSize < tempSize ) |
| 735 | m_gcInfoSize = tempSize; |
| 736 | } |
| 737 | |
| 738 | FreePointerRecords(pLastState); |
| 739 | |
| 740 | FreePointerRecords(m_pRecords); |
| 741 | m_pRecords = NULL; |
| 742 | |
| 743 | return m_Error; |
| 744 | } |
| 745 | |