| 1 | // Licensed to the .NET Foundation under one or more agreements. |
| 2 | // The .NET Foundation licenses this file to you under the MIT license. |
| 3 | // See the LICENSE file in the project root for more information. |
| 4 | // |
| 5 | // File: CLASS.CPP |
| 6 | // |
| 7 | |
| 8 | #include "common.h" |
| 9 | |
| 10 | #include "dllimport.h" |
| 11 | #include "dllimportcallback.h" |
| 12 | #include "fieldmarshaler.h" |
| 13 | #include "customattribute.h" |
| 14 | #include "encee.h" |
| 15 | #include "typestring.h" |
| 16 | |
| 17 | #ifdef FEATURE_COMINTEROP |
| 18 | #include "comcallablewrapper.h" |
| 19 | #include "clrtocomcall.h" |
| 20 | #include "runtimecallablewrapper.h" |
| 21 | #endif // FEATURE_COMINTEROP |
| 22 | |
| 23 | //#define DEBUG_LAYOUT |
| 24 | #define SORT_BY_RID |
| 25 | |
| 26 | #ifndef DACCESS_COMPILE |
| 27 | #include "methodtablebuilder.h" |
| 28 | #endif |
| 29 | #include "nsenumhandleallcases.h" |
| 30 | |
| 31 | #ifndef DACCESS_COMPILE |
| 32 | |
| 33 | |
| 34 | //******************************************************************************* |
| 35 | EEClass::EEClass(DWORD cbFixedEEClassFields) |
| 36 | { |
| 37 | LIMITED_METHOD_CONTRACT; |
| 38 | |
| 39 | // Cache size of fixed fields (this instance also contains a set of packed fields whose final size isn't |
| 40 | // determined until the end of class loading). We store the size into a spare byte made available by |
| 41 | // compiler field alignment, so we need to ensure we never allocate a flavor of EEClass more than 255 |
| 42 | // bytes long. |
| 43 | _ASSERTE(cbFixedEEClassFields <= 0xff); |
| 44 | m_cbFixedEEClassFields = (BYTE)cbFixedEEClassFields; |
| 45 | |
| 46 | // All other members are initialized to zero |
| 47 | } |
| 48 | |
| 49 | //******************************************************************************* |
| 50 | void *EEClass::operator new( |
| 51 | size_t size, |
| 52 | LoaderHeap *pHeap, |
| 53 | AllocMemTracker *pamTracker) |
| 54 | { |
| 55 | CONTRACTL |
| 56 | { |
| 57 | THROWS; |
| 58 | GC_NOTRIGGER; |
| 59 | INJECT_FAULT(COMPlusThrowOM()); |
| 60 | } |
| 61 | CONTRACTL_END; |
| 62 | |
| 63 | // EEClass (or sub-type) is always followed immediately by an EEClassPackedFields structure. This is |
| 64 | // maximally sized at runtime but in the ngen scenario will be optimized into a smaller structure (which |
| 65 | // is why it must go after all the fixed sized fields). |
| 66 | S_SIZE_T safeSize = S_SIZE_T(size) + S_SIZE_T(sizeof(EEClassPackedFields)); |
| 67 | |
| 68 | void *p = pamTracker->Track(pHeap->AllocMem(safeSize)); |
| 69 | |
| 70 | // No need to memset since this memory came from VirtualAlloc'ed memory |
| 71 | // memset (p, 0, size); |
| 72 | |
| 73 | return p; |
| 74 | } |
| 75 | |
| 76 | //******************************************************************************* |
| 77 | void EEClass::Destruct(MethodTable * pOwningMT) |
| 78 | { |
| 79 | CONTRACTL |
| 80 | { |
| 81 | NOTHROW; |
| 82 | GC_TRIGGERS; |
| 83 | FORBID_FAULT; |
| 84 | PRECONDITION(pOwningMT != NULL); |
| 85 | } |
| 86 | CONTRACTL_END |
| 87 | |
| 88 | #ifndef CROSSGEN_COMPILE |
| 89 | |
| 90 | // Not expected to be called for array EEClass |
| 91 | _ASSERTE(!pOwningMT->IsArray()); |
| 92 | |
| 93 | #ifdef _DEBUG |
| 94 | _ASSERTE(!IsDestroyed()); |
| 95 | SetDestroyed(); |
| 96 | #endif |
| 97 | |
| 98 | #ifdef PROFILING_SUPPORTED |
| 99 | // If profiling, then notify the class is getting unloaded. |
| 100 | { |
| 101 | BEGIN_PIN_PROFILER(CORProfilerTrackClasses()); |
| 102 | { |
| 103 | // Calls to the profiler callback may throw, or otherwise fail, if |
| 104 | // the profiler AVs/throws an unhandled exception/etc. We don't want |
| 105 | // those failures to affect the runtime, so we'll ignore them. |
| 106 | // |
| 107 | // Note that the profiler callback may turn around and make calls into |
| 108 | // the profiling runtime that may throw. This try/catch block doesn't |
| 109 | // protect the profiler against such failures. To protect the profiler |
| 110 | // against that, we will need try/catch blocks around all calls into the |
| 111 | // profiling API. |
| 112 | // |
| 113 | // (Bug #26467) |
| 114 | // |
| 115 | |
| 116 | FAULT_NOT_FATAL(); |
| 117 | |
| 118 | EX_TRY |
| 119 | { |
| 120 | GCX_PREEMP(); |
| 121 | |
| 122 | g_profControlBlock.pProfInterface->ClassUnloadStarted((ClassID) pOwningMT); |
| 123 | } |
| 124 | EX_CATCH |
| 125 | { |
| 126 | // The exception here came from the profiler itself. We'll just |
| 127 | // swallow the exception, since we don't want the profiler to bring |
| 128 | // down the runtime. |
| 129 | } |
| 130 | EX_END_CATCH(RethrowTerminalExceptions); |
| 131 | } |
| 132 | END_PIN_PROFILER(); |
| 133 | } |
| 134 | #endif // PROFILING_SUPPORTED |
| 135 | |
| 136 | #ifdef FEATURE_COMINTEROP |
| 137 | // clean up any COM Data |
| 138 | if (m_pccwTemplate) |
| 139 | { |
| 140 | m_pccwTemplate->Release(); |
| 141 | m_pccwTemplate = NULL; |
| 142 | } |
| 143 | |
| 144 | |
| 145 | #ifdef FEATURE_COMINTEROP_UNMANAGED_ACTIVATION |
| 146 | if (GetComClassFactory()) |
| 147 | { |
| 148 | GetComClassFactory()->Cleanup(); |
| 149 | } |
| 150 | #endif // FEATURE_COMINTEROP_UNMANAGED_ACTIVATION |
| 151 | #endif // FEATURE_COMINTEROP |
| 152 | |
| 153 | |
| 154 | if (IsDelegate()) |
| 155 | { |
| 156 | DelegateEEClass* pDelegateEEClass = (DelegateEEClass*)this; |
| 157 | |
| 158 | if (pDelegateEEClass->m_pStaticCallStub) |
| 159 | { |
| 160 | BOOL fStubDeleted = pDelegateEEClass->m_pStaticCallStub->DecRef(); |
| 161 | if (fStubDeleted) |
| 162 | { |
| 163 | DelegateInvokeStubManager::g_pManager->RemoveStub(pDelegateEEClass->m_pStaticCallStub); |
| 164 | } |
| 165 | } |
| 166 | if (pDelegateEEClass->m_pInstRetBuffCallStub) |
| 167 | { |
| 168 | pDelegateEEClass->m_pInstRetBuffCallStub->DecRef(); |
| 169 | } |
| 170 | // While m_pMultiCastInvokeStub is also a member, |
| 171 | // it is owned by the m_pMulticastStubCache, not by the class |
| 172 | // - it is shared across classes. So we don't decrement |
| 173 | // its ref count here |
| 174 | delete pDelegateEEClass->m_pUMThunkMarshInfo; |
| 175 | } |
| 176 | |
| 177 | #ifdef FEATURE_COMINTEROP |
| 178 | if (GetSparseCOMInteropVTableMap() != NULL && !pOwningMT->IsZapped()) |
| 179 | delete GetSparseCOMInteropVTableMap(); |
| 180 | #endif // FEATURE_COMINTEROP |
| 181 | |
| 182 | #ifdef PROFILING_SUPPORTED |
| 183 | // If profiling, then notify the class is getting unloaded. |
| 184 | { |
| 185 | BEGIN_PIN_PROFILER(CORProfilerTrackClasses()); |
| 186 | { |
| 187 | // See comments in the call to ClassUnloadStarted for details on this |
| 188 | // FAULT_NOT_FATAL marker and exception swallowing. |
| 189 | FAULT_NOT_FATAL(); |
| 190 | EX_TRY |
| 191 | { |
| 192 | GCX_PREEMP(); |
| 193 | g_profControlBlock.pProfInterface->ClassUnloadFinished((ClassID) pOwningMT, S_OK); |
| 194 | } |
| 195 | EX_CATCH |
| 196 | { |
| 197 | } |
| 198 | EX_END_CATCH(RethrowTerminalExceptions); |
| 199 | } |
| 200 | END_PIN_PROFILER(); |
| 201 | } |
| 202 | #endif // PROFILING_SUPPORTED |
| 203 | |
| 204 | #endif // CROSSGEN_COMPILE |
| 205 | } |
| 206 | |
| 207 | //******************************************************************************* |
| 208 | /*static*/ EEClass * |
| 209 | EEClass::CreateMinimalClass(LoaderHeap *pHeap, AllocMemTracker *pamTracker) |
| 210 | { |
| 211 | CONTRACTL |
| 212 | { |
| 213 | THROWS; |
| 214 | GC_NOTRIGGER; |
| 215 | } |
| 216 | CONTRACTL_END; |
| 217 | |
| 218 | return new (pHeap, pamTracker) EEClass(sizeof(EEClass)); |
| 219 | } |
| 220 | |
| 221 | |
| 222 | //******************************************************************************* |
| 223 | |
| 224 | //----------------------------------------------------------------------------------- |
| 225 | // Note: this only loads the type to CLASS_DEPENDENCIES_LOADED as this can be called |
| 226 | // indirectly from DoFullyLoad() as part of accessibility checking. |
| 227 | //----------------------------------------------------------------------------------- |
| 228 | MethodTable *MethodTable::LoadEnclosingMethodTable(ClassLoadLevel targetLevel) |
| 229 | { |
| 230 | CONTRACTL |
| 231 | { |
| 232 | THROWS; |
| 233 | GC_TRIGGERS; |
| 234 | INJECT_FAULT(COMPlusThrowOM();); |
| 235 | MODE_ANY; |
| 236 | } |
| 237 | CONTRACTL_END |
| 238 | |
| 239 | mdTypeDef tdEnclosing = GetEnclosingCl(); |
| 240 | |
| 241 | if (tdEnclosing == mdTypeDefNil) |
| 242 | { |
| 243 | return NULL; |
| 244 | } |
| 245 | |
| 246 | return ClassLoader::LoadTypeDefThrowing(GetModule(), |
| 247 | tdEnclosing, |
| 248 | ClassLoader::ThrowIfNotFound, |
| 249 | ClassLoader::PermitUninstDefOrRef, |
| 250 | tdNoTypes, |
| 251 | targetLevel |
| 252 | ).GetMethodTable(); |
| 253 | |
| 254 | } |
| 255 | |
| 256 | #ifdef EnC_SUPPORTED |
| 257 | |
| 258 | //******************************************************************************* |
| 259 | VOID EEClass::FixupFieldDescForEnC(MethodTable * pMT, EnCFieldDesc *pFD, mdFieldDef fieldDef) |
| 260 | { |
| 261 | CONTRACTL |
| 262 | { |
| 263 | THROWS; |
| 264 | MODE_COOPERATIVE; |
| 265 | WRAPPER(GC_TRIGGERS); |
| 266 | INJECT_FAULT(COMPlusThrowOM();); |
| 267 | } |
| 268 | CONTRACTL_END |
| 269 | |
| 270 | Module * pModule = pMT->GetModule(); |
| 271 | IMDInternalImport *pImport = pModule->GetMDImport(); |
| 272 | |
| 273 | #ifdef LOGGING |
| 274 | if (LoggingEnabled()) |
| 275 | { |
| 276 | LPCSTR szFieldName; |
| 277 | if (FAILED(pImport->GetNameOfFieldDef(fieldDef, &szFieldName))) |
| 278 | { |
| 279 | szFieldName = "Invalid FieldDef record" ; |
| 280 | } |
| 281 | LOG((LF_ENC, LL_INFO100, "EEClass::InitializeFieldDescForEnC %s\n" , szFieldName)); |
| 282 | } |
| 283 | #endif //LOGGING |
| 284 | |
| 285 | |
| 286 | #ifdef _DEBUG |
| 287 | BOOL shouldBreak = CLRConfig::GetConfigValue(CLRConfig::INTERNAL_EncFixupFieldBreak); |
| 288 | if (shouldBreak > 0) { |
| 289 | _ASSERTE(!"EncFixupFieldBreak" ); |
| 290 | } |
| 291 | #endif // _DEBUG |
| 292 | |
| 293 | // MethodTableBuilder uses the stacking allocator for most of it's |
| 294 | // working memory requirements, so this makes sure to free the memory |
| 295 | // once this function is out of scope. |
| 296 | CheckPointHolder cph(GetThread()->m_MarshalAlloc.GetCheckpoint()); |
| 297 | |
| 298 | MethodTableBuilder::bmtMetaDataInfo bmtMetaData; |
| 299 | bmtMetaData.cFields = 1; |
| 300 | bmtMetaData.pFields = (mdToken*)_alloca(sizeof(mdToken)); |
| 301 | bmtMetaData.pFields[0] = fieldDef; |
| 302 | bmtMetaData.pFieldAttrs = (DWORD*)_alloca(sizeof(DWORD)); |
| 303 | IfFailThrow(pImport->GetFieldDefProps(fieldDef, &bmtMetaData.pFieldAttrs[0])); |
| 304 | |
| 305 | MethodTableBuilder::bmtMethAndFieldDescs bmtMFDescs; |
| 306 | // We need to alloc the memory, but don't have to fill it in. InitializeFieldDescs |
| 307 | // will copy pFD (1st arg) into here. |
| 308 | bmtMFDescs.ppFieldDescList = (FieldDesc**)_alloca(sizeof(FieldDesc*)); |
| 309 | |
| 310 | MethodTableBuilder::bmtFieldPlacement bmtFP; |
| 311 | |
| 312 | // This simulates the environment that BuildMethodTableThrowing creates |
| 313 | // just enough to run InitializeFieldDescs |
| 314 | MethodTableBuilder::bmtErrorInfo bmtError; |
| 315 | bmtError.pModule = pModule; |
| 316 | bmtError.cl = pMT->GetCl(); |
| 317 | bmtError.dMethodDefInError = mdTokenNil; |
| 318 | bmtError.szMethodNameForError = NULL; |
| 319 | |
| 320 | MethodTableBuilder::bmtInternalInfo bmtInternal; |
| 321 | bmtInternal.pModule = pModule; |
| 322 | bmtInternal.pInternalImport = pImport; |
| 323 | bmtInternal.pParentMT = pMT->GetParentMethodTable(); |
| 324 | |
| 325 | MethodTableBuilder::bmtProperties bmtProp; |
| 326 | bmtProp.fIsValueClass = !!pMT->IsValueType(); |
| 327 | |
| 328 | MethodTableBuilder::bmtEnumFieldInfo bmtEnumFields(bmtInternal.pInternalImport); |
| 329 | |
| 330 | if (pFD->IsStatic()) |
| 331 | { |
| 332 | bmtEnumFields.dwNumStaticFields = 1; |
| 333 | } |
| 334 | else |
| 335 | { |
| 336 | bmtEnumFields.dwNumInstanceFields = 1; |
| 337 | } |
| 338 | |
| 339 | // We shouldn't have to fill this in b/c we're not allowed to EnC value classes, or |
| 340 | // anything else with layout info associated with it. |
| 341 | LayoutRawFieldInfo *pLayoutRawFieldInfos = (LayoutRawFieldInfo*)_alloca((2) * sizeof(LayoutRawFieldInfo)); |
| 342 | |
| 343 | // If not NULL, it means there are some by-value fields, and this contains an entry for each instance or static field, |
| 344 | // which is NULL if not a by value field, and points to the EEClass of the field if a by value field. Instance fields |
| 345 | // come first, statics come second. |
| 346 | MethodTable **pByValueClassCache = NULL; |
| 347 | |
| 348 | EEClass * pClass = pMT->GetClass(); |
| 349 | |
| 350 | // InitializeFieldDescs are going to change these numbers to something wrong, |
| 351 | // even though we already have the right numbers. Save & restore after. |
| 352 | WORD wNumInstanceFields = pMT->GetNumInstanceFields(); |
| 353 | WORD wNumStaticFields = pMT->GetNumStaticFields(); |
| 354 | unsigned totalDeclaredFieldSize = 0; |
| 355 | |
| 356 | AllocMemTracker dummyAmTracker; |
| 357 | |
| 358 | BaseDomain * pDomain = pMT->GetDomain(); |
| 359 | MethodTableBuilder builder(pMT, pClass, |
| 360 | &GetThread()->m_MarshalAlloc, |
| 361 | &dummyAmTracker); |
| 362 | |
| 363 | MethodTableBuilder::bmtGenericsInfo genericsInfo; |
| 364 | |
| 365 | OBJECTREF pThrowable = NULL; |
| 366 | GCPROTECT_BEGIN(pThrowable); |
| 367 | |
| 368 | builder.SetBMTData(pMT->GetLoaderAllocator(), |
| 369 | &bmtError, |
| 370 | &bmtProp, |
| 371 | NULL, |
| 372 | NULL, |
| 373 | NULL, |
| 374 | &bmtMetaData, |
| 375 | NULL, |
| 376 | &bmtMFDescs, |
| 377 | &bmtFP, |
| 378 | &bmtInternal, |
| 379 | NULL, |
| 380 | NULL, |
| 381 | &genericsInfo, |
| 382 | &bmtEnumFields); |
| 383 | |
| 384 | EX_TRY |
| 385 | { |
| 386 | GCX_PREEMP(); |
| 387 | builder.InitializeFieldDescs(pFD, |
| 388 | pLayoutRawFieldInfos, |
| 389 | &bmtInternal, |
| 390 | &genericsInfo, |
| 391 | &bmtMetaData, |
| 392 | &bmtEnumFields, |
| 393 | &bmtError, |
| 394 | &pByValueClassCache, |
| 395 | &bmtMFDescs, |
| 396 | &bmtFP, |
| 397 | &totalDeclaredFieldSize); |
| 398 | } |
| 399 | EX_CATCH_THROWABLE(&pThrowable); |
| 400 | |
| 401 | dummyAmTracker.SuppressRelease(); |
| 402 | |
| 403 | // Restore now |
| 404 | pClass->SetNumInstanceFields(wNumInstanceFields); |
| 405 | pClass->SetNumStaticFields(wNumStaticFields); |
| 406 | |
| 407 | // PERF: For now, we turn off the fast equality check for valuetypes when a |
| 408 | // a field is modified by EnC. Consider doing a check and setting the bit only when |
| 409 | // necessary. |
| 410 | if (pMT->IsValueType()) |
| 411 | { |
| 412 | pClass->SetIsNotTightlyPacked(); |
| 413 | } |
| 414 | |
| 415 | if (pThrowable != NULL) |
| 416 | { |
| 417 | COMPlusThrow(pThrowable); |
| 418 | } |
| 419 | |
| 420 | GCPROTECT_END(); |
| 421 | |
| 422 | pFD->SetMethodTable(pMT); |
| 423 | |
| 424 | // We set this when we first created the FieldDesc, but initializing the FieldDesc |
| 425 | // may have overwritten it so we need to set it again. |
| 426 | pFD->SetEnCNew(); |
| 427 | |
| 428 | return; |
| 429 | } |
| 430 | |
| 431 | //--------------------------------------------------------------------------------------- |
| 432 | // |
| 433 | // AddField - called when a new field is added by EnC |
| 434 | // |
| 435 | // Since instances of this class may already exist on the heap, we can't change the |
| 436 | // runtime layout of the object to accomodate the new field. Instead we hang the field |
| 437 | // off the syncblock (for instance fields) or in the FieldDesc for static fields. |
| 438 | // |
| 439 | // Here we just create the FieldDesc and link it to the class. The actual storage will |
| 440 | // be created lazily on demand. |
| 441 | // |
| 442 | HRESULT EEClass::AddField(MethodTable * pMT, mdFieldDef fieldDef, EnCFieldDesc **ppNewFD) |
| 443 | { |
| 444 | CONTRACTL |
| 445 | { |
| 446 | THROWS; |
| 447 | GC_NOTRIGGER; |
| 448 | MODE_COOPERATIVE; |
| 449 | } |
| 450 | CONTRACTL_END; |
| 451 | |
| 452 | Module * pModule = pMT->GetModule(); |
| 453 | IMDInternalImport *pImport = pModule->GetMDImport(); |
| 454 | |
| 455 | #ifdef LOGGING |
| 456 | if (LoggingEnabled()) |
| 457 | { |
| 458 | LPCSTR szFieldName; |
| 459 | if (FAILED(pImport->GetNameOfFieldDef(fieldDef, &szFieldName))) |
| 460 | { |
| 461 | szFieldName = "Invalid FieldDef record" ; |
| 462 | } |
| 463 | LOG((LF_ENC, LL_INFO100, "EEClass::AddField %s\n" , szFieldName)); |
| 464 | } |
| 465 | #endif //LOGGING |
| 466 | |
| 467 | // We can only add fields to normal classes |
| 468 | if (pMT->HasLayout() || pMT->IsValueType()) |
| 469 | { |
| 470 | return CORDBG_E_ENC_CANT_ADD_FIELD_TO_VALUE_OR_LAYOUT_CLASS; |
| 471 | } |
| 472 | |
| 473 | // We only add private fields. |
| 474 | // This may not be strictly necessary, but helps avoid any semantic confusion with |
| 475 | // existing code etc. |
| 476 | DWORD dwFieldAttrs; |
| 477 | IfFailThrow(pImport->GetFieldDefProps(fieldDef, &dwFieldAttrs)); |
| 478 | |
| 479 | LoaderAllocator* pAllocator = pMT->GetLoaderAllocator(); |
| 480 | |
| 481 | // Here we allocate a FieldDesc and set just enough info to be able to fix it up later |
| 482 | // when we're running in managed code. |
| 483 | EnCAddedFieldElement *pAddedField = (EnCAddedFieldElement *) |
| 484 | (void*)pAllocator->GetHighFrequencyHeap()->AllocMem_NoThrow(S_SIZE_T(sizeof(EnCAddedFieldElement))); |
| 485 | if (!pAddedField) |
| 486 | { |
| 487 | return E_OUTOFMEMORY; |
| 488 | } |
| 489 | pAddedField->Init( fieldDef, IsFdStatic(dwFieldAttrs) ); |
| 490 | |
| 491 | EnCFieldDesc *pNewFD = &pAddedField->m_fieldDesc; |
| 492 | |
| 493 | // Get the EnCEEClassData for this class |
| 494 | // Don't adjust EEClass stats b/c EnC fields shouldn't touch EE data structures. |
| 495 | // We'll just update our private EnC structures instead. |
| 496 | EnCEEClassData *pEnCClass = ((EditAndContinueModule*)pModule)->GetEnCEEClassData(pMT); |
| 497 | if (! pEnCClass) |
| 498 | return E_FAIL; |
| 499 | |
| 500 | // Add the field element to the list of added fields for this class |
| 501 | pEnCClass->AddField(pAddedField); |
| 502 | |
| 503 | // Store the FieldDesc into the module's field list |
| 504 | { |
| 505 | CONTRACT_VIOLATION(ThrowsViolation); // B#25680 (Fix Enc violations): Must handle OOM's from Ensure |
| 506 | pModule->EnsureFieldDefCanBeStored(fieldDef); |
| 507 | } |
| 508 | pModule->EnsuredStoreFieldDef(fieldDef, pNewFD); |
| 509 | pNewFD->SetMethodTable(pMT); |
| 510 | |
| 511 | // Success, return the new FieldDesc |
| 512 | if (ppNewFD) |
| 513 | { |
| 514 | *ppNewFD = pNewFD; |
| 515 | } |
| 516 | return S_OK; |
| 517 | } |
| 518 | |
| 519 | //--------------------------------------------------------------------------------------- |
| 520 | // |
| 521 | // AddMethod - called when a new method is added by EnC |
| 522 | // |
| 523 | // The method has already been added to the metadata with token methodDef. |
| 524 | // Create a new MethodDesc for the method. |
| 525 | // |
| 526 | HRESULT EEClass::AddMethod(MethodTable * pMT, mdMethodDef methodDef, RVA newRVA, MethodDesc **ppMethod) |
| 527 | { |
| 528 | CONTRACTL |
| 529 | { |
| 530 | THROWS; |
| 531 | GC_NOTRIGGER; |
| 532 | MODE_COOPERATIVE; |
| 533 | } |
| 534 | CONTRACTL_END; |
| 535 | |
| 536 | Module * pModule = pMT->GetModule(); |
| 537 | IMDInternalImport *pImport = pModule->GetMDImport(); |
| 538 | |
| 539 | #ifdef LOGGING |
| 540 | if (LoggingEnabled()) |
| 541 | { |
| 542 | LPCSTR szMethodName; |
| 543 | if (FAILED(pImport->GetNameOfMethodDef(methodDef, &szMethodName))) |
| 544 | { |
| 545 | szMethodName = "Invalid MethodDef record" ; |
| 546 | } |
| 547 | LOG((LF_ENC, LL_INFO100, "EEClass::AddMethod %s\n" , szMethodName)); |
| 548 | } |
| 549 | #endif //LOGGING |
| 550 | |
| 551 | DWORD dwDescrOffset; |
| 552 | DWORD dwImplFlags; |
| 553 | HRESULT hr = S_OK; |
| 554 | |
| 555 | if (FAILED(pImport->GetMethodImplProps(methodDef, &dwDescrOffset, &dwImplFlags))) |
| 556 | { |
| 557 | return COR_E_BADIMAGEFORMAT; |
| 558 | } |
| 559 | |
| 560 | DWORD dwMemberAttrs; |
| 561 | IfFailThrow(pImport->GetMethodDefProps(methodDef, &dwMemberAttrs)); |
| 562 | |
| 563 | // Refuse to add other special cases |
| 564 | if (IsReallyMdPinvokeImpl(dwMemberAttrs) || |
| 565 | (pMT->IsInterface() && !IsMdStatic(dwMemberAttrs)) || |
| 566 | IsMiRuntime(dwImplFlags)) |
| 567 | { |
| 568 | _ASSERTE(! "**Error** EEClass::AddMethod only IL private non-virtual methods are supported" ); |
| 569 | LOG((LF_ENC, LL_INFO100, "**Error** EEClass::AddMethod only IL private non-virtual methods are supported\n" )); |
| 570 | return CORDBG_E_ENC_EDIT_NOT_SUPPORTED; |
| 571 | } |
| 572 | |
| 573 | #ifdef _DEBUG |
| 574 | // Validate that this methodDef correctly has a parent typeDef |
| 575 | mdTypeDef parentTypeDef; |
| 576 | if (FAILED(hr = pImport->GetParentToken(methodDef, &parentTypeDef))) |
| 577 | { |
| 578 | _ASSERTE(! "**Error** EEClass::AddMethod parent token not found" ); |
| 579 | LOG((LF_ENC, LL_INFO100, "**Error** EEClass::AddMethod parent token not found\n" )); |
| 580 | return E_FAIL; |
| 581 | } |
| 582 | #endif // _DEBUG |
| 583 | |
| 584 | EEClass * pClass = pMT->GetClass(); |
| 585 | |
| 586 | // @todo: OOM: InitMethodDesc will allocate loaderheap memory but leak it |
| 587 | // on failure. This AllocMemTracker should be replaced with a real one. |
| 588 | AllocMemTracker dummyAmTracker; |
| 589 | |
| 590 | LoaderAllocator* pAllocator = pMT->GetLoaderAllocator(); |
| 591 | |
| 592 | // Create a new MethodDescChunk to hold the new MethodDesc |
| 593 | // Create the chunk somewhere we'll know is within range of the VTable |
| 594 | MethodDescChunk *pChunk = MethodDescChunk::CreateChunk(pAllocator->GetHighFrequencyHeap(), |
| 595 | 1, // methodDescCount |
| 596 | mcInstantiated, |
| 597 | TRUE /* fNonVtableSlot */, |
| 598 | TRUE /* fNativeCodeSlot */, |
| 599 | FALSE /* fComPlusCallInfo */, |
| 600 | pMT, |
| 601 | &dummyAmTracker); |
| 602 | |
| 603 | // Get the new MethodDesc (Note: The method desc memory is zero initialized) |
| 604 | MethodDesc *pNewMD = pChunk->GetFirstMethodDesc(); |
| 605 | |
| 606 | // Initialize the new MethodDesc |
| 607 | MethodTableBuilder builder(pMT, |
| 608 | pClass, |
| 609 | &GetThread()->m_MarshalAlloc, |
| 610 | &dummyAmTracker); |
| 611 | EX_TRY |
| 612 | { |
| 613 | INDEBUG(LPCSTR debug_szFieldName); |
| 614 | INDEBUG(if (FAILED(pImport->GetNameOfMethodDef(methodDef, &debug_szFieldName))) { debug_szFieldName = "Invalid MethodDef record" ; }); |
| 615 | builder.InitMethodDesc(pNewMD, |
| 616 | mcInstantiated, // Use instantiated methoddesc for EnC added methods to get space for slot |
| 617 | methodDef, |
| 618 | dwImplFlags, |
| 619 | dwMemberAttrs, |
| 620 | TRUE, // fEnC |
| 621 | newRVA, |
| 622 | pImport, |
| 623 | NULL |
| 624 | COMMA_INDEBUG(debug_szFieldName) |
| 625 | COMMA_INDEBUG(pMT->GetDebugClassName()) |
| 626 | COMMA_INDEBUG(NULL) |
| 627 | ); |
| 628 | |
| 629 | pNewMD->SetTemporaryEntryPoint(pAllocator, &dummyAmTracker); |
| 630 | } |
| 631 | EX_CATCH_HRESULT(hr); |
| 632 | if (S_OK != hr) |
| 633 | return hr; |
| 634 | |
| 635 | dummyAmTracker.SuppressRelease(); |
| 636 | |
| 637 | _ASSERTE(pNewMD->IsEnCAddedMethod()); |
| 638 | |
| 639 | pNewMD->SetSlot(MethodTable::NO_SLOT); // we can't ever use the slot for EnC methods |
| 640 | |
| 641 | pClass->AddChunk(pChunk); |
| 642 | |
| 643 | // Store the new MethodDesc into the collection for this class |
| 644 | pModule->EnsureMethodDefCanBeStored(methodDef); |
| 645 | pModule->EnsuredStoreMethodDef(methodDef, pNewMD); |
| 646 | |
| 647 | LOG((LF_ENC, LL_INFO100, "EEClass::AddMethod new methoddesc %p for token %p\n" , pNewMD, methodDef)); |
| 648 | |
| 649 | // Success - return the new MethodDesc |
| 650 | _ASSERTE( SUCCEEDED(hr) ); |
| 651 | if (ppMethod) |
| 652 | { |
| 653 | *ppMethod = pNewMD; |
| 654 | } |
| 655 | return S_OK; |
| 656 | } |
| 657 | |
| 658 | #endif // EnC_SUPPORTED |
| 659 | |
| 660 | //--------------------------------------------------------------------------------------- |
| 661 | // |
| 662 | // Check that the class type parameters are used consistently in this signature blob |
| 663 | // in accordance with their variance annotations |
| 664 | // The signature is assumed to be well-formed but indices and arities might not be correct |
| 665 | // |
| 666 | BOOL |
| 667 | EEClass::CheckVarianceInSig( |
| 668 | DWORD numGenericArgs, |
| 669 | BYTE * pVarianceInfo, |
| 670 | Module * pModule, |
| 671 | SigPointer psig, |
| 672 | CorGenericParamAttr position) |
| 673 | { |
| 674 | CONTRACTL |
| 675 | { |
| 676 | THROWS; |
| 677 | GC_TRIGGERS; |
| 678 | MODE_ANY; |
| 679 | } |
| 680 | CONTRACTL_END; |
| 681 | |
| 682 | if (pVarianceInfo == NULL) |
| 683 | return TRUE; |
| 684 | |
| 685 | CorElementType typ; |
| 686 | IfFailThrow(psig.GetElemType(&typ)); |
| 687 | |
| 688 | switch (typ) |
| 689 | { |
| 690 | case ELEMENT_TYPE_STRING: |
| 691 | case ELEMENT_TYPE_U: |
| 692 | case ELEMENT_TYPE_I: |
| 693 | case ELEMENT_TYPE_I1: |
| 694 | case ELEMENT_TYPE_U1: |
| 695 | case ELEMENT_TYPE_BOOLEAN: |
| 696 | case ELEMENT_TYPE_I2: |
| 697 | case ELEMENT_TYPE_U2: |
| 698 | case ELEMENT_TYPE_CHAR: |
| 699 | case ELEMENT_TYPE_I4: |
| 700 | case ELEMENT_TYPE_U4: |
| 701 | case ELEMENT_TYPE_I8: |
| 702 | case ELEMENT_TYPE_U8: |
| 703 | case ELEMENT_TYPE_R4: |
| 704 | case ELEMENT_TYPE_R8: |
| 705 | case ELEMENT_TYPE_VOID: |
| 706 | case ELEMENT_TYPE_OBJECT: |
| 707 | case ELEMENT_TYPE_TYPEDBYREF: |
| 708 | case ELEMENT_TYPE_MVAR: |
| 709 | case ELEMENT_TYPE_CLASS: |
| 710 | case ELEMENT_TYPE_VALUETYPE: |
| 711 | return TRUE; |
| 712 | |
| 713 | case ELEMENT_TYPE_VAR: |
| 714 | { |
| 715 | DWORD index; |
| 716 | IfFailThrow(psig.GetData(&index)); |
| 717 | |
| 718 | // This will be checked later anyway; so give up and don't indicate a variance failure |
| 719 | if (index < 0 || index >= numGenericArgs) |
| 720 | return TRUE; |
| 721 | |
| 722 | // Non-variant parameters are allowed to appear anywhere |
| 723 | if (pVarianceInfo[index] == gpNonVariant) |
| 724 | return TRUE; |
| 725 | |
| 726 | // Covariant and contravariant parameters can *only* appear in resp. covariant and contravariant positions |
| 727 | return ((CorGenericParamAttr) (pVarianceInfo[index]) == position); |
| 728 | } |
| 729 | |
| 730 | case ELEMENT_TYPE_GENERICINST: |
| 731 | { |
| 732 | IfFailThrow(psig.GetElemType(&typ)); |
| 733 | mdTypeRef typeref; |
| 734 | IfFailThrow(psig.GetToken(&typeref)); |
| 735 | |
| 736 | // The number of type parameters follows |
| 737 | DWORD ntypars; |
| 738 | IfFailThrow(psig.GetData(&ntypars)); |
| 739 | |
| 740 | // If this is a value type, or position == gpNonVariant, then |
| 741 | // we're disallowing covariant and contravariant completely |
| 742 | if (typ == ELEMENT_TYPE_VALUETYPE || position == gpNonVariant) |
| 743 | { |
| 744 | for (unsigned i = 0; i < ntypars; i++) |
| 745 | { |
| 746 | if (!CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, gpNonVariant)) |
| 747 | return FALSE; |
| 748 | |
| 749 | IfFailThrow(psig.SkipExactlyOne()); |
| 750 | } |
| 751 | } |
| 752 | // Otherwise we need to take notice of the variance annotation on each type parameter to the generic type |
| 753 | else |
| 754 | { |
| 755 | mdTypeDef typeDef; |
| 756 | Module * pDefModule; |
| 757 | // This will also be resolved later; so, give up and don't indicate a variance failure |
| 758 | if (!ClassLoader::ResolveTokenToTypeDefThrowing(pModule, typeref, &pDefModule, &typeDef)) |
| 759 | return TRUE; |
| 760 | |
| 761 | HENUMInternal hEnumGenericPars; |
| 762 | if (FAILED(pDefModule->GetMDImport()->EnumInit(mdtGenericParam, typeDef, &hEnumGenericPars))) |
| 763 | { |
| 764 | pDefModule->GetAssembly()->ThrowTypeLoadException(pDefModule->GetMDImport(), typeDef, IDS_CLASSLOAD_BADFORMAT); |
| 765 | } |
| 766 | |
| 767 | for (unsigned i = 0; i < ntypars; i++) |
| 768 | { |
| 769 | mdGenericParam tkTyPar; |
| 770 | pDefModule->GetMDImport()->EnumNext(&hEnumGenericPars, &tkTyPar); |
| 771 | DWORD flags; |
| 772 | if (FAILED(pDefModule->GetMDImport()->GetGenericParamProps(tkTyPar, NULL, &flags, NULL, NULL, NULL))) |
| 773 | { |
| 774 | pDefModule->GetAssembly()->ThrowTypeLoadException(pDefModule->GetMDImport(), typeDef, IDS_CLASSLOAD_BADFORMAT); |
| 775 | } |
| 776 | CorGenericParamAttr genPosition = (CorGenericParamAttr) (flags & gpVarianceMask); |
| 777 | // If the surrounding context is contravariant then we need to flip the variance of this parameter |
| 778 | if (position == gpContravariant) |
| 779 | { |
| 780 | genPosition = genPosition == gpCovariant ? gpContravariant |
| 781 | : genPosition == gpContravariant ? gpCovariant |
| 782 | : gpNonVariant; |
| 783 | } |
| 784 | if (!CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, genPosition)) |
| 785 | return FALSE; |
| 786 | |
| 787 | IfFailThrow(psig.SkipExactlyOne()); |
| 788 | } |
| 789 | pDefModule->GetMDImport()->EnumClose(&hEnumGenericPars); |
| 790 | } |
| 791 | |
| 792 | return TRUE; |
| 793 | } |
| 794 | |
| 795 | // Arrays behave covariantly |
| 796 | case ELEMENT_TYPE_ARRAY: |
| 797 | case ELEMENT_TYPE_SZARRAY: |
| 798 | return CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, position); |
| 799 | |
| 800 | // Pointers behave non-variantly |
| 801 | case ELEMENT_TYPE_BYREF: |
| 802 | case ELEMENT_TYPE_PTR: |
| 803 | return CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, gpNonVariant); |
| 804 | |
| 805 | case ELEMENT_TYPE_FNPTR: |
| 806 | { |
| 807 | // Calling convention |
| 808 | IfFailThrow(psig.GetData(NULL)); |
| 809 | |
| 810 | // Get arg count; |
| 811 | ULONG cArgs; |
| 812 | IfFailThrow(psig.GetData(&cArgs)); |
| 813 | |
| 814 | // Conservatively, assume non-variance of function pointer types |
| 815 | if (!CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, gpNonVariant)) |
| 816 | return FALSE; |
| 817 | |
| 818 | IfFailThrow(psig.SkipExactlyOne()); |
| 819 | |
| 820 | for (unsigned i = 0; i < cArgs; i++) |
| 821 | { |
| 822 | if (!CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, gpNonVariant)) |
| 823 | return FALSE; |
| 824 | |
| 825 | IfFailThrow(psig.SkipExactlyOne()); |
| 826 | } |
| 827 | |
| 828 | return TRUE; |
| 829 | } |
| 830 | |
| 831 | default: |
| 832 | THROW_BAD_FORMAT(IDS_CLASSLOAD_BAD_VARIANCE_SIG, pModule); |
| 833 | } |
| 834 | |
| 835 | return FALSE; |
| 836 | } // EEClass::CheckVarianceInSig |
| 837 | |
| 838 | void |
| 839 | ClassLoader::LoadExactParentAndInterfacesTransitively(MethodTable *pMT) |
| 840 | { |
| 841 | CONTRACTL |
| 842 | { |
| 843 | STANDARD_VM_CHECK; |
| 844 | PRECONDITION(CheckPointer(pMT)); |
| 845 | } |
| 846 | CONTRACTL_END; |
| 847 | |
| 848 | |
| 849 | TypeHandle thisTH(pMT); |
| 850 | SigTypeContext typeContext(thisTH); |
| 851 | IMDInternalImport* pInternalImport = pMT->GetMDImport(); |
| 852 | MethodTable *pParentMT = pMT->GetParentMethodTable(); |
| 853 | |
| 854 | if (pParentMT != NULL && pParentMT->HasInstantiation()) |
| 855 | { |
| 856 | // Fill in exact parent if it's instantiated |
| 857 | mdToken crExtends; |
| 858 | IfFailThrow(pInternalImport->GetTypeDefProps( |
| 859 | pMT->GetCl(), |
| 860 | NULL, |
| 861 | &crExtends)); |
| 862 | |
| 863 | _ASSERTE(!IsNilToken(crExtends)); |
| 864 | _ASSERTE(TypeFromToken(crExtends) == mdtTypeSpec); |
| 865 | |
| 866 | TypeHandle newParent = ClassLoader::LoadTypeDefOrRefOrSpecThrowing(pMT->GetModule(), crExtends, &typeContext, |
| 867 | ClassLoader::ThrowIfNotFound, |
| 868 | ClassLoader::FailIfUninstDefOrRef, |
| 869 | ClassLoader::LoadTypes, |
| 870 | CLASS_LOAD_EXACTPARENTS, |
| 871 | TRUE); |
| 872 | |
| 873 | MethodTable* pNewParentMT = newParent.AsMethodTable(); |
| 874 | if (pNewParentMT != pParentMT) |
| 875 | { |
| 876 | LOG((LF_CLASSLOADER, LL_INFO1000, "GENERICS: Replaced approximate parent %s with exact parent %s from token %x\n" , pParentMT->GetDebugClassName(), pNewParentMT->GetDebugClassName(), crExtends)); |
| 877 | |
| 878 | // SetParentMethodTable is not used here since we want to update the indirection cell in the NGen case |
| 879 | if (pMT->IsParentMethodTableIndirectPointerMaybeNull()) |
| 880 | { |
| 881 | *EnsureWritablePages(pMT->GetParentMethodTableValuePtr()) = pNewParentMT; |
| 882 | } |
| 883 | else |
| 884 | { |
| 885 | EnsureWritablePages(pMT->GetParentMethodTablePointerPtr()); |
| 886 | pMT->GetParentMethodTablePointerPtr()->SetValueMaybeNull(pNewParentMT); |
| 887 | } |
| 888 | |
| 889 | pParentMT = pNewParentMT; |
| 890 | } |
| 891 | } |
| 892 | |
| 893 | if (pParentMT != NULL) |
| 894 | { |
| 895 | EnsureLoaded(pParentMT, CLASS_LOAD_EXACTPARENTS); |
| 896 | } |
| 897 | |
| 898 | |
| 899 | if (pParentMT != NULL && pParentMT->HasPerInstInfo()) |
| 900 | { |
| 901 | // Copy down all inherited dictionary pointers which we |
| 902 | // could not embed. |
| 903 | DWORD nDicts = pParentMT->GetNumDicts(); |
| 904 | for (DWORD iDict = 0; iDict < nDicts; iDict++) |
| 905 | { |
| 906 | if (pMT->GetPerInstInfo()[iDict].GetValueMaybeNull() != pParentMT->GetPerInstInfo()[iDict].GetValueMaybeNull()) |
| 907 | { |
| 908 | EnsureWritablePages(&pMT->GetPerInstInfo()[iDict]); |
| 909 | pMT->GetPerInstInfo()[iDict].SetValueMaybeNull(pParentMT->GetPerInstInfo()[iDict].GetValueMaybeNull()); |
| 910 | } |
| 911 | } |
| 912 | } |
| 913 | |
| 914 | #ifdef FEATURE_PREJIT |
| 915 | // Restore action, not in MethodTable::Restore because we may have had approx parents at that point |
| 916 | if (pMT->IsZapped()) |
| 917 | { |
| 918 | MethodTable::InterfaceMapIterator it = pMT->IterateInterfaceMap(); |
| 919 | while (it.Next()) |
| 920 | { |
| 921 | Module::RestoreMethodTablePointer(&it.GetInterfaceInfo()->m_pMethodTable, pMT->GetLoaderModule(), CLASS_LOAD_EXACTPARENTS); |
| 922 | } |
| 923 | } |
| 924 | else |
| 925 | #endif |
| 926 | { |
| 927 | MethodTableBuilder::LoadExactInterfaceMap(pMT); |
| 928 | } |
| 929 | |
| 930 | #ifdef _DEBUG |
| 931 | if (g_pConfig->ShouldDumpOnClassLoad(pMT->GetDebugClassName())) |
| 932 | { |
| 933 | pMT->Debug_DumpInterfaceMap("Exact" ); |
| 934 | } |
| 935 | #endif //_DEBUG |
| 936 | } // ClassLoader::LoadExactParentAndInterfacesTransitively |
| 937 | |
| 938 | // CLASS_LOAD_EXACTPARENTS phase of loading: |
| 939 | // * Load the base class at exact instantiation |
| 940 | // * Recurse LoadExactParents up parent hierarchy |
| 941 | // * Load explicitly declared interfaces on this class at exact instantiation |
| 942 | // * Fixup vtable |
| 943 | // |
| 944 | /*static*/ |
| 945 | void ClassLoader::LoadExactParents(MethodTable *pMT) |
| 946 | { |
| 947 | CONTRACT_VOID |
| 948 | { |
| 949 | STANDARD_VM_CHECK; |
| 950 | PRECONDITION(CheckPointer(pMT)); |
| 951 | POSTCONDITION(pMT->CheckLoadLevel(CLASS_LOAD_EXACTPARENTS)); |
| 952 | } |
| 953 | CONTRACT_END; |
| 954 | |
| 955 | MethodTable *pApproxParentMT = pMT->GetParentMethodTable(); |
| 956 | |
| 957 | if (!pMT->IsCanonicalMethodTable()) |
| 958 | { |
| 959 | EnsureLoaded(TypeHandle(pMT->GetCanonicalMethodTable()), CLASS_LOAD_EXACTPARENTS); |
| 960 | } |
| 961 | |
| 962 | LoadExactParentAndInterfacesTransitively(pMT); |
| 963 | |
| 964 | MethodTableBuilder::CopyExactParentSlots(pMT, pApproxParentMT); |
| 965 | |
| 966 | // We can now mark this type as having exact parents |
| 967 | pMT->SetHasExactParent(); |
| 968 | |
| 969 | RETURN; |
| 970 | } |
| 971 | |
| 972 | //******************************************************************************* |
| 973 | // This is the routine that computes the internal type of a given type. It normalizes |
| 974 | // structs that have only one field (of int/ptr sized values), to be that underlying type. |
| 975 | // |
| 976 | // * see code:MethodTable#KindsOfElementTypes for more |
| 977 | // * It get used by code:TypeHandle::GetInternalCorElementType |
| 978 | CorElementType EEClass::ComputeInternalCorElementTypeForValueType(MethodTable * pMT) |
| 979 | { |
| 980 | CONTRACTL { |
| 981 | THROWS; |
| 982 | GC_TRIGGERS; |
| 983 | } CONTRACTL_END; |
| 984 | |
| 985 | if (pMT->GetNumInstanceFields() == 1 && (!pMT->HasLayout() |
| 986 | || pMT->GetNumInstanceFieldBytes() == 4 |
| 987 | #ifdef _TARGET_64BIT_ |
| 988 | || pMT->GetNumInstanceFieldBytes() == 8 |
| 989 | #endif // _TARGET_64BIT_ |
| 990 | )) // Don't do the optimization if we're getting specified anything but the trivial layout. |
| 991 | { |
| 992 | FieldDesc * pFD = pMT->GetApproxFieldDescListRaw(); |
| 993 | CorElementType type = pFD->GetFieldType(); |
| 994 | |
| 995 | if (type == ELEMENT_TYPE_VALUETYPE) |
| 996 | { |
| 997 | //@todo: Is it more apropos to call LookupApproxFieldTypeHandle() here? |
| 998 | TypeHandle fldHnd = pFD->GetApproxFieldTypeHandleThrowing(); |
| 999 | CONSISTENCY_CHECK(!fldHnd.IsNull()); |
| 1000 | |
| 1001 | type = fldHnd.GetInternalCorElementType(); |
| 1002 | } |
| 1003 | |
| 1004 | switch (type) |
| 1005 | { |
| 1006 | // "DDB 20951: vc8 unmanaged pointer bug." |
| 1007 | // If ELEMENT_TYPE_PTR were returned, Compiler::verMakeTypeInfo would have problem |
| 1008 | // creating a TI_STRUCT out of CORINFO_TYPE_PTR. |
| 1009 | // As a result, the importer would not be able to realize that the thing on the stack |
| 1010 | // is an instance of a valuetype (that contains one single "void*" field), rather than |
| 1011 | // a pointer to a valuetype. |
| 1012 | // Returning ELEMENT_TYPE_U allows verMakeTypeInfo to go down the normal code path |
| 1013 | // for creating a TI_STRUCT. |
| 1014 | case ELEMENT_TYPE_PTR: |
| 1015 | type = ELEMENT_TYPE_U; |
| 1016 | |
| 1017 | case ELEMENT_TYPE_I: |
| 1018 | case ELEMENT_TYPE_U: |
| 1019 | case ELEMENT_TYPE_I4: |
| 1020 | case ELEMENT_TYPE_U4: |
| 1021 | #ifdef _TARGET_64BIT_ |
| 1022 | case ELEMENT_TYPE_I8: |
| 1023 | case ELEMENT_TYPE_U8: |
| 1024 | #endif // _TARGET_64BIT_ |
| 1025 | |
| 1026 | { |
| 1027 | return type; |
| 1028 | } |
| 1029 | |
| 1030 | default: |
| 1031 | break; |
| 1032 | } |
| 1033 | } |
| 1034 | |
| 1035 | return ELEMENT_TYPE_VALUETYPE; |
| 1036 | } |
| 1037 | |
| 1038 | //******************************************************************************* |
| 1039 | // |
| 1040 | // Debugger notification |
| 1041 | // |
| 1042 | BOOL TypeHandle::NotifyDebuggerLoad(AppDomain *pDomain, BOOL attaching) const |
| 1043 | { |
| 1044 | LIMITED_METHOD_CONTRACT; |
| 1045 | |
| 1046 | if (!CORDebuggerAttached()) |
| 1047 | { |
| 1048 | return FALSE; |
| 1049 | } |
| 1050 | |
| 1051 | if (!GetModule()->IsVisibleToDebugger()) |
| 1052 | { |
| 1053 | return FALSE; |
| 1054 | } |
| 1055 | |
| 1056 | return g_pDebugInterface->LoadClass( |
| 1057 | *this, GetCl(), GetModule(), pDomain); |
| 1058 | } |
| 1059 | |
| 1060 | //******************************************************************************* |
| 1061 | void TypeHandle::NotifyDebuggerUnload(AppDomain *pDomain) const |
| 1062 | { |
| 1063 | LIMITED_METHOD_CONTRACT; |
| 1064 | |
| 1065 | if (!GetModule()->IsVisibleToDebugger()) |
| 1066 | return; |
| 1067 | |
| 1068 | if (!pDomain->IsDebuggerAttached()) |
| 1069 | return; |
| 1070 | |
| 1071 | g_pDebugInterface->UnloadClass(GetCl(), GetModule(), pDomain); |
| 1072 | } |
| 1073 | |
| 1074 | //******************************************************************************* |
| 1075 | // Given the (generics-shared or generics-exact) value class method, find the |
| 1076 | // (generics-shared) unboxing Stub for the given method . We search the vtable. |
| 1077 | // |
| 1078 | // This is needed when creating a delegate to an instance method in a value type |
| 1079 | MethodDesc* MethodTable::GetBoxedEntryPointMD(MethodDesc *pMD) |
| 1080 | { |
| 1081 | CONTRACT (MethodDesc *) { |
| 1082 | THROWS; |
| 1083 | GC_TRIGGERS; |
| 1084 | INJECT_FAULT(COMPlusThrowOM();); |
| 1085 | PRECONDITION(IsValueType()); |
| 1086 | PRECONDITION(!pMD->ContainsGenericVariables()); |
| 1087 | PRECONDITION(!pMD->IsUnboxingStub()); |
| 1088 | POSTCONDITION(RETVAL->IsUnboxingStub()); |
| 1089 | } CONTRACT_END; |
| 1090 | |
| 1091 | RETURN MethodDesc::FindOrCreateAssociatedMethodDesc(pMD, |
| 1092 | pMD->GetMethodTable(), |
| 1093 | TRUE /* get unboxing entry point */, |
| 1094 | pMD->GetMethodInstantiation(), |
| 1095 | FALSE /* no allowInstParam */ ); |
| 1096 | |
| 1097 | } |
| 1098 | |
| 1099 | //******************************************************************************* |
| 1100 | // Given the unboxing value class method, find the non-unboxing method |
| 1101 | // This is used when generating the code for an BoxedEntryPointStub. |
| 1102 | MethodDesc* MethodTable::GetUnboxedEntryPointMD(MethodDesc *pMD) |
| 1103 | { |
| 1104 | CONTRACT (MethodDesc *) { |
| 1105 | THROWS; |
| 1106 | GC_TRIGGERS; |
| 1107 | INJECT_FAULT(COMPlusThrowOM();); |
| 1108 | PRECONDITION(IsValueType()); |
| 1109 | // reflection needs to call this for methods in non instantiated classes, |
| 1110 | // so move the assert to the caller when needed |
| 1111 | //PRECONDITION(!pMD->ContainsGenericVariables()); |
| 1112 | PRECONDITION(pMD->IsUnboxingStub()); |
| 1113 | POSTCONDITION(!RETVAL->IsUnboxingStub()); |
| 1114 | } CONTRACT_END; |
| 1115 | |
| 1116 | BOOL allowInstParam = (pMD->GetNumGenericMethodArgs() == 0); |
| 1117 | RETURN MethodDesc::FindOrCreateAssociatedMethodDesc(pMD, |
| 1118 | this, |
| 1119 | FALSE /* don't get unboxing entry point */, |
| 1120 | pMD->GetMethodInstantiation(), |
| 1121 | allowInstParam); |
| 1122 | } |
| 1123 | |
| 1124 | |
| 1125 | //******************************************************************************* |
| 1126 | // Given the unboxing value class method, find the non-unboxing method |
| 1127 | // This is used when generating the code for an BoxedEntryPointStub. |
| 1128 | MethodDesc* MethodTable::GetExistingUnboxedEntryPointMD(MethodDesc *pMD) |
| 1129 | { |
| 1130 | CONTRACT (MethodDesc *) { |
| 1131 | THROWS; |
| 1132 | GC_NOTRIGGER; |
| 1133 | INJECT_FAULT(COMPlusThrowOM();); |
| 1134 | PRECONDITION(IsValueType()); |
| 1135 | // reflection needs to call this for methods in non instantiated classes, |
| 1136 | // so move the assert to the caller when needed |
| 1137 | //PRECONDITION(!pMD->ContainsGenericVariables()); |
| 1138 | PRECONDITION(pMD->IsUnboxingStub()); |
| 1139 | POSTCONDITION(!RETVAL->IsUnboxingStub()); |
| 1140 | } CONTRACT_END; |
| 1141 | |
| 1142 | BOOL allowInstParam = (pMD->GetNumGenericMethodArgs() == 0); |
| 1143 | RETURN MethodDesc::FindOrCreateAssociatedMethodDesc(pMD, |
| 1144 | this, |
| 1145 | FALSE /* don't get unboxing entry point */, |
| 1146 | pMD->GetMethodInstantiation(), |
| 1147 | allowInstParam, |
| 1148 | FALSE, /* forceRemotableMethod */ |
| 1149 | FALSE /* allowCreate */ |
| 1150 | ); |
| 1151 | } |
| 1152 | |
| 1153 | #endif // !DACCESS_COMPILE |
| 1154 | |
| 1155 | //******************************************************************************* |
| 1156 | #if !defined(FEATURE_HFA) |
| 1157 | bool MethodTable::IsHFA() |
| 1158 | { |
| 1159 | LIMITED_METHOD_CONTRACT; |
| 1160 | #ifdef DACCESS_COMPILE |
| 1161 | return false; |
| 1162 | #else |
| 1163 | if (GetClass()->GetMethodTable()->IsValueType()) |
| 1164 | { |
| 1165 | return GetClass()->CheckForHFA(); |
| 1166 | } |
| 1167 | else |
| 1168 | { |
| 1169 | return false; |
| 1170 | } |
| 1171 | #endif |
| 1172 | } |
| 1173 | #endif // !FEATURE_HFA |
| 1174 | |
| 1175 | //******************************************************************************* |
| 1176 | CorElementType MethodTable::GetHFAType() |
| 1177 | { |
| 1178 | CONTRACTL |
| 1179 | { |
| 1180 | WRAPPER(THROWS); // we end up in the class loader which has the conditional contracts |
| 1181 | WRAPPER(GC_TRIGGERS); |
| 1182 | } |
| 1183 | CONTRACTL_END; |
| 1184 | |
| 1185 | if (!IsHFA()) |
| 1186 | return ELEMENT_TYPE_END; |
| 1187 | |
| 1188 | MethodTable * pMT = this; |
| 1189 | for (;;) |
| 1190 | { |
| 1191 | _ASSERTE(pMT->IsValueType()); |
| 1192 | _ASSERTE(pMT->GetNumInstanceFields() > 0); |
| 1193 | |
| 1194 | PTR_FieldDesc pFirstField = pMT->GetApproxFieldDescListRaw(); |
| 1195 | |
| 1196 | CorElementType fieldType = pFirstField->GetFieldType(); |
| 1197 | |
| 1198 | // All HFA fields have to be of the same type, so we can just return the type of the first field |
| 1199 | switch (fieldType) |
| 1200 | { |
| 1201 | case ELEMENT_TYPE_VALUETYPE: |
| 1202 | pMT = pFirstField->LookupApproxFieldTypeHandle().GetMethodTable(); |
| 1203 | break; |
| 1204 | |
| 1205 | case ELEMENT_TYPE_R4: |
| 1206 | case ELEMENT_TYPE_R8: |
| 1207 | return fieldType; |
| 1208 | |
| 1209 | default: |
| 1210 | // This should never happen. MethodTable::IsHFA() should be set only on types |
| 1211 | // that have a valid HFA type when the flag is used to track HFA status. |
| 1212 | _ASSERTE(false); |
| 1213 | return ELEMENT_TYPE_END; |
| 1214 | } |
| 1215 | } |
| 1216 | } |
| 1217 | |
| 1218 | bool MethodTable::IsNativeHFA() |
| 1219 | { |
| 1220 | LIMITED_METHOD_CONTRACT; |
| 1221 | return HasLayout() ? GetLayoutInfo()->IsNativeHFA() : IsHFA(); |
| 1222 | } |
| 1223 | |
| 1224 | CorElementType MethodTable::GetNativeHFAType() |
| 1225 | { |
| 1226 | LIMITED_METHOD_CONTRACT; |
| 1227 | return HasLayout() ? GetLayoutInfo()->GetNativeHFAType() : GetHFAType(); |
| 1228 | } |
| 1229 | |
| 1230 | //--------------------------------------------------------------------------------------- |
| 1231 | // |
| 1232 | // When FEATURE_HFA is defined, we cache the value; otherwise we recompute it with each |
| 1233 | // call. The latter is only for the armaltjit and the arm64altjit. |
| 1234 | bool |
| 1235 | #if defined(FEATURE_HFA) |
| 1236 | EEClass::CheckForHFA(MethodTable ** pByValueClassCache) |
| 1237 | #else |
| 1238 | EEClass::CheckForHFA() |
| 1239 | #endif |
| 1240 | { |
| 1241 | STANDARD_VM_CONTRACT; |
| 1242 | |
| 1243 | // This method should be called for valuetypes only |
| 1244 | _ASSERTE(GetMethodTable()->IsValueType()); |
| 1245 | |
| 1246 | // No HFAs with explicit layout. There may be cases where explicit layout may be still |
| 1247 | // eligible for HFA, but it is hard to tell the real intent. Make it simple and just |
| 1248 | // unconditionally disable HFAs for explicit layout. |
| 1249 | if (HasExplicitFieldOffsetLayout()) |
| 1250 | return false; |
| 1251 | |
| 1252 | // The SIMD Intrinsic types are meant to be handled specially and should not be treated as HFA |
| 1253 | if (GetMethodTable()->IsIntrinsicType()) |
| 1254 | { |
| 1255 | LPCUTF8 namespaceName; |
| 1256 | LPCUTF8 className = GetMethodTable()->GetFullyQualifiedNameInfo(&namespaceName); |
| 1257 | |
| 1258 | if ((strcmp(className, "Vector256`1" ) == 0) || (strcmp(className, "Vector128`1" ) == 0) || |
| 1259 | (strcmp(className, "Vector64`1" ) == 0)) |
| 1260 | { |
| 1261 | assert(strcmp(namespaceName, "System.Runtime.Intrinsics" ) == 0); |
| 1262 | return false; |
| 1263 | } |
| 1264 | } |
| 1265 | |
| 1266 | CorElementType hfaType = ELEMENT_TYPE_END; |
| 1267 | |
| 1268 | FieldDesc *pFieldDescList = GetFieldDescList(); |
| 1269 | for (UINT i = 0; i < GetNumInstanceFields(); i++) |
| 1270 | { |
| 1271 | FieldDesc *pFD = &pFieldDescList[i]; |
| 1272 | CorElementType fieldType = pFD->GetFieldType(); |
| 1273 | |
| 1274 | switch (fieldType) |
| 1275 | { |
| 1276 | case ELEMENT_TYPE_VALUETYPE: |
| 1277 | #if defined(FEATURE_HFA) |
| 1278 | fieldType = pByValueClassCache[i]->GetHFAType(); |
| 1279 | #else |
| 1280 | fieldType = pFD->LookupApproxFieldTypeHandle().AsMethodTable()->GetHFAType(); |
| 1281 | #endif |
| 1282 | break; |
| 1283 | |
| 1284 | case ELEMENT_TYPE_R4: |
| 1285 | case ELEMENT_TYPE_R8: |
| 1286 | break; |
| 1287 | |
| 1288 | default: |
| 1289 | // Not HFA |
| 1290 | return false; |
| 1291 | } |
| 1292 | |
| 1293 | // Field type should be a valid HFA type. |
| 1294 | if (fieldType == ELEMENT_TYPE_END) |
| 1295 | { |
| 1296 | return false; |
| 1297 | } |
| 1298 | |
| 1299 | // Initialize with a valid HFA type. |
| 1300 | if (hfaType == ELEMENT_TYPE_END) |
| 1301 | { |
| 1302 | hfaType = fieldType; |
| 1303 | } |
| 1304 | // All field types should be equal. |
| 1305 | else if (fieldType != hfaType) |
| 1306 | { |
| 1307 | return false; |
| 1308 | } |
| 1309 | } |
| 1310 | |
| 1311 | if (hfaType == ELEMENT_TYPE_END) |
| 1312 | return false; |
| 1313 | |
| 1314 | int elemSize = (hfaType == ELEMENT_TYPE_R8) ? sizeof(double) : sizeof(float); |
| 1315 | |
| 1316 | // Note that we check the total size, but do not perform any checks on number of fields: |
| 1317 | // - Type of fields can be HFA valuetype itself |
| 1318 | // - Managed C++ HFA valuetypes have just one <alignment member> of type float to signal that |
| 1319 | // the valuetype is HFA and explicitly specified size |
| 1320 | |
| 1321 | DWORD totalSize = GetMethodTable()->GetNumInstanceFieldBytes(); |
| 1322 | |
| 1323 | if (totalSize % elemSize != 0) |
| 1324 | return false; |
| 1325 | |
| 1326 | // On ARM, HFAs can have a maximum of four fields regardless of whether those are float or double. |
| 1327 | if (totalSize / elemSize > 4) |
| 1328 | return false; |
| 1329 | |
| 1330 | // All the above tests passed. It's HFA! |
| 1331 | #if defined(FEATURE_HFA) |
| 1332 | GetMethodTable()->SetIsHFA(); |
| 1333 | #endif |
| 1334 | return true; |
| 1335 | } |
| 1336 | |
| 1337 | CorElementType EEClassLayoutInfo::GetNativeHFATypeRaw() |
| 1338 | { |
| 1339 | UINT numReferenceFields = GetNumCTMFields(); |
| 1340 | |
| 1341 | CorElementType hfaType = ELEMENT_TYPE_END; |
| 1342 | |
| 1343 | #ifndef DACCESS_COMPILE |
| 1344 | const FieldMarshaler *pFieldMarshaler = GetFieldMarshalers(); |
| 1345 | while (numReferenceFields--) |
| 1346 | { |
| 1347 | CorElementType fieldType = ELEMENT_TYPE_END; |
| 1348 | |
| 1349 | switch (pFieldMarshaler->GetNStructFieldType()) |
| 1350 | { |
| 1351 | case NFT_COPY4: |
| 1352 | case NFT_COPY8: |
| 1353 | fieldType = pFieldMarshaler->GetFieldDesc()->GetFieldType(); |
| 1354 | if (fieldType != ELEMENT_TYPE_R4 && fieldType != ELEMENT_TYPE_R8) |
| 1355 | return ELEMENT_TYPE_END; |
| 1356 | break; |
| 1357 | |
| 1358 | case NFT_NESTEDLAYOUTCLASS: |
| 1359 | fieldType = ((FieldMarshaler_NestedLayoutClass *)pFieldMarshaler)->GetMethodTable()->GetNativeHFAType(); |
| 1360 | break; |
| 1361 | |
| 1362 | case NFT_NESTEDVALUECLASS: |
| 1363 | fieldType = ((FieldMarshaler_NestedValueClass *)pFieldMarshaler)->GetMethodTable()->GetNativeHFAType(); |
| 1364 | break; |
| 1365 | |
| 1366 | case NFT_FIXEDARRAY: |
| 1367 | fieldType = ((FieldMarshaler_FixedArray *)pFieldMarshaler)->GetElementTypeHandle().GetMethodTable()->GetNativeHFAType(); |
| 1368 | break; |
| 1369 | |
| 1370 | case NFT_DATE: |
| 1371 | fieldType = ELEMENT_TYPE_R8; |
| 1372 | break; |
| 1373 | |
| 1374 | default: |
| 1375 | // Not HFA |
| 1376 | return ELEMENT_TYPE_END; |
| 1377 | } |
| 1378 | |
| 1379 | // Field type should be a valid HFA type. |
| 1380 | if (fieldType == ELEMENT_TYPE_END) |
| 1381 | { |
| 1382 | return ELEMENT_TYPE_END; |
| 1383 | } |
| 1384 | |
| 1385 | // Initialize with a valid HFA type. |
| 1386 | if (hfaType == ELEMENT_TYPE_END) |
| 1387 | { |
| 1388 | hfaType = fieldType; |
| 1389 | } |
| 1390 | // All field types should be equal. |
| 1391 | else if (fieldType != hfaType) |
| 1392 | { |
| 1393 | return ELEMENT_TYPE_END; |
| 1394 | } |
| 1395 | |
| 1396 | ((BYTE*&)pFieldMarshaler) += MAXFIELDMARSHALERSIZE; |
| 1397 | } |
| 1398 | |
| 1399 | if (hfaType == ELEMENT_TYPE_END) |
| 1400 | return ELEMENT_TYPE_END; |
| 1401 | |
| 1402 | int elemSize = (hfaType == ELEMENT_TYPE_R8) ? sizeof(double) : sizeof(float); |
| 1403 | |
| 1404 | // Note that we check the total size, but do not perform any checks on number of fields: |
| 1405 | // - Type of fields can be HFA valuetype itself |
| 1406 | // - Managed C++ HFA valuetypes have just one <alignment member> of type float to signal that |
| 1407 | // the valuetype is HFA and explicitly specified size |
| 1408 | |
| 1409 | DWORD totalSize = GetNativeSize(); |
| 1410 | |
| 1411 | if (totalSize % elemSize != 0) |
| 1412 | return ELEMENT_TYPE_END; |
| 1413 | |
| 1414 | // On ARM, HFAs can have a maximum of four fields regardless of whether those are float or double. |
| 1415 | if (totalSize / elemSize > 4) |
| 1416 | return ELEMENT_TYPE_END; |
| 1417 | |
| 1418 | #endif // !DACCESS_COMPILE |
| 1419 | |
| 1420 | return hfaType; |
| 1421 | } |
| 1422 | |
| 1423 | #ifdef FEATURE_HFA |
| 1424 | // |
| 1425 | // The managed and unmanaged views of the types can differ for non-blitable types. This method |
| 1426 | // mirrors the HFA type computation for the unmanaged view. |
| 1427 | // |
| 1428 | VOID EEClass::CheckForNativeHFA() |
| 1429 | { |
| 1430 | STANDARD_VM_CONTRACT; |
| 1431 | |
| 1432 | // No HFAs with inheritance |
| 1433 | if (!(GetMethodTable()->IsValueType() || (GetMethodTable()->GetParentMethodTable() == g_pObjectClass))) |
| 1434 | return; |
| 1435 | |
| 1436 | // No HFAs with explicit layout. There may be cases where explicit layout may be still |
| 1437 | // eligible for HFA, but it is hard to tell the real intent. Make it simple and just |
| 1438 | // unconditionally disable HFAs for explicit layout. |
| 1439 | if (HasExplicitFieldOffsetLayout()) |
| 1440 | return; |
| 1441 | |
| 1442 | CorElementType hfaType = GetLayoutInfo()->GetNativeHFATypeRaw(); |
| 1443 | if (hfaType == ELEMENT_TYPE_END) |
| 1444 | { |
| 1445 | return; |
| 1446 | } |
| 1447 | |
| 1448 | // All the above tests passed. It's HFA! |
| 1449 | GetLayoutInfo()->SetNativeHFAType(hfaType); |
| 1450 | } |
| 1451 | #endif // FEATURE_HFA |
| 1452 | |
| 1453 | #ifdef FEATURE_64BIT_ALIGNMENT |
| 1454 | // Returns true iff the native view of this type requires 64-bit aligment. |
| 1455 | bool MethodTable::NativeRequiresAlign8() |
| 1456 | { |
| 1457 | LIMITED_METHOD_CONTRACT; |
| 1458 | |
| 1459 | if (HasLayout()) |
| 1460 | { |
| 1461 | return (GetLayoutInfo()->GetLargestAlignmentRequirementOfAllMembers() >= 8); |
| 1462 | } |
| 1463 | return RequiresAlign8(); |
| 1464 | } |
| 1465 | #endif // FEATURE_64BIT_ALIGNMENT |
| 1466 | |
| 1467 | #ifndef DACCESS_COMPILE |
| 1468 | |
| 1469 | #ifdef FEATURE_COMINTEROP |
| 1470 | //========================================================================================== |
| 1471 | TypeHandle MethodTable::GetCoClassForInterface() |
| 1472 | { |
| 1473 | CONTRACTL |
| 1474 | { |
| 1475 | THROWS; |
| 1476 | GC_TRIGGERS; |
| 1477 | INJECT_FAULT(COMPlusThrowOM();); |
| 1478 | } |
| 1479 | CONTRACTL_END |
| 1480 | |
| 1481 | EEClass * pClass = GetClass(); |
| 1482 | |
| 1483 | if (!pClass->IsComClassInterface()) |
| 1484 | return TypeHandle(); |
| 1485 | |
| 1486 | _ASSERTE(IsInterface()); |
| 1487 | |
| 1488 | TypeHandle th = pClass->GetCoClassForInterface(); |
| 1489 | if (!th.IsNull()) |
| 1490 | return th; |
| 1491 | |
| 1492 | return SetupCoClassForInterface(); |
| 1493 | } |
| 1494 | |
| 1495 | //******************************************************************************* |
| 1496 | TypeHandle MethodTable::SetupCoClassForInterface() |
| 1497 | { |
| 1498 | CONTRACTL |
| 1499 | { |
| 1500 | THROWS; |
| 1501 | GC_TRIGGERS; |
| 1502 | INJECT_FAULT(COMPlusThrowOM();); |
| 1503 | PRECONDITION(IsComClassInterface()); |
| 1504 | |
| 1505 | } |
| 1506 | CONTRACTL_END |
| 1507 | |
| 1508 | TypeHandle CoClassType; |
| 1509 | const BYTE *pVal = NULL; |
| 1510 | ULONG cbVal = 0; |
| 1511 | |
| 1512 | if (!IsProjectedFromWinRT()) // ignore classic COM interop CA on WinRT types |
| 1513 | { |
| 1514 | HRESULT hr = GetMDImport()->GetCustomAttributeByName(GetCl(), INTEROP_COCLASS_TYPE , (const void **)&pVal, &cbVal); |
| 1515 | if (hr == S_OK) |
| 1516 | { |
| 1517 | CustomAttributeParser cap(pVal, cbVal); |
| 1518 | |
| 1519 | IfFailThrow(cap.SkipProlog()); |
| 1520 | |
| 1521 | // Retrieve the COM source interface class name. |
| 1522 | ULONG cbName; |
| 1523 | LPCUTF8 szName; |
| 1524 | IfFailThrow(cap.GetNonNullString(&szName, &cbName)); |
| 1525 | |
| 1526 | // Copy the name to a temporary buffer and NULL terminate it. |
| 1527 | StackSString ss(SString::Utf8, szName, cbName); |
| 1528 | |
| 1529 | // Try to load the class using its name as a fully qualified name. If that fails, |
| 1530 | // then we try to load it in the assembly of the current class. |
| 1531 | CoClassType = TypeName::GetTypeUsingCASearchRules(ss.GetUnicode(), GetAssembly()); |
| 1532 | |
| 1533 | // Cache the coclass type |
| 1534 | g_IBCLogger.LogEEClassCOWTableAccess(this); |
| 1535 | GetClass_NoLogging()->SetCoClassForInterface(CoClassType); |
| 1536 | } |
| 1537 | } |
| 1538 | return CoClassType; |
| 1539 | } |
| 1540 | |
| 1541 | //******************************************************************************* |
| 1542 | void MethodTable::GetEventInterfaceInfo(MethodTable **ppSrcItfClass, MethodTable **ppEvProvClass) |
| 1543 | { |
| 1544 | CONTRACTL |
| 1545 | { |
| 1546 | THROWS; |
| 1547 | GC_TRIGGERS; |
| 1548 | INJECT_FAULT(COMPlusThrowOM();); |
| 1549 | } |
| 1550 | CONTRACTL_END |
| 1551 | |
| 1552 | |
| 1553 | TypeHandle EventProvType; |
| 1554 | TypeHandle SrcItfType; |
| 1555 | const BYTE *pVal = NULL; |
| 1556 | ULONG cbVal = 0; |
| 1557 | |
| 1558 | // Retrieve the ComEventProviderAttribute CA. |
| 1559 | HRESULT hr = GetMDImport()->GetCustomAttributeByName(GetCl(), INTEROP_COMEVENTINTERFACE_TYPE, (const void**)&pVal, &cbVal); |
| 1560 | if (FAILED(hr)) |
| 1561 | { |
| 1562 | COMPlusThrowHR(hr); |
| 1563 | } |
| 1564 | |
| 1565 | CustomAttributeParser cap(pVal, cbVal); |
| 1566 | |
| 1567 | // Skip the CA type prefix. |
| 1568 | IfFailThrow(cap.SkipProlog()); |
| 1569 | |
| 1570 | // Retrieve the COM source interface class name. |
| 1571 | LPCUTF8 szName; |
| 1572 | ULONG cbName; |
| 1573 | IfFailThrow(cap.GetNonNullString(&szName, &cbName)); |
| 1574 | |
| 1575 | // Copy the name to a temporary buffer and NULL terminate it. |
| 1576 | StackSString ss(SString::Utf8, szName, cbName); |
| 1577 | |
| 1578 | // Try to load the class using its name as a fully qualified name. If that fails, |
| 1579 | // then we try to load it in the assembly of the current class. |
| 1580 | SrcItfType = TypeName::GetTypeUsingCASearchRules(ss.GetUnicode(), GetAssembly()); |
| 1581 | |
| 1582 | // Retrieve the COM event provider class name. |
| 1583 | IfFailThrow(cap.GetNonNullString(&szName, &cbName)); |
| 1584 | |
| 1585 | // Copy the name to a temporary buffer and NULL terminate it. |
| 1586 | ss.SetUTF8(szName, cbName); |
| 1587 | |
| 1588 | // Try to load the class using its name as a fully qualified name. If that fails, |
| 1589 | // then we try to load it in the assembly of the current class. |
| 1590 | EventProvType = TypeName::GetTypeUsingCASearchRules(ss.GetUnicode(), GetAssembly()); |
| 1591 | |
| 1592 | // Set the source interface and event provider classes. |
| 1593 | *ppSrcItfClass = SrcItfType.GetMethodTable(); |
| 1594 | *ppEvProvClass = EventProvType.GetMethodTable(); |
| 1595 | } |
| 1596 | |
| 1597 | //******************************************************************************* |
| 1598 | TypeHandle MethodTable::GetDefItfForComClassItf() |
| 1599 | { |
| 1600 | CONTRACTL |
| 1601 | { |
| 1602 | THROWS; |
| 1603 | GC_TRIGGERS; |
| 1604 | INJECT_FAULT(COMPlusThrowOM();); |
| 1605 | } |
| 1606 | CONTRACTL_END |
| 1607 | |
| 1608 | BAD_FORMAT_NOTHROW_ASSERT(GetClass()->IsComClassInterface()); |
| 1609 | |
| 1610 | // The COM class interface uses the normal scheme which is to have no |
| 1611 | // methods and to implement default interface and optionnally the |
| 1612 | // default source interface. In this scheme, the first implemented |
| 1613 | // interface is the default interface which we return. |
| 1614 | InterfaceMapIterator it = IterateInterfaceMap(); |
| 1615 | if (it.Next()) |
| 1616 | { |
| 1617 | return TypeHandle(it.GetInterface()); |
| 1618 | } |
| 1619 | else |
| 1620 | { |
| 1621 | // The COM class interface has the methods directly on the itself. |
| 1622 | // Because of this we need to consider it to be the default interface. |
| 1623 | return TypeHandle(this); |
| 1624 | } |
| 1625 | } |
| 1626 | |
| 1627 | #endif // FEATURE_COMINTEROP |
| 1628 | |
| 1629 | |
| 1630 | #endif // !DACCESS_COMPILE |
| 1631 | |
| 1632 | //--------------------------------------------------------------------------------------- |
| 1633 | // |
| 1634 | // Get the metadata token of the outer type for a nested type |
| 1635 | // |
| 1636 | // Return Value: |
| 1637 | // The token of the outer class if this EEClass is nested, or mdTypeDefNil if the |
| 1638 | // EEClass is not a nested type |
| 1639 | // |
| 1640 | |
| 1641 | mdTypeDef MethodTable::GetEnclosingCl() |
| 1642 | { |
| 1643 | CONTRACTL |
| 1644 | { |
| 1645 | THROWS; |
| 1646 | GC_TRIGGERS; |
| 1647 | MODE_ANY; |
| 1648 | } |
| 1649 | CONTRACTL_END; |
| 1650 | |
| 1651 | mdTypeDef tdEnclosing = mdTypeDefNil; |
| 1652 | |
| 1653 | if (GetClass()->IsNested()) |
| 1654 | { |
| 1655 | HRESULT hr = GetMDImport()->GetNestedClassProps(GetCl(), &tdEnclosing); |
| 1656 | if (FAILED(hr)) |
| 1657 | { |
| 1658 | ThrowHR(hr, BFA_UNABLE_TO_GET_NESTED_PROPS); |
| 1659 | } |
| 1660 | } |
| 1661 | |
| 1662 | return tdEnclosing; |
| 1663 | } |
| 1664 | |
| 1665 | //******************************************************************************* |
| 1666 | // |
| 1667 | // Helper routines for the macros defined at the top of this class. |
| 1668 | // You probably should not use these functions directly. |
| 1669 | // |
| 1670 | template<typename RedirectFunctor> |
| 1671 | SString &MethodTable::_GetFullyQualifiedNameForClassNestedAwareInternal(SString &ssBuf) |
| 1672 | { |
| 1673 | CONTRACTL { |
| 1674 | THROWS; |
| 1675 | GC_NOTRIGGER; |
| 1676 | INJECT_FAULT(COMPlusThrowOM();); |
| 1677 | } CONTRACTL_END; |
| 1678 | |
| 1679 | ssBuf.Clear(); |
| 1680 | |
| 1681 | LPCUTF8 pszNamespace; |
| 1682 | LPCUTF8 pszName; |
| 1683 | pszName = GetFullyQualifiedNameInfo(&pszNamespace); |
| 1684 | if (pszName == NULL) |
| 1685 | { |
| 1686 | return ssBuf; |
| 1687 | } |
| 1688 | |
| 1689 | StackSString ssName(SString::Utf8, pszName); |
| 1690 | |
| 1691 | mdTypeDef mdEncl = GetCl(); |
| 1692 | IMDInternalImport *pImport = GetMDImport(); |
| 1693 | |
| 1694 | // Check if the type is nested |
| 1695 | DWORD dwAttr; |
| 1696 | IfFailThrow(pImport->GetTypeDefProps(GetCl(), &dwAttr, NULL)); |
| 1697 | |
| 1698 | RedirectFunctor redirectFunctor; |
| 1699 | if (IsTdNested(dwAttr)) |
| 1700 | { |
| 1701 | StackSString ssFullyQualifiedName; |
| 1702 | StackSString ssPath; |
| 1703 | |
| 1704 | // Build the nesting chain. |
| 1705 | while (SUCCEEDED(pImport->GetNestedClassProps(mdEncl, &mdEncl))) |
| 1706 | { |
| 1707 | LPCUTF8 szEnclName; |
| 1708 | LPCUTF8 szEnclNameSpace; |
| 1709 | IfFailThrow(pImport->GetNameOfTypeDef( |
| 1710 | mdEncl, |
| 1711 | &szEnclName, |
| 1712 | &szEnclNameSpace)); |
| 1713 | |
| 1714 | ns::MakePath(ssPath, |
| 1715 | StackSString(SString::Utf8, redirectFunctor(szEnclNameSpace)), |
| 1716 | StackSString(SString::Utf8, szEnclName)); |
| 1717 | ns::MakeNestedTypeName(ssFullyQualifiedName, ssPath, ssName); |
| 1718 | |
| 1719 | ssName = ssFullyQualifiedName; |
| 1720 | } |
| 1721 | } |
| 1722 | |
| 1723 | ns::MakePath( |
| 1724 | ssBuf, |
| 1725 | StackSString(SString::Utf8, redirectFunctor(pszNamespace)), ssName); |
| 1726 | |
| 1727 | return ssBuf; |
| 1728 | } |
| 1729 | |
| 1730 | class PassThrough |
| 1731 | { |
| 1732 | public : |
| 1733 | LPCUTF8 operator() (LPCUTF8 szEnclNamespace) |
| 1734 | { |
| 1735 | LIMITED_METHOD_CONTRACT; |
| 1736 | |
| 1737 | return szEnclNamespace; |
| 1738 | } |
| 1739 | }; |
| 1740 | |
| 1741 | SString &MethodTable::_GetFullyQualifiedNameForClassNestedAware(SString &ssBuf) |
| 1742 | { |
| 1743 | LIMITED_METHOD_CONTRACT; |
| 1744 | |
| 1745 | return _GetFullyQualifiedNameForClassNestedAwareInternal<PassThrough>(ssBuf); |
| 1746 | } |
| 1747 | |
| 1748 | //******************************************************************************* |
| 1749 | SString &MethodTable::_GetFullyQualifiedNameForClass(SString &ssBuf) |
| 1750 | { |
| 1751 | CONTRACTL |
| 1752 | { |
| 1753 | THROWS; |
| 1754 | GC_NOTRIGGER; |
| 1755 | INJECT_FAULT(COMPlusThrowOM();); |
| 1756 | } |
| 1757 | CONTRACTL_END |
| 1758 | |
| 1759 | ssBuf.Clear(); |
| 1760 | |
| 1761 | if (IsArray()) |
| 1762 | { |
| 1763 | TypeDesc::ConstructName(GetInternalCorElementType(), |
| 1764 | GetApproxArrayElementTypeHandle(), |
| 1765 | GetRank(), |
| 1766 | ssBuf); |
| 1767 | } |
| 1768 | else if (!IsNilToken(GetCl())) |
| 1769 | { |
| 1770 | LPCUTF8 szNamespace; |
| 1771 | LPCUTF8 szName; |
| 1772 | IfFailThrow(GetMDImport()->GetNameOfTypeDef(GetCl(), &szName, &szNamespace)); |
| 1773 | |
| 1774 | ns::MakePath(ssBuf, |
| 1775 | StackSString(SString::Utf8, szNamespace), |
| 1776 | StackSString(SString::Utf8, szName)); |
| 1777 | } |
| 1778 | |
| 1779 | return ssBuf; |
| 1780 | } |
| 1781 | |
| 1782 | //******************************************************************************* |
| 1783 | // |
| 1784 | // Gets the namespace and class name for the class. The namespace |
| 1785 | // can legitimately come back NULL, however a return value of NULL indicates |
| 1786 | // an error. |
| 1787 | // |
| 1788 | // NOTE: this used to return array class names, which were sometimes squirreled away by the |
| 1789 | // class loader hash table. It's been removed because it wasted space and was basically broken |
| 1790 | // in general (sometimes wasn't set, sometimes set wrong). If you need array class names, |
| 1791 | // use GetFullyQualifiedNameForClass instead. |
| 1792 | // |
| 1793 | LPCUTF8 MethodTable::GetFullyQualifiedNameInfo(LPCUTF8 *ppszNamespace) |
| 1794 | { |
| 1795 | CONTRACTL |
| 1796 | { |
| 1797 | NOTHROW; |
| 1798 | GC_NOTRIGGER; |
| 1799 | FORBID_FAULT; |
| 1800 | SO_TOLERANT; |
| 1801 | } |
| 1802 | CONTRACTL_END |
| 1803 | |
| 1804 | if (IsArray()) |
| 1805 | { |
| 1806 | *ppszNamespace = NULL; |
| 1807 | return NULL; |
| 1808 | } |
| 1809 | else |
| 1810 | { |
| 1811 | LPCUTF8 szName; |
| 1812 | if (FAILED(GetMDImport()->GetNameOfTypeDef(GetCl(), &szName, ppszNamespace))) |
| 1813 | { |
| 1814 | *ppszNamespace = NULL; |
| 1815 | return NULL; |
| 1816 | } |
| 1817 | return szName; |
| 1818 | } |
| 1819 | } |
| 1820 | |
| 1821 | #ifndef DACCESS_COMPILE |
| 1822 | |
| 1823 | #ifdef FEATURE_COMINTEROP |
| 1824 | |
| 1825 | //******************************************************************************* |
| 1826 | CorIfaceAttr MethodTable::GetComInterfaceType() |
| 1827 | { |
| 1828 | CONTRACTL |
| 1829 | { |
| 1830 | THROWS; |
| 1831 | GC_NOTRIGGER; |
| 1832 | FORBID_FAULT; |
| 1833 | } |
| 1834 | CONTRACTL_END |
| 1835 | |
| 1836 | // This should only be called on interfaces. |
| 1837 | BAD_FORMAT_NOTHROW_ASSERT(IsInterface()); |
| 1838 | |
| 1839 | // Check to see if we have already determined the COM interface type |
| 1840 | // of this interface. |
| 1841 | CorIfaceAttr ItfType = GetClass()->GetComInterfaceType(); |
| 1842 | |
| 1843 | if (ItfType != (CorIfaceAttr)-1) |
| 1844 | return ItfType; |
| 1845 | |
| 1846 | if (IsProjectedFromWinRT()) |
| 1847 | { |
| 1848 | // WinRT interfaces are always IInspectable-based |
| 1849 | ItfType = ifInspectable; |
| 1850 | } |
| 1851 | else |
| 1852 | { |
| 1853 | // Retrieve the interface type from the metadata. |
| 1854 | HRESULT hr = GetMDImport()->GetIfaceTypeOfTypeDef(GetCl(), (ULONG*)&ItfType); |
| 1855 | IfFailThrow(hr); |
| 1856 | |
| 1857 | if (hr != S_OK) |
| 1858 | { |
| 1859 | // if not found in metadata, use the default |
| 1860 | ItfType = ifDual; |
| 1861 | } |
| 1862 | } |
| 1863 | |
| 1864 | // Cache the interface type |
| 1865 | g_IBCLogger.LogEEClassCOWTableAccess(this); |
| 1866 | GetClass_NoLogging()->SetComInterfaceType(ItfType); |
| 1867 | |
| 1868 | return ItfType; |
| 1869 | } |
| 1870 | |
| 1871 | #endif // FEATURE_COMINTEROP |
| 1872 | |
| 1873 | //******************************************************************************* |
| 1874 | void EEClass::GetBestFitMapping(MethodTable * pMT, BOOL *pfBestFitMapping, BOOL *pfThrowOnUnmappableChar) |
| 1875 | { |
| 1876 | CONTRACTL |
| 1877 | { |
| 1878 | THROWS; // OOM only |
| 1879 | GC_NOTRIGGER; |
| 1880 | MODE_ANY; |
| 1881 | } |
| 1882 | CONTRACTL_END; |
| 1883 | |
| 1884 | EEClass * pClass = pMT->GetClass(); |
| 1885 | |
| 1886 | // lazy init |
| 1887 | if (!(pClass->m_VMFlags & VMFLAG_BESTFITMAPPING_INITED)) |
| 1888 | { |
| 1889 | *pfBestFitMapping = FALSE; |
| 1890 | *pfThrowOnUnmappableChar = FALSE; |
| 1891 | |
| 1892 | ReadBestFitCustomAttribute(pMT->GetMDImport(), pMT->GetCl(), pfBestFitMapping, pfThrowOnUnmappableChar); |
| 1893 | |
| 1894 | DWORD flags = VMFLAG_BESTFITMAPPING_INITED; |
| 1895 | if (*pfBestFitMapping) flags |= VMFLAG_BESTFITMAPPING; |
| 1896 | if (*pfThrowOnUnmappableChar) flags |= VMFLAG_THROWONUNMAPPABLECHAR; |
| 1897 | |
| 1898 | FastInterlockOr(EnsureWritablePages(&pClass->m_VMFlags), flags); |
| 1899 | } |
| 1900 | else |
| 1901 | { |
| 1902 | *pfBestFitMapping = (pClass->m_VMFlags & VMFLAG_BESTFITMAPPING); |
| 1903 | *pfThrowOnUnmappableChar = (pClass->m_VMFlags & VMFLAG_THROWONUNMAPPABLECHAR); |
| 1904 | } |
| 1905 | } |
| 1906 | |
| 1907 | #ifdef _DEBUG |
| 1908 | |
| 1909 | //******************************************************************************* |
| 1910 | void MethodTable::DebugRecursivelyDumpInstanceFields(LPCUTF8 pszClassName, BOOL debug) |
| 1911 | { |
| 1912 | WRAPPER_NO_CONTRACT; // It's a dev helper, who cares about contracts |
| 1913 | |
| 1914 | EX_TRY |
| 1915 | { |
| 1916 | StackSString ssBuff; |
| 1917 | |
| 1918 | DWORD cParentInstanceFields; |
| 1919 | DWORD i; |
| 1920 | |
| 1921 | CONSISTENCY_CHECK(CheckLoadLevel(CLASS_LOAD_APPROXPARENTS)); |
| 1922 | |
| 1923 | MethodTable *pParentMT = GetParentMethodTable(); |
| 1924 | if (pParentMT != NULL) |
| 1925 | { |
| 1926 | cParentInstanceFields = pParentMT->GetClass()->GetNumInstanceFields(); |
| 1927 | DefineFullyQualifiedNameForClass(); |
| 1928 | LPCUTF8 name = GetFullyQualifiedNameForClass(pParentMT); |
| 1929 | pParentMT->DebugRecursivelyDumpInstanceFields(name, debug); |
| 1930 | } |
| 1931 | else |
| 1932 | { |
| 1933 | cParentInstanceFields = 0; |
| 1934 | } |
| 1935 | |
| 1936 | // Are there any new instance fields declared by this class? |
| 1937 | if (GetNumInstanceFields() > cParentInstanceFields) |
| 1938 | { |
| 1939 | // Display them |
| 1940 | if(debug) { |
| 1941 | ssBuff.Printf(W("%S:\n" ), pszClassName); |
| 1942 | WszOutputDebugString(ssBuff.GetUnicode()); |
| 1943 | } |
| 1944 | else { |
| 1945 | LOG((LF_CLASSLOADER, LL_ALWAYS, "%s:\n" , pszClassName)); |
| 1946 | } |
| 1947 | |
| 1948 | for (i = 0; i < (GetNumInstanceFields()-cParentInstanceFields); i++) |
| 1949 | { |
| 1950 | FieldDesc *pFD = &GetClass()->GetFieldDescList()[i]; |
| 1951 | #ifdef DEBUG_LAYOUT |
| 1952 | printf("offset %s%3d %s\n" , pFD->IsByValue() ? "byvalue " : "" , pFD->GetOffset_NoLogging(), pFD->GetName()); |
| 1953 | #endif |
| 1954 | if(debug) { |
| 1955 | ssBuff.Printf(W("offset %3d %S\n" ), pFD->GetOffset_NoLogging(), pFD->GetName()); |
| 1956 | WszOutputDebugString(ssBuff.GetUnicode()); |
| 1957 | } |
| 1958 | else { |
| 1959 | LOG((LF_CLASSLOADER, LL_ALWAYS, "offset %3d %s\n" , pFD->GetOffset_NoLogging(), pFD->GetName())); |
| 1960 | } |
| 1961 | } |
| 1962 | } |
| 1963 | } |
| 1964 | EX_CATCH |
| 1965 | { |
| 1966 | if(debug) |
| 1967 | { |
| 1968 | WszOutputDebugString(W("<Exception Thrown>\n" )); |
| 1969 | } |
| 1970 | else |
| 1971 | { |
| 1972 | LOG((LF_CLASSLOADER, LL_ALWAYS, "<Exception Thrown>\n" )); |
| 1973 | } |
| 1974 | } |
| 1975 | EX_END_CATCH(SwallowAllExceptions); |
| 1976 | } |
| 1977 | |
| 1978 | //******************************************************************************* |
| 1979 | void MethodTable::DebugDumpFieldLayout(LPCUTF8 pszClassName, BOOL debug) |
| 1980 | { |
| 1981 | WRAPPER_NO_CONTRACT; // It's a dev helper, who cares about contracts |
| 1982 | |
| 1983 | if (GetNumStaticFields() == 0 && GetNumInstanceFields() == 0) |
| 1984 | return; |
| 1985 | |
| 1986 | EX_TRY |
| 1987 | { |
| 1988 | StackSString ssBuff; |
| 1989 | |
| 1990 | DWORD i; |
| 1991 | DWORD cParentInstanceFields; |
| 1992 | |
| 1993 | CONSISTENCY_CHECK(CheckLoadLevel(CLASS_LOAD_APPROXPARENTS)); |
| 1994 | |
| 1995 | if (GetParentMethodTable() != NULL) |
| 1996 | cParentInstanceFields = GetParentMethodTable()->GetNumInstanceFields(); |
| 1997 | else |
| 1998 | { |
| 1999 | cParentInstanceFields = 0; |
| 2000 | } |
| 2001 | |
| 2002 | if (debug) |
| 2003 | { |
| 2004 | ssBuff.Printf(W("Field layout for '%S':\n\n" ), pszClassName); |
| 2005 | WszOutputDebugString(ssBuff.GetUnicode()); |
| 2006 | } |
| 2007 | else |
| 2008 | { |
| 2009 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2010 | LOG((LF_ALWAYS, LL_ALWAYS, "Field layout for '%s':\n\n" , pszClassName)); |
| 2011 | } |
| 2012 | |
| 2013 | if (GetNumStaticFields() > 0) |
| 2014 | { |
| 2015 | if (debug) |
| 2016 | { |
| 2017 | WszOutputDebugString(W("Static fields (stored at vtable offsets)\n" )); |
| 2018 | WszOutputDebugString(W("----------------------------------------\n" )); |
| 2019 | } |
| 2020 | else |
| 2021 | { |
| 2022 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2023 | LOG((LF_ALWAYS, LL_ALWAYS, "Static fields (stored at vtable offsets)\n" )); |
| 2024 | LOG((LF_ALWAYS, LL_ALWAYS, "----------------------------------------\n" )); |
| 2025 | } |
| 2026 | |
| 2027 | for (i = 0; i < GetNumStaticFields(); i++) |
| 2028 | { |
| 2029 | FieldDesc *pFD = GetClass()->GetFieldDescList() + ((GetNumInstanceFields()-cParentInstanceFields) + i); |
| 2030 | if(debug) { |
| 2031 | ssBuff.Printf(W("offset %3d %S\n" ), pFD->GetOffset_NoLogging(), pFD->GetName()); |
| 2032 | WszOutputDebugString(ssBuff.GetUnicode()); |
| 2033 | } |
| 2034 | else |
| 2035 | { |
| 2036 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2037 | LOG((LF_ALWAYS, LL_ALWAYS, "offset %3d %s\n" , pFD->GetOffset_NoLogging(), pFD->GetName())); |
| 2038 | } |
| 2039 | } |
| 2040 | } |
| 2041 | |
| 2042 | if (GetNumInstanceFields() > 0) |
| 2043 | { |
| 2044 | if (GetNumStaticFields()) { |
| 2045 | if(debug) { |
| 2046 | WszOutputDebugString(W("\n" )); |
| 2047 | } |
| 2048 | else |
| 2049 | { |
| 2050 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2051 | LOG((LF_ALWAYS, LL_ALWAYS, "\n" )); |
| 2052 | } |
| 2053 | } |
| 2054 | |
| 2055 | if (debug) |
| 2056 | { |
| 2057 | WszOutputDebugString(W("Instance fields\n" )); |
| 2058 | WszOutputDebugString(W("---------------\n" )); |
| 2059 | } |
| 2060 | else |
| 2061 | { |
| 2062 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2063 | LOG((LF_ALWAYS, LL_ALWAYS, "Instance fields\n" )); |
| 2064 | LOG((LF_ALWAYS, LL_ALWAYS, "---------------\n" )); |
| 2065 | } |
| 2066 | |
| 2067 | DebugRecursivelyDumpInstanceFields(pszClassName, debug); |
| 2068 | } |
| 2069 | |
| 2070 | if (debug) |
| 2071 | { |
| 2072 | WszOutputDebugString(W("\n" )); |
| 2073 | } |
| 2074 | else |
| 2075 | { |
| 2076 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2077 | LOG((LF_ALWAYS, LL_ALWAYS, "\n" )); |
| 2078 | } |
| 2079 | } |
| 2080 | EX_CATCH |
| 2081 | { |
| 2082 | if (debug) |
| 2083 | { |
| 2084 | WszOutputDebugString(W("<Exception Thrown>\n" )); |
| 2085 | } |
| 2086 | else |
| 2087 | { |
| 2088 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2089 | LOG((LF_ALWAYS, LL_ALWAYS, "<Exception Thrown>\n" )); |
| 2090 | } |
| 2091 | } |
| 2092 | EX_END_CATCH(SwallowAllExceptions); |
| 2093 | } // MethodTable::DebugDumpFieldLayout |
| 2094 | |
| 2095 | //******************************************************************************* |
| 2096 | void |
| 2097 | MethodTable::DebugDumpGCDesc( |
| 2098 | LPCUTF8 pszClassName, |
| 2099 | BOOL fDebug) |
| 2100 | { |
| 2101 | WRAPPER_NO_CONTRACT; // It's a dev helper, who cares about contracts |
| 2102 | |
| 2103 | EX_TRY |
| 2104 | { |
| 2105 | StackSString ssBuff; |
| 2106 | |
| 2107 | if (fDebug) |
| 2108 | { |
| 2109 | ssBuff.Printf(W("GC description for '%S':\n\n" ), pszClassName); |
| 2110 | WszOutputDebugString(ssBuff.GetUnicode()); |
| 2111 | } |
| 2112 | else |
| 2113 | { |
| 2114 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2115 | LOG((LF_ALWAYS, LL_ALWAYS, "GC description for '%s':\n\n" , pszClassName)); |
| 2116 | } |
| 2117 | |
| 2118 | if (ContainsPointersOrCollectible()) |
| 2119 | { |
| 2120 | CGCDescSeries *pSeries; |
| 2121 | CGCDescSeries *pHighest; |
| 2122 | |
| 2123 | if (fDebug) |
| 2124 | { |
| 2125 | WszOutputDebugString(W("GCDesc:\n" )); |
| 2126 | } else |
| 2127 | { |
| 2128 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2129 | LOG((LF_ALWAYS, LL_ALWAYS, "GCDesc:\n" )); |
| 2130 | } |
| 2131 | |
| 2132 | pSeries = CGCDesc::GetCGCDescFromMT(this)->GetLowestSeries(); |
| 2133 | pHighest = CGCDesc::GetCGCDescFromMT(this)->GetHighestSeries(); |
| 2134 | |
| 2135 | while (pSeries <= pHighest) |
| 2136 | { |
| 2137 | if (fDebug) |
| 2138 | { |
| 2139 | ssBuff.Printf(W(" offset %5d (%d w/o Object), size %5d (%5d w/o BaseSize subtr)\n" ), |
| 2140 | pSeries->GetSeriesOffset(), |
| 2141 | pSeries->GetSeriesOffset() - OBJECT_SIZE, |
| 2142 | pSeries->GetSeriesSize(), |
| 2143 | pSeries->GetSeriesSize() + GetBaseSize() ); |
| 2144 | WszOutputDebugString(ssBuff.GetUnicode()); |
| 2145 | } |
| 2146 | else |
| 2147 | { |
| 2148 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2149 | LOG((LF_ALWAYS, LL_ALWAYS, " offset %5d (%d w/o Object), size %5d (%5d w/o BaseSize subtr)\n" , |
| 2150 | pSeries->GetSeriesOffset(), |
| 2151 | pSeries->GetSeriesOffset() - OBJECT_SIZE, |
| 2152 | pSeries->GetSeriesSize(), |
| 2153 | pSeries->GetSeriesSize() + GetBaseSize() |
| 2154 | )); |
| 2155 | } |
| 2156 | pSeries++; |
| 2157 | } |
| 2158 | |
| 2159 | if (fDebug) |
| 2160 | { |
| 2161 | WszOutputDebugString(W("\n" )); |
| 2162 | } else |
| 2163 | { |
| 2164 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2165 | LOG((LF_ALWAYS, LL_ALWAYS, "\n" )); |
| 2166 | } |
| 2167 | } |
| 2168 | } |
| 2169 | EX_CATCH |
| 2170 | { |
| 2171 | if (fDebug) |
| 2172 | { |
| 2173 | WszOutputDebugString(W("<Exception Thrown>\n" )); |
| 2174 | } |
| 2175 | else |
| 2176 | { |
| 2177 | //LF_ALWAYS allowed here because this is controlled by special env var ShouldDumpOnClassLoad |
| 2178 | LOG((LF_ALWAYS, LL_ALWAYS, "<Exception Thrown>\n" )); |
| 2179 | } |
| 2180 | } |
| 2181 | EX_END_CATCH(SwallowAllExceptions); |
| 2182 | } // MethodTable::DebugDumpGCDesc |
| 2183 | |
| 2184 | #endif // _DEBUG |
| 2185 | |
| 2186 | #ifdef FEATURE_COMINTEROP |
| 2187 | //******************************************************************************* |
| 2188 | CorClassIfaceAttr MethodTable::GetComClassInterfaceType() |
| 2189 | { |
| 2190 | CONTRACTL |
| 2191 | { |
| 2192 | THROWS; |
| 2193 | GC_TRIGGERS; |
| 2194 | MODE_ANY; |
| 2195 | PRECONDITION(!IsInterface()); |
| 2196 | } |
| 2197 | CONTRACTL_END |
| 2198 | |
| 2199 | // If the type is an open generic type, then it is considered ClassInterfaceType.None. |
| 2200 | if (ContainsGenericVariables()) |
| 2201 | return clsIfNone; |
| 2202 | |
| 2203 | // Classes that either have generic instantiations (G<int>) or derive from classes |
| 2204 | // with generic instantiations (D : B<int>) are always considered ClassInterfaceType.None. |
| 2205 | if (HasGenericClassInstantiationInHierarchy()) |
| 2206 | return clsIfNone; |
| 2207 | |
| 2208 | // If the class does not support IClassX because it derives from or implements WinRT types, |
| 2209 | // then it is considered ClassInterfaceType.None unless explicitly overriden by the CA |
| 2210 | if (!ClassSupportsIClassX(this)) |
| 2211 | return clsIfNone; |
| 2212 | |
| 2213 | return ReadClassInterfaceTypeCustomAttribute(TypeHandle(this)); |
| 2214 | } |
| 2215 | #endif // FEATURE_COMINTEROP |
| 2216 | |
| 2217 | //--------------------------------------------------------------------------------------- |
| 2218 | // |
| 2219 | Substitution |
| 2220 | MethodTable::GetSubstitutionForParent( |
| 2221 | const Substitution * pSubst) |
| 2222 | { |
| 2223 | CONTRACTL |
| 2224 | { |
| 2225 | THROWS; |
| 2226 | GC_NOTRIGGER; |
| 2227 | FORBID_FAULT; |
| 2228 | } |
| 2229 | CONTRACTL_END |
| 2230 | |
| 2231 | mdToken crExtends; |
| 2232 | DWORD dwAttrClass; |
| 2233 | |
| 2234 | if (IsArray()) |
| 2235 | { |
| 2236 | return Substitution(GetModule(), SigPointer(), pSubst); |
| 2237 | } |
| 2238 | |
| 2239 | IfFailThrow(GetMDImport()->GetTypeDefProps( |
| 2240 | GetCl(), |
| 2241 | &dwAttrClass, |
| 2242 | &crExtends)); |
| 2243 | |
| 2244 | return Substitution(crExtends, GetModule(), pSubst); |
| 2245 | } // MethodTable::GetSubstitutionForParent |
| 2246 | |
| 2247 | #endif //!DACCESS_COMPILE |
| 2248 | |
| 2249 | |
| 2250 | //******************************************************************************* |
| 2251 | #ifdef FEATURE_PREJIT |
| 2252 | DWORD EEClass::GetSize() |
| 2253 | { |
| 2254 | CONTRACTL |
| 2255 | { |
| 2256 | NOTHROW; |
| 2257 | GC_NOTRIGGER; |
| 2258 | FORBID_FAULT; |
| 2259 | } |
| 2260 | CONTRACTL_END; |
| 2261 | |
| 2262 | // Total instance size consists of the fixed ("normal") fields, cached at construction time and dependent |
| 2263 | // on whether we're a vanilla EEClass or DelegateEEClass etc., and a portion for the packed fields tacked on |
| 2264 | // the end. The size of the packed fields can be retrieved from the fields themselves or, if we were |
| 2265 | // unsuccessful in our attempts to compress the data, the full size of the EEClassPackedFields structure |
| 2266 | // (which is essentially just a DWORD array of all the field values). |
| 2267 | return m_cbFixedEEClassFields + |
| 2268 | (m_fFieldsArePacked ? GetPackedFields()->GetPackedSize() : sizeof(EEClassPackedFields)); |
| 2269 | } |
| 2270 | #endif // FEATURE_PREJIT |
| 2271 | |
| 2272 | #ifndef DACCESS_COMPILE |
| 2273 | #ifdef FEATURE_COMINTEROP |
| 2274 | |
| 2275 | // |
| 2276 | // Implementations of SparseVTableMap methods. |
| 2277 | // |
| 2278 | |
| 2279 | //******************************************************************************* |
| 2280 | SparseVTableMap::SparseVTableMap() |
| 2281 | { |
| 2282 | LIMITED_METHOD_CONTRACT; |
| 2283 | |
| 2284 | // Note that this will also zero out all gaps. It is important for NGen determinism. |
| 2285 | ZeroMemory(this, sizeof(*this)); |
| 2286 | } |
| 2287 | |
| 2288 | //******************************************************************************* |
| 2289 | SparseVTableMap::~SparseVTableMap() |
| 2290 | { |
| 2291 | LIMITED_METHOD_CONTRACT; |
| 2292 | |
| 2293 | if (m_MapList != NULL) |
| 2294 | { |
| 2295 | delete [] m_MapList; |
| 2296 | m_MapList = NULL; |
| 2297 | } |
| 2298 | } |
| 2299 | |
| 2300 | //******************************************************************************* |
| 2301 | // Allocate or expand the mapping list for a new entry. |
| 2302 | void SparseVTableMap::AllocOrExpand() |
| 2303 | { |
| 2304 | STANDARD_VM_CONTRACT; |
| 2305 | |
| 2306 | if (m_MapEntries == m_Allocated) { |
| 2307 | |
| 2308 | Entry *maplist = new Entry[m_Allocated + MapGrow]; |
| 2309 | |
| 2310 | if (m_MapList != NULL) |
| 2311 | memcpy(maplist, m_MapList, m_MapEntries * sizeof(Entry)); |
| 2312 | |
| 2313 | m_Allocated += MapGrow; |
| 2314 | delete [] m_MapList; |
| 2315 | m_MapList = maplist; |
| 2316 | } |
| 2317 | } |
| 2318 | |
| 2319 | //******************************************************************************* |
| 2320 | // While building mapping list, record a gap in VTable slot numbers. |
| 2321 | void SparseVTableMap::RecordGap(WORD StartMTSlot, WORD NumSkipSlots) |
| 2322 | { |
| 2323 | STANDARD_VM_CONTRACT; |
| 2324 | |
| 2325 | _ASSERTE((StartMTSlot == 0) || (StartMTSlot > m_MTSlot)); |
| 2326 | _ASSERTE(NumSkipSlots > 0); |
| 2327 | |
| 2328 | // We use the information about the current gap to complete a map entry for |
| 2329 | // the last non-gap. There is a special case where the vtable begins with a |
| 2330 | // gap, so we don't have a non-gap to record. |
| 2331 | if (StartMTSlot == 0) { |
| 2332 | _ASSERTE((m_MTSlot == 0) && (m_VTSlot == 0)); |
| 2333 | m_VTSlot = NumSkipSlots; |
| 2334 | return; |
| 2335 | } |
| 2336 | |
| 2337 | // We need an entry, allocate or expand the list as necessary. |
| 2338 | AllocOrExpand(); |
| 2339 | |
| 2340 | // Update the list with an entry describing the last non-gap in vtable |
| 2341 | // entries. |
| 2342 | m_MapList[m_MapEntries].m_Start = m_MTSlot; |
| 2343 | m_MapList[m_MapEntries].m_Span = StartMTSlot - m_MTSlot; |
| 2344 | m_MapList[m_MapEntries].m_MapTo = m_VTSlot; |
| 2345 | |
| 2346 | m_VTSlot += (StartMTSlot - m_MTSlot) + NumSkipSlots; |
| 2347 | m_MTSlot = StartMTSlot; |
| 2348 | |
| 2349 | m_MapEntries++; |
| 2350 | } |
| 2351 | |
| 2352 | //******************************************************************************* |
| 2353 | // Finish creation of mapping list. |
| 2354 | void SparseVTableMap::FinalizeMapping(WORD TotalMTSlots) |
| 2355 | { |
| 2356 | STANDARD_VM_CONTRACT; |
| 2357 | |
| 2358 | _ASSERTE(TotalMTSlots >= m_MTSlot); |
| 2359 | |
| 2360 | // If mapping ended with a gap, we have nothing else to record. |
| 2361 | if (TotalMTSlots == m_MTSlot) |
| 2362 | return; |
| 2363 | |
| 2364 | // Allocate or expand the list as necessary. |
| 2365 | AllocOrExpand(); |
| 2366 | |
| 2367 | // Update the list with an entry describing the last non-gap in vtable |
| 2368 | // entries. |
| 2369 | m_MapList[m_MapEntries].m_Start = m_MTSlot; |
| 2370 | m_MapList[m_MapEntries].m_Span = TotalMTSlots - m_MTSlot; |
| 2371 | m_MapList[m_MapEntries].m_MapTo = m_VTSlot; |
| 2372 | |
| 2373 | // Update VT slot cursor, because we use it to determine total number of |
| 2374 | // vtable slots for GetNumVirtuals |
| 2375 | m_VTSlot += TotalMTSlots - m_MTSlot; |
| 2376 | |
| 2377 | m_MapEntries++; |
| 2378 | } |
| 2379 | |
| 2380 | //******************************************************************************* |
| 2381 | // Lookup a VTable slot number from a method table slot number. |
| 2382 | WORD SparseVTableMap::LookupVTSlot(WORD MTSlot) |
| 2383 | { |
| 2384 | CONTRACTL |
| 2385 | { |
| 2386 | NOTHROW; |
| 2387 | GC_NOTRIGGER; |
| 2388 | FORBID_FAULT; |
| 2389 | SO_TOLERANT; |
| 2390 | } |
| 2391 | CONTRACTL_END |
| 2392 | |
| 2393 | // As an optimization, check the last entry which yielded a correct result. |
| 2394 | if ((MTSlot >= m_MapList[m_LastUsed].m_Start) && |
| 2395 | (MTSlot < (m_MapList[m_LastUsed].m_Start + m_MapList[m_LastUsed].m_Span))) |
| 2396 | return (MTSlot - m_MapList[m_LastUsed].m_Start) + m_MapList[m_LastUsed].m_MapTo; |
| 2397 | |
| 2398 | // Check all MT slots spans to see which one our input slot lies in. |
| 2399 | for (WORD i = 0; i < m_MapEntries; i++) { |
| 2400 | if ((MTSlot >= m_MapList[i].m_Start) && |
| 2401 | (MTSlot < (m_MapList[i].m_Start + m_MapList[i].m_Span))) { |
| 2402 | m_LastUsed = i; |
| 2403 | return (MTSlot - m_MapList[i].m_Start) + m_MapList[i].m_MapTo; |
| 2404 | } |
| 2405 | } |
| 2406 | |
| 2407 | _ASSERTE(!"Invalid MethodTable slot" ); |
| 2408 | return ~0; |
| 2409 | } |
| 2410 | |
| 2411 | //******************************************************************************* |
| 2412 | // Retrieve the number of slots in the vtable (both empty and full). |
| 2413 | WORD SparseVTableMap::GetNumVTableSlots() |
| 2414 | { |
| 2415 | LIMITED_METHOD_CONTRACT; |
| 2416 | |
| 2417 | return m_VTSlot; |
| 2418 | } |
| 2419 | |
| 2420 | #ifdef FEATURE_NATIVE_IMAGE_GENERATION |
| 2421 | //******************************************************************************* |
| 2422 | void SparseVTableMap::Save(DataImage *image) |
| 2423 | { |
| 2424 | STANDARD_VM_CONTRACT; |
| 2425 | |
| 2426 | image->StoreStructure(this, sizeof(SparseVTableMap), |
| 2427 | DataImage::ITEM_SPARSE_VTABLE_MAP_TABLE); |
| 2428 | |
| 2429 | // Trim unused portion of the table |
| 2430 | m_Allocated = m_MapEntries; |
| 2431 | |
| 2432 | image->StoreInternedStructure(m_MapList, m_Allocated * sizeof(Entry), |
| 2433 | DataImage::ITEM_SPARSE_VTABLE_MAP_ENTRIES); |
| 2434 | } |
| 2435 | |
| 2436 | //******************************************************************************* |
| 2437 | void SparseVTableMap::Fixup(DataImage *image) |
| 2438 | { |
| 2439 | STANDARD_VM_CONTRACT; |
| 2440 | |
| 2441 | image->FixupPointerField(this, offsetof(SparseVTableMap, m_MapList)); |
| 2442 | } |
| 2443 | #endif //FEATURE_NATIVE_IMAGE_GENERATION |
| 2444 | #endif //FEATURE_COMINTEROP |
| 2445 | |
| 2446 | #ifdef FEATURE_NATIVE_IMAGE_GENERATION |
| 2447 | |
| 2448 | //******************************************************************************* |
| 2449 | void EEClass::Save(DataImage *image, MethodTable *pMT) |
| 2450 | { |
| 2451 | CONTRACTL |
| 2452 | { |
| 2453 | STANDARD_VM_CHECK; |
| 2454 | PRECONDITION(this == pMT->GetClass()); |
| 2455 | PRECONDITION(pMT->IsCanonicalMethodTable()); |
| 2456 | PRECONDITION(pMT->IsFullyLoaded()); |
| 2457 | PRECONDITION(!image->IsStored(this)); |
| 2458 | PRECONDITION(image->GetModule()->GetAssembly() == |
| 2459 | GetAppDomain()->ToCompilationDomain()->GetTargetAssembly()); |
| 2460 | } |
| 2461 | CONTRACTL_END; |
| 2462 | |
| 2463 | LOG((LF_ZAP, LL_INFO10000, "EEClass::Save %s (%p)\n" , m_szDebugClassName, this)); |
| 2464 | |
| 2465 | m_fFieldsArePacked = GetPackedFields()->PackFields(); |
| 2466 | |
| 2467 | DWORD cbSize = GetSize(); |
| 2468 | |
| 2469 | // *************************************************************** |
| 2470 | // Only put new actions in this function if they really relate to EEClass |
| 2471 | // rather than MethodTable. For example, if you need to allocate |
| 2472 | // a per-type entry in some table in the NGEN image, then you will probably |
| 2473 | // need to allocate one such entry per MethodTable, e.g. per generic |
| 2474 | // instantiation. You probably don't want to allocate one that is common |
| 2475 | // to a group of shared instantiations. |
| 2476 | // *************************************************************** |
| 2477 | |
| 2478 | DataImage::ItemKind item = |
| 2479 | (!pMT->IsGenericTypeDefinition() && pMT->ContainsGenericVariables()) |
| 2480 | ? DataImage::ITEM_EECLASS_COLD |
| 2481 | // Until we get all the access paths for generics tidied up, many paths touch the EEClass, e.g. GetInstantiation() |
| 2482 | : pMT->HasInstantiation() |
| 2483 | ? DataImage::ITEM_EECLASS_WARM |
| 2484 | : DataImage::ITEM_EECLASS; |
| 2485 | |
| 2486 | // Save optional fields if we have any. |
| 2487 | if (HasOptionalFields()) |
| 2488 | image->StoreStructure(GetOptionalFields(), |
| 2489 | sizeof(EEClassOptionalFields), |
| 2490 | item); |
| 2491 | |
| 2492 | #ifdef _DEBUG |
| 2493 | if (!image->IsStored(m_szDebugClassName)) |
| 2494 | image->StoreStructure(m_szDebugClassName, (ULONG)(strlen(m_szDebugClassName)+1), |
| 2495 | DataImage::ITEM_DEBUG, |
| 2496 | 1); |
| 2497 | #endif // _DEBUG |
| 2498 | |
| 2499 | #ifdef FEATURE_COMINTEROP |
| 2500 | if (GetSparseCOMInteropVTableMap() != NULL) |
| 2501 | GetSparseCOMInteropVTableMap()->Save(image); |
| 2502 | #endif // FEATURE_COMINTEROP |
| 2503 | |
| 2504 | // |
| 2505 | // Save MethodDescs |
| 2506 | // |
| 2507 | |
| 2508 | MethodDescChunk *chunk = GetChunks(); |
| 2509 | if (chunk != NULL) |
| 2510 | { |
| 2511 | MethodDesc::SaveChunk methodDescSaveChunk(image); |
| 2512 | |
| 2513 | MethodTable::IntroducedMethodIterator it(pMT, TRUE); |
| 2514 | for (; it.IsValid(); it.Next()) |
| 2515 | { |
| 2516 | MethodDesc * pMD = it.GetMethodDesc(); |
| 2517 | |
| 2518 | // Do not save IL stubs that we have failed to generate code for |
| 2519 | if (pMD->IsILStub() && image->GetCodeAddress(pMD) == NULL) |
| 2520 | continue; |
| 2521 | |
| 2522 | methodDescSaveChunk.Append(pMD); |
| 2523 | } |
| 2524 | |
| 2525 | ZapStoredStructure * pChunksNode = methodDescSaveChunk.Save(); |
| 2526 | if (pChunksNode != NULL) |
| 2527 | image->BindPointer(chunk, pChunksNode, 0); |
| 2528 | |
| 2529 | } |
| 2530 | |
| 2531 | // |
| 2532 | // Save FieldDescs |
| 2533 | // |
| 2534 | |
| 2535 | SIZE_T fieldCount = FieldDescListSize(pMT); |
| 2536 | |
| 2537 | if (fieldCount != 0) |
| 2538 | { |
| 2539 | FieldDesc *pFDStart = GetFieldDescList(); |
| 2540 | FieldDesc *pFDEnd = pFDStart + fieldCount; |
| 2541 | |
| 2542 | FieldDesc *pFD = pFDStart; |
| 2543 | while (pFD < pFDEnd) |
| 2544 | { |
| 2545 | pFD->PrecomputeNameHash(); |
| 2546 | pFD++; |
| 2547 | } |
| 2548 | |
| 2549 | ZapStoredStructure * pFDNode = image->StoreStructure(pFDStart, (ULONG)(fieldCount * sizeof(FieldDesc)), |
| 2550 | DataImage::ITEM_FIELD_DESC_LIST); |
| 2551 | |
| 2552 | pFD = pFDStart; |
| 2553 | while (pFD < pFDEnd) |
| 2554 | { |
| 2555 | pFD->SaveContents(image); |
| 2556 | if (pFD != pFDStart) |
| 2557 | image->BindPointer(pFD, pFDNode, (BYTE *)pFD - (BYTE *)pFDStart); |
| 2558 | pFD++; |
| 2559 | } |
| 2560 | } |
| 2561 | |
| 2562 | // |
| 2563 | // Save MethodDescs |
| 2564 | // |
| 2565 | |
| 2566 | if (HasLayout()) |
| 2567 | { |
| 2568 | EEClassLayoutInfo *pInfo = &((LayoutEEClass*)this)->m_LayoutInfo; |
| 2569 | |
| 2570 | if (pInfo->m_numCTMFields > 0) |
| 2571 | { |
| 2572 | ZapStoredStructure * pNode = image->StoreStructure(pInfo->GetFieldMarshalers(), |
| 2573 | pInfo->m_numCTMFields * MAXFIELDMARSHALERSIZE, |
| 2574 | DataImage::ITEM_FIELD_MARSHALERS); |
| 2575 | |
| 2576 | for (UINT iField = 0; iField < pInfo->m_numCTMFields; iField++) |
| 2577 | { |
| 2578 | FieldMarshaler *pFM = (FieldMarshaler*)((BYTE *)pInfo->GetFieldMarshalers() + iField * MAXFIELDMARSHALERSIZE); |
| 2579 | pFM->Save(image); |
| 2580 | |
| 2581 | if (iField > 0) |
| 2582 | image->BindPointer(pFM, pNode, iField * MAXFIELDMARSHALERSIZE); |
| 2583 | } |
| 2584 | } |
| 2585 | } |
| 2586 | |
| 2587 | // Save dictionary layout information |
| 2588 | DictionaryLayout *pDictLayout = GetDictionaryLayout(); |
| 2589 | if (pMT->IsSharedByGenericInstantiations() && pDictLayout != NULL) |
| 2590 | { |
| 2591 | pDictLayout->Save(image); |
| 2592 | LOG((LF_ZAP, LL_INFO10000, "ZAP: dictionary for %s has %d slots used out of possible %d\n" , m_szDebugClassName, |
| 2593 | pDictLayout->GetNumUsedSlots(), pDictLayout->GetMaxSlots())); |
| 2594 | } |
| 2595 | |
| 2596 | if (GetVarianceInfo() != NULL) |
| 2597 | image->StoreInternedStructure(GetVarianceInfo(), |
| 2598 | pMT->GetNumGenericArgs(), |
| 2599 | DataImage::ITEM_CLASS_VARIANCE_INFO); |
| 2600 | |
| 2601 | image->StoreStructure(this, cbSize, item); |
| 2602 | |
| 2603 | if (pMT->IsInterface()) |
| 2604 | { |
| 2605 | // Make sure our guid is computed |
| 2606 | |
| 2607 | #ifdef FEATURE_COMINTEROP |
| 2608 | // Generic WinRT types can have their GUID computed only if the instantiation is WinRT-legal |
| 2609 | if (!pMT->IsProjectedFromWinRT() || |
| 2610 | !pMT->SupportsGenericInterop(TypeHandle::Interop_NativeToManaged) || |
| 2611 | pMT->IsLegalNonArrayWinRTType()) |
| 2612 | #endif // FEATURE_COMINTEROP |
| 2613 | { |
| 2614 | GUID dummy; |
| 2615 | if (SUCCEEDED(pMT->GetGuidNoThrow(&dummy, TRUE, FALSE))) |
| 2616 | { |
| 2617 | GuidInfo* pGuidInfo = pMT->GetGuidInfo(); |
| 2618 | _ASSERTE(pGuidInfo != NULL); |
| 2619 | |
| 2620 | image->StoreStructure(pGuidInfo, sizeof(GuidInfo), |
| 2621 | DataImage::ITEM_GUID_INFO); |
| 2622 | |
| 2623 | #ifdef FEATURE_COMINTEROP |
| 2624 | if (pMT->IsLegalNonArrayWinRTType()) |
| 2625 | { |
| 2626 | Module *pModule = pMT->GetModule(); |
| 2627 | if (pModule->CanCacheWinRTTypeByGuid(pMT)) |
| 2628 | { |
| 2629 | pModule->CacheWinRTTypeByGuid(pMT, pGuidInfo); |
| 2630 | } |
| 2631 | } |
| 2632 | #endif // FEATURE_COMINTEROP |
| 2633 | } |
| 2634 | else |
| 2635 | { |
| 2636 | // make sure we don't store a GUID_NULL guid in the NGEN image |
| 2637 | // instead we'll compute the GUID at runtime, and throw, if appropriate |
| 2638 | m_pGuidInfo.SetValueMaybeNull(NULL); |
| 2639 | } |
| 2640 | } |
| 2641 | } |
| 2642 | |
| 2643 | #ifdef FEATURE_COMINTEROP |
| 2644 | if (IsDelegate()) |
| 2645 | { |
| 2646 | DelegateEEClass *pDelegateClass = (DelegateEEClass *)this; |
| 2647 | ComPlusCallInfo *pComInfo = pDelegateClass->m_pComPlusCallInfo; |
| 2648 | |
| 2649 | if (pComInfo != NULL && pComInfo->ShouldSave(image)) |
| 2650 | { |
| 2651 | image->StoreStructure(pDelegateClass->m_pComPlusCallInfo, |
| 2652 | sizeof(ComPlusCallInfo), |
| 2653 | item); |
| 2654 | } |
| 2655 | } |
| 2656 | #endif // FEATURE_COMINTEROP |
| 2657 | |
| 2658 | LOG((LF_ZAP, LL_INFO10000, "EEClass::Save %s (%p) complete.\n" , m_szDebugClassName, this)); |
| 2659 | } |
| 2660 | |
| 2661 | //******************************************************************************* |
| 2662 | DWORD EEClass::FieldDescListSize(MethodTable * pMT) |
| 2663 | { |
| 2664 | LIMITED_METHOD_CONTRACT; |
| 2665 | |
| 2666 | EEClass * pClass = pMT->GetClass(); |
| 2667 | DWORD fieldCount = pClass->GetNumInstanceFields() + pClass->GetNumStaticFields(); |
| 2668 | |
| 2669 | MethodTable * pParentMT = pMT->GetParentMethodTable(); |
| 2670 | if (pParentMT != NULL) |
| 2671 | fieldCount -= pParentMT->GetNumInstanceFields(); |
| 2672 | return fieldCount; |
| 2673 | } |
| 2674 | |
| 2675 | //******************************************************************************* |
| 2676 | void EEClass::Fixup(DataImage *image, MethodTable *pMT) |
| 2677 | { |
| 2678 | CONTRACTL |
| 2679 | { |
| 2680 | STANDARD_VM_CHECK; |
| 2681 | PRECONDITION(this == pMT->GetClass()); |
| 2682 | PRECONDITION(pMT->IsCanonicalMethodTable()); |
| 2683 | PRECONDITION(pMT->IsFullyLoaded()); |
| 2684 | PRECONDITION(image->IsStored(this)); |
| 2685 | } |
| 2686 | CONTRACTL_END; |
| 2687 | |
| 2688 | LOG((LF_ZAP, LL_INFO10000, "EEClass::Fixup %s (%p)\n" , GetDebugClassName(), this)); |
| 2689 | |
| 2690 | // Fixup pointer to optional fields if this class has any. This pointer is a relative pointer (to avoid |
| 2691 | // the need for base relocation fixups) and thus needs to use the IMAGE_REL_BASED_RELPTR fixup type. |
| 2692 | if (HasOptionalFields()) |
| 2693 | image->FixupRelativePointerField(this, offsetof(EEClass, m_rpOptionalFields)); |
| 2694 | |
| 2695 | #ifdef _DEBUG |
| 2696 | image->FixupPointerField(this, offsetof(EEClass, m_szDebugClassName)); |
| 2697 | #endif |
| 2698 | |
| 2699 | #ifdef FEATURE_COMINTEROP |
| 2700 | if (GetSparseCOMInteropVTableMap() != NULL) |
| 2701 | { |
| 2702 | image->FixupPointerField(GetOptionalFields(), offsetof(EEClassOptionalFields, m_pSparseVTableMap)); |
| 2703 | GetSparseCOMInteropVTableMap()->Fixup(image); |
| 2704 | } |
| 2705 | #endif // FEATURE_COMINTEROP |
| 2706 | |
| 2707 | DictionaryLayout *pDictLayout = GetDictionaryLayout(); |
| 2708 | if (pDictLayout != NULL) |
| 2709 | { |
| 2710 | pDictLayout->Fixup(image, FALSE); |
| 2711 | image->FixupPointerField(GetOptionalFields(), offsetof(EEClassOptionalFields, m_pDictLayout)); |
| 2712 | } |
| 2713 | |
| 2714 | if (HasOptionalFields()) |
| 2715 | image->FixupRelativePointerField(GetOptionalFields(), offsetof(EEClassOptionalFields, m_pVarianceInfo)); |
| 2716 | |
| 2717 | // |
| 2718 | // We pass in the method table, because some classes (e.g. remoting proxy) |
| 2719 | // have fake method tables set up in them & we want to restore the regular |
| 2720 | // one. |
| 2721 | // |
| 2722 | image->FixupField(this, offsetof(EEClass, m_pMethodTable), pMT, 0, IMAGE_REL_BASED_RelativePointer); |
| 2723 | |
| 2724 | // |
| 2725 | // Fixup MethodDescChunk and MethodDescs |
| 2726 | // |
| 2727 | MethodDescChunk* pChunks = GetChunks(); |
| 2728 | |
| 2729 | if (pChunks!= NULL && image->IsStored(pChunks)) |
| 2730 | { |
| 2731 | image->FixupRelativePointerField(this, offsetof(EEClass, m_pChunks)); |
| 2732 | |
| 2733 | MethodTable::IntroducedMethodIterator it(pMT, TRUE); |
| 2734 | for (; it.IsValid(); it.Next()) |
| 2735 | { |
| 2736 | MethodDesc * pMD = it.GetMethodDesc(); |
| 2737 | |
| 2738 | // Skip IL stubs that were not saved into the image |
| 2739 | if (pMD->IsILStub() && !image->IsStored(pMD)) |
| 2740 | continue; |
| 2741 | |
| 2742 | it.GetMethodDesc()->Fixup(image); |
| 2743 | } |
| 2744 | |
| 2745 | } |
| 2746 | else |
| 2747 | { |
| 2748 | image->ZeroPointerField(this, offsetof(EEClass, m_pChunks)); |
| 2749 | } |
| 2750 | |
| 2751 | // |
| 2752 | // Fixup FieldDescs |
| 2753 | // |
| 2754 | |
| 2755 | SIZE_T fieldCount = FieldDescListSize(pMT); |
| 2756 | |
| 2757 | if (fieldCount != 0) |
| 2758 | { |
| 2759 | image->FixupRelativePointerField(this, offsetof(EEClass, m_pFieldDescList)); |
| 2760 | |
| 2761 | FieldDesc *pField = GetFieldDescList(); |
| 2762 | FieldDesc *pFieldEnd = pField + fieldCount; |
| 2763 | while (pField < pFieldEnd) |
| 2764 | { |
| 2765 | pField->Fixup(image); |
| 2766 | pField++; |
| 2767 | } |
| 2768 | } |
| 2769 | |
| 2770 | #ifdef FEATURE_COMINTEROP |
| 2771 | // These fields will be lazy inited if we zero them |
| 2772 | if (HasOptionalFields()) |
| 2773 | image->ZeroPointerField(GetOptionalFields(), offsetof(EEClassOptionalFields, m_pCoClassForIntf)); |
| 2774 | #ifdef FEATURE_COMINTEROP_UNMANAGED_ACTIVATION |
| 2775 | if (HasOptionalFields()) |
| 2776 | image->ZeroPointerField(GetOptionalFields(), offsetof(EEClassOptionalFields, m_pClassFactory)); |
| 2777 | #endif |
| 2778 | image->ZeroPointerField(this, offsetof(EEClass, m_pccwTemplate)); |
| 2779 | #endif // FEATURE_COMINTEROP |
| 2780 | |
| 2781 | if (HasLayout()) |
| 2782 | { |
| 2783 | image->FixupRelativePointerField(this, offsetof(LayoutEEClass, m_LayoutInfo.m_pFieldMarshalers)); |
| 2784 | |
| 2785 | EEClassLayoutInfo *pInfo = &((LayoutEEClass*)this)->m_LayoutInfo; |
| 2786 | |
| 2787 | FieldMarshaler *pFM = pInfo->GetFieldMarshalers(); |
| 2788 | FieldMarshaler *pFMEnd = (FieldMarshaler*) ((BYTE *)pFM + pInfo->m_numCTMFields*MAXFIELDMARSHALERSIZE); |
| 2789 | while (pFM < pFMEnd) |
| 2790 | { |
| 2791 | pFM->Fixup(image); |
| 2792 | ((BYTE*&)pFM) += MAXFIELDMARSHALERSIZE; |
| 2793 | } |
| 2794 | } |
| 2795 | else if (IsDelegate()) |
| 2796 | { |
| 2797 | image->FixupRelativePointerField(this, offsetof(DelegateEEClass, m_pInvokeMethod)); |
| 2798 | image->FixupRelativePointerField(this, offsetof(DelegateEEClass, m_pBeginInvokeMethod)); |
| 2799 | image->FixupRelativePointerField(this, offsetof(DelegateEEClass, m_pEndInvokeMethod)); |
| 2800 | |
| 2801 | image->ZeroPointerField(this, offsetof(DelegateEEClass, m_pUMThunkMarshInfo)); |
| 2802 | image->ZeroPointerField(this, offsetof(DelegateEEClass, m_pStaticCallStub)); |
| 2803 | image->ZeroPointerField(this, offsetof(DelegateEEClass, m_pMultiCastInvokeStub)); |
| 2804 | image->ZeroPointerField(this, offsetof(DelegateEEClass, m_pSecureDelegateInvokeStub)); |
| 2805 | image->ZeroPointerField(this, offsetof(DelegateEEClass, m_pMarshalStub)); |
| 2806 | |
| 2807 | #ifdef FEATURE_COMINTEROP |
| 2808 | DelegateEEClass *pDelegateClass = (DelegateEEClass *)this; |
| 2809 | ComPlusCallInfo *pComInfo = pDelegateClass->m_pComPlusCallInfo; |
| 2810 | |
| 2811 | if (image->IsStored(pComInfo)) |
| 2812 | { |
| 2813 | image->FixupPointerField(this, offsetof(DelegateEEClass, m_pComPlusCallInfo)); |
| 2814 | pComInfo->Fixup(image); |
| 2815 | } |
| 2816 | else |
| 2817 | { |
| 2818 | image->ZeroPointerField(this, offsetof(DelegateEEClass, m_pComPlusCallInfo)); |
| 2819 | } |
| 2820 | #endif // FEATURE_COMINTEROP |
| 2821 | |
| 2822 | image->FixupPointerField(this, offsetof(DelegateEEClass, m_pForwardStubMD)); |
| 2823 | image->FixupPointerField(this, offsetof(DelegateEEClass, m_pReverseStubMD)); |
| 2824 | } |
| 2825 | |
| 2826 | // |
| 2827 | // This field must be initialized at |
| 2828 | // load time |
| 2829 | // |
| 2830 | |
| 2831 | if (IsInterface() && GetGuidInfo() != NULL) |
| 2832 | image->FixupRelativePointerField(this, offsetof(EEClass, m_pGuidInfo)); |
| 2833 | else |
| 2834 | image->ZeroPointerField(this, offsetof(EEClass, m_pGuidInfo)); |
| 2835 | |
| 2836 | LOG((LF_ZAP, LL_INFO10000, "EEClass::Fixup %s (%p) complete.\n" , GetDebugClassName(), this)); |
| 2837 | } |
| 2838 | #endif // FEATURE_NATIVE_IMAGE_GENERATION |
| 2839 | |
| 2840 | |
| 2841 | //******************************************************************************* |
| 2842 | void EEClass::AddChunk (MethodDescChunk* pNewChunk) |
| 2843 | { |
| 2844 | STATIC_CONTRACT_NOTHROW; |
| 2845 | STATIC_CONTRACT_GC_NOTRIGGER; |
| 2846 | STATIC_CONTRACT_FORBID_FAULT; |
| 2847 | |
| 2848 | _ASSERTE(pNewChunk->GetNextChunk() == NULL); |
| 2849 | pNewChunk->SetNextChunk(GetChunks()); |
| 2850 | SetChunks(pNewChunk); |
| 2851 | } |
| 2852 | |
| 2853 | //******************************************************************************* |
| 2854 | void EEClass::AddChunkIfItHasNotBeenAdded (MethodDescChunk* pNewChunk) |
| 2855 | { |
| 2856 | STATIC_CONTRACT_NOTHROW; |
| 2857 | STATIC_CONTRACT_GC_NOTRIGGER; |
| 2858 | STATIC_CONTRACT_FORBID_FAULT; |
| 2859 | |
| 2860 | // return if the chunk has been added |
| 2861 | if (pNewChunk->GetNextChunk() != NULL) |
| 2862 | return; |
| 2863 | |
| 2864 | // even if pNewChunk->GetNextChunk() is NULL, this may still be the first chunk we added |
| 2865 | // (last in the list) so find the end of the list and verify that |
| 2866 | MethodDescChunk *chunk = GetChunks(); |
| 2867 | if (chunk != NULL) |
| 2868 | { |
| 2869 | while (chunk->GetNextChunk() != NULL) |
| 2870 | chunk = chunk->GetNextChunk(); |
| 2871 | |
| 2872 | if (chunk == pNewChunk) |
| 2873 | return; |
| 2874 | } |
| 2875 | |
| 2876 | pNewChunk->SetNextChunk(GetChunks()); |
| 2877 | SetChunks(pNewChunk); |
| 2878 | } |
| 2879 | |
| 2880 | #endif // !DACCESS_COMPILE |
| 2881 | |
| 2882 | //******************************************************************************* |
| 2883 | // ApproxFieldDescIterator is used to iterate over fields in a given class. |
| 2884 | // It does not includes EnC fields, and not inherited fields. |
| 2885 | // <NICE> ApproxFieldDescIterator is only used to iterate over static fields in one place, |
| 2886 | // and this will probably change anyway. After |
| 2887 | // we clean this up we should make ApproxFieldDescIterator work |
| 2888 | // over instance fields only </NICE> |
| 2889 | ApproxFieldDescIterator::ApproxFieldDescIterator() |
| 2890 | { |
| 2891 | CONTRACTL |
| 2892 | { |
| 2893 | NOTHROW; |
| 2894 | GC_NOTRIGGER; |
| 2895 | FORBID_FAULT; |
| 2896 | } |
| 2897 | CONTRACTL_END |
| 2898 | |
| 2899 | m_iteratorType = 0; |
| 2900 | m_pFieldDescList = NULL; |
| 2901 | m_currField = -1; |
| 2902 | m_totalFields = 0; |
| 2903 | } |
| 2904 | |
| 2905 | //******************************************************************************* |
| 2906 | void ApproxFieldDescIterator::Init(MethodTable *pMT, int iteratorType) |
| 2907 | { |
| 2908 | CONTRACTL |
| 2909 | { |
| 2910 | NOTHROW; |
| 2911 | GC_NOTRIGGER; |
| 2912 | FORBID_FAULT; |
| 2913 | SUPPORTS_DAC; |
| 2914 | } |
| 2915 | CONTRACTL_END |
| 2916 | |
| 2917 | m_iteratorType = iteratorType; |
| 2918 | m_pFieldDescList = pMT->GetApproxFieldDescListRaw(); |
| 2919 | m_currField = -1; |
| 2920 | |
| 2921 | // This gets non-EnC fields. |
| 2922 | m_totalFields = pMT->GetNumIntroducedInstanceFields(); |
| 2923 | |
| 2924 | if (!(iteratorType & (int)INSTANCE_FIELDS)) |
| 2925 | { |
| 2926 | // if not handling instances then skip them by setting curr to last one |
| 2927 | m_currField = m_totalFields - 1; |
| 2928 | } |
| 2929 | |
| 2930 | if (iteratorType & (int)STATIC_FIELDS) |
| 2931 | { |
| 2932 | m_totalFields += pMT->GetNumStaticFields(); |
| 2933 | } |
| 2934 | } |
| 2935 | |
| 2936 | //******************************************************************************* |
| 2937 | PTR_FieldDesc ApproxFieldDescIterator::Next() |
| 2938 | { |
| 2939 | CONTRACTL |
| 2940 | { |
| 2941 | NOTHROW; |
| 2942 | GC_NOTRIGGER; |
| 2943 | FORBID_FAULT; |
| 2944 | SUPPORTS_DAC; |
| 2945 | } |
| 2946 | CONTRACTL_END |
| 2947 | |
| 2948 | // This will iterate through all non-inherited and non-EnC fields. |
| 2949 | ++m_currField; |
| 2950 | if (m_currField >= m_totalFields) |
| 2951 | { |
| 2952 | return NULL; |
| 2953 | } |
| 2954 | |
| 2955 | return m_pFieldDescList + m_currField; |
| 2956 | } |
| 2957 | |
| 2958 | //******************************************************************************* |
| 2959 | bool |
| 2960 | DeepFieldDescIterator::NextClass() |
| 2961 | { |
| 2962 | WRAPPER_NO_CONTRACT; |
| 2963 | |
| 2964 | if (m_curClass <= 0) |
| 2965 | { |
| 2966 | return false; |
| 2967 | } |
| 2968 | |
| 2969 | if (m_numClasses <= 0) { |
| 2970 | _ASSERTE(m_numClasses > 0); |
| 2971 | return false; |
| 2972 | } |
| 2973 | |
| 2974 | MethodTable * pMT; |
| 2975 | |
| 2976 | // |
| 2977 | // If we're in the cache just grab the cache entry. |
| 2978 | // |
| 2979 | // If we're deeper in the hierarchy than the |
| 2980 | // portion we cached we need to take the |
| 2981 | // deepest cache entry and search down manually. |
| 2982 | // |
| 2983 | |
| 2984 | if (--m_curClass < m_numClasses) |
| 2985 | { |
| 2986 | pMT = m_classes[m_curClass]; |
| 2987 | } |
| 2988 | else |
| 2989 | { |
| 2990 | pMT = m_classes[m_numClasses - 1]; |
| 2991 | int depthDiff = m_curClass - m_numClasses + 1; |
| 2992 | while (depthDiff--) |
| 2993 | { |
| 2994 | pMT = pMT->GetParentMethodTable(); |
| 2995 | } |
| 2996 | } |
| 2997 | |
| 2998 | m_fieldIter.Init(pMT, m_fieldIter.GetIteratorType()); |
| 2999 | return true; |
| 3000 | } |
| 3001 | |
| 3002 | //******************************************************************************* |
| 3003 | void |
| 3004 | DeepFieldDescIterator::Init(MethodTable* pMT, int iteratorType, |
| 3005 | bool includeParents) |
| 3006 | { |
| 3007 | WRAPPER_NO_CONTRACT; |
| 3008 | |
| 3009 | MethodTable * lastClass = NULL; |
| 3010 | int numClasses; |
| 3011 | |
| 3012 | // |
| 3013 | // Walk up the parent chain, collecting |
| 3014 | // parent pointers and counting fields. |
| 3015 | // |
| 3016 | |
| 3017 | numClasses = 0; |
| 3018 | m_numClasses = 0; |
| 3019 | m_deepTotalFields = 0; |
| 3020 | m_lastNextFromParentClass = false; |
| 3021 | |
| 3022 | while (pMT) |
| 3023 | { |
| 3024 | if (m_numClasses < (int)NumItems(m_classes)) |
| 3025 | { |
| 3026 | m_classes[m_numClasses++] = pMT; |
| 3027 | } |
| 3028 | |
| 3029 | if ((iteratorType & ApproxFieldDescIterator::INSTANCE_FIELDS) != 0) |
| 3030 | { |
| 3031 | m_deepTotalFields += pMT->GetNumIntroducedInstanceFields(); |
| 3032 | } |
| 3033 | if ((iteratorType & ApproxFieldDescIterator::STATIC_FIELDS) != 0) |
| 3034 | { |
| 3035 | m_deepTotalFields += pMT->GetNumStaticFields(); |
| 3036 | } |
| 3037 | |
| 3038 | numClasses++; |
| 3039 | lastClass = pMT; |
| 3040 | |
| 3041 | if (includeParents) |
| 3042 | { |
| 3043 | pMT = pMT->GetParentMethodTable(); |
| 3044 | } |
| 3045 | else |
| 3046 | { |
| 3047 | break; |
| 3048 | } |
| 3049 | } |
| 3050 | |
| 3051 | // Start the per-class field iterator on the base-most parent. |
| 3052 | if (numClasses) |
| 3053 | { |
| 3054 | m_curClass = numClasses - 1; |
| 3055 | m_fieldIter.Init(lastClass, iteratorType); |
| 3056 | } |
| 3057 | else |
| 3058 | { |
| 3059 | m_curClass = 0; |
| 3060 | } |
| 3061 | } |
| 3062 | |
| 3063 | //******************************************************************************* |
| 3064 | FieldDesc* |
| 3065 | DeepFieldDescIterator::Next() |
| 3066 | { |
| 3067 | WRAPPER_NO_CONTRACT; |
| 3068 | |
| 3069 | FieldDesc* field; |
| 3070 | |
| 3071 | do |
| 3072 | { |
| 3073 | m_lastNextFromParentClass = m_curClass > 0; |
| 3074 | |
| 3075 | field = m_fieldIter.Next(); |
| 3076 | |
| 3077 | if (!field && !NextClass()) |
| 3078 | { |
| 3079 | return NULL; |
| 3080 | } |
| 3081 | } |
| 3082 | while (!field); |
| 3083 | |
| 3084 | return field; |
| 3085 | } |
| 3086 | |
| 3087 | //******************************************************************************* |
| 3088 | bool |
| 3089 | DeepFieldDescIterator::Skip(int numSkip) |
| 3090 | { |
| 3091 | WRAPPER_NO_CONTRACT; |
| 3092 | |
| 3093 | while (numSkip >= m_fieldIter.CountRemaining()) |
| 3094 | { |
| 3095 | numSkip -= m_fieldIter.CountRemaining(); |
| 3096 | |
| 3097 | if (!NextClass()) |
| 3098 | { |
| 3099 | return false; |
| 3100 | } |
| 3101 | } |
| 3102 | |
| 3103 | while (numSkip--) |
| 3104 | { |
| 3105 | m_fieldIter.Next(); |
| 3106 | } |
| 3107 | |
| 3108 | return true; |
| 3109 | } |
| 3110 | |
| 3111 | #ifdef DACCESS_COMPILE |
| 3112 | |
| 3113 | //******************************************************************************* |
| 3114 | void |
| 3115 | EEClass::EnumMemoryRegions(CLRDataEnumMemoryFlags flags, MethodTable * pMT) |
| 3116 | { |
| 3117 | SUPPORTS_DAC; |
| 3118 | DAC_ENUM_DTHIS(); |
| 3119 | EMEM_OUT(("MEM: %p EEClass\n" , dac_cast<TADDR>(this))); |
| 3120 | |
| 3121 | // The DAC_ENUM_DTHIS above won't have reported the packed fields tacked on the end of this instance (they |
| 3122 | // aren't part of the static class definition because the fields are variably sized and thus have to come |
| 3123 | // right at the end of the structure, even for sub-types such as LayoutEEClass or DelegateEEClass). |
| 3124 | DacEnumMemoryRegion(dac_cast<TADDR>(GetPackedFields()), sizeof(EEClassPackedFields)); |
| 3125 | |
| 3126 | if (HasOptionalFields()) |
| 3127 | DacEnumMemoryRegion(dac_cast<TADDR>(GetOptionalFields()), sizeof(EEClassOptionalFields)); |
| 3128 | |
| 3129 | if (flags != CLRDATA_ENUM_MEM_MINI && flags != CLRDATA_ENUM_MEM_TRIAGE) |
| 3130 | { |
| 3131 | PTR_Module pModule = pMT->GetModule(); |
| 3132 | if (pModule.IsValid()) |
| 3133 | { |
| 3134 | pModule->EnumMemoryRegions(flags, true); |
| 3135 | } |
| 3136 | PTR_MethodDescChunk chunk = GetChunks(); |
| 3137 | while (chunk.IsValid()) |
| 3138 | { |
| 3139 | chunk->EnumMemoryRegions(flags); |
| 3140 | chunk = chunk->GetNextChunk(); |
| 3141 | } |
| 3142 | } |
| 3143 | |
| 3144 | PTR_FieldDesc pFieldDescList = GetFieldDescList(); |
| 3145 | if (pFieldDescList.IsValid()) |
| 3146 | { |
| 3147 | // add one to make sos's code happy. |
| 3148 | DacEnumMemoryRegion(dac_cast<TADDR>(pFieldDescList), |
| 3149 | (pMT->GetNumIntroducedInstanceFields() + |
| 3150 | GetNumStaticFields() + 1) * |
| 3151 | sizeof(FieldDesc)); |
| 3152 | } |
| 3153 | |
| 3154 | } |
| 3155 | |
| 3156 | #endif // DACCESS_COMPILE |
| 3157 | |
| 3158 | // Get pointer to the packed fields structure attached to this instance. |
| 3159 | PTR_EEClassPackedFields EEClass::GetPackedFields() |
| 3160 | { |
| 3161 | LIMITED_METHOD_DAC_CONTRACT; |
| 3162 | |
| 3163 | return dac_cast<PTR_EEClassPackedFields>(PTR_HOST_TO_TADDR(this) + m_cbFixedEEClassFields); |
| 3164 | } |
| 3165 | |
| 3166 | // Get the value of the given field. Works regardless of whether the field is currently in its packed or |
| 3167 | // unpacked state. |
| 3168 | DWORD EEClass::GetPackableField(EEClassFieldId eField) |
| 3169 | { |
| 3170 | CONTRACTL |
| 3171 | { |
| 3172 | NOTHROW; |
| 3173 | GC_NOTRIGGER; |
| 3174 | MODE_ANY; |
| 3175 | SUPPORTS_DAC; |
| 3176 | SO_TOLERANT; |
| 3177 | } |
| 3178 | CONTRACTL_END; |
| 3179 | |
| 3180 | return m_fFieldsArePacked ? |
| 3181 | GetPackedFields()->GetPackedField(eField) : |
| 3182 | GetPackedFields()->GetUnpackedField(eField); |
| 3183 | } |
| 3184 | |
| 3185 | // Set the value of the given field. The field *must* be in the unpacked state for this to be legal (in |
| 3186 | // practice all packable fields must be initialized during class construction and from then on remain |
| 3187 | // immutable). |
| 3188 | void EEClass::SetPackableField(EEClassFieldId eField, DWORD dwValue) |
| 3189 | { |
| 3190 | CONTRACTL |
| 3191 | { |
| 3192 | NOTHROW; |
| 3193 | GC_NOTRIGGER; |
| 3194 | MODE_ANY; |
| 3195 | SO_TOLERANT; |
| 3196 | } |
| 3197 | CONTRACTL_END; |
| 3198 | |
| 3199 | _ASSERTE(!m_fFieldsArePacked); |
| 3200 | GetPackedFields()->SetUnpackedField(eField, dwValue); |
| 3201 | } |
| 3202 | |