1 | // Licensed to the .NET Foundation under one or more agreements. |
2 | // The .NET Foundation licenses this file to you under the MIT license. |
3 | // See the LICENSE file in the project root for more information. |
4 | // |
5 | // File: methodtable.cpp |
6 | // |
7 | |
8 | #include "common.h" |
9 | |
10 | #include "clsload.hpp" |
11 | #include "method.hpp" |
12 | #include "class.h" |
13 | #include "classcompat.h" |
14 | #include "object.h" |
15 | #include "field.h" |
16 | #include "util.hpp" |
17 | #include "excep.h" |
18 | #include "siginfo.hpp" |
19 | #include "threads.h" |
20 | #include "stublink.h" |
21 | #include "ecall.h" |
22 | #include "dllimport.h" |
23 | #include "gcdesc.h" |
24 | #include "jitinterface.h" |
25 | #include "eeconfig.h" |
26 | #include "log.h" |
27 | #include "fieldmarshaler.h" |
28 | #include "cgensys.h" |
29 | #include "gcheaputilities.h" |
30 | #include "dbginterface.h" |
31 | #include "comdelegate.h" |
32 | #include "eventtrace.h" |
33 | #include "fieldmarshaler.h" |
34 | |
35 | |
36 | #include "eeprofinterfaces.h" |
37 | #include "dllimportcallback.h" |
38 | #include "listlock.h" |
39 | #include "methodimpl.h" |
40 | #include "guidfromname.h" |
41 | #include "stackprobe.h" |
42 | #include "encee.h" |
43 | #include "encee.h" |
44 | #include "comsynchronizable.h" |
45 | #include "customattribute.h" |
46 | #include "virtualcallstub.h" |
47 | #include "contractimpl.h" |
48 | #ifdef FEATURE_PREJIT |
49 | #include "zapsig.h" |
50 | #endif //FEATURE_PREJIT |
51 | |
52 | #ifdef FEATURE_COMINTEROP |
53 | #include "comcallablewrapper.h" |
54 | #include "clrtocomcall.h" |
55 | #include "runtimecallablewrapper.h" |
56 | #include "winrttypenameconverter.h" |
57 | #endif // FEATURE_COMINTEROP |
58 | |
59 | #include "typeequivalencehash.hpp" |
60 | |
61 | #include "generics.h" |
62 | #include "genericdict.h" |
63 | #include "typestring.h" |
64 | #include "typedesc.h" |
65 | #include "array.h" |
66 | |
67 | #ifdef FEATURE_INTERPRETER |
68 | #include "interpreter.h" |
69 | #endif // FEATURE_INTERPRETER |
70 | |
71 | #ifndef DACCESS_COMPILE |
72 | |
73 | // Typedef for string comparition functions. |
74 | typedef int (__cdecl *UTF8StringCompareFuncPtr)(const char *, const char *); |
75 | |
76 | MethodDataCache *MethodTable::s_pMethodDataCache = NULL; |
77 | BOOL MethodTable::s_fUseMethodDataCache = FALSE; |
78 | BOOL MethodTable::s_fUseParentMethodData = FALSE; |
79 | |
80 | #ifdef _DEBUG |
81 | extern unsigned g_dupMethods; |
82 | #endif |
83 | |
84 | #endif // !DACCESS_COMPILE |
85 | |
86 | #ifndef DACCESS_COMPILE |
87 | //========================================================================================== |
88 | class MethodDataCache |
89 | { |
90 | typedef MethodTable::MethodData MethodData; |
91 | |
92 | public: // Ctor. Allocates cEntries entries. Throws. |
93 | static UINT32 GetObjectSize(UINT32 cEntries); |
94 | MethodDataCache(UINT32 cEntries); |
95 | |
96 | MethodData *Find(MethodTable *pMT); |
97 | MethodData *Find(MethodTable *pMTDecl, MethodTable *pMTImpl); |
98 | void Insert(MethodData *pMData); |
99 | void Clear(); |
100 | |
101 | protected: |
102 | // This describes each entry in the cache. |
103 | struct Entry |
104 | { |
105 | MethodData *m_pMData; |
106 | UINT32 m_iTimestamp; |
107 | }; |
108 | |
109 | MethodData *FindHelper(MethodTable *pMTDecl, MethodTable *pMTImpl, UINT32 idx); |
110 | |
111 | inline UINT32 GetNextTimestamp() |
112 | { return ++m_iCurTimestamp; } |
113 | |
114 | inline UINT32 NumEntries() |
115 | { LIMITED_METHOD_CONTRACT; return m_cEntries; } |
116 | |
117 | inline void TouchEntry(UINT32 i) |
118 | { WRAPPER_NO_CONTRACT; m_iLastTouched = i; GetEntry(i)->m_iTimestamp = GetNextTimestamp(); } |
119 | |
120 | inline UINT32 GetLastTouchedEntryIndex() |
121 | { WRAPPER_NO_CONTRACT; return m_iLastTouched; } |
122 | |
123 | // The end of this object contains an array of Entry |
124 | inline Entry *GetEntryData() |
125 | { LIMITED_METHOD_CONTRACT; return (Entry *)(this + 1); } |
126 | |
127 | inline Entry *GetEntry(UINT32 i) |
128 | { WRAPPER_NO_CONTRACT; return GetEntryData() + i; } |
129 | |
130 | private: |
131 | // This serializes access to the cache |
132 | SimpleRWLock m_lock; |
133 | |
134 | // This allows ageing of entries to decide which to punt when |
135 | // inserting a new entry. |
136 | UINT32 m_iCurTimestamp; |
137 | |
138 | // The number of entries in the cache |
139 | UINT32 m_cEntries; |
140 | UINT32 m_iLastTouched; |
141 | |
142 | #ifdef _WIN64 |
143 | UINT32 pad; // insures that we are a multiple of 8-bytes |
144 | #endif |
145 | }; // class MethodDataCache |
146 | |
147 | //========================================================================================== |
148 | UINT32 MethodDataCache::GetObjectSize(UINT32 cEntries) |
149 | { |
150 | LIMITED_METHOD_CONTRACT; |
151 | return sizeof(MethodDataCache) + (sizeof(Entry) * cEntries); |
152 | } |
153 | |
154 | //========================================================================================== |
155 | MethodDataCache::MethodDataCache(UINT32 cEntries) |
156 | : m_lock(COOPERATIVE_OR_PREEMPTIVE, LOCK_TYPE_DEFAULT), |
157 | m_iCurTimestamp(0), |
158 | m_cEntries(cEntries), |
159 | m_iLastTouched(0) |
160 | { |
161 | WRAPPER_NO_CONTRACT; |
162 | ZeroMemory(GetEntryData(), cEntries * sizeof(Entry)); |
163 | } |
164 | |
165 | //========================================================================================== |
166 | MethodTable::MethodData *MethodDataCache::FindHelper( |
167 | MethodTable *pMTDecl, MethodTable *pMTImpl, UINT32 idx) |
168 | { |
169 | CONTRACTL { |
170 | NOTHROW; |
171 | GC_NOTRIGGER; |
172 | INSTANCE_CHECK; |
173 | } CONTRACTL_END; |
174 | |
175 | MethodData *pEntry = GetEntry(idx)->m_pMData; |
176 | if (pEntry != NULL) { |
177 | MethodTable *pMTDeclEntry = pEntry->GetDeclMethodTable(); |
178 | MethodTable *pMTImplEntry = pEntry->GetImplMethodTable(); |
179 | if (pMTDeclEntry == pMTDecl && pMTImplEntry == pMTImpl) { |
180 | return pEntry; |
181 | } |
182 | else if (pMTDecl == pMTImpl) { |
183 | if (pMTDeclEntry == pMTDecl) { |
184 | return pEntry->GetDeclMethodData(); |
185 | } |
186 | if (pMTImplEntry == pMTDecl) { |
187 | return pEntry->GetImplMethodData(); |
188 | } |
189 | } |
190 | } |
191 | |
192 | return NULL; |
193 | } |
194 | |
195 | //========================================================================================== |
196 | MethodTable::MethodData *MethodDataCache::Find(MethodTable *pMTDecl, MethodTable *pMTImpl) |
197 | { |
198 | CONTRACTL { |
199 | NOTHROW; |
200 | GC_NOTRIGGER; |
201 | INSTANCE_CHECK; |
202 | } CONTRACTL_END; |
203 | |
204 | #ifdef LOGGING |
205 | g_sdStats.m_cCacheLookups++; |
206 | #endif |
207 | |
208 | SimpleReadLockHolder lh(&m_lock); |
209 | |
210 | // Check the last touched entry. |
211 | MethodData *pEntry = FindHelper(pMTDecl, pMTImpl, GetLastTouchedEntryIndex()); |
212 | |
213 | // Now search the entire cache. |
214 | if (pEntry == NULL) { |
215 | for (UINT32 i = 0; i < NumEntries(); i++) { |
216 | pEntry = FindHelper(pMTDecl, pMTImpl, i); |
217 | if (pEntry != NULL) { |
218 | TouchEntry(i); |
219 | break; |
220 | } |
221 | } |
222 | } |
223 | |
224 | if (pEntry != NULL) { |
225 | pEntry->AddRef(); |
226 | } |
227 | |
228 | #ifdef LOGGING |
229 | else { |
230 | // Failure to find the entry in the cache. |
231 | g_sdStats.m_cCacheMisses++; |
232 | } |
233 | #endif // LOGGING |
234 | |
235 | return pEntry; |
236 | } |
237 | |
238 | //========================================================================================== |
239 | MethodTable::MethodData *MethodDataCache::Find(MethodTable *pMT) |
240 | { |
241 | WRAPPER_NO_CONTRACT; |
242 | return Find(pMT, pMT); |
243 | } |
244 | |
245 | //========================================================================================== |
246 | void MethodDataCache::Insert(MethodData *pMData) |
247 | { |
248 | CONTRACTL { |
249 | NOTHROW; // for now, because it does not yet resize. |
250 | GC_NOTRIGGER; |
251 | INSTANCE_CHECK; |
252 | } CONTRACTL_END; |
253 | |
254 | SimpleWriteLockHolder hLock(&m_lock); |
255 | |
256 | UINT32 iMin = UINT32_MAX; |
257 | UINT32 idxMin = UINT32_MAX; |
258 | for (UINT32 i = 0; i < NumEntries(); i++) { |
259 | if (GetEntry(i)->m_iTimestamp < iMin) { |
260 | idxMin = i; |
261 | iMin = GetEntry(i)->m_iTimestamp; |
262 | } |
263 | } |
264 | Entry *pEntry = GetEntry(idxMin); |
265 | if (pEntry->m_pMData != NULL) { |
266 | pEntry->m_pMData->Release(); |
267 | } |
268 | pMData->AddRef(); |
269 | pEntry->m_pMData = pMData; |
270 | pEntry->m_iTimestamp = GetNextTimestamp(); |
271 | } |
272 | |
273 | //========================================================================================== |
274 | void MethodDataCache::Clear() |
275 | { |
276 | CONTRACTL { |
277 | NOTHROW; // for now, because it does not yet resize. |
278 | GC_NOTRIGGER; |
279 | INSTANCE_CHECK; |
280 | } CONTRACTL_END; |
281 | |
282 | // Taking the lock here is just a precaution. Really, the runtime |
283 | // should be suspended because this is called while unloading an |
284 | // AppDomain at the SysSuspendEE stage. But, if someone calls it |
285 | // outside of that context, we should be extra cautious. |
286 | SimpleWriteLockHolder lh(&m_lock); |
287 | |
288 | for (UINT32 i = 0; i < NumEntries(); i++) { |
289 | Entry *pEntry = GetEntry(i); |
290 | if (pEntry->m_pMData != NULL) { |
291 | pEntry->m_pMData->Release(); |
292 | } |
293 | } |
294 | ZeroMemory(GetEntryData(), NumEntries() * sizeof(Entry)); |
295 | m_iCurTimestamp = 0; |
296 | } // MethodDataCache::Clear |
297 | |
298 | #endif // !DACCESS_COMPILE |
299 | |
300 | |
301 | //========================================================================================== |
302 | // |
303 | // Initialize the offsets of multipurpose slots at compile time using template metaprogramming |
304 | // |
305 | |
306 | template<int N> |
307 | struct CountBitsAtCompileTime |
308 | { |
309 | enum { value = (N & 1) + CountBitsAtCompileTime<(N >> 1)>::value }; |
310 | }; |
311 | |
312 | template<> |
313 | struct CountBitsAtCompileTime<0> |
314 | { |
315 | enum { value = 0 }; |
316 | }; |
317 | |
318 | // "mask" is mask of used slots. |
319 | template<int mask> |
320 | struct MethodTable::MultipurposeSlotOffset |
321 | { |
322 | // This is raw index of the slot assigned on first come first served basis |
323 | enum { raw = CountBitsAtCompileTime<mask>::value }; |
324 | |
325 | // This is actual index of the slot. It is equal to raw index except for the case |
326 | // where the first fixed slot is not used, but the second one is. The first fixed |
327 | // slot has to be assigned instead of the second one in this case. This assumes that |
328 | // there are exactly two fixed slots. |
329 | enum { index = (((mask & 3) == 2) && (raw == 1)) ? 0 : raw }; |
330 | |
331 | // Offset of slot |
332 | enum { slotOffset = (index == 0) ? offsetof(MethodTable, m_pMultipurposeSlot1) : |
333 | (index == 1) ? offsetof(MethodTable, m_pMultipurposeSlot2) : |
334 | (sizeof(MethodTable) + index * sizeof(TADDR) - 2 * sizeof(TADDR)) }; |
335 | |
336 | // Size of methodtable with overflow slots. It is used to compute start offset of optional members. |
337 | enum { totalSize = (slotOffset >= sizeof(MethodTable)) ? slotOffset : sizeof(MethodTable) }; |
338 | }; |
339 | |
340 | // |
341 | // These macros recursively expand to create 2^N values for the offset arrays |
342 | // |
343 | #define MULTIPURPOSE_SLOT_OFFSET_1(mask) MULTIPURPOSE_SLOT_OFFSET (mask) MULTIPURPOSE_SLOT_OFFSET (mask | 0x01) |
344 | #define MULTIPURPOSE_SLOT_OFFSET_2(mask) MULTIPURPOSE_SLOT_OFFSET_1(mask) MULTIPURPOSE_SLOT_OFFSET_1(mask | 0x02) |
345 | #define MULTIPURPOSE_SLOT_OFFSET_3(mask) MULTIPURPOSE_SLOT_OFFSET_2(mask) MULTIPURPOSE_SLOT_OFFSET_2(mask | 0x04) |
346 | #define MULTIPURPOSE_SLOT_OFFSET_4(mask) MULTIPURPOSE_SLOT_OFFSET_3(mask) MULTIPURPOSE_SLOT_OFFSET_3(mask | 0x08) |
347 | #define MULTIPURPOSE_SLOT_OFFSET_5(mask) MULTIPURPOSE_SLOT_OFFSET_4(mask) MULTIPURPOSE_SLOT_OFFSET_4(mask | 0x10) |
348 | |
349 | #define MULTIPURPOSE_SLOT_OFFSET(mask) MultipurposeSlotOffset<mask>::slotOffset, |
350 | const BYTE MethodTable::c_DispatchMapSlotOffsets[] = { |
351 | MULTIPURPOSE_SLOT_OFFSET_2(0) |
352 | }; |
353 | const BYTE MethodTable::c_NonVirtualSlotsOffsets[] = { |
354 | MULTIPURPOSE_SLOT_OFFSET_3(0) |
355 | }; |
356 | const BYTE MethodTable::c_ModuleOverrideOffsets[] = { |
357 | MULTIPURPOSE_SLOT_OFFSET_4(0) |
358 | }; |
359 | #undef MULTIPURPOSE_SLOT_OFFSET |
360 | |
361 | #define MULTIPURPOSE_SLOT_OFFSET(mask) MultipurposeSlotOffset<mask>::totalSize, |
362 | const BYTE MethodTable::[] = { |
363 | MULTIPURPOSE_SLOT_OFFSET_5(0) |
364 | }; |
365 | #undef MULTIPURPOSE_SLOT_OFFSET |
366 | |
367 | |
368 | //========================================================================================== |
369 | // Optimization intended for MethodTable::GetModule, MethodTable::GetDispatchMap and MethodTable::GetNonVirtualSlotsPtr |
370 | |
371 | #include <optsmallperfcritical.h> |
372 | |
373 | PTR_Module MethodTable::GetModule() |
374 | { |
375 | LIMITED_METHOD_DAC_CONTRACT; |
376 | |
377 | g_IBCLogger.LogMethodTableAccess(this); |
378 | |
379 | // Fast path for non-generic non-array case |
380 | if ((m_dwFlags & (enum_flag_HasComponentSize | enum_flag_GenericsMask)) == 0) |
381 | return GetLoaderModule(); |
382 | |
383 | MethodTable * pMTForModule = IsArray() ? this : GetCanonicalMethodTable(); |
384 | if (!pMTForModule->HasModuleOverride()) |
385 | return pMTForModule->GetLoaderModule(); |
386 | |
387 | TADDR pSlot = pMTForModule->GetMultipurposeSlotPtr(enum_flag_HasModuleOverride, c_ModuleOverrideOffsets); |
388 | return RelativeFixupPointer<PTR_Module>::GetValueAtPtr(pSlot); |
389 | } |
390 | |
391 | //========================================================================================== |
392 | PTR_Module MethodTable::GetModule_NoLogging() |
393 | { |
394 | LIMITED_METHOD_DAC_CONTRACT; |
395 | |
396 | // Fast path for non-generic non-array case |
397 | if ((m_dwFlags & (enum_flag_HasComponentSize | enum_flag_GenericsMask)) == 0) |
398 | return GetLoaderModule(); |
399 | |
400 | MethodTable * pMTForModule = IsArray() ? this : GetCanonicalMethodTable(); |
401 | if (!pMTForModule->HasModuleOverride()) |
402 | return pMTForModule->GetLoaderModule(); |
403 | |
404 | TADDR pSlot = pMTForModule->GetMultipurposeSlotPtr(enum_flag_HasModuleOverride, c_ModuleOverrideOffsets); |
405 | return RelativeFixupPointer<PTR_Module>::GetValueAtPtr(pSlot); |
406 | } |
407 | |
408 | //========================================================================================== |
409 | PTR_DispatchMap MethodTable::GetDispatchMap() |
410 | { |
411 | LIMITED_METHOD_DAC_CONTRACT; |
412 | |
413 | MethodTable * pMT = this; |
414 | |
415 | if (!pMT->HasDispatchMapSlot()) |
416 | { |
417 | pMT = pMT->GetCanonicalMethodTable(); |
418 | if (!pMT->HasDispatchMapSlot()) |
419 | return NULL; |
420 | } |
421 | |
422 | g_IBCLogger.LogDispatchMapAccess(pMT); |
423 | |
424 | TADDR pSlot = pMT->GetMultipurposeSlotPtr(enum_flag_HasDispatchMapSlot, c_DispatchMapSlotOffsets); |
425 | return RelativePointer<PTR_DispatchMap>::GetValueAtPtr(pSlot); |
426 | } |
427 | |
428 | //========================================================================================== |
429 | TADDR MethodTable::GetNonVirtualSlotsPtr() |
430 | { |
431 | LIMITED_METHOD_DAC_CONTRACT; |
432 | |
433 | _ASSERTE(GetFlag(enum_flag_HasNonVirtualSlots)); |
434 | return GetMultipurposeSlotPtr(enum_flag_HasNonVirtualSlots, c_NonVirtualSlotsOffsets); |
435 | } |
436 | |
437 | #include <optdefault.h> |
438 | |
439 | |
440 | //========================================================================================== |
441 | PTR_Module MethodTable::GetModuleIfLoaded() |
442 | { |
443 | CONTRACTL |
444 | { |
445 | NOTHROW; |
446 | GC_NOTRIGGER; |
447 | MODE_ANY; |
448 | FORBID_FAULT; |
449 | SUPPORTS_DAC; |
450 | } |
451 | CONTRACTL_END; |
452 | |
453 | g_IBCLogger.LogMethodTableAccess(this); |
454 | |
455 | MethodTable * pMTForModule = IsArray() ? this : GetCanonicalMethodTable(); |
456 | if (!pMTForModule->HasModuleOverride()) |
457 | return pMTForModule->GetLoaderModule(); |
458 | |
459 | return Module::RestoreModulePointerIfLoaded(pMTForModule->GetModuleOverridePtr(), pMTForModule->GetLoaderModule()); |
460 | } |
461 | |
462 | #ifndef DACCESS_COMPILE |
463 | //========================================================================================== |
464 | void MethodTable::SetModule(Module * pModule) |
465 | { |
466 | LIMITED_METHOD_CONTRACT; |
467 | |
468 | if (HasModuleOverride()) |
469 | { |
470 | GetModuleOverridePtr()->SetValue(pModule); |
471 | } |
472 | |
473 | _ASSERTE(GetModule() == pModule); |
474 | } |
475 | #endif // DACCESS_COMPILE |
476 | |
477 | //========================================================================================== |
478 | BOOL MethodTable::ValidateWithPossibleAV() |
479 | { |
480 | CANNOT_HAVE_CONTRACT; |
481 | SUPPORTS_DAC; |
482 | |
483 | // MethodTables have the canonicalization property below. |
484 | // i.e. canonicalize, and canonicalize again, and check the result are |
485 | // the same. This is a property that holds for every single valid object in |
486 | // the system, but which should hold for very few other addresses. |
487 | |
488 | // For non-generic classes, we can rely on comparing |
489 | // object->methodtable->class->methodtable |
490 | // to |
491 | // object->methodtable |
492 | // |
493 | // However, for generic instantiation this does not work. There we must |
494 | // compare |
495 | // |
496 | // object->methodtable->class->methodtable->class |
497 | // to |
498 | // object->methodtable->class |
499 | // |
500 | // Of course, that's not necessarily enough to verify that the method |
501 | // table and class are absolutely valid - we rely on type soundness |
502 | // for that. We need to do more sanity checking to |
503 | // make sure that our pointer here is in fact a valid object. |
504 | PTR_EEClass pEEClass = this->GetClassWithPossibleAV(); |
505 | return ((this == pEEClass->GetMethodTableWithPossibleAV()) || |
506 | ((HasInstantiation() || IsArray()) && |
507 | (pEEClass->GetMethodTableWithPossibleAV()->GetClassWithPossibleAV() == pEEClass))); |
508 | } |
509 | |
510 | #ifndef DACCESS_COMPILE |
511 | |
512 | //========================================================================================== |
513 | BOOL MethodTable::IsClassInited(AppDomain* pAppDomain /* = NULL */) |
514 | { |
515 | WRAPPER_NO_CONTRACT; |
516 | |
517 | if (IsClassPreInited()) |
518 | return TRUE; |
519 | |
520 | if (IsSharedByGenericInstantiations()) |
521 | return FALSE; |
522 | |
523 | DomainLocalModule *pLocalModule; |
524 | if (pAppDomain == NULL) |
525 | { |
526 | pLocalModule = GetDomainLocalModule(); |
527 | } |
528 | else |
529 | { |
530 | pLocalModule = GetDomainLocalModule(pAppDomain); |
531 | } |
532 | |
533 | _ASSERTE(pLocalModule != NULL); |
534 | |
535 | return pLocalModule->IsClassInitialized(this); |
536 | } |
537 | |
538 | //========================================================================================== |
539 | BOOL MethodTable::IsInitError() |
540 | { |
541 | WRAPPER_NO_CONTRACT; |
542 | |
543 | DomainLocalModule *pLocalModule = GetDomainLocalModule(); |
544 | _ASSERTE(pLocalModule != NULL); |
545 | |
546 | return pLocalModule->IsClassInitError(this); |
547 | } |
548 | |
549 | //========================================================================================== |
550 | // mark the class as having its .cctor run |
551 | void MethodTable::SetClassInited() |
552 | { |
553 | WRAPPER_NO_CONTRACT; |
554 | _ASSERTE(!IsClassPreInited()); |
555 | GetDomainLocalModule()->SetClassInitialized(this); |
556 | } |
557 | |
558 | //========================================================================================== |
559 | void MethodTable::SetClassInitError() |
560 | { |
561 | WRAPPER_NO_CONTRACT; |
562 | GetDomainLocalModule()->SetClassInitError(this); |
563 | } |
564 | |
565 | //========================================================================================== |
566 | // mark the class as having been restored. |
567 | void MethodTable::SetIsRestored() |
568 | { |
569 | CONTRACTL |
570 | { |
571 | THROWS; |
572 | GC_TRIGGERS; |
573 | } |
574 | CONTRACTL_END |
575 | |
576 | PRECONDITION(!IsFullyLoaded()); |
577 | |
578 | // If functions on this type have already been requested for rejit, then give the rejit |
579 | // manager a chance to jump-stamp the code we are implicitly restoring. This ensures the |
580 | // first thread entering the function will jump to the prestub and trigger the |
581 | // rejit. Note that the PublishMethodTableHolder may take a lock to avoid a rejit race. |
582 | // See code:ReJitManager::PublishMethodHolder::PublishMethodHolder#PublishCode |
583 | // for details on the race. |
584 | // |
585 | { |
586 | PublishMethodTableHolder(this); |
587 | FastInterlockAnd(EnsureWritablePages(&(GetWriteableDataForWrite()->m_dwFlags)), ~MethodTableWriteableData::enum_flag_Unrestored); |
588 | } |
589 | #ifndef DACCESS_COMPILE |
590 | if (ETW_PROVIDER_ENABLED(MICROSOFT_WINDOWS_DOTNETRUNTIME_PROVIDER)) |
591 | { |
592 | ETW::MethodLog::MethodTableRestored(this); |
593 | } |
594 | #endif |
595 | } |
596 | |
597 | //========================================================================================== |
598 | // mark as COM object type (System.__ComObject and types deriving from it) |
599 | void MethodTable::SetComObjectType() |
600 | { |
601 | LIMITED_METHOD_CONTRACT; |
602 | SetFlag(enum_flag_ComObject); |
603 | } |
604 | |
605 | #ifdef FEATURE_ICASTABLE |
606 | void MethodTable::SetICastable() |
607 | { |
608 | LIMITED_METHOD_CONTRACT; |
609 | SetFlag(enum_flag_ICastable); |
610 | } |
611 | #endif |
612 | |
613 | BOOL MethodTable::IsICastable() |
614 | { |
615 | LIMITED_METHOD_DAC_CONTRACT; |
616 | #ifdef FEATURE_ICASTABLE |
617 | return GetFlag(enum_flag_ICastable); |
618 | #else |
619 | return FALSE; |
620 | #endif |
621 | } |
622 | |
623 | |
624 | #endif // !DACCESS_COMPILE |
625 | |
626 | //========================================================================================== |
627 | WORD MethodTable::GetNumMethods() |
628 | { |
629 | LIMITED_METHOD_DAC_CONTRACT; |
630 | return GetClass()->GetNumMethods(); |
631 | } |
632 | |
633 | //========================================================================================== |
634 | PTR_BaseDomain MethodTable::GetDomain() |
635 | { |
636 | LIMITED_METHOD_DAC_CONTRACT; |
637 | return dac_cast<PTR_BaseDomain>(AppDomain::GetCurrentDomain()); |
638 | } |
639 | |
640 | //========================================================================================== |
641 | BOOL MethodTable::HasSameTypeDefAs(MethodTable *pMT) |
642 | { |
643 | LIMITED_METHOD_DAC_CONTRACT; |
644 | |
645 | if (this == pMT) |
646 | return TRUE; |
647 | |
648 | // optimize for the negative case where we expect RID mismatch |
649 | if (GetTypeDefRid() != pMT->GetTypeDefRid()) |
650 | return FALSE; |
651 | |
652 | if (GetCanonicalMethodTable() == pMT->GetCanonicalMethodTable()) |
653 | return TRUE; |
654 | |
655 | return (GetModule() == pMT->GetModule()); |
656 | } |
657 | |
658 | //========================================================================================== |
659 | BOOL MethodTable::HasSameTypeDefAs_NoLogging(MethodTable *pMT) |
660 | { |
661 | LIMITED_METHOD_DAC_CONTRACT; |
662 | |
663 | if (this == pMT) |
664 | return TRUE; |
665 | |
666 | // optimize for the negative case where we expect RID mismatch |
667 | if (GetTypeDefRid_NoLogging() != pMT->GetTypeDefRid_NoLogging()) |
668 | return FALSE; |
669 | |
670 | if (GetCanonicalMethodTable() == pMT->GetCanonicalMethodTable()) |
671 | return TRUE; |
672 | |
673 | return (GetModule_NoLogging() == pMT->GetModule_NoLogging()); |
674 | } |
675 | |
676 | #ifndef DACCESS_COMPILE |
677 | |
678 | //========================================================================================== |
679 | PTR_MethodTable InterfaceInfo_t::GetApproxMethodTable(Module * pContainingModule) |
680 | { |
681 | CONTRACTL |
682 | { |
683 | THROWS; |
684 | GC_TRIGGERS; |
685 | MODE_ANY; |
686 | } |
687 | CONTRACTL_END; |
688 | #ifdef FEATURE_PREJIT |
689 | if (m_pMethodTable.IsTagged()) |
690 | { |
691 | // Ideally, we would use Module::RestoreMethodTablePointer here. Unfortunately, it is not |
692 | // possible because of the current type loader architecture that restores types incrementally |
693 | // even in the NGen case. |
694 | MethodTable * pItfMT = *(m_pMethodTable.GetValuePtr()); |
695 | |
696 | // Restore the method table, but do not write it back if it has instantiation. We do not want |
697 | // to write back the approximate instantiations. |
698 | Module::RestoreMethodTablePointerRaw(&pItfMT, pContainingModule, CLASS_LOAD_APPROXPARENTS); |
699 | |
700 | if (!pItfMT->HasInstantiation()) |
701 | { |
702 | // m_pMethodTable.SetValue() is not used here since we want to update the indirection cell |
703 | *EnsureWritablePages(m_pMethodTable.GetValuePtr()) = pItfMT; |
704 | } |
705 | |
706 | return pItfMT; |
707 | } |
708 | #endif |
709 | MethodTable * pItfMT = m_pMethodTable.GetValue(); |
710 | ClassLoader::EnsureLoaded(TypeHandle(pItfMT), CLASS_LOAD_APPROXPARENTS); |
711 | return pItfMT; |
712 | } |
713 | |
714 | #ifndef CROSSGEN_COMPILE |
715 | //========================================================================================== |
716 | // get the method desc given the interface method desc |
717 | /* static */ MethodDesc *MethodTable::GetMethodDescForInterfaceMethodAndServer( |
718 | TypeHandle ownerType, MethodDesc *pItfMD, OBJECTREF *pServer) |
719 | { |
720 | CONTRACT(MethodDesc*) |
721 | { |
722 | THROWS; |
723 | GC_TRIGGERS; |
724 | MODE_COOPERATIVE; |
725 | PRECONDITION(CheckPointer(pItfMD)); |
726 | PRECONDITION(pItfMD->IsInterface()); |
727 | PRECONDITION(!ownerType.IsNull()); |
728 | PRECONDITION(ownerType.GetMethodTable()->HasSameTypeDefAs(pItfMD->GetMethodTable())); |
729 | POSTCONDITION(CheckPointer(RETVAL)); |
730 | } |
731 | CONTRACT_END; |
732 | VALIDATEOBJECTREF(*pServer); |
733 | |
734 | #ifdef _DEBUG |
735 | MethodTable * pItfMT = ownerType.GetMethodTable(); |
736 | PREFIX_ASSUME(pItfMT != NULL); |
737 | #endif // _DEBUG |
738 | |
739 | MethodTable *pServerMT = (*pServer)->GetMethodTable(); |
740 | PREFIX_ASSUME(pServerMT != NULL); |
741 | |
742 | #ifdef FEATURE_ICASTABLE |
743 | // In case of ICastable, instead of trying to find method implementation in the real object type |
744 | // we call pObj.GetValueInternal() and call GetMethodDescForInterfaceMethod() again with whatever type it returns. |
745 | // It allows objects that implement ICastable to mimic behavior of other types. |
746 | if (pServerMT->IsICastable() && |
747 | !pItfMD->HasMethodInstantiation() && |
748 | !TypeHandle(pServerMT).CanCastTo(ownerType)) // we need to make sure object doesn't implement this interface in a natural way |
749 | { |
750 | GCStress<cfg_any>::MaybeTrigger(); |
751 | |
752 | // Make call to ICastableHelpers.GetImplType(obj, interfaceTypeObj) |
753 | PREPARE_NONVIRTUAL_CALLSITE(METHOD__ICASTABLEHELPERS__GETIMPLTYPE); |
754 | |
755 | OBJECTREF ownerManagedType = ownerType.GetManagedClassObject(); //GC triggers |
756 | |
757 | DECLARE_ARGHOLDER_ARRAY(args, 2); |
758 | args[ARGNUM_0] = OBJECTREF_TO_ARGHOLDER(*pServer); |
759 | args[ARGNUM_1] = OBJECTREF_TO_ARGHOLDER(ownerManagedType); |
760 | |
761 | OBJECTREF impTypeObj = NULL; |
762 | CALL_MANAGED_METHOD_RETREF(impTypeObj, OBJECTREF, args); |
763 | |
764 | INDEBUG(ownerManagedType = NULL); //ownerManagedType wasn't protected during the call |
765 | if (impTypeObj == NULL) // GetImplType returns default(RuntimeTypeHandle) |
766 | { |
767 | COMPlusThrow(kEntryPointNotFoundException); |
768 | } |
769 | |
770 | ReflectClassBaseObject* resultTypeObj = ((ReflectClassBaseObject*)OBJECTREFToObject(impTypeObj)); |
771 | TypeHandle resulTypeHnd = resultTypeObj->GetType(); |
772 | MethodTable *pResultMT = resulTypeHnd.GetMethodTable(); |
773 | |
774 | RETURN(pResultMT->GetMethodDescForInterfaceMethod(ownerType, pItfMD, TRUE /* throwOnConflict */)); |
775 | } |
776 | #endif |
777 | |
778 | #ifdef FEATURE_COMINTEROP |
779 | if (pServerMT->IsComObjectType() && !pItfMD->HasMethodInstantiation()) |
780 | { |
781 | // interop needs an exact MethodDesc |
782 | pItfMD = MethodDesc::FindOrCreateAssociatedMethodDesc( |
783 | pItfMD, |
784 | ownerType.GetMethodTable(), |
785 | FALSE, // forceBoxedEntryPoint |
786 | Instantiation(), // methodInst |
787 | FALSE, // allowInstParam |
788 | TRUE); // forceRemotableMethod |
789 | |
790 | RETURN(pServerMT->GetMethodDescForComInterfaceMethod(pItfMD, false)); |
791 | } |
792 | #endif // !FEATURE_COMINTEROP |
793 | |
794 | // Handle pure COM+ types. |
795 | RETURN (pServerMT->GetMethodDescForInterfaceMethod(ownerType, pItfMD, TRUE /* throwOnConflict */)); |
796 | } |
797 | |
798 | #ifdef FEATURE_COMINTEROP |
799 | //========================================================================================== |
800 | // get the method desc given the interface method desc on a COM implemented server |
801 | // (if fNullOk is set then NULL is an allowable return value) |
802 | MethodDesc *MethodTable::GetMethodDescForComInterfaceMethod(MethodDesc *pItfMD, bool fNullOk) |
803 | { |
804 | CONTRACT(MethodDesc*) |
805 | { |
806 | THROWS; |
807 | GC_TRIGGERS; |
808 | MODE_COOPERATIVE; |
809 | PRECONDITION(CheckPointer(pItfMD)); |
810 | PRECONDITION(pItfMD->IsInterface()); |
811 | PRECONDITION(IsComObjectType()); |
812 | POSTCONDITION(fNullOk || CheckPointer(RETVAL)); |
813 | } |
814 | CONTRACT_END; |
815 | |
816 | MethodTable * pItfMT = pItfMD->GetMethodTable(); |
817 | PREFIX_ASSUME(pItfMT != NULL); |
818 | |
819 | // We now handle __ComObject class that doesn't have Dynamic Interface Map |
820 | if (!HasDynamicInterfaceMap()) |
821 | { |
822 | RETURN(pItfMD); |
823 | } |
824 | else |
825 | { |
826 | // Now we handle the more complex extensible RCW's. The first thing to do is check |
827 | // to see if the static definition of the extensible RCW specifies that the class |
828 | // implements the interface. |
829 | DWORD slot = (DWORD) -1; |
830 | |
831 | // Calling GetTarget here instead of FindDispatchImpl gives us caching functionality to increase speed. |
832 | PCODE tgt = VirtualCallStubManager::GetTarget( |
833 | pItfMT->GetLoaderAllocator()->GetDispatchToken(pItfMT->GetTypeID(), pItfMD->GetSlot()), this, TRUE /* throwOnConflict */); |
834 | |
835 | if (tgt != NULL) |
836 | { |
837 | RETURN(MethodTable::GetMethodDescForSlotAddress(tgt)); |
838 | } |
839 | |
840 | // The interface is not in the static class definition so we need to look at the |
841 | // dynamic interfaces. |
842 | else if (FindDynamicallyAddedInterface(pItfMT)) |
843 | { |
844 | // This interface was added to the class dynamically so it is implemented |
845 | // by the COM object. We treat this dynamically added interfaces the same |
846 | // way we treat COM objects. That is by using the interface vtable. |
847 | RETURN(pItfMD); |
848 | } |
849 | else |
850 | { |
851 | RETURN(NULL); |
852 | } |
853 | } |
854 | } |
855 | #endif // FEATURE_COMINTEROP |
856 | |
857 | #endif // CROSSGEN_COMPILE |
858 | |
859 | //--------------------------------------------------------------------------------------- |
860 | // |
861 | MethodTable* CreateMinimalMethodTable(Module* pContainingModule, |
862 | LoaderHeap* pCreationHeap, |
863 | AllocMemTracker* pamTracker) |
864 | { |
865 | CONTRACTL |
866 | { |
867 | THROWS; |
868 | GC_NOTRIGGER; |
869 | MODE_ANY; |
870 | INJECT_FAULT(COMPlusThrowOM()); |
871 | } |
872 | CONTRACTL_END; |
873 | |
874 | EEClass* pClass = EEClass::CreateMinimalClass(pCreationHeap, pamTracker); |
875 | |
876 | LOG((LF_BCL, LL_INFO100, "Level2 - Creating MethodTable {0x%p}...\n" , pClass)); |
877 | |
878 | MethodTable* pMT = (MethodTable *)(void *)pamTracker->Track(pCreationHeap->AllocMem(S_SIZE_T(sizeof(MethodTable)))); |
879 | |
880 | // Note: Memory allocated on loader heap is zero filled |
881 | // memset(pMT, 0, sizeof(MethodTable)); |
882 | |
883 | // Allocate the private data block ("private" during runtime in the ngen'ed case). |
884 | BYTE* pMTWriteableData = (BYTE *) |
885 | pamTracker->Track(pCreationHeap->AllocMem(S_SIZE_T(sizeof(MethodTableWriteableData)))); |
886 | pMT->SetWriteableData((PTR_MethodTableWriteableData)pMTWriteableData); |
887 | |
888 | // |
889 | // Set up the EEClass |
890 | // |
891 | pClass->SetMethodTable(pMT); // in the EEClass set the pointer to this MethodTable |
892 | pClass->SetAttrClass(tdPublic | tdSealed); |
893 | |
894 | // |
895 | // Set up the MethodTable |
896 | // |
897 | // Does not need parent. Note that MethodTable for COR_GLOBAL_PARENT_TOKEN does not have parent either, |
898 | // so the system has to be wired for dealing with no parent anyway. |
899 | pMT->SetParentMethodTable(NULL); |
900 | pMT->SetClass(pClass); |
901 | pMT->SetLoaderModule(pContainingModule); |
902 | pMT->SetLoaderAllocator(pContainingModule->GetLoaderAllocator()); |
903 | pMT->SetInternalCorElementType(ELEMENT_TYPE_CLASS); |
904 | pMT->SetBaseSize(OBJECT_BASESIZE); |
905 | |
906 | #ifdef _DEBUG |
907 | pClass->SetDebugClassName("dynamicClass" ); |
908 | pMT->SetDebugClassName("dynamicClass" ); |
909 | #endif |
910 | |
911 | LOG((LF_BCL, LL_INFO10, "Level1 - MethodTable created {0x%p}\n" , pClass)); |
912 | |
913 | return pMT; |
914 | } |
915 | |
916 | |
917 | #ifdef FEATURE_COMINTEROP |
918 | #ifndef CROSSGEN_COMPILE |
919 | //========================================================================================== |
920 | OBJECTREF MethodTable::GetObjCreateDelegate() |
921 | { |
922 | CONTRACTL |
923 | { |
924 | MODE_COOPERATIVE; |
925 | GC_NOTRIGGER; |
926 | NOTHROW; |
927 | } |
928 | CONTRACTL_END; |
929 | _ASSERT(!IsInterface()); |
930 | if (GetOHDelegate()) |
931 | return ObjectFromHandle(GetOHDelegate()); |
932 | else |
933 | return NULL; |
934 | } |
935 | |
936 | //========================================================================================== |
937 | void MethodTable::SetObjCreateDelegate(OBJECTREF orDelegate) |
938 | { |
939 | CONTRACTL |
940 | { |
941 | MODE_COOPERATIVE; |
942 | GC_NOTRIGGER; |
943 | THROWS; // From CreateHandle |
944 | } |
945 | CONTRACTL_END; |
946 | |
947 | if (GetOHDelegate()) |
948 | StoreObjectInHandle(GetOHDelegate(), orDelegate); |
949 | else |
950 | SetOHDelegate (GetAppDomain()->CreateHandle(orDelegate)); |
951 | } |
952 | #endif //CROSSGEN_COMPILE |
953 | #endif // FEATURE_COMINTEROP |
954 | |
955 | |
956 | //========================================================================================== |
957 | void MethodTable::SetInterfaceMap(WORD wNumInterfaces, InterfaceInfo_t* iMap) |
958 | { |
959 | LIMITED_METHOD_CONTRACT; |
960 | if (wNumInterfaces == 0) |
961 | { |
962 | _ASSERTE(!HasInterfaceMap()); |
963 | return; |
964 | } |
965 | |
966 | m_wNumInterfaces = wNumInterfaces; |
967 | |
968 | CONSISTENCY_CHECK(IS_ALIGNED(iMap, sizeof(void*))); |
969 | m_pInterfaceMap.SetValue(iMap); |
970 | } |
971 | |
972 | //========================================================================================== |
973 | // Called after GetExtraInterfaceInfoSize above to setup a new MethodTable with the additional memory to track |
974 | // extra interface info. If there are a non-zero number of interfaces implemented on this class but |
975 | // GetExtraInterfaceInfoSize() returned zero, this call must still be made (with a NULL argument). |
976 | void MethodTable::InitializeExtraInterfaceInfo(PVOID pInfo) |
977 | { |
978 | STANDARD_VM_CONTRACT; |
979 | |
980 | // Check that memory was allocated or not allocated in the right scenarios. |
981 | _ASSERTE(((pInfo == NULL) && (GetExtraInterfaceInfoSize(GetNumInterfaces()) == 0)) || |
982 | ((pInfo != NULL) && (GetExtraInterfaceInfoSize(GetNumInterfaces()) != 0))); |
983 | |
984 | // This call is a no-op if we don't require extra interface info (in which case a buffer should never have |
985 | // been allocated). |
986 | if (!HasExtraInterfaceInfo()) |
987 | { |
988 | _ASSERTE(pInfo == NULL); |
989 | return; |
990 | } |
991 | |
992 | // Get pointer to optional slot that holds either a small inlined bitmap of flags or the pointer to a |
993 | // larger bitmap. |
994 | PTR_TADDR pInfoSlot = GetExtraInterfaceInfoPtr(); |
995 | |
996 | // In either case, data inlined or held in an external buffer, the correct thing to do is to write pInfo |
997 | // to the slot. In the inlined case we wish to set all flags to their default value (zero, false) and |
998 | // writing NULL does that. Otherwise we simply want to dump the buffer pointer directly into the slot (no |
999 | // need for a discriminator bit, we can always infer which format we're using based on the interface |
1000 | // count). |
1001 | *pInfoSlot = (TADDR)pInfo; |
1002 | |
1003 | // There shouldn't be any need for further initialization in the buffered case since loader heap |
1004 | // allocation zeroes data. |
1005 | #ifdef _DEBUG |
1006 | if (pInfo != NULL) |
1007 | for (DWORD i = 0; i < GetExtraInterfaceInfoSize(GetNumInterfaces()); i++) |
1008 | _ASSERTE(*((BYTE*)pInfo + i) == 0); |
1009 | #endif // _DEBUG |
1010 | } |
1011 | |
1012 | #ifdef FEATURE_NATIVE_IMAGE_GENERATION |
1013 | // Ngen support. |
1014 | void MethodTable::SaveExtraInterfaceInfo(DataImage *pImage) |
1015 | { |
1016 | STANDARD_VM_CONTRACT; |
1017 | |
1018 | // No extra data to save if the number of interfaces is below the threshhold -- there is either no data or |
1019 | // it all fits into the optional members inline. |
1020 | if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshhold) |
1021 | return; |
1022 | |
1023 | pImage->StoreStructure((LPVOID)*GetExtraInterfaceInfoPtr(), |
1024 | GetExtraInterfaceInfoSize(GetNumInterfaces()), |
1025 | DataImage::ITEM_INTERFACE_MAP); |
1026 | } |
1027 | |
1028 | void MethodTable::FixupExtraInterfaceInfo(DataImage *pImage) |
1029 | { |
1030 | STANDARD_VM_CONTRACT; |
1031 | |
1032 | // No pointer to extra data to fixup if the number of interfaces is below the threshhold -- there is |
1033 | // either no data or it all fits into the optional members inline. |
1034 | if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshhold) |
1035 | return; |
1036 | |
1037 | pImage->FixupPointerField(this, (BYTE*)GetExtraInterfaceInfoPtr() - (BYTE*)this); |
1038 | } |
1039 | #endif // FEATURE_NATIVE_IMAGE_GENERATION |
1040 | |
1041 | // Define a macro that generates a mask for a given bit in a TADDR correctly on either 32 or 64 bit platforms. |
1042 | #ifdef _WIN64 |
1043 | #define SELECT_TADDR_BIT(_index) (1ULL << (_index)) |
1044 | #else |
1045 | #define SELECT_TADDR_BIT(_index) (1U << (_index)) |
1046 | #endif |
1047 | |
1048 | //========================================================================================== |
1049 | // For the given interface in the map (specified via map index) mark the interface as declared explicitly on |
1050 | // this class. This is not legal for dynamically added interfaces (as used by RCWs). |
1051 | void MethodTable::SetInterfaceDeclaredOnClass(DWORD index) |
1052 | { |
1053 | STANDARD_VM_CONTRACT; |
1054 | |
1055 | _ASSERTE(HasExtraInterfaceInfo()); |
1056 | _ASSERTE(index < GetNumInterfaces()); |
1057 | |
1058 | // Get address of optional slot for extra info. |
1059 | PTR_TADDR pInfoSlot = GetExtraInterfaceInfoPtr(); |
1060 | |
1061 | if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshhold) |
1062 | { |
1063 | // Bitmap of flags is stored inline in the optional slot. |
1064 | *pInfoSlot |= SELECT_TADDR_BIT(index); |
1065 | } |
1066 | else |
1067 | { |
1068 | // Slot points to a buffer containing a larger bitmap. |
1069 | TADDR *pBitmap = (PTR_TADDR)*pInfoSlot; |
1070 | |
1071 | DWORD idxTaddr = index / (sizeof(TADDR) * 8); // Select TADDR in array that covers the target bit |
1072 | DWORD idxInTaddr = index % (sizeof(TADDR) * 8); |
1073 | TADDR bitmask = SELECT_TADDR_BIT(idxInTaddr); |
1074 | |
1075 | pBitmap[idxTaddr] |= bitmask; |
1076 | _ASSERTE((pBitmap[idxTaddr] & bitmask) == bitmask); |
1077 | } |
1078 | } |
1079 | |
1080 | //========================================================================================== |
1081 | // For the given interface return true if the interface was declared explicitly on this class. |
1082 | bool MethodTable::IsInterfaceDeclaredOnClass(DWORD index) |
1083 | { |
1084 | STANDARD_VM_CONTRACT; |
1085 | |
1086 | _ASSERTE(HasExtraInterfaceInfo()); |
1087 | |
1088 | // Dynamic interfaces are always marked as not DeclaredOnClass (I don't know why but this is how the code |
1089 | // was originally authored). |
1090 | if (index >= GetNumInterfaces()) |
1091 | { |
1092 | #ifdef FEATURE_COMINTEROP |
1093 | _ASSERTE(HasDynamicInterfaceMap()); |
1094 | #endif // FEATURE_COMINTEROP |
1095 | return false; |
1096 | } |
1097 | |
1098 | // Get data from the optional extra info slot. |
1099 | TADDR taddrInfo = *GetExtraInterfaceInfoPtr(); |
1100 | |
1101 | if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshhold) |
1102 | { |
1103 | // Bitmap of flags is stored directly in the value. |
1104 | return (taddrInfo & SELECT_TADDR_BIT(index)) != 0; |
1105 | } |
1106 | else |
1107 | { |
1108 | // Slot points to a buffer containing a larger bitmap. |
1109 | TADDR *pBitmap = (PTR_TADDR)taddrInfo; |
1110 | |
1111 | DWORD idxTaddr = index / (sizeof(TADDR) * 8); // Select TADDR in array that covers the target bit |
1112 | DWORD idxInTaddr = index % (sizeof(TADDR) * 8); |
1113 | TADDR bitmask = SELECT_TADDR_BIT(idxInTaddr); |
1114 | |
1115 | return (pBitmap[idxTaddr] & bitmask) != 0; |
1116 | } |
1117 | } |
1118 | |
1119 | #ifdef FEATURE_COMINTEROP |
1120 | |
1121 | //========================================================================================== |
1122 | PTR_InterfaceInfo MethodTable::GetDynamicallyAddedInterfaceMap() |
1123 | { |
1124 | LIMITED_METHOD_DAC_CONTRACT; |
1125 | PRECONDITION(HasDynamicInterfaceMap()); |
1126 | |
1127 | return GetInterfaceMap() + GetNumInterfaces(); |
1128 | } |
1129 | |
1130 | //========================================================================================== |
1131 | unsigned MethodTable::GetNumDynamicallyAddedInterfaces() |
1132 | { |
1133 | LIMITED_METHOD_DAC_CONTRACT; |
1134 | PRECONDITION(HasDynamicInterfaceMap()); |
1135 | |
1136 | PTR_InterfaceInfo pInterfaces = GetInterfaceMap(); |
1137 | PREFIX_ASSUME(pInterfaces != NULL); |
1138 | return (unsigned)*(dac_cast<PTR_SIZE_T>(pInterfaces) - 1); |
1139 | } |
1140 | |
1141 | //========================================================================================== |
1142 | BOOL MethodTable::FindDynamicallyAddedInterface(MethodTable *pInterface) |
1143 | { |
1144 | LIMITED_METHOD_CONTRACT; |
1145 | |
1146 | _ASSERTE(IsRestored_NoLogging()); |
1147 | _ASSERTE(HasDynamicInterfaceMap()); // This should never be called on for a type that is not an extensible RCW. |
1148 | |
1149 | unsigned cDynInterfaces = GetNumDynamicallyAddedInterfaces(); |
1150 | InterfaceInfo_t *pDynItfMap = GetDynamicallyAddedInterfaceMap(); |
1151 | |
1152 | for (unsigned i = 0; i < cDynInterfaces; i++) |
1153 | { |
1154 | if (pDynItfMap[i].GetMethodTable() == pInterface) |
1155 | return TRUE; |
1156 | } |
1157 | |
1158 | return FALSE; |
1159 | } |
1160 | |
1161 | //========================================================================================== |
1162 | void MethodTable::AddDynamicInterface(MethodTable *pItfMT) |
1163 | { |
1164 | CONTRACTL |
1165 | { |
1166 | THROWS; |
1167 | GC_NOTRIGGER; |
1168 | MODE_ANY; |
1169 | PRECONDITION(IsRestored_NoLogging()); |
1170 | PRECONDITION(HasDynamicInterfaceMap()); // This should never be called on for a type that is not an extensible RCW. |
1171 | } |
1172 | CONTRACTL_END; |
1173 | |
1174 | unsigned NumDynAddedInterfaces = GetNumDynamicallyAddedInterfaces(); |
1175 | unsigned TotalNumInterfaces = GetNumInterfaces() + NumDynAddedInterfaces; |
1176 | |
1177 | InterfaceInfo_t *pNewItfMap = NULL; |
1178 | S_SIZE_T AllocSize = (S_SIZE_T(S_UINT32(TotalNumInterfaces) + S_UINT32(1)) * S_SIZE_T(sizeof(InterfaceInfo_t))) + S_SIZE_T(sizeof(DWORD_PTR)); |
1179 | if (AllocSize.IsOverflow()) |
1180 | ThrowHR(COR_E_OVERFLOW); |
1181 | |
1182 | // Allocate the new interface table adding one for the new interface and one |
1183 | // more for the dummy slot before the start of the table.. |
1184 | pNewItfMap = (InterfaceInfo_t*)(void*)GetLoaderAllocator()->GetHighFrequencyHeap()->AllocMem(AllocSize); |
1185 | |
1186 | pNewItfMap = (InterfaceInfo_t*)(((BYTE *)pNewItfMap) + sizeof(DWORD_PTR)); |
1187 | |
1188 | // Copy the old map into the new one. |
1189 | if (TotalNumInterfaces > 0) { |
1190 | InterfaceInfo_t *pInterfaceMap = GetInterfaceMap(); |
1191 | PREFIX_ASSUME(pInterfaceMap != NULL); |
1192 | |
1193 | for (unsigned index = 0; index < TotalNumInterfaces; ++index) |
1194 | { |
1195 | InterfaceInfo_t *pIntInfo = (InterfaceInfo_t *) (pNewItfMap + index); |
1196 | pIntInfo->SetMethodTable((pInterfaceMap + index)->GetMethodTable()); |
1197 | } |
1198 | } |
1199 | |
1200 | // Add the new interface at the end of the map. |
1201 | pNewItfMap[TotalNumInterfaces].SetMethodTable(pItfMT); |
1202 | |
1203 | // Update the count of dynamically added interfaces. |
1204 | *(((DWORD_PTR *)pNewItfMap) - 1) = NumDynAddedInterfaces + 1; |
1205 | |
1206 | // Switch the old interface map with the new one. |
1207 | EnsureWritablePages(&m_pInterfaceMap); |
1208 | m_pInterfaceMap.SetValueVolatile(pNewItfMap); |
1209 | |
1210 | // Log the fact that we leaked the interface vtable map. |
1211 | #ifdef _DEBUG |
1212 | LOG((LF_INTEROP, LL_EVERYTHING, |
1213 | "Extensible RCW %s being cast to interface %s caused an interface vtable map leak" , |
1214 | GetClass()->GetDebugClassName(), pItfMT->GetClass()->m_szDebugClassName)); |
1215 | #else // !_DEBUG |
1216 | LOG((LF_INTEROP, LL_EVERYTHING, |
1217 | "Extensible RCW being cast to an interface caused an interface vtable map leak" )); |
1218 | #endif // !_DEBUG |
1219 | } // MethodTable::AddDynamicInterface |
1220 | |
1221 | #endif // FEATURE_COMINTEROP |
1222 | |
1223 | void MethodTable::SetupGenericsStaticsInfo(FieldDesc* pStaticFieldDescs) |
1224 | { |
1225 | CONTRACTL |
1226 | { |
1227 | THROWS; |
1228 | GC_TRIGGERS; |
1229 | MODE_ANY; |
1230 | } |
1231 | CONTRACTL_END; |
1232 | |
1233 | // No need to generate IDs for open types. Indeed since we don't save them |
1234 | // in the NGEN image it would be actively incorrect to do so. However |
1235 | // we still leave the optional member in the MethodTable holding the value -1 for the ID. |
1236 | |
1237 | GenericsStaticsInfo *pInfo = GetGenericsStaticsInfo(); |
1238 | if (!ContainsGenericVariables() && !IsSharedByGenericInstantiations()) |
1239 | { |
1240 | Module * pModuleForStatics = GetLoaderModule(); |
1241 | |
1242 | pInfo->m_DynamicTypeID = pModuleForStatics->AllocateDynamicEntry(this); |
1243 | } |
1244 | else |
1245 | { |
1246 | pInfo->m_DynamicTypeID = (SIZE_T)-1; |
1247 | } |
1248 | |
1249 | pInfo->m_pFieldDescs.SetValueMaybeNull(pStaticFieldDescs); |
1250 | } |
1251 | |
1252 | #endif // !DACCESS_COMPILE |
1253 | |
1254 | //========================================================================================== |
1255 | // Calculate how many bytes of storage will be required to track additional information for interfaces. This |
1256 | // will be zero if there are no interfaces, but can also be zero for small numbers of interfaces as well, and |
1257 | // callers should be ready to handle this. |
1258 | /* static */ SIZE_T MethodTable::(DWORD cInterfaces) |
1259 | { |
1260 | LIMITED_METHOD_DAC_CONTRACT; |
1261 | |
1262 | // For small numbers of interfaces we can record the info in the TADDR of the optional member itself (use |
1263 | // the TADDR as a bitmap). |
1264 | if (cInterfaces <= kInlinedInterfaceInfoThreshhold) |
1265 | return 0; |
1266 | |
1267 | // Otherwise we'll cause an array of TADDRs to be allocated (use TADDRs since the heap space allocated |
1268 | // will almost certainly need to be TADDR aligned anyway). |
1269 | return ALIGN_UP(cInterfaces, sizeof(TADDR) * 8) / 8; |
1270 | } |
1271 | |
1272 | #ifdef DACCESS_COMPILE |
1273 | //========================================================================================== |
1274 | void MethodTable::() |
1275 | { |
1276 | SUPPORTS_DAC; |
1277 | |
1278 | // No extra data to enum if the number of interfaces is below the threshhold -- there is either no data or |
1279 | // it all fits into the optional members inline. |
1280 | if (GetNumInterfaces() <= kInlinedInterfaceInfoThreshhold) |
1281 | return; |
1282 | |
1283 | DacEnumMemoryRegion(*GetExtraInterfaceInfoPtr(), GetExtraInterfaceInfoSize(GetNumInterfaces())); |
1284 | } |
1285 | #endif // DACCESS_COMPILE |
1286 | |
1287 | //========================================================================================== |
1288 | Module* MethodTable::GetModuleForStatics() |
1289 | { |
1290 | WRAPPER_NO_CONTRACT; |
1291 | SUPPORTS_DAC; |
1292 | |
1293 | g_IBCLogger.LogMethodTableAccess(this); |
1294 | |
1295 | if (HasGenericsStaticsInfo()) |
1296 | { |
1297 | DWORD dwDynamicClassDomainID; |
1298 | return GetGenericsStaticsModuleAndID(&dwDynamicClassDomainID); |
1299 | } |
1300 | else |
1301 | { |
1302 | return GetLoaderModule(); |
1303 | } |
1304 | } |
1305 | |
1306 | //========================================================================================== |
1307 | DWORD MethodTable::GetModuleDynamicEntryID() |
1308 | { |
1309 | WRAPPER_NO_CONTRACT; |
1310 | SUPPORTS_DAC; |
1311 | |
1312 | _ASSERTE(IsDynamicStatics() && "Only memory reflection emit types and generics can have a dynamic ID" ); |
1313 | |
1314 | if (HasGenericsStaticsInfo()) |
1315 | { |
1316 | DWORD dwDynamicClassDomainID; |
1317 | GetGenericsStaticsModuleAndID(&dwDynamicClassDomainID); |
1318 | return dwDynamicClassDomainID; |
1319 | } |
1320 | else |
1321 | { |
1322 | return GetClass()->GetModuleDynamicID(); |
1323 | } |
1324 | } |
1325 | |
1326 | #ifndef DACCESS_COMPILE |
1327 | |
1328 | #ifdef FEATURE_TYPEEQUIVALENCE |
1329 | //========================================================================================== |
1330 | // Equivalence based on Guid and TypeIdentifier attributes to support the "no-PIA" feature. |
1331 | BOOL MethodTable::IsEquivalentTo_Worker(MethodTable *pOtherMT COMMA_INDEBUG(TypeHandlePairList *pVisited)) |
1332 | { |
1333 | CONTRACTL |
1334 | { |
1335 | THROWS; |
1336 | GC_TRIGGERS; |
1337 | MODE_ANY; |
1338 | SO_TOLERANT; // we are called from MethodTable::CanCastToClass |
1339 | } |
1340 | CONTRACTL_END; |
1341 | |
1342 | _ASSERTE(HasTypeEquivalence() && pOtherMT->HasTypeEquivalence()); |
1343 | |
1344 | |
1345 | #ifdef _DEBUG |
1346 | if (TypeHandlePairList::Exists(pVisited, TypeHandle(this), TypeHandle(pOtherMT))) |
1347 | { |
1348 | _ASSERTE(!"We are in the process of comparing these types already. That should never happen!" ); |
1349 | return TRUE; |
1350 | } |
1351 | TypeHandlePairList newVisited(TypeHandle(this), TypeHandle(pOtherMT), pVisited); |
1352 | #endif |
1353 | |
1354 | |
1355 | if (HasInstantiation() != pOtherMT->HasInstantiation()) |
1356 | return FALSE; |
1357 | |
1358 | if (IsArray()) |
1359 | { |
1360 | if (!pOtherMT->IsArray() || GetRank() != pOtherMT->GetRank()) |
1361 | return FALSE; |
1362 | |
1363 | // arrays of structures have their own unshared MTs and will take this path |
1364 | return (GetApproxArrayElementTypeHandle().IsEquivalentTo(pOtherMT->GetApproxArrayElementTypeHandle() COMMA_INDEBUG(&newVisited))); |
1365 | } |
1366 | |
1367 | BOOL bResult = FALSE; |
1368 | |
1369 | BEGIN_SO_INTOLERANT_CODE(GetThread()); |
1370 | bResult = IsEquivalentTo_WorkerInner(pOtherMT COMMA_INDEBUG(&newVisited)); |
1371 | END_SO_INTOLERANT_CODE; |
1372 | |
1373 | return bResult; |
1374 | } |
1375 | |
1376 | //========================================================================================== |
1377 | // Type equivalence - SO intolerant part. |
1378 | BOOL MethodTable::IsEquivalentTo_WorkerInner(MethodTable *pOtherMT COMMA_INDEBUG(TypeHandlePairList *pVisited)) |
1379 | { |
1380 | CONTRACTL |
1381 | { |
1382 | THROWS; |
1383 | GC_TRIGGERS; |
1384 | MODE_ANY; |
1385 | SO_INTOLERANT; |
1386 | LOADS_TYPE(CLASS_DEPENDENCIES_LOADED); |
1387 | } |
1388 | CONTRACTL_END; |
1389 | |
1390 | TypeEquivalenceHashTable *typeHashTable = NULL; |
1391 | AppDomain *pDomain = GetAppDomain(); |
1392 | if (pDomain != NULL) |
1393 | { |
1394 | typeHashTable = pDomain->GetTypeEquivalenceCache(); |
1395 | TypeEquivalenceHashTable::EquivalenceMatch match = typeHashTable->CheckEquivalence(TypeHandle(this), TypeHandle(pOtherMT)); |
1396 | switch (match) |
1397 | { |
1398 | case TypeEquivalenceHashTable::Match: |
1399 | return TRUE; |
1400 | case TypeEquivalenceHashTable::NoMatch: |
1401 | return FALSE; |
1402 | case TypeEquivalenceHashTable::MatchUnknown: |
1403 | break; |
1404 | default: |
1405 | _ASSERTE(FALSE); |
1406 | break; |
1407 | } |
1408 | } |
1409 | |
1410 | BOOL fEquivalent = FALSE; |
1411 | |
1412 | // Check if type is generic |
1413 | if (HasInstantiation()) |
1414 | { |
1415 | // Limit variance on generics only to interfaces |
1416 | if (!IsInterface() || !pOtherMT->IsInterface()) |
1417 | { |
1418 | fEquivalent = FALSE; |
1419 | goto EquivalenceCalculated; |
1420 | } |
1421 | |
1422 | // check whether the instantiations are equivalent |
1423 | Instantiation inst1 = GetInstantiation(); |
1424 | Instantiation inst2 = pOtherMT->GetInstantiation(); |
1425 | |
1426 | // Verify generic argument count |
1427 | if (inst1.GetNumArgs() != inst2.GetNumArgs()) |
1428 | { |
1429 | fEquivalent = FALSE; |
1430 | goto EquivalenceCalculated; |
1431 | } |
1432 | |
1433 | // Verify each generic argument type |
1434 | for (DWORD i = 0; i < inst1.GetNumArgs(); i++) |
1435 | { |
1436 | if (!inst1[i].IsEquivalentTo(inst2[i] COMMA_INDEBUG(pVisited))) |
1437 | { |
1438 | fEquivalent = FALSE; |
1439 | goto EquivalenceCalculated; |
1440 | } |
1441 | } |
1442 | |
1443 | if (GetTypeDefRid() == pOtherMT->GetTypeDefRid() && GetModule() == pOtherMT->GetModule()) |
1444 | { |
1445 | // it's OK to declare the MTs equivalent at this point; the cases we care |
1446 | // about are IList<IFoo> and IList<IBar> where IFoo and IBar are equivalent |
1447 | fEquivalent = TRUE; |
1448 | } |
1449 | else |
1450 | { |
1451 | fEquivalent = FALSE; |
1452 | } |
1453 | goto EquivalenceCalculated; |
1454 | } |
1455 | |
1456 | if (IsArray()) |
1457 | { |
1458 | if (!pOtherMT->IsArray() || GetRank() != pOtherMT->GetRank()) |
1459 | { |
1460 | fEquivalent = FALSE; |
1461 | goto EquivalenceCalculated; |
1462 | } |
1463 | |
1464 | // arrays of structures have their own unshared MTs and will take this path |
1465 | TypeHandle elementType1 = GetApproxArrayElementTypeHandle(); |
1466 | TypeHandle elementType2 = pOtherMT->GetApproxArrayElementTypeHandle(); |
1467 | fEquivalent = elementType1.IsEquivalentTo(elementType2 COMMA_INDEBUG(pVisited)); |
1468 | goto EquivalenceCalculated; |
1469 | } |
1470 | |
1471 | fEquivalent = CompareTypeDefsForEquivalence(GetCl(), pOtherMT->GetCl(), GetModule(), pOtherMT->GetModule(), NULL); |
1472 | |
1473 | EquivalenceCalculated: |
1474 | // Record equivalence matches if a table exists |
1475 | if (typeHashTable != NULL) |
1476 | { |
1477 | // Collectible type results will not get cached. |
1478 | if ((!Collectible() && !pOtherMT->Collectible())) |
1479 | { |
1480 | auto match = fEquivalent ? TypeEquivalenceHashTable::Match : TypeEquivalenceHashTable::NoMatch; |
1481 | typeHashTable->RecordEquivalence(TypeHandle(this), TypeHandle(pOtherMT), match); |
1482 | } |
1483 | } |
1484 | |
1485 | return fEquivalent; |
1486 | } |
1487 | #endif // FEATURE_TYPEEQUIVALENCE |
1488 | |
1489 | //========================================================================================== |
1490 | BOOL MethodTable::CanCastToInterface(MethodTable *pTargetMT, TypeHandlePairList *pVisited) |
1491 | { |
1492 | CONTRACTL |
1493 | { |
1494 | THROWS; |
1495 | GC_TRIGGERS; |
1496 | MODE_ANY; |
1497 | INSTANCE_CHECK; |
1498 | PRECONDITION(CheckPointer(pTargetMT)); |
1499 | PRECONDITION(pTargetMT->IsInterface()); |
1500 | PRECONDITION(IsRestored_NoLogging()); |
1501 | } |
1502 | CONTRACTL_END |
1503 | |
1504 | if (!pTargetMT->HasVariance()) |
1505 | { |
1506 | if (HasTypeEquivalence() || pTargetMT->HasTypeEquivalence()) |
1507 | { |
1508 | if (IsInterface() && IsEquivalentTo(pTargetMT)) |
1509 | return TRUE; |
1510 | |
1511 | return ImplementsEquivalentInterface(pTargetMT); |
1512 | } |
1513 | |
1514 | return CanCastToNonVariantInterface(pTargetMT); |
1515 | } |
1516 | else |
1517 | { |
1518 | if (CanCastByVarianceToInterfaceOrDelegate(pTargetMT, pVisited)) |
1519 | return TRUE; |
1520 | |
1521 | InterfaceMapIterator it = IterateInterfaceMap(); |
1522 | while (it.Next()) |
1523 | { |
1524 | if (it.GetInterface()->CanCastByVarianceToInterfaceOrDelegate(pTargetMT, pVisited)) |
1525 | return TRUE; |
1526 | } |
1527 | } |
1528 | return FALSE; |
1529 | } |
1530 | |
1531 | //========================================================================================== |
1532 | BOOL MethodTable::CanCastByVarianceToInterfaceOrDelegate(MethodTable *pTargetMT, TypeHandlePairList *pVisited) |
1533 | { |
1534 | CONTRACTL |
1535 | { |
1536 | THROWS; |
1537 | GC_TRIGGERS; |
1538 | MODE_ANY; |
1539 | INSTANCE_CHECK; |
1540 | PRECONDITION(CheckPointer(pTargetMT)); |
1541 | PRECONDITION(pTargetMT->HasVariance()); |
1542 | PRECONDITION(pTargetMT->IsInterface() || pTargetMT->IsDelegate()); |
1543 | PRECONDITION(IsRestored_NoLogging()); |
1544 | } |
1545 | CONTRACTL_END |
1546 | |
1547 | BOOL returnValue = FALSE; |
1548 | |
1549 | EEClass *pClass = NULL; |
1550 | |
1551 | TypeHandlePairList pairList(this, pTargetMT, pVisited); |
1552 | |
1553 | if (TypeHandlePairList::Exists(pVisited, this, pTargetMT)) |
1554 | goto Exit; |
1555 | |
1556 | if (GetTypeDefRid() != pTargetMT->GetTypeDefRid() || GetModule() != pTargetMT->GetModule()) |
1557 | { |
1558 | goto Exit; |
1559 | } |
1560 | |
1561 | { |
1562 | pClass = pTargetMT->GetClass(); |
1563 | Instantiation inst = GetInstantiation(); |
1564 | Instantiation targetInst = pTargetMT->GetInstantiation(); |
1565 | |
1566 | for (DWORD i = 0; i < inst.GetNumArgs(); i++) |
1567 | { |
1568 | TypeHandle thArg = inst[i]; |
1569 | TypeHandle thTargetArg = targetInst[i]; |
1570 | |
1571 | // If argument types are not equivalent, test them for compatibility |
1572 | // in accordance with the the variance annotation |
1573 | if (!thArg.IsEquivalentTo(thTargetArg)) |
1574 | { |
1575 | switch (pClass->GetVarianceOfTypeParameter(i)) |
1576 | { |
1577 | case gpCovariant : |
1578 | if (!thArg.IsBoxedAndCanCastTo(thTargetArg, &pairList)) |
1579 | goto Exit; |
1580 | break; |
1581 | |
1582 | case gpContravariant : |
1583 | if (!thTargetArg.IsBoxedAndCanCastTo(thArg, &pairList)) |
1584 | goto Exit; |
1585 | break; |
1586 | |
1587 | case gpNonVariant : |
1588 | goto Exit; |
1589 | |
1590 | default : |
1591 | _ASSERTE(!"Illegal variance annotation" ); |
1592 | goto Exit; |
1593 | } |
1594 | } |
1595 | } |
1596 | } |
1597 | |
1598 | returnValue = TRUE; |
1599 | |
1600 | Exit: |
1601 | |
1602 | return returnValue; |
1603 | } |
1604 | |
1605 | //========================================================================================== |
1606 | BOOL MethodTable::CanCastToClass(MethodTable *pTargetMT, TypeHandlePairList *pVisited) |
1607 | { |
1608 | CONTRACTL |
1609 | { |
1610 | THROWS; |
1611 | GC_TRIGGERS; |
1612 | MODE_ANY; |
1613 | INSTANCE_CHECK; |
1614 | PRECONDITION(CheckPointer(pTargetMT)); |
1615 | PRECONDITION(!pTargetMT->IsArray()); |
1616 | PRECONDITION(!pTargetMT->IsInterface()); |
1617 | } |
1618 | CONTRACTL_END |
1619 | |
1620 | MethodTable *pMT = this; |
1621 | |
1622 | // If the target type has variant type parameters, we take a slower path |
1623 | if (pTargetMT->HasVariance()) |
1624 | { |
1625 | // At present, we support variance only on delegates and interfaces |
1626 | CONSISTENCY_CHECK(pTargetMT->IsDelegate()); |
1627 | |
1628 | // First chase inheritance hierarchy until we hit a class that only differs in its instantiation |
1629 | do { |
1630 | // Cheap check for equivalence |
1631 | if (pMT->IsEquivalentTo(pTargetMT)) |
1632 | return TRUE; |
1633 | |
1634 | g_IBCLogger.LogMethodTableAccess(pMT); |
1635 | |
1636 | if (pMT->CanCastByVarianceToInterfaceOrDelegate(pTargetMT, pVisited)) |
1637 | return TRUE; |
1638 | |
1639 | pMT = pMT->GetParentMethodTable(); |
1640 | } while (pMT); |
1641 | } |
1642 | |
1643 | // If there are no variant type parameters, just chase the hierarchy |
1644 | else |
1645 | { |
1646 | do { |
1647 | if (pMT->IsEquivalentTo(pTargetMT)) |
1648 | return TRUE; |
1649 | |
1650 | g_IBCLogger.LogMethodTableAccess(pMT); |
1651 | |
1652 | pMT = pMT->GetParentMethodTable(); |
1653 | } while (pMT); |
1654 | } |
1655 | |
1656 | return FALSE; |
1657 | } |
1658 | |
1659 | #include <optsmallperfcritical.h> |
1660 | //========================================================================================== |
1661 | BOOL MethodTable::CanCastToNonVariantInterface(MethodTable *pTargetMT) |
1662 | { |
1663 | CONTRACTL |
1664 | { |
1665 | NOTHROW; |
1666 | GC_NOTRIGGER; |
1667 | MODE_ANY; |
1668 | INSTANCE_CHECK; |
1669 | SO_TOLERANT; |
1670 | PRECONDITION(CheckPointer(pTargetMT)); |
1671 | PRECONDITION(pTargetMT->IsInterface()); |
1672 | PRECONDITION(!pTargetMT->HasVariance()); |
1673 | PRECONDITION(IsRestored_NoLogging()); |
1674 | } |
1675 | CONTRACTL_END |
1676 | |
1677 | // Check to see if the current class is for the interface passed in. |
1678 | if (this == pTargetMT) |
1679 | return TRUE; |
1680 | |
1681 | // Check to see if the static class definition indicates we implement the interface. |
1682 | return ImplementsInterfaceInline(pTargetMT); |
1683 | } |
1684 | |
1685 | //========================================================================================== |
1686 | TypeHandle::CastResult MethodTable::CanCastToInterfaceNoGC(MethodTable *pTargetMT) |
1687 | { |
1688 | CONTRACTL |
1689 | { |
1690 | NOTHROW; |
1691 | GC_NOTRIGGER; |
1692 | MODE_ANY; |
1693 | INSTANCE_CHECK; |
1694 | SO_TOLERANT; |
1695 | PRECONDITION(CheckPointer(pTargetMT)); |
1696 | PRECONDITION(pTargetMT->IsInterface()); |
1697 | PRECONDITION(IsRestored_NoLogging()); |
1698 | } |
1699 | CONTRACTL_END |
1700 | |
1701 | if (!pTargetMT->HasVariance() && !IsArray() && !HasTypeEquivalence() && !pTargetMT->HasTypeEquivalence()) |
1702 | { |
1703 | return CanCastToNonVariantInterface(pTargetMT) ? TypeHandle::CanCast : TypeHandle::CannotCast; |
1704 | } |
1705 | else |
1706 | { |
1707 | // We're conservative on variant interfaces and types with equivalence |
1708 | return TypeHandle::MaybeCast; |
1709 | } |
1710 | } |
1711 | |
1712 | //========================================================================================== |
1713 | TypeHandle::CastResult MethodTable::CanCastToClassNoGC(MethodTable *pTargetMT) |
1714 | { |
1715 | CONTRACTL |
1716 | { |
1717 | NOTHROW; |
1718 | GC_NOTRIGGER; |
1719 | MODE_ANY; |
1720 | INSTANCE_CHECK; |
1721 | SO_TOLERANT; |
1722 | PRECONDITION(CheckPointer(pTargetMT)); |
1723 | PRECONDITION(!pTargetMT->IsArray()); |
1724 | PRECONDITION(!pTargetMT->IsInterface()); |
1725 | } |
1726 | CONTRACTL_END |
1727 | |
1728 | // We're conservative on variant classes |
1729 | if (pTargetMT->HasVariance() || g_IBCLogger.InstrEnabled()) |
1730 | { |
1731 | return TypeHandle::MaybeCast; |
1732 | } |
1733 | |
1734 | // Type equivalence needs the slow path |
1735 | if (HasTypeEquivalence() || pTargetMT->HasTypeEquivalence()) |
1736 | { |
1737 | return TypeHandle::MaybeCast; |
1738 | } |
1739 | |
1740 | // If there are no variant type parameters, just chase the hierarchy |
1741 | else |
1742 | { |
1743 | PTR_VOID pMT = this; |
1744 | |
1745 | do { |
1746 | if (pMT == pTargetMT) |
1747 | return TypeHandle::CanCast; |
1748 | |
1749 | pMT = MethodTable::GetParentMethodTableOrIndirection(pMT); |
1750 | } while (pMT); |
1751 | } |
1752 | |
1753 | return TypeHandle::CannotCast; |
1754 | } |
1755 | #include <optdefault.h> |
1756 | |
1757 | BOOL |
1758 | MethodTable::IsExternallyVisible() |
1759 | { |
1760 | CONTRACTL |
1761 | { |
1762 | THROWS; |
1763 | MODE_ANY; |
1764 | GC_TRIGGERS; |
1765 | SO_INTOLERANT; |
1766 | } |
1767 | CONTRACTL_END; |
1768 | |
1769 | BOOL bIsVisible = IsTypeDefExternallyVisible(GetCl(), GetModule(), GetClass()->GetAttrClass()); |
1770 | |
1771 | if (bIsVisible && HasInstantiation() && !IsGenericTypeDefinition()) |
1772 | { |
1773 | for (COUNT_T i = 0; i < GetNumGenericArgs(); i++) |
1774 | { |
1775 | if (!GetInstantiation()[i].IsExternallyVisible()) |
1776 | return FALSE; |
1777 | } |
1778 | } |
1779 | |
1780 | return bIsVisible; |
1781 | } // MethodTable::IsExternallyVisible |
1782 | |
1783 | #ifdef FEATURE_PREJIT |
1784 | |
1785 | BOOL MethodTable::CanShareVtableChunksFrom(MethodTable *pTargetMT, Module *pCurrentLoaderModule, Module *pCurrentPreferredZapModule) |
1786 | { |
1787 | WRAPPER_NO_CONTRACT; |
1788 | |
1789 | // These constraints come from two places: |
1790 | // 1. A non-zapped MT cannot share with a zapped MT since it may result in SetSlot() on a read-only slot |
1791 | // 2. Zapping this MT in MethodTable::Save cannot "unshare" something we decide to share now |
1792 | // |
1793 | // We could fix both of these and allow non-zapped MTs to share chunks fully by doing the following |
1794 | // 1. Fix the few dangerous callers of SetSlot to first check whether the chunk itself is zapped |
1795 | // (see MethodTableBuilder::CopyExactParentSlots, or we could use ExecutionManager::FindZapModule) |
1796 | // 2. Have this function return FALSE if IsCompilationProcess and rely on MethodTable::Save to do all sharing for the NGen case |
1797 | |
1798 | return !pTargetMT->IsZapped() && |
1799 | pTargetMT->GetLoaderModule() == pCurrentLoaderModule && |
1800 | pCurrentLoaderModule == pCurrentPreferredZapModule && |
1801 | pCurrentPreferredZapModule == Module::GetPreferredZapModuleForMethodTable(pTargetMT); |
1802 | } |
1803 | |
1804 | #else |
1805 | |
1806 | BOOL MethodTable::CanShareVtableChunksFrom(MethodTable *pTargetMT, Module *pCurrentLoaderModule) |
1807 | { |
1808 | WRAPPER_NO_CONTRACT; |
1809 | |
1810 | return pTargetMT->GetLoaderModule() == pCurrentLoaderModule; |
1811 | } |
1812 | |
1813 | #endif |
1814 | |
1815 | #ifdef _DEBUG |
1816 | |
1817 | void |
1818 | MethodTable::DebugDumpVtable(LPCUTF8 szClassName, BOOL fDebug) |
1819 | { |
1820 | //diag functions shouldn't affect normal behavior |
1821 | CONTRACTL |
1822 | { |
1823 | NOTHROW; |
1824 | GC_TRIGGERS; |
1825 | } |
1826 | CONTRACTL_END; |
1827 | |
1828 | CQuickBytes qb; |
1829 | const size_t cchBuff = MAX_CLASSNAME_LENGTH + 30; |
1830 | LPWSTR buff = fDebug ? (LPWSTR) qb.AllocNoThrow(cchBuff * sizeof(WCHAR)) : NULL; |
1831 | |
1832 | if ((buff == NULL) && fDebug) |
1833 | { |
1834 | WszOutputDebugString(W("OOM when dumping VTable - falling back to logging" )); |
1835 | fDebug = FALSE; |
1836 | } |
1837 | |
1838 | if (fDebug) |
1839 | { |
1840 | swprintf_s(buff, cchBuff, W("Vtable (with interface dupes) for '%S':\n" ), szClassName); |
1841 | #ifdef _DEBUG |
1842 | swprintf_s(&buff[wcslen(buff)], cchBuff - wcslen(buff) , W(" Total duplicate slots = %d\n" ), g_dupMethods); |
1843 | #endif |
1844 | WszOutputDebugString(buff); |
1845 | } |
1846 | else |
1847 | { |
1848 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
1849 | LOG((LF_ALWAYS, LL_ALWAYS, "Vtable (with interface dupes) for '%s':\n" , szClassName)); |
1850 | LOG((LF_ALWAYS, LL_ALWAYS, " Total duplicate slots = %d\n" , g_dupMethods)); |
1851 | } |
1852 | |
1853 | HRESULT hr; |
1854 | EX_TRY |
1855 | { |
1856 | MethodIterator it(this); |
1857 | for (; it.IsValid(); it.Next()) |
1858 | { |
1859 | MethodDesc *pMD = it.GetMethodDesc(); |
1860 | LPCUTF8 pszName = pMD->GetName((USHORT) it.GetSlotNumber()); |
1861 | DWORD dwAttrs = pMD->GetAttrs(); |
1862 | |
1863 | if (fDebug) |
1864 | { |
1865 | DefineFullyQualifiedNameForClass(); |
1866 | LPCUTF8 name = GetFullyQualifiedNameForClass(pMD->GetMethodTable()); |
1867 | swprintf_s(buff, cchBuff, |
1868 | W(" slot %2d: %S::%S%S 0x%p (slot = %2d)\n" ), |
1869 | it.GetSlotNumber(), |
1870 | name, |
1871 | pszName, |
1872 | IsMdFinal(dwAttrs) ? " (final)" : "" , |
1873 | pMD->GetMethodEntryPoint(), |
1874 | pMD->GetSlot() |
1875 | ); |
1876 | WszOutputDebugString(buff); |
1877 | } |
1878 | else |
1879 | { |
1880 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
1881 | LOG((LF_ALWAYS, LL_ALWAYS, |
1882 | " slot %2d: %s::%s%s 0x%p (slot = %2d)\n" , |
1883 | it.GetSlotNumber(), |
1884 | pMD->GetClass()->GetDebugClassName(), |
1885 | pszName, |
1886 | IsMdFinal(dwAttrs) ? " (final)" : "" , |
1887 | pMD->GetMethodEntryPoint(), |
1888 | pMD->GetSlot() |
1889 | )); |
1890 | } |
1891 | if (it.GetSlotNumber() == (DWORD)(GetNumMethods()-1)) |
1892 | { |
1893 | if (fDebug) |
1894 | { |
1895 | WszOutputDebugString(W(" <-- vtable ends here\n" )); |
1896 | } |
1897 | else |
1898 | { |
1899 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
1900 | LOG((LF_ALWAYS, LL_ALWAYS, " <-- vtable ends here\n" )); |
1901 | } |
1902 | } |
1903 | } |
1904 | } |
1905 | EX_CATCH_HRESULT(hr); |
1906 | |
1907 | if (fDebug) |
1908 | { |
1909 | WszOutputDebugString(W("\n" )); |
1910 | } |
1911 | else |
1912 | { |
1913 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
1914 | LOG((LF_ALWAYS, LL_ALWAYS, "\n" )); |
1915 | } |
1916 | } // MethodTable::DebugDumpVtable |
1917 | |
1918 | void |
1919 | MethodTable::Debug_DumpInterfaceMap( |
1920 | LPCSTR szInterfaceMapPrefix) |
1921 | { |
1922 | // Diagnostic functions shouldn't affect normal behavior |
1923 | CONTRACTL |
1924 | { |
1925 | NOTHROW; |
1926 | GC_TRIGGERS; |
1927 | } |
1928 | CONTRACTL_END; |
1929 | |
1930 | if (GetNumInterfaces() == 0) |
1931 | { // There are no interfaces, no point in printing interface map info |
1932 | return; |
1933 | } |
1934 | |
1935 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
1936 | LOG((LF_ALWAYS, LL_ALWAYS, |
1937 | "%s Interface Map for '%s':\n" , |
1938 | szInterfaceMapPrefix, |
1939 | GetDebugClassName())); |
1940 | LOG((LF_ALWAYS, LL_ALWAYS, |
1941 | " Number of interfaces = %d\n" , |
1942 | GetNumInterfaces())); |
1943 | |
1944 | HRESULT hr; |
1945 | EX_TRY |
1946 | { |
1947 | InterfaceMapIterator it(this); |
1948 | while (it.Next()) |
1949 | { |
1950 | MethodTable *pInterfaceMT = it.GetInterface(); |
1951 | |
1952 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
1953 | LOG((LF_ALWAYS, LL_ALWAYS, |
1954 | " index %2d: %s 0x%p\n" , |
1955 | it.GetIndex(), |
1956 | pInterfaceMT->GetDebugClassName(), |
1957 | pInterfaceMT)); |
1958 | } |
1959 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
1960 | LOG((LF_ALWAYS, LL_ALWAYS, " <-- interface map ends here\n" )); |
1961 | } |
1962 | EX_CATCH_HRESULT(hr); |
1963 | |
1964 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
1965 | LOG((LF_ALWAYS, LL_ALWAYS, "\n" )); |
1966 | } // MethodTable::Debug_DumpInterfaceMap |
1967 | |
1968 | void |
1969 | MethodTable::Debug_DumpDispatchMap() |
1970 | { |
1971 | WRAPPER_NO_CONTRACT; // It's a dev helper, we don't care about contracts |
1972 | |
1973 | if (!HasDispatchMap()) |
1974 | { // There is no dipstch map for this type, no point in printing the info |
1975 | return; |
1976 | } |
1977 | |
1978 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
1979 | LOG((LF_ALWAYS, LL_ALWAYS, "Dispatch Map for '%s':\n" , GetDebugClassName())); |
1980 | |
1981 | InterfaceInfo_t * pInterfaceMap = GetInterfaceMap(); |
1982 | DispatchMap::EncodedMapIterator it(this); |
1983 | |
1984 | while (it.IsValid()) |
1985 | { |
1986 | DispatchMapEntry *pEntry = it.Entry(); |
1987 | |
1988 | UINT32 nInterfaceIndex = pEntry->GetTypeID().GetInterfaceNum(); |
1989 | _ASSERTE(nInterfaceIndex < GetNumInterfaces()); |
1990 | |
1991 | MethodTable * pInterface = pInterfaceMap[nInterfaceIndex].GetMethodTable(); |
1992 | UINT32 nInterfaceSlotNumber = pEntry->GetSlotNumber(); |
1993 | UINT32 nImplementationSlotNumber = pEntry->GetTargetSlotNumber(); |
1994 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
1995 | LOG((LF_ALWAYS, LL_ALWAYS, |
1996 | " Interface %d (%s) slot %d (%s) implemented in slot %d (%s)\n" , |
1997 | nInterfaceIndex, |
1998 | pInterface->GetDebugClassName(), |
1999 | nInterfaceSlotNumber, |
2000 | pInterface->GetMethodDescForSlot(nInterfaceSlotNumber)->GetName(), |
2001 | nImplementationSlotNumber, |
2002 | GetMethodDescForSlot(nImplementationSlotNumber)->GetName())); |
2003 | |
2004 | it.Next(); |
2005 | } |
2006 | //LF_ALWAYS allowed here because this is controlled by special env var code:EEConfig::ShouldDumpOnClassLoad |
2007 | LOG((LF_ALWAYS, LL_ALWAYS, " <-- Dispatch map ends here\n" )); |
2008 | } // MethodTable::Debug_DumpDispatchMap |
2009 | |
2010 | #endif //_DEBUG |
2011 | |
2012 | //========================================================================================== |
2013 | NOINLINE BOOL MethodTable::ImplementsInterface(MethodTable *pInterface) |
2014 | { |
2015 | WRAPPER_NO_CONTRACT; |
2016 | return ImplementsInterfaceInline(pInterface); |
2017 | } |
2018 | |
2019 | //========================================================================================== |
2020 | BOOL MethodTable::ImplementsEquivalentInterface(MethodTable *pInterface) |
2021 | { |
2022 | CONTRACTL |
2023 | { |
2024 | THROWS; |
2025 | GC_TRIGGERS; |
2026 | SO_TOLERANT; |
2027 | PRECONDITION(pInterface->IsInterface()); // class we are looking up should be an interface |
2028 | } |
2029 | CONTRACTL_END; |
2030 | |
2031 | // look for exact match first (optimize for success) |
2032 | if (ImplementsInterfaceInline(pInterface)) |
2033 | return TRUE; |
2034 | |
2035 | if (!pInterface->HasTypeEquivalence()) |
2036 | return FALSE; |
2037 | |
2038 | DWORD numInterfaces = GetNumInterfaces(); |
2039 | if (numInterfaces == 0) |
2040 | return FALSE; |
2041 | |
2042 | InterfaceInfo_t *pInfo = GetInterfaceMap(); |
2043 | |
2044 | do |
2045 | { |
2046 | if (pInfo->GetMethodTable()->IsEquivalentTo(pInterface)) |
2047 | return TRUE; |
2048 | |
2049 | pInfo++; |
2050 | } |
2051 | while (--numInterfaces); |
2052 | |
2053 | return FALSE; |
2054 | } |
2055 | |
2056 | //========================================================================================== |
2057 | MethodDesc *MethodTable::GetMethodDescForInterfaceMethod(MethodDesc *pInterfaceMD, BOOL throwOnConflict) |
2058 | { |
2059 | CONTRACTL |
2060 | { |
2061 | THROWS; |
2062 | GC_TRIGGERS; |
2063 | PRECONDITION(!pInterfaceMD->HasClassOrMethodInstantiation()); |
2064 | } |
2065 | CONTRACTL_END; |
2066 | WRAPPER_NO_CONTRACT; |
2067 | |
2068 | return GetMethodDescForInterfaceMethod(TypeHandle(pInterfaceMD->GetMethodTable()), pInterfaceMD, throwOnConflict); |
2069 | } |
2070 | |
2071 | //========================================================================================== |
2072 | MethodDesc *MethodTable::GetMethodDescForInterfaceMethod(TypeHandle ownerType, MethodDesc *pInterfaceMD, BOOL throwOnConflict) |
2073 | { |
2074 | CONTRACTL |
2075 | { |
2076 | THROWS; |
2077 | GC_TRIGGERS; |
2078 | PRECONDITION(!ownerType.IsNull()); |
2079 | PRECONDITION(ownerType.GetMethodTable()->IsInterface()); |
2080 | PRECONDITION(ownerType.GetMethodTable()->HasSameTypeDefAs(pInterfaceMD->GetMethodTable())); |
2081 | PRECONDITION(IsArray() || ImplementsEquivalentInterface(ownerType.GetMethodTable()) || ownerType.GetMethodTable()->HasVariance()); |
2082 | } |
2083 | CONTRACTL_END; |
2084 | |
2085 | MethodDesc *pMD = NULL; |
2086 | |
2087 | MethodTable *pInterfaceMT = ownerType.AsMethodTable(); |
2088 | |
2089 | #ifdef CROSSGEN_COMPILE |
2090 | DispatchSlot implSlot(FindDispatchSlot(pInterfaceMT->GetTypeID(), pInterfaceMD->GetSlot(), throwOnConflict)); |
2091 | if (implSlot.IsNull()) |
2092 | { |
2093 | _ASSERTE(!throwOnConflict); |
2094 | return NULL; |
2095 | } |
2096 | PCODE pTgt = implSlot.GetTarget(); |
2097 | #else |
2098 | PCODE pTgt = VirtualCallStubManager::GetTarget( |
2099 | pInterfaceMT->GetLoaderAllocator()->GetDispatchToken(pInterfaceMT->GetTypeID(), pInterfaceMD->GetSlot()), |
2100 | this, throwOnConflict); |
2101 | if (pTgt == NULL) |
2102 | { |
2103 | _ASSERTE(!throwOnConflict); |
2104 | return NULL; |
2105 | } |
2106 | #endif |
2107 | pMD = MethodTable::GetMethodDescForSlotAddress(pTgt); |
2108 | |
2109 | #ifdef _DEBUG |
2110 | MethodDesc *pDispSlotMD = FindDispatchSlotForInterfaceMD(ownerType, pInterfaceMD, throwOnConflict).GetMethodDesc(); |
2111 | _ASSERTE(pDispSlotMD == pMD); |
2112 | #endif // _DEBUG |
2113 | |
2114 | pMD->CheckRestore(); |
2115 | |
2116 | return pMD; |
2117 | } |
2118 | #endif // DACCESS_COMPILE |
2119 | |
2120 | //========================================================================================== |
2121 | PTR_FieldDesc MethodTable::GetFieldDescByIndex(DWORD fieldIndex) |
2122 | { |
2123 | LIMITED_METHOD_CONTRACT; |
2124 | |
2125 | if (HasGenericsStaticsInfo() && |
2126 | fieldIndex >= GetNumIntroducedInstanceFields()) |
2127 | { |
2128 | return GetGenericsStaticFieldDescs() + (fieldIndex - GetNumIntroducedInstanceFields()); |
2129 | } |
2130 | else |
2131 | { |
2132 | return GetClass()->GetFieldDescList() + fieldIndex; |
2133 | } |
2134 | } |
2135 | |
2136 | //========================================================================================== |
2137 | DWORD MethodTable::GetIndexForFieldDesc(FieldDesc *pField) |
2138 | { |
2139 | LIMITED_METHOD_CONTRACT; |
2140 | |
2141 | if (pField->IsStatic() && HasGenericsStaticsInfo()) |
2142 | { |
2143 | FieldDesc *pStaticFields = GetGenericsStaticFieldDescs(); |
2144 | |
2145 | return GetNumIntroducedInstanceFields() + DWORD(pField - pStaticFields); |
2146 | |
2147 | } |
2148 | else |
2149 | { |
2150 | FieldDesc *pFields = GetClass()->GetFieldDescList(); |
2151 | |
2152 | return DWORD(pField - pFields); |
2153 | } |
2154 | } |
2155 | |
2156 | //========================================================================================== |
2157 | #ifdef _MSC_VER |
2158 | #pragma optimize("t", on) |
2159 | #endif // _MSC_VER |
2160 | // compute whether the type can be considered to have had its |
2161 | // static initialization run without doing anything at all, i.e. whether we know |
2162 | // immediately that the type requires nothing to do for initialization |
2163 | // |
2164 | // If a type used as a representiative during JITting is PreInit then |
2165 | // any types that it may represent within a code-sharing |
2166 | // group are also PreInit. For example, if List<object> is PreInit then List<string> |
2167 | // and List<MyType> are also PreInit. This is because the dynamicStatics, staticRefHandles |
2168 | // and hasCCtor are all identical given a head type, and weakening the domainNeutrality |
2169 | // to DomainSpecific only makes more types PreInit. |
2170 | BOOL MethodTable::IsClassPreInited() |
2171 | { |
2172 | LIMITED_METHOD_CONTRACT; |
2173 | |
2174 | if (ContainsGenericVariables()) |
2175 | return TRUE; |
2176 | |
2177 | if (HasClassConstructor()) |
2178 | return FALSE; |
2179 | |
2180 | if (HasBoxedRegularStatics()) |
2181 | return FALSE; |
2182 | |
2183 | if (IsDynamicStatics()) |
2184 | return FALSE; |
2185 | |
2186 | return TRUE; |
2187 | } |
2188 | #ifdef _MSC_VER |
2189 | #pragma optimize("", on) |
2190 | #endif // _MSC_VER |
2191 | |
2192 | //======================================================================================== |
2193 | |
2194 | #if defined(UNIX_AMD64_ABI_ITF) |
2195 | |
2196 | #if defined(_DEBUG) && defined(LOGGING) |
2197 | static |
2198 | const char* GetSystemVClassificationTypeName(SystemVClassificationType t) |
2199 | { |
2200 | switch (t) |
2201 | { |
2202 | case SystemVClassificationTypeUnknown: return "Unknown" ; |
2203 | case SystemVClassificationTypeStruct: return "Struct" ; |
2204 | case SystemVClassificationTypeNoClass: return "NoClass" ; |
2205 | case SystemVClassificationTypeMemory: return "Memory" ; |
2206 | case SystemVClassificationTypeInteger: return "Integer" ; |
2207 | case SystemVClassificationTypeIntegerReference: return "IntegerReference" ; |
2208 | case SystemVClassificationTypeIntegerByRef: return "IntegerByReference" ; |
2209 | case SystemVClassificationTypeSSE: return "SSE" ; |
2210 | case SystemVClassificationTypeTypedReference: return "TypedReference" ; |
2211 | default: return "ERROR" ; |
2212 | } |
2213 | }; |
2214 | #endif // _DEBUG && LOGGING |
2215 | |
2216 | // Returns 'true' if the struct is passed in registers, 'false' otherwise. |
2217 | bool MethodTable::ClassifyEightBytes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, bool useNativeLayout) |
2218 | { |
2219 | if (useNativeLayout) |
2220 | { |
2221 | return ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel, startOffsetOfStruct, useNativeLayout); |
2222 | } |
2223 | else |
2224 | { |
2225 | return ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel, startOffsetOfStruct, useNativeLayout); |
2226 | } |
2227 | } |
2228 | |
2229 | // If we have a field classification already, but there is a union, we must merge the classification type of the field. Returns the |
2230 | // new, merged classification type. |
2231 | /* static */ |
2232 | static SystemVClassificationType ReClassifyField(SystemVClassificationType originalClassification, SystemVClassificationType newFieldClassification) |
2233 | { |
2234 | _ASSERTE((newFieldClassification == SystemVClassificationTypeInteger) || |
2235 | (newFieldClassification == SystemVClassificationTypeIntegerReference) || |
2236 | (newFieldClassification == SystemVClassificationTypeIntegerByRef) || |
2237 | (newFieldClassification == SystemVClassificationTypeSSE)); |
2238 | |
2239 | switch (newFieldClassification) |
2240 | { |
2241 | case SystemVClassificationTypeInteger: |
2242 | // Integer overrides everything; the resulting classification is Integer. Can't merge Integer and IntegerReference. |
2243 | _ASSERTE((originalClassification == SystemVClassificationTypeInteger) || |
2244 | (originalClassification == SystemVClassificationTypeSSE)); |
2245 | |
2246 | return SystemVClassificationTypeInteger; |
2247 | |
2248 | case SystemVClassificationTypeSSE: |
2249 | // If the old and new classifications are both SSE, then the merge is SSE, otherwise it will be integer. Can't merge SSE and IntegerReference. |
2250 | _ASSERTE((originalClassification == SystemVClassificationTypeInteger) || |
2251 | (originalClassification == SystemVClassificationTypeSSE)); |
2252 | |
2253 | if (originalClassification == SystemVClassificationTypeSSE) |
2254 | { |
2255 | return SystemVClassificationTypeSSE; |
2256 | } |
2257 | else |
2258 | { |
2259 | return SystemVClassificationTypeInteger; |
2260 | } |
2261 | |
2262 | case SystemVClassificationTypeIntegerReference: |
2263 | // IntegerReference can only merge with IntegerReference. |
2264 | _ASSERTE(originalClassification == SystemVClassificationTypeIntegerReference); |
2265 | return SystemVClassificationTypeIntegerReference; |
2266 | |
2267 | case SystemVClassificationTypeIntegerByRef: |
2268 | // IntegerByReference can only merge with IntegerByReference. |
2269 | _ASSERTE(originalClassification == SystemVClassificationTypeIntegerByRef); |
2270 | return SystemVClassificationTypeIntegerByRef; |
2271 | |
2272 | default: |
2273 | _ASSERTE(false); // Unexpected type. |
2274 | return SystemVClassificationTypeUnknown; |
2275 | } |
2276 | } |
2277 | |
2278 | // Returns 'true' if the struct is passed in registers, 'false' otherwise. |
2279 | bool MethodTable::ClassifyEightBytesWithManagedLayout(SystemVStructRegisterPassingHelperPtr helperPtr, |
2280 | unsigned int nestingLevel, |
2281 | unsigned int startOffsetOfStruct, |
2282 | bool useNativeLayout) |
2283 | { |
2284 | CONTRACTL |
2285 | { |
2286 | THROWS; |
2287 | GC_TRIGGERS; |
2288 | SO_TOLERANT; |
2289 | MODE_ANY; |
2290 | } |
2291 | CONTRACTL_END; |
2292 | |
2293 | WORD numIntroducedFields = GetNumIntroducedInstanceFields(); |
2294 | |
2295 | // It appears the VM gives a struct with no fields of size 1. |
2296 | // Don't pass in register such structure. |
2297 | if (numIntroducedFields == 0) |
2298 | { |
2299 | return false; |
2300 | } |
2301 | |
2302 | // No struct register passing with explicit layout. There may be cases where explicit layout may be still |
2303 | // eligible for register struct passing, but it is hard to tell the real intent. Make it simple and just |
2304 | // unconditionally disable register struct passing for explicit layout. |
2305 | if (GetClass()->HasExplicitFieldOffsetLayout()) |
2306 | { |
2307 | LOG((LF_JIT, LL_EVERYTHING, "%*s**** ClassifyEightBytesWithManagedLayout: struct %s has explicit layout; will not be enregistered\n" , |
2308 | nestingLevel * 5, "" , this->GetDebugClassName())); |
2309 | return false; |
2310 | } |
2311 | |
2312 | // The SIMD Intrinsic types are meant to be handled specially and should not be passed as struct registers |
2313 | if (IsIntrinsicType()) |
2314 | { |
2315 | LPCUTF8 namespaceName; |
2316 | LPCUTF8 className = GetFullyQualifiedNameInfo(&namespaceName); |
2317 | |
2318 | if ((strcmp(className, "Vector256`1" ) == 0) || (strcmp(className, "Vector128`1" ) == 0) || |
2319 | (strcmp(className, "Vector64`1" ) == 0)) |
2320 | { |
2321 | assert(strcmp(namespaceName, "System.Runtime.Intrinsics" ) == 0); |
2322 | |
2323 | LOG((LF_JIT, LL_EVERYTHING, "%*s**** ClassifyEightBytesWithManagedLayout: struct %s is a SIMD intrinsic type; will not be enregistered\n" , |
2324 | nestingLevel * 5, "" , this->GetDebugClassName())); |
2325 | |
2326 | return false; |
2327 | } |
2328 | } |
2329 | |
2330 | #ifdef _DEBUG |
2331 | LOG((LF_JIT, LL_EVERYTHING, "%*s**** Classify %s (%p), startOffset %d, total struct size %d\n" , |
2332 | nestingLevel * 5, "" , this->GetDebugClassName(), this, startOffsetOfStruct, helperPtr->structSize)); |
2333 | int fieldNum = -1; |
2334 | #endif // _DEBUG |
2335 | |
2336 | FieldDesc *pField = GetApproxFieldDescListRaw(); |
2337 | FieldDesc *pFieldEnd = pField + numIntroducedFields; |
2338 | |
2339 | // System types are loaded before others, so ByReference<T> would be loaded before Span<T> or any other type that has a |
2340 | // ByReference<T> field. ByReference<T> is the first by-ref-like system type to be loaded (see |
2341 | // SystemDomain::LoadBaseSystemClasses), so if the current method table is marked as by-ref-like and g_pByReferenceClass is |
2342 | // null, it must be the initial load of ByReference<T>. |
2343 | bool isThisByReferenceOfT = IsByRefLike() && (g_pByReferenceClass == nullptr || HasSameTypeDefAs(g_pByReferenceClass)); |
2344 | |
2345 | for (; pField < pFieldEnd; pField++) |
2346 | { |
2347 | #ifdef _DEBUG |
2348 | ++fieldNum; |
2349 | #endif // _DEBUG |
2350 | |
2351 | DWORD fieldOffset = pField->GetOffset(); |
2352 | unsigned normalizedFieldOffset = fieldOffset + startOffsetOfStruct; |
2353 | |
2354 | unsigned int fieldSize = pField->GetSize(); |
2355 | _ASSERTE(fieldSize != (unsigned int)-1); |
2356 | |
2357 | // The field can't span past the end of the struct. |
2358 | if ((normalizedFieldOffset + fieldSize) > helperPtr->structSize) |
2359 | { |
2360 | _ASSERTE(false && "Invalid struct size. The size of fields and overall size don't agree" ); |
2361 | return false; |
2362 | } |
2363 | |
2364 | CorElementType fieldType = pField->GetFieldType(); |
2365 | |
2366 | SystemVClassificationType fieldClassificationType; |
2367 | if (isThisByReferenceOfT) |
2368 | { |
2369 | // ByReference<T> is a special type whose single IntPtr field holds a by-ref potentially interior pointer to GC |
2370 | // memory, so classify its field as such |
2371 | _ASSERTE(numIntroducedFields == 1); |
2372 | _ASSERTE(fieldType == CorElementType::ELEMENT_TYPE_I); |
2373 | fieldClassificationType = SystemVClassificationTypeIntegerByRef; |
2374 | } |
2375 | else |
2376 | { |
2377 | fieldClassificationType = CorInfoType2UnixAmd64Classification(fieldType); |
2378 | } |
2379 | |
2380 | #ifdef _DEBUG |
2381 | LPCUTF8 fieldName; |
2382 | pField->GetName_NoThrow(&fieldName); |
2383 | #endif // _DEBUG |
2384 | if (fieldClassificationType == SystemVClassificationTypeStruct) |
2385 | { |
2386 | TypeHandle th = pField->GetApproxFieldTypeHandleThrowing(); |
2387 | _ASSERTE(!th.IsNull()); |
2388 | MethodTable* pFieldMT = th.GetMethodTable(); |
2389 | |
2390 | bool inEmbeddedStructPrev = helperPtr->inEmbeddedStruct; |
2391 | helperPtr->inEmbeddedStruct = true; |
2392 | |
2393 | bool structRet = false; |
2394 | // If classifying for marshaling/PInvoke and the aggregated struct has a native layout |
2395 | // use the native classification. If not, continue using the managed layout. |
2396 | if (useNativeLayout && pFieldMT->HasLayout()) |
2397 | { |
2398 | structRet = pFieldMT->ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout); |
2399 | } |
2400 | else |
2401 | { |
2402 | structRet = pFieldMT->ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout); |
2403 | } |
2404 | |
2405 | helperPtr->inEmbeddedStruct = inEmbeddedStructPrev; |
2406 | |
2407 | if (!structRet) |
2408 | { |
2409 | // If the nested struct says not to enregister, there's no need to continue analyzing at this level. Just return do not enregister. |
2410 | return false; |
2411 | } |
2412 | |
2413 | continue; |
2414 | } |
2415 | |
2416 | if (fieldClassificationType == SystemVClassificationTypeTypedReference || |
2417 | CorInfoType2UnixAmd64Classification(GetClass_NoLogging()->GetInternalCorElementType()) == SystemVClassificationTypeTypedReference) |
2418 | { |
2419 | // The TypedReference is a very special type. |
2420 | // In source/metadata it has two fields - Type and Value and both are defined of type IntPtr. |
2421 | // When the VM creates a layout of the type it changes the type of the Value to ByRef type and the |
2422 | // type of the Type field is left to IntPtr (TYPE_I internally - native int type.) |
2423 | // This requires a special treatment of this type. The code below handles the both fields (and this entire type). |
2424 | |
2425 | for (unsigned i = 0; i < 2; i++) |
2426 | { |
2427 | fieldSize = 8; |
2428 | fieldOffset = (i == 0 ? 0 : 8); |
2429 | normalizedFieldOffset = fieldOffset + startOffsetOfStruct; |
2430 | fieldClassificationType = (i == 0 ? SystemVClassificationTypeIntegerByRef : SystemVClassificationTypeInteger); |
2431 | if ((normalizedFieldOffset % fieldSize) != 0) |
2432 | { |
2433 | // The spec requires that struct values on the stack from register passed fields expects |
2434 | // those fields to be at their natural alignment. |
2435 | |
2436 | LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Field %d %s: offset %d (normalized %d), size %d not at natural alignment; not enregistering struct\n" , |
2437 | nestingLevel * 5, "" , fieldNum, fieldNum, (i == 0 ? "Value" : "Type" ), fieldOffset, normalizedFieldOffset, fieldSize)); |
2438 | return false; |
2439 | } |
2440 | |
2441 | helperPtr->largestFieldOffset = (int)normalizedFieldOffset; |
2442 | |
2443 | // Set the data for a new field. |
2444 | |
2445 | // The new field classification must not have been initialized yet. |
2446 | _ASSERTE(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] == SystemVClassificationTypeNoClass); |
2447 | |
2448 | // There are only a few field classifications that are allowed. |
2449 | _ASSERTE((fieldClassificationType == SystemVClassificationTypeInteger) || |
2450 | (fieldClassificationType == SystemVClassificationTypeIntegerReference) || |
2451 | (fieldClassificationType == SystemVClassificationTypeIntegerByRef) || |
2452 | (fieldClassificationType == SystemVClassificationTypeSSE)); |
2453 | |
2454 | helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] = fieldClassificationType; |
2455 | helperPtr->fieldSizes[helperPtr->currentUniqueOffsetField] = fieldSize; |
2456 | helperPtr->fieldOffsets[helperPtr->currentUniqueOffsetField] = normalizedFieldOffset; |
2457 | |
2458 | LOG((LF_JIT, LL_EVERYTHING, " %*s**** Field %d %s: offset %d (normalized %d), size %d, currentUniqueOffsetField %d, field type classification %s, chosen field classification %s\n" , |
2459 | nestingLevel * 5, "" , fieldNum, (i == 0 ? "Value" : "Type" ), fieldOffset, normalizedFieldOffset, fieldSize, helperPtr->currentUniqueOffsetField, |
2460 | GetSystemVClassificationTypeName(fieldClassificationType), |
2461 | GetSystemVClassificationTypeName(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField]))); |
2462 | |
2463 | helperPtr->currentUniqueOffsetField++; |
2464 | #ifdef _DEBUG |
2465 | ++fieldNum; |
2466 | #endif // _DEBUG |
2467 | } |
2468 | |
2469 | // Both fields of the special TypedReference struct are handled. |
2470 | pField = pFieldEnd; |
2471 | |
2472 | // Done classifying the System.TypedReference struct fields. |
2473 | continue; |
2474 | } |
2475 | |
2476 | if ((normalizedFieldOffset % fieldSize) != 0) |
2477 | { |
2478 | // The spec requires that struct values on the stack from register passed fields expects |
2479 | // those fields to be at their natural alignment. |
2480 | |
2481 | LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Field %d %s: offset %d (normalized %d), size %d not at natural alignment; not enregistering struct\n" , |
2482 | nestingLevel * 5, "" , fieldNum, fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldSize)); |
2483 | return false; |
2484 | } |
2485 | |
2486 | if ((int)normalizedFieldOffset <= helperPtr->largestFieldOffset) |
2487 | { |
2488 | // Find the field corresponding to this offset and update the size if needed. |
2489 | // We assume that either it matches the offset of a previously seen field, or |
2490 | // it is an out-of-order offset (the VM does give us structs in non-increasing |
2491 | // offset order sometimes) that doesn't overlap any other field. |
2492 | |
2493 | // REVIEW: will the offset ever match a previously seen field offset for cases that are NOT ExplicitLayout? |
2494 | // If not, we can get rid of this loop, and just assume the offset is from an out-of-order field. We wouldn't |
2495 | // need to maintain largestFieldOffset, either, since we would then assume all fields are unique. We could |
2496 | // also get rid of ReClassifyField(). |
2497 | int i; |
2498 | for (i = helperPtr->currentUniqueOffsetField - 1; i >= 0; i--) |
2499 | { |
2500 | if (helperPtr->fieldOffsets[i] == normalizedFieldOffset) |
2501 | { |
2502 | if (fieldSize > helperPtr->fieldSizes[i]) |
2503 | { |
2504 | helperPtr->fieldSizes[i] = fieldSize; |
2505 | } |
2506 | |
2507 | helperPtr->fieldClassifications[i] = ReClassifyField(helperPtr->fieldClassifications[i], fieldClassificationType); |
2508 | |
2509 | LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Field %d %s: offset %d (normalized %d), size %d, union with uniqueOffsetField %d, field type classification %s, reclassified field to %s\n" , |
2510 | nestingLevel * 5, "" , fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldSize, i, |
2511 | GetSystemVClassificationTypeName(fieldClassificationType), |
2512 | GetSystemVClassificationTypeName(helperPtr->fieldClassifications[i]))); |
2513 | |
2514 | break; |
2515 | } |
2516 | // Make sure the field doesn't start in the middle of another field. |
2517 | _ASSERTE((normalizedFieldOffset < helperPtr->fieldOffsets[i]) || |
2518 | (normalizedFieldOffset >= helperPtr->fieldOffsets[i] + helperPtr->fieldSizes[i])); |
2519 | } |
2520 | |
2521 | if (i >= 0) |
2522 | { |
2523 | // The proper size of the union set of fields has been set above; continue to the next field. |
2524 | continue; |
2525 | } |
2526 | } |
2527 | else |
2528 | { |
2529 | helperPtr->largestFieldOffset = (int)normalizedFieldOffset; |
2530 | } |
2531 | |
2532 | // Set the data for a new field. |
2533 | |
2534 | // The new field classification must not have been initialized yet. |
2535 | _ASSERTE(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] == SystemVClassificationTypeNoClass); |
2536 | |
2537 | // There are only a few field classifications that are allowed. |
2538 | _ASSERTE((fieldClassificationType == SystemVClassificationTypeInteger) || |
2539 | (fieldClassificationType == SystemVClassificationTypeIntegerReference) || |
2540 | (fieldClassificationType == SystemVClassificationTypeIntegerByRef) || |
2541 | (fieldClassificationType == SystemVClassificationTypeSSE)); |
2542 | |
2543 | helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] = fieldClassificationType; |
2544 | helperPtr->fieldSizes[helperPtr->currentUniqueOffsetField] = fieldSize; |
2545 | helperPtr->fieldOffsets[helperPtr->currentUniqueOffsetField] = normalizedFieldOffset; |
2546 | |
2547 | LOG((LF_JIT, LL_EVERYTHING, " %*s**** Field %d %s: offset %d (normalized %d), size %d, currentUniqueOffsetField %d, field type classification %s, chosen field classification %s\n" , |
2548 | nestingLevel * 5, "" , fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldSize, helperPtr->currentUniqueOffsetField, |
2549 | GetSystemVClassificationTypeName(fieldClassificationType), |
2550 | GetSystemVClassificationTypeName(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField]))); |
2551 | |
2552 | _ASSERTE(helperPtr->currentUniqueOffsetField < SYSTEMV_MAX_NUM_FIELDS_IN_REGISTER_PASSED_STRUCT); |
2553 | helperPtr->currentUniqueOffsetField++; |
2554 | } // end per-field for loop |
2555 | |
2556 | AssignClassifiedEightByteTypes(helperPtr, nestingLevel); |
2557 | |
2558 | return true; |
2559 | } |
2560 | |
2561 | // Returns 'true' if the struct is passed in registers, 'false' otherwise. |
2562 | bool MethodTable::ClassifyEightBytesWithNativeLayout(SystemVStructRegisterPassingHelperPtr helperPtr, |
2563 | unsigned int nestingLevel, |
2564 | unsigned int startOffsetOfStruct, |
2565 | bool useNativeLayout) |
2566 | { |
2567 | CONTRACTL |
2568 | { |
2569 | THROWS; |
2570 | GC_TRIGGERS; |
2571 | SO_TOLERANT; |
2572 | MODE_ANY; |
2573 | } |
2574 | CONTRACTL_END; |
2575 | |
2576 | // Should be in this method only doing a native layout classification. |
2577 | _ASSERTE(useNativeLayout); |
2578 | |
2579 | #ifdef DACCESS_COMPILE |
2580 | // No register classification for this case. |
2581 | return false; |
2582 | #else // DACCESS_COMPILE |
2583 | |
2584 | if (!HasLayout()) |
2585 | { |
2586 | // If there is no native layout for this struct use the managed layout instead. |
2587 | return ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel, startOffsetOfStruct, useNativeLayout); |
2588 | } |
2589 | |
2590 | const FieldMarshaler *pFieldMarshaler = GetLayoutInfo()->GetFieldMarshalers(); |
2591 | UINT numIntroducedFields = GetLayoutInfo()->GetNumCTMFields(); |
2592 | |
2593 | // No fields. |
2594 | if (numIntroducedFields == 0) |
2595 | { |
2596 | return false; |
2597 | } |
2598 | |
2599 | // No struct register passing with explicit layout. There may be cases where explicit layout may be still |
2600 | // eligible for register struct passing, but it is hard to tell the real intent. Make it simple and just |
2601 | // unconditionally disable register struct passing for explicit layout. |
2602 | if (GetClass()->HasExplicitFieldOffsetLayout()) |
2603 | { |
2604 | LOG((LF_JIT, LL_EVERYTHING, "%*s**** ClassifyEightBytesWithNativeLayout: struct %s has explicit layout; will not be enregistered\n" , |
2605 | nestingLevel * 5, "" , this->GetDebugClassName())); |
2606 | return false; |
2607 | } |
2608 | |
2609 | // The SIMD Intrinsic types are meant to be handled specially and should not be passed as struct registers |
2610 | if (IsIntrinsicType()) |
2611 | { |
2612 | LPCUTF8 namespaceName; |
2613 | LPCUTF8 className = GetFullyQualifiedNameInfo(&namespaceName); |
2614 | |
2615 | if ((strcmp(className, "Vector256`1" ) == 0) || (strcmp(className, "Vector128`1" ) == 0) || |
2616 | (strcmp(className, "Vector64`1" ) == 0)) |
2617 | { |
2618 | assert(strcmp(namespaceName, "System.Runtime.Intrinsics" ) == 0); |
2619 | |
2620 | LOG((LF_JIT, LL_EVERYTHING, "%*s**** ClassifyEightBytesWithNativeLayout: struct %s is a SIMD intrinsic type; will not be enregistered\n" , |
2621 | nestingLevel * 5, "" , this->GetDebugClassName())); |
2622 | |
2623 | return false; |
2624 | } |
2625 | } |
2626 | |
2627 | #ifdef _DEBUG |
2628 | LOG((LF_JIT, LL_EVERYTHING, "%*s**** Classify for native struct %s (%p), startOffset %d, total struct size %d\n" , |
2629 | nestingLevel * 5, "" , this->GetDebugClassName(), this, startOffsetOfStruct, helperPtr->structSize)); |
2630 | int fieldNum = -1; |
2631 | #endif // _DEBUG |
2632 | |
2633 | while (numIntroducedFields--) |
2634 | { |
2635 | #ifdef _DEBUG |
2636 | ++fieldNum; |
2637 | #endif // _DEBUG |
2638 | |
2639 | FieldDesc *pField = pFieldMarshaler->GetFieldDesc(); |
2640 | CorElementType fieldType = pField->GetFieldType(); |
2641 | |
2642 | // Invalid field type. |
2643 | if (fieldType == ELEMENT_TYPE_END) |
2644 | { |
2645 | return false; |
2646 | } |
2647 | |
2648 | DWORD fieldOffset = pFieldMarshaler->GetExternalOffset(); |
2649 | unsigned normalizedFieldOffset = fieldOffset + startOffsetOfStruct; |
2650 | |
2651 | unsigned int fieldNativeSize = pFieldMarshaler->NativeSize(); |
2652 | if (fieldNativeSize > SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES) |
2653 | { |
2654 | // Pass on stack in this case. |
2655 | return false; |
2656 | } |
2657 | |
2658 | _ASSERTE(fieldNativeSize != (unsigned int)-1); |
2659 | |
2660 | // The field can't span past the end of the struct. |
2661 | if ((normalizedFieldOffset + fieldNativeSize) > helperPtr->structSize) |
2662 | { |
2663 | _ASSERTE(false && "Invalid native struct size. The size of fields and overall size don't agree" ); |
2664 | return false; |
2665 | } |
2666 | |
2667 | SystemVClassificationType fieldClassificationType = SystemVClassificationTypeUnknown; |
2668 | |
2669 | #ifdef _DEBUG |
2670 | LPCUTF8 fieldName; |
2671 | pField->GetName_NoThrow(&fieldName); |
2672 | #endif // _DEBUG |
2673 | |
2674 | // Some NStruct Field Types have extra information and require special handling |
2675 | NStructFieldType cls = pFieldMarshaler->GetNStructFieldType(); |
2676 | if (cls == NFT_FIXEDCHARARRAYANSI) |
2677 | { |
2678 | fieldClassificationType = SystemVClassificationTypeInteger; |
2679 | } |
2680 | else if (cls == NFT_FIXEDARRAY) |
2681 | { |
2682 | VARTYPE vtElement = ((FieldMarshaler_FixedArray*)pFieldMarshaler)->GetElementVT(); |
2683 | switch (vtElement) |
2684 | { |
2685 | case VT_EMPTY: |
2686 | case VT_NULL: |
2687 | case VT_BOOL: |
2688 | case VT_I1: |
2689 | case VT_I2: |
2690 | case VT_I4: |
2691 | case VT_I8: |
2692 | case VT_UI1: |
2693 | case VT_UI2: |
2694 | case VT_UI4: |
2695 | case VT_UI8: |
2696 | case VT_PTR: |
2697 | case VT_INT: |
2698 | case VT_UINT: |
2699 | case VT_LPSTR: |
2700 | case VT_LPWSTR: |
2701 | fieldClassificationType = SystemVClassificationTypeInteger; |
2702 | break; |
2703 | case VT_R4: |
2704 | case VT_R8: |
2705 | fieldClassificationType = SystemVClassificationTypeSSE; |
2706 | break; |
2707 | case VT_DECIMAL: |
2708 | case VT_DATE: |
2709 | case VT_BSTR: |
2710 | case VT_UNKNOWN: |
2711 | case VT_DISPATCH: |
2712 | case VT_SAFEARRAY: |
2713 | case VT_ERROR: |
2714 | case VT_HRESULT: |
2715 | case VT_CARRAY: |
2716 | case VT_USERDEFINED: |
2717 | case VT_RECORD: |
2718 | case VT_FILETIME: |
2719 | case VT_BLOB: |
2720 | case VT_STREAM: |
2721 | case VT_STORAGE: |
2722 | case VT_STREAMED_OBJECT: |
2723 | case VT_STORED_OBJECT: |
2724 | case VT_BLOB_OBJECT: |
2725 | case VT_CF: |
2726 | case VT_CLSID: |
2727 | default: |
2728 | // Not supported. |
2729 | return false; |
2730 | } |
2731 | } |
2732 | #ifdef FEATURE_COMINTEROP |
2733 | else if (cls == NFT_INTERFACE) |
2734 | { |
2735 | // COMInterop not supported for CORECLR. |
2736 | _ASSERTE(false && "COMInterop not supported for CORECLR." ); |
2737 | return false; |
2738 | } |
2739 | #ifdef FEATURE_CLASSIC_COMINTEROP |
2740 | else if (cls == NFT_SAFEARRAY) |
2741 | { |
2742 | // COMInterop not supported for CORECLR. |
2743 | _ASSERTE(false && "COMInterop not supported for CORECLR." ); |
2744 | return false; |
2745 | } |
2746 | #endif // FEATURE_CLASSIC_COMINTEROP |
2747 | #endif // FEATURE_COMINTEROP |
2748 | else if (cls == NFT_NESTEDLAYOUTCLASS) |
2749 | { |
2750 | MethodTable* pFieldMT = ((FieldMarshaler_NestedLayoutClass*)pFieldMarshaler)->GetMethodTable(); |
2751 | |
2752 | bool inEmbeddedStructPrev = helperPtr->inEmbeddedStruct; |
2753 | helperPtr->inEmbeddedStruct = true; |
2754 | bool structRet = pFieldMT->ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout); |
2755 | helperPtr->inEmbeddedStruct = inEmbeddedStructPrev; |
2756 | |
2757 | if (!structRet) |
2758 | { |
2759 | // If the nested struct says not to enregister, there's no need to continue analyzing at this level. Just return do not enregister. |
2760 | return false; |
2761 | } |
2762 | |
2763 | continue; |
2764 | } |
2765 | else if (cls == NFT_NESTEDVALUECLASS) |
2766 | { |
2767 | MethodTable* pFieldMT = ((FieldMarshaler_NestedValueClass*)pFieldMarshaler)->GetMethodTable(); |
2768 | |
2769 | bool inEmbeddedStructPrev = helperPtr->inEmbeddedStruct; |
2770 | helperPtr->inEmbeddedStruct = true; |
2771 | bool structRet = pFieldMT->ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout); |
2772 | helperPtr->inEmbeddedStruct = inEmbeddedStructPrev; |
2773 | |
2774 | if (!structRet) |
2775 | { |
2776 | // If the nested struct says not to enregister, there's no need to continue analyzing at this level. Just return do not enregister. |
2777 | return false; |
2778 | } |
2779 | |
2780 | continue; |
2781 | } |
2782 | else if (cls == NFT_COPY1) |
2783 | { |
2784 | // The following CorElementTypes are the only ones handled with FieldMarshaler_Copy1. |
2785 | switch (fieldType) |
2786 | { |
2787 | case ELEMENT_TYPE_I1: |
2788 | fieldClassificationType = SystemVClassificationTypeInteger; |
2789 | break; |
2790 | |
2791 | case ELEMENT_TYPE_U1: |
2792 | fieldClassificationType = SystemVClassificationTypeInteger; |
2793 | break; |
2794 | |
2795 | default: |
2796 | // Invalid entry. |
2797 | return false; // Pass on stack. |
2798 | } |
2799 | } |
2800 | else if (cls == NFT_COPY2) |
2801 | { |
2802 | // The following CorElementTypes are the only ones handled with FieldMarshaler_Copy2. |
2803 | switch (fieldType) |
2804 | { |
2805 | case ELEMENT_TYPE_CHAR: |
2806 | case ELEMENT_TYPE_I2: |
2807 | case ELEMENT_TYPE_U2: |
2808 | fieldClassificationType = SystemVClassificationTypeInteger; |
2809 | break; |
2810 | |
2811 | default: |
2812 | // Invalid entry. |
2813 | return false; // Pass on stack. |
2814 | } |
2815 | } |
2816 | else if (cls == NFT_COPY4) |
2817 | { |
2818 | // The following CorElementTypes are the only ones handled with FieldMarshaler_Copy4. |
2819 | switch (fieldType) |
2820 | { |
2821 | // At this point, ELEMENT_TYPE_I must be 4 bytes long. Same for ELEMENT_TYPE_U. |
2822 | case ELEMENT_TYPE_I: |
2823 | case ELEMENT_TYPE_I4: |
2824 | case ELEMENT_TYPE_U: |
2825 | case ELEMENT_TYPE_U4: |
2826 | case ELEMENT_TYPE_PTR: |
2827 | fieldClassificationType = SystemVClassificationTypeInteger; |
2828 | break; |
2829 | |
2830 | case ELEMENT_TYPE_R4: |
2831 | fieldClassificationType = SystemVClassificationTypeSSE; |
2832 | break; |
2833 | |
2834 | default: |
2835 | // Invalid entry. |
2836 | return false; // Pass on stack. |
2837 | } |
2838 | } |
2839 | else if (cls == NFT_COPY8) |
2840 | { |
2841 | // The following CorElementTypes are the only ones handled with FieldMarshaler_Copy8. |
2842 | switch (fieldType) |
2843 | { |
2844 | // At this point, ELEMENT_TYPE_I must be 8 bytes long. Same for ELEMENT_TYPE_U. |
2845 | case ELEMENT_TYPE_I: |
2846 | case ELEMENT_TYPE_I8: |
2847 | case ELEMENT_TYPE_U: |
2848 | case ELEMENT_TYPE_U8: |
2849 | case ELEMENT_TYPE_PTR: |
2850 | fieldClassificationType = SystemVClassificationTypeInteger; |
2851 | break; |
2852 | |
2853 | case ELEMENT_TYPE_R8: |
2854 | fieldClassificationType = SystemVClassificationTypeSSE; |
2855 | break; |
2856 | |
2857 | default: |
2858 | // Invalid entry. |
2859 | return false; // Pass on stack. |
2860 | } |
2861 | } |
2862 | else if (cls == NFT_FIXEDSTRINGUNI) |
2863 | { |
2864 | fieldClassificationType = SystemVClassificationTypeInteger; |
2865 | } |
2866 | else if (cls == NFT_FIXEDSTRINGANSI) |
2867 | { |
2868 | fieldClassificationType = SystemVClassificationTypeInteger; |
2869 | } |
2870 | else |
2871 | { |
2872 | // All other NStruct Field Types which do not require special handling. |
2873 | switch (cls) |
2874 | { |
2875 | #ifdef FEATURE_COMINTEROP |
2876 | case NFT_HSTRING: |
2877 | case NFT_VARIANT: |
2878 | case NFT_VARIANTBOOL: |
2879 | case NFT_CURRENCY: |
2880 | // COMInterop not supported for CORECLR. |
2881 | _ASSERTE(false && "COMInterop not supported for CORECLR." ); |
2882 | return false; |
2883 | #endif // FEATURE_COMINTEROP |
2884 | case NFT_STRINGUNI: |
2885 | case NFT_STRINGANSI: |
2886 | case NFT_ANSICHAR: |
2887 | case NFT_STRINGUTF8: |
2888 | case NFT_WINBOOL: |
2889 | case NFT_CBOOL: |
2890 | case NFT_DELEGATE: |
2891 | case NFT_SAFEHANDLE: |
2892 | case NFT_CRITICALHANDLE: |
2893 | case NFT_BSTR: |
2894 | fieldClassificationType = SystemVClassificationTypeInteger; |
2895 | break; |
2896 | |
2897 | // It's not clear what the right behavior for NTF_DECIMAL and NTF_DATE is |
2898 | // But those two types would only make sense on windows. We can revisit this later |
2899 | case NFT_DECIMAL: |
2900 | case NFT_DATE: |
2901 | case NFT_ILLEGAL: |
2902 | default: |
2903 | return false; |
2904 | } |
2905 | } |
2906 | |
2907 | if ((normalizedFieldOffset % fieldNativeSize) != 0) |
2908 | { |
2909 | // The spec requires that struct values on the stack from register passed fields expects |
2910 | // those fields to be at their natural alignment. |
2911 | |
2912 | LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Native Field %d %s: offset %d (normalized %d), native size %d not at natural alignment; not enregistering struct\n" , |
2913 | nestingLevel * 5, "" , fieldNum, fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldNativeSize)); |
2914 | return false; |
2915 | } |
2916 | |
2917 | if ((int)normalizedFieldOffset <= helperPtr->largestFieldOffset) |
2918 | { |
2919 | // Find the field corresponding to this offset and update the size if needed. |
2920 | // We assume that either it matches the offset of a previously seen field, or |
2921 | // it is an out-of-order offset (the VM does give us structs in non-increasing |
2922 | // offset order sometimes) that doesn't overlap any other field. |
2923 | |
2924 | int i; |
2925 | for (i = helperPtr->currentUniqueOffsetField - 1; i >= 0; i--) |
2926 | { |
2927 | if (helperPtr->fieldOffsets[i] == normalizedFieldOffset) |
2928 | { |
2929 | if (fieldNativeSize > helperPtr->fieldSizes[i]) |
2930 | { |
2931 | helperPtr->fieldSizes[i] = fieldNativeSize; |
2932 | } |
2933 | |
2934 | helperPtr->fieldClassifications[i] = ReClassifyField(helperPtr->fieldClassifications[i], fieldClassificationType); |
2935 | |
2936 | LOG((LF_JIT, LL_EVERYTHING, " %*sxxxx Native Field %d %s: offset %d (normalized %d), native size %d, union with uniqueOffsetField %d, field type classification %s, reclassified field to %s\n" , |
2937 | nestingLevel * 5, "" , fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldNativeSize, i, |
2938 | GetSystemVClassificationTypeName(fieldClassificationType), |
2939 | GetSystemVClassificationTypeName(helperPtr->fieldClassifications[i]))); |
2940 | |
2941 | break; |
2942 | } |
2943 | // Make sure the field doesn't start in the middle of another field. |
2944 | _ASSERTE((normalizedFieldOffset < helperPtr->fieldOffsets[i]) || |
2945 | (normalizedFieldOffset >= helperPtr->fieldOffsets[i] + helperPtr->fieldSizes[i])); |
2946 | } |
2947 | |
2948 | if (i >= 0) |
2949 | { |
2950 | // The proper size of the union set of fields has been set above; continue to the next field. |
2951 | continue; |
2952 | } |
2953 | } |
2954 | else |
2955 | { |
2956 | helperPtr->largestFieldOffset = (int)normalizedFieldOffset; |
2957 | } |
2958 | |
2959 | // Set the data for a new field. |
2960 | |
2961 | // The new field classification must not have been initialized yet. |
2962 | _ASSERTE(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] == SystemVClassificationTypeNoClass); |
2963 | |
2964 | // There are only a few field classifications that are allowed. |
2965 | _ASSERTE((fieldClassificationType == SystemVClassificationTypeInteger) || |
2966 | (fieldClassificationType == SystemVClassificationTypeIntegerReference) || |
2967 | (fieldClassificationType == SystemVClassificationTypeIntegerByRef) || |
2968 | (fieldClassificationType == SystemVClassificationTypeSSE)); |
2969 | |
2970 | helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField] = fieldClassificationType; |
2971 | helperPtr->fieldSizes[helperPtr->currentUniqueOffsetField] = fieldNativeSize; |
2972 | helperPtr->fieldOffsets[helperPtr->currentUniqueOffsetField] = normalizedFieldOffset; |
2973 | |
2974 | LOG((LF_JIT, LL_EVERYTHING, " %*s**** Native Field %d %s: offset %d (normalized %d), size %d, currentUniqueOffsetField %d, field type classification %s, chosen field classification %s\n" , |
2975 | nestingLevel * 5, "" , fieldNum, fieldName, fieldOffset, normalizedFieldOffset, fieldNativeSize, helperPtr->currentUniqueOffsetField, |
2976 | GetSystemVClassificationTypeName(fieldClassificationType), |
2977 | GetSystemVClassificationTypeName(helperPtr->fieldClassifications[helperPtr->currentUniqueOffsetField]))); |
2978 | |
2979 | _ASSERTE(helperPtr->currentUniqueOffsetField < SYSTEMV_MAX_NUM_FIELDS_IN_REGISTER_PASSED_STRUCT); |
2980 | helperPtr->currentUniqueOffsetField++; |
2981 | ((BYTE*&)pFieldMarshaler) += MAXFIELDMARSHALERSIZE; |
2982 | } // end per-field for loop |
2983 | |
2984 | AssignClassifiedEightByteTypes(helperPtr, nestingLevel); |
2985 | |
2986 | return true; |
2987 | #endif // DACCESS_COMPILE |
2988 | } |
2989 | |
2990 | // Assigns the classification types to the array with eightbyte types. |
2991 | void MethodTable::AssignClassifiedEightByteTypes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel) const |
2992 | { |
2993 | static const size_t CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS = CLR_SYSTEMV_MAX_EIGHTBYTES_COUNT_TO_PASS_IN_REGISTERS * SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES; |
2994 | static_assert_no_msg(CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS == SYSTEMV_MAX_NUM_FIELDS_IN_REGISTER_PASSED_STRUCT); |
2995 | |
2996 | if (!helperPtr->inEmbeddedStruct) |
2997 | { |
2998 | _ASSERTE(nestingLevel == 0); |
2999 | |
3000 | int largestFieldOffset = helperPtr->largestFieldOffset; |
3001 | _ASSERTE(largestFieldOffset != -1); |
3002 | |
3003 | // We're at the top level of the recursion, and we're done looking at the fields. |
3004 | // Now sort the fields by offset and set the output data. |
3005 | |
3006 | int sortedFieldOrder[CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS]; |
3007 | for (unsigned i = 0; i < CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS; i++) |
3008 | { |
3009 | sortedFieldOrder[i] = -1; |
3010 | } |
3011 | |
3012 | unsigned numFields = helperPtr->currentUniqueOffsetField; |
3013 | for (unsigned i = 0; i < numFields; i++) |
3014 | { |
3015 | _ASSERTE(helperPtr->fieldOffsets[i] < CLR_SYSTEMV_MAX_BYTES_TO_PASS_IN_REGISTERS); |
3016 | _ASSERTE(sortedFieldOrder[helperPtr->fieldOffsets[i]] == -1); // we haven't seen this field offset yet. |
3017 | sortedFieldOrder[helperPtr->fieldOffsets[i]] = i; |
3018 | } |
3019 | |
3020 | // Calculate the eightbytes and their types. |
3021 | unsigned int accumulatedSizeForEightByte = 0; |
3022 | unsigned int currentEightByteOffset = 0; |
3023 | unsigned int currentEightByte = 0; |
3024 | |
3025 | int lastFieldOrdinal = sortedFieldOrder[largestFieldOffset]; |
3026 | unsigned int offsetAfterLastFieldByte = largestFieldOffset + helperPtr->fieldSizes[lastFieldOrdinal]; |
3027 | SystemVClassificationType lastFieldClassification = helperPtr->fieldClassifications[lastFieldOrdinal]; |
3028 | |
3029 | unsigned offset = 0; |
3030 | for (unsigned fieldSize = 0; offset < helperPtr->structSize; offset += fieldSize) |
3031 | { |
3032 | SystemVClassificationType fieldClassificationType; |
3033 | |
3034 | int ordinal = sortedFieldOrder[offset]; |
3035 | if (ordinal == -1) |
3036 | { |
3037 | // If there is no field that starts as this offset, treat its contents as padding. |
3038 | // Any padding that follows the last field receives the same classification as the |
3039 | // last field; padding between fields receives the NO_CLASS classification as per |
3040 | // the SysV ABI spec. |
3041 | fieldSize = 1; |
3042 | fieldClassificationType = offset < offsetAfterLastFieldByte ? SystemVClassificationTypeNoClass : lastFieldClassification; |
3043 | } |
3044 | else |
3045 | { |
3046 | fieldSize = helperPtr->fieldSizes[ordinal]; |
3047 | _ASSERTE(fieldSize > 0 && fieldSize <= SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES); |
3048 | |
3049 | fieldClassificationType = helperPtr->fieldClassifications[ordinal]; |
3050 | _ASSERTE(fieldClassificationType != SystemVClassificationTypeMemory && fieldClassificationType != SystemVClassificationTypeUnknown); |
3051 | } |
3052 | |
3053 | if (helperPtr->eightByteClassifications[currentEightByte] == fieldClassificationType) |
3054 | { |
3055 | // Do nothing. The eight-byte already has this classification. |
3056 | } |
3057 | else if (helperPtr->eightByteClassifications[currentEightByte] == SystemVClassificationTypeNoClass) |
3058 | { |
3059 | helperPtr->eightByteClassifications[currentEightByte] = fieldClassificationType; |
3060 | } |
3061 | else if ((helperPtr->eightByteClassifications[currentEightByte] == SystemVClassificationTypeInteger) || |
3062 | (fieldClassificationType == SystemVClassificationTypeInteger)) |
3063 | { |
3064 | _ASSERTE((fieldClassificationType != SystemVClassificationTypeIntegerReference) && |
3065 | (fieldClassificationType != SystemVClassificationTypeIntegerByRef)); |
3066 | |
3067 | helperPtr->eightByteClassifications[currentEightByte] = SystemVClassificationTypeInteger; |
3068 | } |
3069 | else if ((helperPtr->eightByteClassifications[currentEightByte] == SystemVClassificationTypeIntegerReference) || |
3070 | (fieldClassificationType == SystemVClassificationTypeIntegerReference)) |
3071 | { |
3072 | helperPtr->eightByteClassifications[currentEightByte] = SystemVClassificationTypeIntegerReference; |
3073 | } |
3074 | else if ((helperPtr->eightByteClassifications[currentEightByte] == SystemVClassificationTypeIntegerByRef) || |
3075 | (fieldClassificationType == SystemVClassificationTypeIntegerByRef)) |
3076 | { |
3077 | helperPtr->eightByteClassifications[currentEightByte] = SystemVClassificationTypeIntegerByRef; |
3078 | } |
3079 | else |
3080 | { |
3081 | helperPtr->eightByteClassifications[currentEightByte] = SystemVClassificationTypeSSE; |
3082 | } |
3083 | |
3084 | accumulatedSizeForEightByte += fieldSize; |
3085 | if (accumulatedSizeForEightByte == SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES) |
3086 | { |
3087 | // Save data for this eightbyte. |
3088 | helperPtr->eightByteSizes[currentEightByte] = SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES; |
3089 | helperPtr->eightByteOffsets[currentEightByte] = currentEightByteOffset; |
3090 | |
3091 | // Set up for next eightbyte. |
3092 | currentEightByte++; |
3093 | _ASSERTE(currentEightByte <= CLR_SYSTEMV_MAX_EIGHTBYTES_COUNT_TO_PASS_IN_REGISTERS); |
3094 | |
3095 | currentEightByteOffset = offset + fieldSize; |
3096 | accumulatedSizeForEightByte = 0; |
3097 | } |
3098 | |
3099 | _ASSERTE(accumulatedSizeForEightByte < SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES); |
3100 | } |
3101 | |
3102 | // Handle structs that end in the middle of an eightbyte. |
3103 | if (accumulatedSizeForEightByte > 0 && accumulatedSizeForEightByte < SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES) |
3104 | { |
3105 | _ASSERTE((helperPtr->structSize % SYSTEMV_EIGHT_BYTE_SIZE_IN_BYTES) != 0); |
3106 | |
3107 | helperPtr->eightByteSizes[currentEightByte] = accumulatedSizeForEightByte; |
3108 | helperPtr->eightByteOffsets[currentEightByte] = currentEightByteOffset; |
3109 | currentEightByte++; |
3110 | } |
3111 | |
3112 | helperPtr->eightByteCount = currentEightByte; |
3113 | |
3114 | _ASSERTE(helperPtr->eightByteCount <= CLR_SYSTEMV_MAX_EIGHTBYTES_COUNT_TO_PASS_IN_REGISTERS); |
3115 | |
3116 | #ifdef _DEBUG |
3117 | LOG((LF_JIT, LL_EVERYTHING, " ----\n" )); |
3118 | LOG((LF_JIT, LL_EVERYTHING, " **** Number EightBytes: %d\n" , helperPtr->eightByteCount)); |
3119 | for (unsigned i = 0; i < helperPtr->eightByteCount; i++) |
3120 | { |
3121 | LOG((LF_JIT, LL_EVERYTHING, " **** eightByte %d -- classType: %s, eightByteOffset: %d, eightByteSize: %d\n" , |
3122 | i, GetSystemVClassificationTypeName(helperPtr->eightByteClassifications[i]), helperPtr->eightByteOffsets[i], helperPtr->eightByteSizes[i])); |
3123 | } |
3124 | #endif // _DEBUG |
3125 | } |
3126 | } |
3127 | |
3128 | #endif // defined(UNIX_AMD64_ABI_ITF) |
3129 | |
3130 | #if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE) |
3131 | //========================================================================================== |
3132 | void MethodTable::AllocateRegularStaticBoxes() |
3133 | { |
3134 | CONTRACTL |
3135 | { |
3136 | THROWS; |
3137 | GC_TRIGGERS; |
3138 | PRECONDITION(!ContainsGenericVariables()); |
3139 | PRECONDITION(HasBoxedRegularStatics()); |
3140 | MODE_ANY; |
3141 | } |
3142 | CONTRACTL_END; |
3143 | |
3144 | LOG((LF_CLASSLOADER, LL_INFO10000, "STATICS: Instantiating static handles for %s\n" , GetDebugClassName())); |
3145 | |
3146 | GCX_COOP(); |
3147 | |
3148 | PTR_BYTE pStaticBase = GetGCStaticsBasePointer(); |
3149 | |
3150 | GCPROTECT_BEGININTERIOR(pStaticBase); |
3151 | |
3152 | // In ngened case, we have cached array with boxed statics MTs. In JITed case, we have just the FieldDescs |
3153 | ClassCtorInfoEntry *pClassCtorInfoEntry = GetClassCtorInfoIfExists(); |
3154 | if (pClassCtorInfoEntry != NULL) |
3155 | { |
3156 | OBJECTREF* pStaticSlots = (OBJECTREF*)(pStaticBase + pClassCtorInfoEntry->firstBoxedStaticOffset); |
3157 | GCPROTECT_BEGININTERIOR(pStaticSlots); |
3158 | |
3159 | ArrayDPTR(RelativeFixupPointer<PTR_MethodTable>) ppMTs = GetLoaderModule()->GetZapModuleCtorInfo()-> |
3160 | GetGCStaticMTs(pClassCtorInfoEntry->firstBoxedStaticMTIndex); |
3161 | |
3162 | DWORD numBoxedStatics = pClassCtorInfoEntry->numBoxedStatics; |
3163 | for (DWORD i = 0; i < numBoxedStatics; i++) |
3164 | { |
3165 | #ifdef FEATURE_PREJIT |
3166 | Module::RestoreMethodTablePointer(&(ppMTs[i]), GetLoaderModule()); |
3167 | #endif |
3168 | MethodTable *pFieldMT = ppMTs[i].GetValue(); |
3169 | |
3170 | _ASSERTE(pFieldMT); |
3171 | |
3172 | LOG((LF_CLASSLOADER, LL_INFO10000, "\tInstantiating static of type %s\n" , pFieldMT->GetDebugClassName())); |
3173 | OBJECTREF obj = AllocateStaticBox(pFieldMT, pClassCtorInfoEntry->hasFixedAddressVTStatics); |
3174 | |
3175 | SetObjectReference( &(pStaticSlots[i]), obj, GetAppDomain() ); |
3176 | } |
3177 | GCPROTECT_END(); |
3178 | } |
3179 | else |
3180 | { |
3181 | // We should never take this codepath in zapped images. |
3182 | _ASSERTE(!IsZapped()); |
3183 | |
3184 | FieldDesc *pField = HasGenericsStaticsInfo() ? |
3185 | GetGenericsStaticFieldDescs() : (GetApproxFieldDescListRaw() + GetNumIntroducedInstanceFields()); |
3186 | FieldDesc *pFieldEnd = pField + GetNumStaticFields(); |
3187 | |
3188 | while (pField < pFieldEnd) |
3189 | { |
3190 | _ASSERTE(pField->IsStatic()); |
3191 | |
3192 | if (!pField->IsSpecialStatic() && pField->IsByValue()) |
3193 | { |
3194 | TypeHandle th = pField->GetFieldTypeHandleThrowing(); |
3195 | MethodTable* pFieldMT = th.GetMethodTable(); |
3196 | |
3197 | LOG((LF_CLASSLOADER, LL_INFO10000, "\tInstantiating static of type %s\n" , pFieldMT->GetDebugClassName())); |
3198 | OBJECTREF obj = AllocateStaticBox(pFieldMT, HasFixedAddressVTStatics()); |
3199 | |
3200 | SetObjectReference( (OBJECTREF*)(pStaticBase + pField->GetOffset()), obj, GetAppDomain() ); |
3201 | } |
3202 | |
3203 | pField++; |
3204 | } |
3205 | } |
3206 | GCPROTECT_END(); |
3207 | } |
3208 | |
3209 | //========================================================================================== |
3210 | OBJECTREF MethodTable::AllocateStaticBox(MethodTable* pFieldMT, BOOL fPinned, OBJECTHANDLE* pHandle) |
3211 | { |
3212 | CONTRACTL |
3213 | { |
3214 | THROWS; |
3215 | GC_TRIGGERS; |
3216 | MODE_ANY; |
3217 | CONTRACTL_END; |
3218 | } |
3219 | |
3220 | _ASSERTE(pFieldMT->IsValueType()); |
3221 | |
3222 | // Activate any dependent modules if necessary |
3223 | pFieldMT->EnsureInstanceActive(); |
3224 | |
3225 | OBJECTREF obj = AllocateObject(pFieldMT); |
3226 | |
3227 | // Pin the object if necessary |
3228 | if (fPinned) |
3229 | { |
3230 | LOG((LF_CLASSLOADER, LL_INFO10000, "\tSTATICS:Pinning static (VT fixed address attribute) of type %s\n" , pFieldMT->GetDebugClassName())); |
3231 | OBJECTHANDLE oh = GetAppDomain()->CreatePinningHandle(obj); |
3232 | if (pHandle) |
3233 | { |
3234 | *pHandle = oh; |
3235 | } |
3236 | } |
3237 | else |
3238 | { |
3239 | if (pHandle) |
3240 | { |
3241 | *pHandle = NULL; |
3242 | } |
3243 | } |
3244 | |
3245 | return obj; |
3246 | } |
3247 | |
3248 | //========================================================================================== |
3249 | BOOL MethodTable::RunClassInitEx(OBJECTREF *pThrowable) |
3250 | { |
3251 | CONTRACTL |
3252 | { |
3253 | THROWS; |
3254 | GC_TRIGGERS; |
3255 | MODE_COOPERATIVE; |
3256 | PRECONDITION(IsFullyLoaded()); |
3257 | PRECONDITION(IsProtectedByGCFrame(pThrowable)); |
3258 | } |
3259 | CONTRACTL_END; |
3260 | |
3261 | // A somewhat unusual function, can both return throwable and throw. |
3262 | // The difference is, we throw on restartable operations and just return throwable |
3263 | // on exceptions fatal for the .cctor |
3264 | // (Of course in the latter case the caller is supposed to throw pThrowable) |
3265 | // Doing the opposite ( i.e. throwing on fatal and returning on nonfatal) |
3266 | // would be more intuitive but it's more convenient the way it is |
3267 | |
3268 | BOOL fRet = FALSE; |
3269 | |
3270 | // During the <clinit>, this thread must not be asynchronously |
3271 | // stopped or interrupted. That would leave the class unavailable |
3272 | // and is therefore a security hole. We don't have to worry about |
3273 | // multithreading, since we only manipulate the current thread's count. |
3274 | ThreadPreventAsyncHolder preventAsync; |
3275 | |
3276 | // If the static initialiser throws an exception that it doesn't catch, it has failed |
3277 | EX_TRY |
3278 | { |
3279 | // Activate our module if necessary |
3280 | EnsureInstanceActive(); |
3281 | |
3282 | STRESS_LOG1(LF_CLASSLOADER, LL_INFO1000, "RunClassInit: Calling class contructor for type %pT\n" , this); |
3283 | |
3284 | MethodTable * pCanonMT = GetCanonicalMethodTable(); |
3285 | |
3286 | // Call the code method without touching MethodDesc if possible |
3287 | PCODE pCctorCode = pCanonMT->GetSlot(pCanonMT->GetClassConstructorSlot()); |
3288 | |
3289 | if (pCanonMT->IsSharedByGenericInstantiations()) |
3290 | { |
3291 | PREPARE_NONVIRTUAL_CALLSITE_USING_CODE(pCctorCode); |
3292 | DECLARE_ARGHOLDER_ARRAY(args, 1); |
3293 | args[ARGNUM_0] = PTR_TO_ARGHOLDER(this); |
3294 | CATCH_HANDLER_FOUND_NOTIFICATION_CALLSITE; |
3295 | CALL_MANAGED_METHOD_NORET(args); |
3296 | } |
3297 | else |
3298 | { |
3299 | PREPARE_NONVIRTUAL_CALLSITE_USING_CODE(pCctorCode); |
3300 | DECLARE_ARGHOLDER_ARRAY(args, 0); |
3301 | CATCH_HANDLER_FOUND_NOTIFICATION_CALLSITE; |
3302 | CALL_MANAGED_METHOD_NORET(args); |
3303 | } |
3304 | |
3305 | STRESS_LOG1(LF_CLASSLOADER, LL_INFO100000, "RunClassInit: Returned Successfully from class contructor for type %pT\n" , this); |
3306 | |
3307 | fRet = TRUE; |
3308 | } |
3309 | EX_CATCH |
3310 | { |
3311 | // Exception set by parent |
3312 | // <TODO>@TODO: We should make this an ExceptionInInitializerError if the exception thrown is not |
3313 | // a subclass of Error</TODO> |
3314 | *pThrowable = GET_THROWABLE(); |
3315 | _ASSERTE(fRet == FALSE); |
3316 | |
3317 | #ifdef FEATURE_CORRUPTING_EXCEPTIONS |
3318 | // If active thread state does not have a CorruptionSeverity set for the exception, |
3319 | // then set one up based upon the current exception code and/or the throwable. |
3320 | // |
3321 | // When can we be here and current exception tracker may not have corruption severity set? |
3322 | // Incase of SO in managed code, SO is never seen by CLR's exception handler for managed code |
3323 | // and if this happens in cctor, we can end up here without the corruption severity set. |
3324 | Thread *pThread = GetThread(); |
3325 | _ASSERTE(pThread != NULL); |
3326 | ThreadExceptionState *pCurTES = pThread->GetExceptionState(); |
3327 | _ASSERTE(pCurTES != NULL); |
3328 | if (pCurTES->GetLastActiveExceptionCorruptionSeverity() == NotSet) |
3329 | { |
3330 | if (CEHelper::IsProcessCorruptedStateException(GetCurrentExceptionCode()) || |
3331 | CEHelper::IsProcessCorruptedStateException(*pThrowable)) |
3332 | { |
3333 | // Process Corrupting |
3334 | pCurTES->SetLastActiveExceptionCorruptionSeverity(ProcessCorrupting); |
3335 | LOG((LF_EH, LL_INFO100, "MethodTable::RunClassInitEx - Exception treated as ProcessCorrupting.\n" )); |
3336 | } |
3337 | else |
3338 | { |
3339 | // Not Corrupting |
3340 | pCurTES->SetLastActiveExceptionCorruptionSeverity(NotCorrupting); |
3341 | LOG((LF_EH, LL_INFO100, "MethodTable::RunClassInitEx - Exception treated as non-corrupting.\n" )); |
3342 | } |
3343 | } |
3344 | else |
3345 | { |
3346 | LOG((LF_EH, LL_INFO100, "MethodTable::RunClassInitEx - Exception already has corruption severity set.\n" )); |
3347 | } |
3348 | #endif // FEATURE_CORRUPTING_EXCEPTIONS |
3349 | } |
3350 | EX_END_CATCH(SwallowAllExceptions) |
3351 | |
3352 | return fRet; |
3353 | } |
3354 | |
3355 | //========================================================================================== |
3356 | void MethodTable::DoRunClassInitThrowing() |
3357 | { |
3358 | CONTRACTL |
3359 | { |
3360 | THROWS; |
3361 | GC_TRIGGERS; |
3362 | MODE_ANY; |
3363 | SO_TOLERANT; |
3364 | } |
3365 | CONTRACTL_END; |
3366 | |
3367 | GCX_COOP(); |
3368 | |
3369 | // This is a fairly aggressive policy. Merely asking that the class be initialized is grounds for kicking you out. |
3370 | // Alternately, we could simply NOP out the class initialization. Since the aggressive policy is also the more secure |
3371 | // policy, keep this unless it proves intractable to remove all premature classinits in the system. |
3372 | EnsureActive(); |
3373 | |
3374 | Thread *pThread; |
3375 | pThread = GetThread(); |
3376 | _ASSERTE(pThread); |
3377 | INTERIOR_STACK_PROBE_FOR(pThread, 8); |
3378 | |
3379 | AppDomain *pDomain = GetAppDomain(); |
3380 | |
3381 | HRESULT hrResult = E_FAIL; |
3382 | const char *description; |
3383 | STRESS_LOG2(LF_CLASSLOADER, LL_INFO100000, "DoRunClassInit: Request to init %pT in appdomain %p\n" , this, pDomain); |
3384 | |
3385 | // |
3386 | // Take the global lock |
3387 | // |
3388 | |
3389 | ListLock *_pLock = pDomain->GetClassInitLock(); |
3390 | |
3391 | ListLockHolder pInitLock(_pLock); |
3392 | |
3393 | // Check again |
3394 | if (IsClassInited()) |
3395 | goto Exit; |
3396 | |
3397 | // |
3398 | // Handle cases where the .cctor has already tried to run but failed. |
3399 | // |
3400 | |
3401 | |
3402 | if (IsInitError()) |
3403 | { |
3404 | // Some error occurred trying to init this class |
3405 | ListLockEntry* pEntry= (ListLockEntry *) _pLock->Find(this); |
3406 | _ASSERTE(pEntry!=NULL); |
3407 | _ASSERTE(pEntry->m_pLoaderAllocator == (GetDomain()->IsSharedDomain() ? pDomain->GetLoaderAllocator() : GetLoaderAllocator())); |
3408 | |
3409 | // If this isn't a TypeInitializationException, then its creation failed |
3410 | // somehow previously, so we should make one last attempt to create it. If |
3411 | // that fails, just throw the exception that was originally thrown. |
3412 | // Primarily, this deals with the problem that the exception is a |
3413 | // ThreadAbortException, because this must be executing on a different |
3414 | // thread. If in fact this thread is also aborting, then rethrowing the |
3415 | // other thread's exception will not do any worse. |
3416 | |
3417 | // If we need to create the type init exception object, we'll need to |
3418 | // GC protect these, so might as well create the structure now. |
3419 | struct _gc { |
3420 | OBJECTREF pInitException; |
3421 | OBJECTREF pNewInitException; |
3422 | OBJECTREF pThrowable; |
3423 | } gc; |
3424 | |
3425 | gc.pInitException = pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException); |
3426 | gc.pNewInitException = NULL; |
3427 | gc.pThrowable = NULL; |
3428 | |
3429 | GCPROTECT_BEGIN(gc); |
3430 | |
3431 | // We need to release this lock because CreateTypeInitializationExceptionObject and fetching the TypeLoad exception can cause |
3432 | // managed code to re-enter into this codepath, causing a locking order violation. |
3433 | pInitLock.Release(); |
3434 | |
3435 | if (MscorlibBinder::GetException(kTypeInitializationException) != gc.pInitException->GetMethodTable()) |
3436 | { |
3437 | DefineFullyQualifiedNameForClassWOnStack(); |
3438 | LPCWSTR wszName = GetFullyQualifiedNameForClassW(this); |
3439 | |
3440 | CreateTypeInitializationExceptionObject(wszName, &gc.pInitException, &gc.pNewInitException, &gc.pThrowable); |
3441 | |
3442 | LOADERHANDLE hOrigInitException = pEntry->m_hInitException; |
3443 | if (!CLRException::IsPreallocatedExceptionObject(pEntry->m_pLoaderAllocator->GetHandleValue(hOrigInitException))) |
3444 | { |
3445 | // Now put the new init exception in the handle. If another thread beat us (because we released the |
3446 | // lock above), then we'll just let the extra init exception object get collected later. |
3447 | pEntry->m_pLoaderAllocator->CompareExchangeValueInHandle(pEntry->m_hInitException, gc.pNewInitException, gc.pInitException); |
3448 | } else { |
3449 | // if the stored exception is a preallocated one we cannot store the new Exception object in it. |
3450 | // we'll attempt to create a new handle for the new TypeInitializationException object |
3451 | LOADERHANDLE hNewInitException = NULL; |
3452 | // CreateHandle can throw due to OOM. We need to catch this so that we make sure to set the |
3453 | // init error. Whatever exception was thrown will be rethrown below, so no worries. |
3454 | EX_TRY { |
3455 | hNewInitException = pEntry->m_pLoaderAllocator->AllocateHandle(gc.pNewInitException); |
3456 | } EX_CATCH { |
3457 | // If we failed to create the handle we'll just leave the originally alloc'd one in place. |
3458 | } EX_END_CATCH(SwallowAllExceptions); |
3459 | |
3460 | // if two threads are racing to set m_hInitException, clear the handle created by the loser |
3461 | if (hNewInitException != NULL && |
3462 | InterlockedCompareExchangeT((&pEntry->m_hInitException), hNewInitException, hOrigInitException) != hOrigInitException) |
3463 | { |
3464 | pEntry->m_pLoaderAllocator->FreeHandle(hNewInitException); |
3465 | } |
3466 | } |
3467 | } |
3468 | else { |
3469 | gc.pThrowable = gc.pInitException; |
3470 | } |
3471 | |
3472 | GCPROTECT_END(); |
3473 | |
3474 | // Throw the saved exception. Since we may be rethrowing a previously cached exception, must clear the stack trace first. |
3475 | // Rethrowing a previously cached exception is distasteful but is required for appcompat with Everett. |
3476 | // |
3477 | // (The IsException() is probably more appropriate as an assert but as this isn't a heavily tested code path, |
3478 | // I prefer to be defensive here.) |
3479 | if (IsException(gc.pThrowable->GetMethodTable())) |
3480 | { |
3481 | ((EXCEPTIONREF)(gc.pThrowable))->ClearStackTraceForThrow(); |
3482 | } |
3483 | |
3484 | // <FEATURE_CORRUPTING_EXCEPTIONS> |
3485 | // Specify the corruption severity to be used to raise this exception in COMPlusThrow below. |
3486 | // This will ensure that when the exception is seen by the managed code personality routine, |
3487 | // it will setup the correct corruption severity in the exception tracker. |
3488 | // </FEATURE_CORRUPTING_EXCEPTIONS> |
3489 | |
3490 | COMPlusThrow(gc.pThrowable |
3491 | #ifdef FEATURE_CORRUPTING_EXCEPTIONS |
3492 | , pEntry->m_CorruptionSeverity |
3493 | #endif // FEATURE_CORRUPTING_EXCEPTIONS |
3494 | ); |
3495 | } |
3496 | |
3497 | description = ".cctor lock" ; |
3498 | #ifdef _DEBUG |
3499 | description = GetDebugClassName(); |
3500 | #endif |
3501 | |
3502 | // Take the lock |
3503 | { |
3504 | //nontrivial holder, might take a lock in destructor |
3505 | ListLockEntryHolder pEntry(ListLockEntry::Find(pInitLock, this, description)); |
3506 | |
3507 | ListLockEntryLockHolder pLock(pEntry, FALSE); |
3508 | |
3509 | // We have a list entry, we can release the global lock now |
3510 | pInitLock.Release(); |
3511 | |
3512 | if (pLock.DeadlockAwareAcquire()) |
3513 | { |
3514 | if (pEntry->m_hrResultCode == S_FALSE) |
3515 | { |
3516 | if (!NingenEnabled()) |
3517 | { |
3518 | if (HasBoxedRegularStatics()) |
3519 | { |
3520 | // First, instantiate any objects needed for value type statics |
3521 | AllocateRegularStaticBoxes(); |
3522 | } |
3523 | |
3524 | // Nobody has run the .cctor yet |
3525 | if (HasClassConstructor()) |
3526 | { |
3527 | struct _gc { |
3528 | OBJECTREF pInnerException; |
3529 | OBJECTREF pInitException; |
3530 | OBJECTREF pThrowable; |
3531 | } gc; |
3532 | gc.pInnerException = NULL; |
3533 | gc.pInitException = NULL; |
3534 | gc.pThrowable = NULL; |
3535 | GCPROTECT_BEGIN(gc); |
3536 | |
3537 | if (!RunClassInitEx(&gc.pInnerException)) |
3538 | { |
3539 | // The .cctor failed and we want to store the exception that resulted |
3540 | // in the entry. Increment the ref count to keep the entry alive for |
3541 | // subsequent attempts to run the .cctor. |
3542 | pEntry->AddRef(); |
3543 | // For collectible types, register the entry for cleanup. |
3544 | if (GetLoaderAllocator()->IsCollectible()) |
3545 | { |
3546 | GetLoaderAllocator()->RegisterFailedTypeInitForCleanup(pEntry); |
3547 | } |
3548 | |
3549 | _ASSERTE(g_pThreadAbortExceptionClass == MscorlibBinder::GetException(kThreadAbortException)); |
3550 | |
3551 | if(gc.pInnerException->GetMethodTable() == g_pThreadAbortExceptionClass) |
3552 | { |
3553 | gc.pThrowable = gc.pInnerException; |
3554 | gc.pInitException = gc.pInnerException; |
3555 | gc.pInnerException = NULL; |
3556 | } |
3557 | else |
3558 | { |
3559 | DefineFullyQualifiedNameForClassWOnStack(); |
3560 | LPCWSTR wszName = GetFullyQualifiedNameForClassW(this); |
3561 | |
3562 | // Note that this may not succeed due to problems creating the exception |
3563 | // object. On failure, it will first try to |
3564 | CreateTypeInitializationExceptionObject( |
3565 | wszName, &gc.pInnerException, &gc.pInitException, &gc.pThrowable); |
3566 | } |
3567 | |
3568 | pEntry->m_pLoaderAllocator = GetDomain()->IsSharedDomain() ? pDomain->GetLoaderAllocator() : GetLoaderAllocator(); |
3569 | |
3570 | // CreateHandle can throw due to OOM. We need to catch this so that we make sure to set the |
3571 | // init error. Whatever exception was thrown will be rethrown below, so no worries. |
3572 | EX_TRY { |
3573 | // Save the exception object, and return to caller as well. |
3574 | pEntry->m_hInitException = pEntry->m_pLoaderAllocator->AllocateHandle(gc.pInitException); |
3575 | } EX_CATCH { |
3576 | // If we failed to create the handle (due to OOM), we'll just store the preallocated OOM |
3577 | // handle here instead. |
3578 | pEntry->m_hInitException = pEntry->m_pLoaderAllocator->AllocateHandle(CLRException::GetPreallocatedOutOfMemoryException()); |
3579 | } EX_END_CATCH(SwallowAllExceptions); |
3580 | |
3581 | pEntry->m_hrResultCode = E_FAIL; |
3582 | SetClassInitError(); |
3583 | |
3584 | #ifdef FEATURE_CORRUPTING_EXCEPTIONS |
3585 | // Save the corruption severity of the exception so that if the type system |
3586 | // attempts to pick it up from its cache list and throw again, it should |
3587 | // treat the exception as corrupting, if applicable. |
3588 | pEntry->m_CorruptionSeverity = pThread->GetExceptionState()->GetLastActiveExceptionCorruptionSeverity(); |
3589 | |
3590 | // We should be having a valid corruption severity at this point |
3591 | _ASSERTE(pEntry->m_CorruptionSeverity != NotSet); |
3592 | #endif // FEATURE_CORRUPTING_EXCEPTIONS |
3593 | |
3594 | COMPlusThrow(gc.pThrowable |
3595 | #ifdef FEATURE_CORRUPTING_EXCEPTIONS |
3596 | , pEntry->m_CorruptionSeverity |
3597 | #endif // FEATURE_CORRUPTING_EXCEPTIONS |
3598 | ); |
3599 | } |
3600 | |
3601 | GCPROTECT_END(); |
3602 | } |
3603 | } |
3604 | |
3605 | pEntry->m_hrResultCode = S_OK; |
3606 | |
3607 | // Set the initialization flags in the DLS and on domain-specific types. |
3608 | // Note we also set the flag for dynamic statics, which use the DynamicStatics part |
3609 | // of the DLS irrespective of whether the type is domain neutral or not. |
3610 | SetClassInited(); |
3611 | |
3612 | } |
3613 | else |
3614 | { |
3615 | // Use previous result |
3616 | |
3617 | hrResult = pEntry->m_hrResultCode; |
3618 | if(FAILED(hrResult)) |
3619 | { |
3620 | // An exception may have occurred in the cctor. DoRunClassInit() should return FALSE in that |
3621 | // case. |
3622 | _ASSERTE(pEntry->m_hInitException); |
3623 | _ASSERTE(pEntry->m_pLoaderAllocator == (GetDomain()->IsSharedDomain() ? pDomain->GetLoaderAllocator() : GetLoaderAllocator())); |
3624 | _ASSERTE(IsInitError()); |
3625 | |
3626 | // Throw the saved exception. Since we are rethrowing a previously cached exception, must clear the stack trace first. |
3627 | // Rethrowing a previously cached exception is distasteful but is required for appcompat with Everett. |
3628 | // |
3629 | // (The IsException() is probably more appropriate as an assert but as this isn't a heavily tested code path, |
3630 | // I prefer to be defensive here.) |
3631 | if (IsException(pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException)->GetMethodTable())) |
3632 | { |
3633 | ((EXCEPTIONREF)(pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException)))->ClearStackTraceForThrow(); |
3634 | } |
3635 | COMPlusThrow(pEntry->m_pLoaderAllocator->GetHandleValue(pEntry->m_hInitException)); |
3636 | } |
3637 | } |
3638 | } |
3639 | } |
3640 | |
3641 | // |
3642 | // Notify any entries waiting on the current entry and wait for the required entries. |
3643 | // |
3644 | |
3645 | // We need to take the global lock before we play with the list of entries. |
3646 | |
3647 | STRESS_LOG2(LF_CLASSLOADER, LL_INFO100000, "DoRunClassInit: returning SUCCESS for init %pT in appdomain %p\n" , this, pDomain); |
3648 | // No need to set pThrowable in case of error it will already have been set. |
3649 | |
3650 | g_IBCLogger.LogMethodTableAccess(this); |
3651 | Exit: |
3652 | ; |
3653 | END_INTERIOR_STACK_PROBE; |
3654 | } |
3655 | |
3656 | //========================================================================================== |
3657 | void MethodTable::CheckRunClassInitThrowing() |
3658 | { |
3659 | CONTRACTL |
3660 | { |
3661 | THROWS; |
3662 | GC_TRIGGERS; |
3663 | SO_TOLERANT; |
3664 | INJECT_FAULT(COMPlusThrowOM()); |
3665 | PRECONDITION(IsFullyLoaded()); |
3666 | } |
3667 | CONTRACTL_END; |
3668 | |
3669 | { // Debug-only code causes SO volation, so add exception. |
3670 | CONTRACT_VIOLATION(SOToleranceViolation); |
3671 | CONSISTENCY_CHECK(CheckActivated()); |
3672 | } |
3673 | |
3674 | // To find GC hole easier... |
3675 | TRIGGERSGC(); |
3676 | |
3677 | if (IsClassPreInited()) |
3678 | return; |
3679 | |
3680 | // Don't initialize shared generic instantiations (e.g. MyClass<__Canon>) |
3681 | if (IsSharedByGenericInstantiations()) |
3682 | return; |
3683 | |
3684 | DomainLocalModule *pLocalModule = GetDomainLocalModule(); |
3685 | _ASSERTE(pLocalModule); |
3686 | |
3687 | DWORD iClassIndex = GetClassIndex(); |
3688 | |
3689 | // Check to see if we have already run the .cctor for this class. |
3690 | if (!pLocalModule->IsClassAllocated(this, iClassIndex)) |
3691 | pLocalModule->PopulateClass(this); |
3692 | |
3693 | if (!pLocalModule->IsClassInitialized(this, iClassIndex)) |
3694 | DoRunClassInitThrowing(); |
3695 | } |
3696 | |
3697 | //========================================================================================== |
3698 | void MethodTable::CheckRunClassInitAsIfConstructingThrowing() |
3699 | { |
3700 | CONTRACTL |
3701 | { |
3702 | THROWS; |
3703 | GC_TRIGGERS; |
3704 | SO_TOLERANT; |
3705 | MODE_ANY; |
3706 | } |
3707 | CONTRACTL_END; |
3708 | if (HasPreciseInitCctors()) |
3709 | { |
3710 | MethodTable *pMTCur = this; |
3711 | while (pMTCur != NULL) |
3712 | { |
3713 | if (!pMTCur->GetClass()->IsBeforeFieldInit()) |
3714 | pMTCur->CheckRunClassInitThrowing(); |
3715 | |
3716 | pMTCur = pMTCur->GetParentMethodTable(); |
3717 | } |
3718 | } |
3719 | } |
3720 | |
3721 | //========================================================================================== |
3722 | OBJECTREF MethodTable::Allocate() |
3723 | { |
3724 | CONTRACTL |
3725 | { |
3726 | MODE_COOPERATIVE; |
3727 | GC_TRIGGERS; |
3728 | THROWS; |
3729 | } |
3730 | CONTRACTL_END; |
3731 | |
3732 | CONSISTENCY_CHECK(IsFullyLoaded()); |
3733 | |
3734 | EnsureInstanceActive(); |
3735 | |
3736 | if (HasPreciseInitCctors()) |
3737 | { |
3738 | CheckRunClassInitAsIfConstructingThrowing(); |
3739 | } |
3740 | |
3741 | return AllocateObject(this); |
3742 | } |
3743 | |
3744 | //========================================================================================== |
3745 | // box 'data' creating a new object and return it. This routine understands the special |
3746 | // handling needed for Nullable values. |
3747 | // see code:Nullable#NullableVerification |
3748 | |
3749 | OBJECTREF MethodTable::Box(void* data) |
3750 | { |
3751 | CONTRACTL |
3752 | { |
3753 | THROWS; |
3754 | GC_TRIGGERS; |
3755 | MODE_COOPERATIVE; |
3756 | PRECONDITION(IsValueType()); |
3757 | } |
3758 | CONTRACTL_END; |
3759 | |
3760 | OBJECTREF ref; |
3761 | |
3762 | GCPROTECT_BEGININTERIOR (data); |
3763 | |
3764 | if (IsByRefLike()) |
3765 | { |
3766 | // We should never box a type that contains stack pointers. |
3767 | COMPlusThrow(kInvalidOperationException, W("InvalidOperation_TypeCannotBeBoxed" )); |
3768 | } |
3769 | |
3770 | ref = FastBox(&data); |
3771 | GCPROTECT_END (); |
3772 | return ref; |
3773 | } |
3774 | |
3775 | OBJECTREF MethodTable::FastBox(void** data) |
3776 | { |
3777 | CONTRACTL |
3778 | { |
3779 | THROWS; |
3780 | GC_TRIGGERS; |
3781 | MODE_COOPERATIVE; |
3782 | PRECONDITION(IsValueType()); |
3783 | } |
3784 | CONTRACTL_END; |
3785 | |
3786 | // See code:Nullable#NullableArchitecture for more |
3787 | if (IsNullable()) |
3788 | return Nullable::Box(*data, this); |
3789 | |
3790 | OBJECTREF ref = Allocate(); |
3791 | CopyValueClass(ref->UnBox(), *data, this, ref->GetAppDomain()); |
3792 | return ref; |
3793 | } |
3794 | |
3795 | #if _TARGET_X86_ || _TARGET_AMD64_ |
3796 | //========================================================================================== |
3797 | static void FastCallFinalize(Object *obj, PCODE funcPtr, BOOL fCriticalCall) |
3798 | { |
3799 | STATIC_CONTRACT_THROWS; |
3800 | STATIC_CONTRACT_GC_TRIGGERS; |
3801 | STATIC_CONTRACT_MODE_COOPERATIVE; |
3802 | STATIC_CONTRACT_SO_INTOLERANT; |
3803 | |
3804 | BEGIN_CALL_TO_MANAGEDEX(fCriticalCall ? EEToManagedCriticalCall : EEToManagedDefault); |
3805 | |
3806 | #if defined(_TARGET_X86_) |
3807 | |
3808 | __asm |
3809 | { |
3810 | mov ecx, [obj] |
3811 | call [funcPtr] |
3812 | INDEBUG(nop) // Mark the fact that we can call managed code |
3813 | } |
3814 | |
3815 | #else // _TARGET_X86_ |
3816 | |
3817 | FastCallFinalizeWorker(obj, funcPtr); |
3818 | |
3819 | #endif // _TARGET_X86_ |
3820 | |
3821 | END_CALL_TO_MANAGED(); |
3822 | } |
3823 | |
3824 | #endif // _TARGET_X86_ || _TARGET_AMD64_ |
3825 | |
3826 | void CallFinalizerOnThreadObject(Object *obj) |
3827 | { |
3828 | STATIC_CONTRACT_MODE_COOPERATIVE; |
3829 | |
3830 | THREADBASEREF refThis = (THREADBASEREF)ObjectToOBJECTREF(obj); |
3831 | Thread* thread = refThis->GetInternal(); |
3832 | |
3833 | // Prevent multiple calls to Finalize |
3834 | // Objects can be resurrected after being finalized. However, there is no |
3835 | // race condition here. We always check whether an exposed thread object is |
3836 | // still attached to the internal Thread object, before proceeding. |
3837 | if (thread) |
3838 | { |
3839 | refThis->SetDelegate(NULL); |
3840 | |
3841 | // During process shutdown, we finalize even reachable objects. But if we break |
3842 | // the link between the System.Thread and the internal Thread object, the runtime |
3843 | // may not work correctly. In particular, we won't be able to transition between |
3844 | // contexts and domains to finalize other objects. Since the runtime doesn't |
3845 | // require that Threads finalize during shutdown, we need to disable this. If |
3846 | // we wait until phase 2 of shutdown finalization (when the EE is suspended and |
3847 | // will never resume) then we can simply skip the side effects of Thread |
3848 | // finalization. |
3849 | if ((g_fEEShutDown & ShutDown_Finalize2) == 0) |
3850 | { |
3851 | if (GetThread() != thread) |
3852 | { |
3853 | refThis->ClearInternal(); |
3854 | } |
3855 | |
3856 | FastInterlockOr ((ULONG *)&thread->m_State, Thread::TS_Finalized); |
3857 | Thread::SetCleanupNeededForFinalizedThread(); |
3858 | } |
3859 | } |
3860 | } |
3861 | |
3862 | //========================================================================================== |
3863 | // From the GC finalizer thread, invoke the Finalize() method on an object. |
3864 | void MethodTable::CallFinalizer(Object *obj) |
3865 | { |
3866 | CONTRACTL |
3867 | { |
3868 | THROWS; |
3869 | GC_TRIGGERS; |
3870 | MODE_COOPERATIVE; |
3871 | PRECONDITION(obj->GetMethodTable()->HasFinalizer()); |
3872 | } |
3873 | CONTRACTL_END; |
3874 | |
3875 | // Never call any finalizers under ngen for determinism |
3876 | if (IsCompilationProcess()) |
3877 | { |
3878 | return; |
3879 | } |
3880 | |
3881 | MethodTable *pMT = obj->GetMethodTable(); |
3882 | |
3883 | |
3884 | // Check for precise init class constructors that have failed, if any have failed, then we didn't run the |
3885 | // constructor for the object, and running the finalizer for the object would violate the CLI spec by running |
3886 | // instance code without having successfully run the precise-init class constructor. |
3887 | if (pMT->HasPreciseInitCctors()) |
3888 | { |
3889 | MethodTable *pMTCur = pMT; |
3890 | do |
3891 | { |
3892 | if ((!pMTCur->GetClass()->IsBeforeFieldInit()) && pMTCur->IsInitError()) |
3893 | { |
3894 | // Precise init Type Initializer for type failed... do not run finalizer |
3895 | return; |
3896 | } |
3897 | |
3898 | pMTCur = pMTCur->GetParentMethodTable(); |
3899 | } |
3900 | while (pMTCur != NULL); |
3901 | } |
3902 | |
3903 | if (pMT == g_pThreadClass) |
3904 | { |
3905 | // Finalizing Thread object requires ThreadStoreLock. It is expensive if |
3906 | // we keep taking ThreadStoreLock. This is very bad if we have high retiring |
3907 | // rate of Thread objects. |
3908 | // To avoid taking ThreadStoreLock multiple times, we mark Thread with TS_Finalized |
3909 | // and clean up a batch of them when we take ThreadStoreLock next time. |
3910 | |
3911 | // To avoid possible hierarchy requirement between critical finalizers, we call cleanup |
3912 | // code directly. |
3913 | CallFinalizerOnThreadObject(obj); |
3914 | return; |
3915 | } |
3916 | |
3917 | |
3918 | // Determine if the object has a critical or normal finalizer. |
3919 | BOOL fCriticalFinalizer = pMT->HasCriticalFinalizer(); |
3920 | |
3921 | // There's no reason to actually set up a frame here. If we crawl out of the |
3922 | // Finalize() method on this thread, we will see FRAME_TOP which indicates |
3923 | // that the crawl should terminate. This is analogous to how KickOffThread() |
3924 | // starts new threads in the runtime. |
3925 | PCODE funcPtr = pMT->GetRestoredSlot(g_pObjectFinalizerMD->GetSlot()); |
3926 | |
3927 | #ifdef STRESS_LOG |
3928 | if (fCriticalFinalizer) |
3929 | { |
3930 | STRESS_LOG2(LF_GCALLOC, LL_INFO100, "Finalizing CriticalFinalizer %pM in domain %d\n" , |
3931 | pMT, GetAppDomain()->GetId().m_dwId); |
3932 | } |
3933 | #endif |
3934 | |
3935 | #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) |
3936 | |
3937 | #ifdef DEBUGGING_SUPPORTED |
3938 | if (CORDebuggerTraceCall()) |
3939 | g_pDebugInterface->TraceCall((const BYTE *) funcPtr); |
3940 | #endif // DEBUGGING_SUPPORTED |
3941 | |
3942 | FastCallFinalize(obj, funcPtr, fCriticalFinalizer); |
3943 | |
3944 | #else // defined(_TARGET_X86_) || defined(_TARGET_AMD64_) |
3945 | |
3946 | PREPARE_NONVIRTUAL_CALLSITE_USING_CODE(funcPtr); |
3947 | |
3948 | DECLARE_ARGHOLDER_ARRAY(args, 1); |
3949 | |
3950 | args[ARGNUM_0] = PTR_TO_ARGHOLDER(obj); |
3951 | |
3952 | if (fCriticalFinalizer) |
3953 | { |
3954 | CRITICAL_CALLSITE; |
3955 | } |
3956 | |
3957 | CALL_MANAGED_METHOD_NORET(args); |
3958 | |
3959 | #endif // (defined(_TARGET_X86_) && defined(_TARGET_AMD64_) |
3960 | |
3961 | #ifdef STRESS_LOG |
3962 | if (fCriticalFinalizer) |
3963 | { |
3964 | STRESS_LOG2(LF_GCALLOC, LL_INFO100, "Finalized CriticalFinalizer %pM in domain %d without exception\n" , |
3965 | pMT, GetAppDomain()->GetId().m_dwId); |
3966 | } |
3967 | #endif |
3968 | } |
3969 | |
3970 | //========================================================================== |
3971 | // If the MethodTable doesn't yet know the Exposed class that represents it via |
3972 | // Reflection, acquire that class now. Regardless, return it to the caller. |
3973 | //========================================================================== |
3974 | OBJECTREF MethodTable::GetManagedClassObject() |
3975 | { |
3976 | CONTRACT(OBJECTREF) { |
3977 | |
3978 | THROWS; |
3979 | GC_TRIGGERS; |
3980 | MODE_COOPERATIVE; |
3981 | INJECT_FAULT(COMPlusThrowOM()); |
3982 | PRECONDITION(!IsArray()); // Arrays can't go through this path. |
3983 | POSTCONDITION(GetWriteableData()->m_hExposedClassObject != 0); |
3984 | //REENTRANT |
3985 | } |
3986 | CONTRACT_END; |
3987 | |
3988 | #ifdef _DEBUG |
3989 | // Force a GC here because GetManagedClassObject could trigger GC nondeterminsticaly |
3990 | GCStress<cfg_any, PulseGcTriggerPolicy>::MaybeTrigger(); |
3991 | #endif // _DEBUG |
3992 | |
3993 | if (GetWriteableData()->m_hExposedClassObject == NULL) |
3994 | { |
3995 | // Make sure that we have been restored |
3996 | CheckRestore(); |
3997 | |
3998 | REFLECTCLASSBASEREF refClass = NULL; |
3999 | GCPROTECT_BEGIN(refClass); |
4000 | refClass = (REFLECTCLASSBASEREF) AllocateObject(g_pRuntimeTypeClass); |
4001 | |
4002 | LoaderAllocator *pLoaderAllocator = GetLoaderAllocator(); |
4003 | |
4004 | ((ReflectClassBaseObject*)OBJECTREFToObject(refClass))->SetType(TypeHandle(this)); |
4005 | ((ReflectClassBaseObject*)OBJECTREFToObject(refClass))->SetKeepAlive(pLoaderAllocator->GetExposedObject()); |
4006 | |
4007 | // Let all threads fight over who wins using InterlockedCompareExchange. |
4008 | // Only the winner can set m_ExposedClassObject from NULL. |
4009 | LOADERHANDLE exposedClassObjectHandle = pLoaderAllocator->AllocateHandle(refClass); |
4010 | |
4011 | if (FastInterlockCompareExchangePointer(&(EnsureWritablePages(GetWriteableDataForWrite())->m_hExposedClassObject), exposedClassObjectHandle, static_cast<LOADERHANDLE>(NULL))) |
4012 | { |
4013 | pLoaderAllocator->FreeHandle(exposedClassObjectHandle); |
4014 | } |
4015 | |
4016 | GCPROTECT_END(); |
4017 | } |
4018 | RETURN(GetManagedClassObjectIfExists()); |
4019 | } |
4020 | |
4021 | #endif //!DACCESS_COMPILE && !CROSSGEN_COMPILE |
4022 | |
4023 | //========================================================================================== |
4024 | // This needs to stay consistent with AllocateNewMT() and MethodTable::Save() |
4025 | // |
4026 | // <TODO> protect this via some asserts as we've had one hard-to-track-down |
4027 | // bug already </TODO> |
4028 | // |
4029 | void MethodTable::GetSavedExtent(TADDR *pStart, TADDR *pEnd) |
4030 | { |
4031 | CONTRACTL |
4032 | { |
4033 | NOTHROW; |
4034 | GC_NOTRIGGER; |
4035 | } |
4036 | CONTRACTL_END; |
4037 | |
4038 | TADDR start; |
4039 | |
4040 | if (ContainsPointersOrCollectible()) |
4041 | start = dac_cast<TADDR>(this) - CGCDesc::GetCGCDescFromMT(this)->GetSize(); |
4042 | else |
4043 | start = dac_cast<TADDR>(this); |
4044 | |
4045 | TADDR end = dac_cast<TADDR>(this) + GetEndOffsetOfOptionalMembers(); |
4046 | |
4047 | _ASSERTE(start && end && (start < end)); |
4048 | *pStart = start; |
4049 | *pEnd = end; |
4050 | } |
4051 | |
4052 | #ifdef FEATURE_NATIVE_IMAGE_GENERATION |
4053 | |
4054 | #ifndef DACCESS_COMPILE |
4055 | |
4056 | BOOL MethodTable::CanInternVtableChunk(DataImage *image, VtableIndirectionSlotIterator it) |
4057 | { |
4058 | STANDARD_VM_CONTRACT; |
4059 | |
4060 | _ASSERTE(IsCompilationProcess()); |
4061 | |
4062 | BOOL canBeSharedWith = TRUE; |
4063 | |
4064 | // We allow full sharing except that which would break MethodTable::Fixup -- when the slots are Fixup'd |
4065 | // we need to ensure that regardless of who is doing the Fixup the same target is decided on. |
4066 | // Note that if this requirement is not met, an assert will fire in ZapStoredStructure::Save |
4067 | |
4068 | if (GetFlag(enum_flag_NotInPZM)) |
4069 | { |
4070 | canBeSharedWith = FALSE; |
4071 | } |
4072 | |
4073 | if (canBeSharedWith) |
4074 | { |
4075 | for (DWORD slotNumber = it.GetStartSlot(); slotNumber < it.GetEndSlot(); slotNumber++) |
4076 | { |
4077 | MethodDesc *pMD = GetMethodDescForSlot(slotNumber); |
4078 | _ASSERTE(pMD != NULL); |
4079 | pMD->CheckRestore(); |
4080 | |
4081 | if (!image->CanEagerBindToMethodDesc(pMD)) |
4082 | { |
4083 | canBeSharedWith = FALSE; |
4084 | break; |
4085 | } |
4086 | } |
4087 | } |
4088 | |
4089 | return canBeSharedWith; |
4090 | } |
4091 | |
4092 | //========================================================================================== |
4093 | void MethodTable::PrepopulateDictionary(DataImage * image, BOOL nonExpansive) |
4094 | { |
4095 | STANDARD_VM_CONTRACT; |
4096 | |
4097 | if (GetDictionary()) |
4098 | { |
4099 | // We can only save elements of the dictionary if we are sure of its |
4100 | // layout, which means we must be either tightly-knit to the EEClass |
4101 | // (i.e. be the owner of the EEClass) or else we can hard-bind to the EEClass. |
4102 | // There's no point in prepopulating the dictionary if we can't save the entries. |
4103 | // |
4104 | // This corresponds to the canSaveSlots which we pass to the Dictionary::Fixup |
4105 | |
4106 | if (!IsCanonicalMethodTable() && image->CanEagerBindToMethodTable(GetCanonicalMethodTable())) |
4107 | { |
4108 | LOG((LF_JIT, LL_INFO10000, "GENERICS: Prepopulating dictionary for MT %s\n" , GetDebugClassName())); |
4109 | GetDictionary()->PrepopulateDictionary(NULL, this, nonExpansive); |
4110 | } |
4111 | } |
4112 | } |
4113 | |
4114 | //========================================================================================== |
4115 | void ModuleCtorInfo::AddElement(MethodTable *pMethodTable) |
4116 | { |
4117 | STANDARD_VM_CONTRACT; |
4118 | |
4119 | // Get the values for the new entry before we update the |
4120 | // cache in the Module |
4121 | |
4122 | // Expand the table if needed. No lock is needed because this is at NGEN time |
4123 | if (numElements >= numLastAllocated) |
4124 | { |
4125 | _ASSERTE(numElements == numLastAllocated); |
4126 | |
4127 | RelativePointer<MethodTable *> *ppOldMTEntries = ppMT; |
4128 | |
4129 | #ifdef _PREFAST_ |
4130 | #pragma warning(push) |
4131 | #pragma warning(disable:22011) // Suppress PREFast warning about integer overflows or underflows |
4132 | #endif // _PREFAST_ |
4133 | DWORD numNewAllocated = max(2 * numLastAllocated, MODULE_CTOR_ELEMENTS); |
4134 | #ifdef _PREFAST_ |
4135 | #pragma warning(pop) |
4136 | #endif // _PREFAST_ |
4137 | |
4138 | ppMT = new RelativePointer<MethodTable *> [numNewAllocated]; |
4139 | |
4140 | _ASSERTE(ppMT); |
4141 | |
4142 | for (unsigned index = 0; index < numLastAllocated; ++index) |
4143 | { |
4144 | ppMT[index].SetValueMaybeNull(ppOldMTEntries[index].GetValueMaybeNull()); |
4145 | } |
4146 | |
4147 | for (unsigned index = numLastAllocated; index < numNewAllocated; ++index) |
4148 | { |
4149 | ppMT[index].SetValueMaybeNull(NULL); |
4150 | } |
4151 | |
4152 | delete[] ppOldMTEntries; |
4153 | |
4154 | numLastAllocated = numNewAllocated; |
4155 | } |
4156 | |
4157 | // Assign the new entry |
4158 | // |
4159 | // Note the use of two "parallel" arrays. We do this to keep the workingset smaller since we |
4160 | // often search (in GetClassCtorInfoIfExists) for a methodtable pointer but never actually find it. |
4161 | |
4162 | ppMT[numElements].SetValue(pMethodTable); |
4163 | numElements++; |
4164 | } |
4165 | |
4166 | //========================================================================================== |
4167 | void MethodTable::Save(DataImage *image, DWORD profilingFlags) |
4168 | { |
4169 | CONTRACTL { |
4170 | STANDARD_VM_CHECK; |
4171 | PRECONDITION(IsRestored_NoLogging()); |
4172 | PRECONDITION(IsFullyLoaded()); |
4173 | PRECONDITION(image->GetModule()->GetAssembly() == |
4174 | GetAppDomain()->ToCompilationDomain()->GetTargetAssembly()); |
4175 | } CONTRACTL_END; |
4176 | |
4177 | LOG((LF_ZAP, LL_INFO10000, "MethodTable::Save %s (%p)\n" , GetDebugClassName(), this)); |
4178 | |
4179 | // Be careful about calling DictionaryLayout::Trim - strict conditions apply. |
4180 | // See note on that method. |
4181 | if (GetDictionary() && |
4182 | GetClass()->GetDictionaryLayout() && |
4183 | image->CanEagerBindToMethodTable(GetCanonicalMethodTable())) |
4184 | { |
4185 | GetClass()->GetDictionaryLayout()->Trim(); |
4186 | } |
4187 | |
4188 | // Set the "restore" flags. They may not have been set yet. |
4189 | // We don't need the return value of this call. |
4190 | NeedsRestore(image); |
4191 | |
4192 | //check if this is actually in the PZM |
4193 | if (Module::GetPreferredZapModuleForMethodTable(this) != GetLoaderModule()) |
4194 | { |
4195 | _ASSERTE(!IsStringOrArray()); |
4196 | SetFlag(enum_flag_NotInPZM); |
4197 | } |
4198 | |
4199 | // Set the IsStructMarshallable Bit |
4200 | if (::IsStructMarshalable(this)) |
4201 | { |
4202 | SetStructMarshalable(); |
4203 | } |
4204 | |
4205 | TADDR start, end; |
4206 | |
4207 | GetSavedExtent(&start, &end); |
4208 | |
4209 | #ifdef FEATURE_COMINTEROP |
4210 | if (HasGuidInfo()) |
4211 | { |
4212 | // Make sure our GUID is computed |
4213 | |
4214 | // Generic WinRT types can have their GUID computed only if the instantiation is WinRT-legal |
4215 | if (IsLegalNonArrayWinRTType()) |
4216 | { |
4217 | GUID dummy; |
4218 | if (SUCCEEDED(GetGuidNoThrow(&dummy, TRUE, FALSE))) |
4219 | { |
4220 | GuidInfo* pGuidInfo = GetGuidInfo(); |
4221 | _ASSERTE(pGuidInfo != NULL); |
4222 | |
4223 | image->StoreStructure(pGuidInfo, |
4224 | sizeof(GuidInfo), |
4225 | DataImage::ITEM_GUID_INFO); |
4226 | |
4227 | Module *pModule = GetModule(); |
4228 | if (pModule->CanCacheWinRTTypeByGuid(this)) |
4229 | { |
4230 | pModule->CacheWinRTTypeByGuid(this, pGuidInfo); |
4231 | } |
4232 | } |
4233 | else |
4234 | { |
4235 | GuidInfo** ppGuidInfo = GetGuidInfoPtr(); |
4236 | *ppGuidInfo = NULL; |
4237 | } |
4238 | } |
4239 | } |
4240 | #endif // FEATURE_COMINTEROP |
4241 | |
4242 | |
4243 | #ifdef _DEBUG |
4244 | if (GetDebugClassName() != NULL && !image->IsStored(GetDebugClassName())) |
4245 | image->StoreStructure(debug_m_szClassName, (ULONG)(strlen(GetDebugClassName())+1), |
4246 | DataImage::ITEM_DEBUG, |
4247 | 1); |
4248 | #endif // _DEBUG |
4249 | |
4250 | DataImage::ItemKind kindBasic = DataImage::ITEM_METHOD_TABLE; |
4251 | if (IsWriteable()) |
4252 | kindBasic = DataImage::ITEM_METHOD_TABLE_SPECIAL_WRITEABLE; |
4253 | |
4254 | ZapStoredStructure * pMTNode = image->StoreStructure((void*) start, (ULONG)(end - start), kindBasic); |
4255 | |
4256 | if ((void *)this != (void *)start) |
4257 | image->BindPointer(this, pMTNode, (BYTE *)this - (BYTE *)start); |
4258 | |
4259 | // Store the vtable chunks |
4260 | VtableIndirectionSlotIterator it = IterateVtableIndirectionSlots(); |
4261 | while (it.Next()) |
4262 | { |
4263 | if (!image->IsStored(it.GetIndirectionSlot())) |
4264 | { |
4265 | if (!MethodTable::VTableIndir2_t::isRelative |
4266 | && CanInternVtableChunk(image, it)) |
4267 | image->StoreInternedStructure(it.GetIndirectionSlot(), it.GetSize(), DataImage::ITEM_VTABLE_CHUNK); |
4268 | else |
4269 | image->StoreStructure(it.GetIndirectionSlot(), it.GetSize(), DataImage::ITEM_VTABLE_CHUNK); |
4270 | } |
4271 | else |
4272 | { |
4273 | // Tell the interning system that we have already shared this structure without its help |
4274 | image->NoteReusedStructure(it.GetIndirectionSlot()); |
4275 | } |
4276 | } |
4277 | |
4278 | if (HasNonVirtualSlotsArray()) |
4279 | { |
4280 | image->StoreStructure(GetNonVirtualSlotsArray(), GetNonVirtualSlotsArraySize(), DataImage::ITEM_VTABLE_CHUNK); |
4281 | } |
4282 | |
4283 | if (HasInterfaceMap()) |
4284 | { |
4285 | #ifdef FEATURE_COMINTEROP |
4286 | // Dynamic interface maps have an additional DWORD_PTR preceding the InterfaceInfo_t array |
4287 | if (HasDynamicInterfaceMap()) |
4288 | { |
4289 | ZapStoredStructure * pInterfaceMapNode; |
4290 | if (decltype(InterfaceInfo_t::m_pMethodTable)::isRelative) |
4291 | { |
4292 | pInterfaceMapNode = image->StoreStructure(((DWORD_PTR *)GetInterfaceMap()) - 1, |
4293 | GetInterfaceMapSize(), |
4294 | DataImage::ITEM_INTERFACE_MAP); |
4295 | } |
4296 | else |
4297 | { |
4298 | pInterfaceMapNode = image->StoreInternedStructure(((DWORD_PTR *)GetInterfaceMap()) - 1, |
4299 | GetInterfaceMapSize(), |
4300 | DataImage::ITEM_INTERFACE_MAP); |
4301 | } |
4302 | image->BindPointer(GetInterfaceMap(), pInterfaceMapNode, sizeof(DWORD_PTR)); |
4303 | } |
4304 | else |
4305 | #endif // FEATURE_COMINTEROP |
4306 | { |
4307 | if (decltype(InterfaceInfo_t::m_pMethodTable)::isRelative) |
4308 | { |
4309 | image->StoreStructure(GetInterfaceMap(), GetInterfaceMapSize(), DataImage::ITEM_INTERFACE_MAP); |
4310 | } |
4311 | else |
4312 | { |
4313 | image->StoreInternedStructure(GetInterfaceMap(), GetInterfaceMapSize(), DataImage::ITEM_INTERFACE_MAP); |
4314 | } |
4315 | } |
4316 | |
4317 | SaveExtraInterfaceInfo(image); |
4318 | } |
4319 | |
4320 | // If we have a dispatch map, save it. |
4321 | if (HasDispatchMapSlot()) |
4322 | { |
4323 | GetDispatchMap()->Save(image); |
4324 | } |
4325 | |
4326 | if (HasPerInstInfo()) |
4327 | { |
4328 | ZapStoredStructure * pPerInstInfoNode; |
4329 | if (CanEagerBindToParentDictionaries(image, NULL)) |
4330 | { |
4331 | if (PerInstInfoElem_t::isRelative) |
4332 | { |
4333 | pPerInstInfoNode = image->StoreStructure((BYTE *)GetPerInstInfo() - sizeof(GenericsDictInfo), GetPerInstInfoSize() + sizeof(GenericsDictInfo), DataImage::ITEM_DICTIONARY); |
4334 | } |
4335 | else |
4336 | { |
4337 | pPerInstInfoNode = image->StoreInternedStructure((BYTE *)GetPerInstInfo() - sizeof(GenericsDictInfo), GetPerInstInfoSize() + sizeof(GenericsDictInfo), DataImage::ITEM_DICTIONARY); |
4338 | } |
4339 | } |
4340 | else |
4341 | { |
4342 | pPerInstInfoNode = image->StoreStructure((BYTE *)GetPerInstInfo() - sizeof(GenericsDictInfo), GetPerInstInfoSize() + sizeof(GenericsDictInfo), DataImage::ITEM_DICTIONARY_WRITEABLE); |
4343 | } |
4344 | image->BindPointer(GetPerInstInfo(), pPerInstInfoNode, sizeof(GenericsDictInfo)); |
4345 | } |
4346 | |
4347 | Dictionary * pDictionary = GetDictionary(); |
4348 | if (pDictionary != NULL) |
4349 | { |
4350 | BOOL fIsWriteable; |
4351 | |
4352 | if (!IsCanonicalMethodTable()) |
4353 | { |
4354 | // CanEagerBindToMethodTable would not work for targeted patching here. The dictionary |
4355 | // layout is sensitive to compilation order that can be changed by TP compatible changes. |
4356 | BOOL canSaveSlots = (image->GetModule() == GetCanonicalMethodTable()->GetLoaderModule()); |
4357 | |
4358 | fIsWriteable = pDictionary->IsWriteable(image, canSaveSlots, |
4359 | GetNumGenericArgs(), |
4360 | GetModule(), |
4361 | GetClass()->GetDictionaryLayout()); |
4362 | } |
4363 | else |
4364 | { |
4365 | fIsWriteable = FALSE; |
4366 | } |
4367 | |
4368 | |
4369 | if (!fIsWriteable) |
4370 | { |
4371 | image->StoreInternedStructure(pDictionary, GetInstAndDictSize(), DataImage::ITEM_DICTIONARY); |
4372 | } |
4373 | else |
4374 | { |
4375 | image->StoreStructure(pDictionary, GetInstAndDictSize(), DataImage::ITEM_DICTIONARY_WRITEABLE); |
4376 | } |
4377 | } |
4378 | |
4379 | WORD numStaticFields = GetClass()->GetNumStaticFields(); |
4380 | |
4381 | if (!IsCanonicalMethodTable() && HasGenericsStaticsInfo() && numStaticFields != 0) |
4382 | { |
4383 | FieldDesc * pGenericsFieldDescs = GetGenericsStaticFieldDescs(); |
4384 | |
4385 | for (DWORD i = 0; i < numStaticFields; i++) |
4386 | { |
4387 | FieldDesc *pFld = pGenericsFieldDescs + i; |
4388 | pFld->PrecomputeNameHash(); |
4389 | } |
4390 | |
4391 | ZapStoredStructure * pFDNode = image->StoreStructure(pGenericsFieldDescs, sizeof(FieldDesc) * numStaticFields, |
4392 | DataImage::ITEM_GENERICS_STATIC_FIELDDESCS); |
4393 | |
4394 | for (DWORD i = 0; i < numStaticFields; i++) |
4395 | { |
4396 | FieldDesc *pFld = pGenericsFieldDescs + i; |
4397 | pFld->SaveContents(image); |
4398 | if (pFld != pGenericsFieldDescs) |
4399 | image->BindPointer(pFld, pFDNode, (BYTE *)pFld - (BYTE *)pGenericsFieldDescs); |
4400 | } |
4401 | } |
4402 | |
4403 | // Allocate a ModuleCtorInfo entry in the NGEN image if necessary |
4404 | if (HasBoxedRegularStatics()) |
4405 | { |
4406 | image->GetModule()->GetZapModuleCtorInfo()->AddElement(this); |
4407 | } |
4408 | |
4409 | // MethodTable WriteableData |
4410 | |
4411 | |
4412 | PTR_Const_MethodTableWriteableData pWriteableData = GetWriteableData_NoLogging(); |
4413 | _ASSERTE(pWriteableData != NULL); |
4414 | if (pWriteableData != NULL) |
4415 | { |
4416 | pWriteableData->Save(image, this, profilingFlags); |
4417 | } |
4418 | |
4419 | LOG((LF_ZAP, LL_INFO10000, "MethodTable::Save %s (%p) complete.\n" , GetDebugClassName(), this)); |
4420 | |
4421 | // Save the EEClass at the same time as the method table if this is the canonical method table |
4422 | if (IsCanonicalMethodTable()) |
4423 | GetClass()->Save(image, this); |
4424 | } // MethodTable::Save |
4425 | |
4426 | //========================================================================== |
4427 | // The NeedsRestore Computation. |
4428 | // |
4429 | // WARNING: The NeedsRestore predicate on MethodTable and EEClass |
4430 | // MUST be computable immediately after we have loaded a type. |
4431 | // It must NOT depend on any additions or changes made to the |
4432 | // MethodTable as a result of compiling code, or |
4433 | // later steps such as prepopulating dictionaries. |
4434 | //========================================================================== |
4435 | BOOL MethodTable::ComputeNeedsRestore(DataImage *image, TypeHandleList *pVisited) |
4436 | { |
4437 | CONTRACTL |
4438 | { |
4439 | STANDARD_VM_CHECK; |
4440 | // See comment in ComputeNeedsRestoreWorker |
4441 | PRECONDITION(GetLoaderModule()->HasNativeImage() || GetLoaderModule() == GetAppDomain()->ToCompilationDomain()->GetTargetModule()); |
4442 | } |
4443 | CONTRACTL_END; |
4444 | |
4445 | _ASSERTE(GetAppDomain()->IsCompilationDomain()); // only used at ngen time! |
4446 | |
4447 | if (GetWriteableData()->IsNeedsRestoreCached()) |
4448 | { |
4449 | return GetWriteableData()->GetCachedNeedsRestore(); |
4450 | } |
4451 | |
4452 | // We may speculatively assume that any types we've visited on this run of |
4453 | // the ComputeNeedsRestore algorithm don't need a restore. If they |
4454 | // do need a restore then we will check that when we first visit that method |
4455 | // table. |
4456 | if (TypeHandleList::Exists(pVisited, TypeHandle(this))) |
4457 | { |
4458 | pVisited->MarkBrokenCycle(this); |
4459 | return FALSE; |
4460 | } |
4461 | TypeHandleList newVisited(this, pVisited); |
4462 | |
4463 | BOOL needsRestore = ComputeNeedsRestoreWorker(image, &newVisited); |
4464 | |
4465 | // Cache the results of running the algorithm. |
4466 | // We can only cache the result if we have not speculatively assumed |
4467 | // that any types are not NeedsRestore |
4468 | if (!newVisited.HasBrokenCycleMark()) |
4469 | { |
4470 | GetWriteableDataForWrite()->SetCachedNeedsRestore(needsRestore); |
4471 | } |
4472 | else |
4473 | { |
4474 | _ASSERTE(pVisited != NULL); |
4475 | } |
4476 | return needsRestore; |
4477 | } |
4478 | |
4479 | //========================================================================================== |
4480 | BOOL MethodTable::ComputeNeedsRestoreWorker(DataImage *image, TypeHandleList *pVisited) |
4481 | { |
4482 | STANDARD_VM_CONTRACT; |
4483 | |
4484 | #ifdef _DEBUG |
4485 | // You should only call ComputeNeedsRestoreWorker on things being saved into |
4486 | // the current LoaderModule - the NeedsRestore flag should have been computed |
4487 | // for all items from NGEN images, and we should never compute NeedsRestore |
4488 | // on anything that is not related to an NGEN image. If this fails then |
4489 | // there is probably a CanEagerBindTo check missing as we trace through a |
4490 | // pointer from one data structure to another. |
4491 | // Trace back on the call stack and work out where this condition first fails. |
4492 | |
4493 | Module* myModule = GetLoaderModule(); |
4494 | AppDomain* myAppDomain = GetAppDomain(); |
4495 | CompilationDomain* myCompilationDomain = myAppDomain->ToCompilationDomain(); |
4496 | Module* myCompilationModule = myCompilationDomain->GetTargetModule(); |
4497 | |
4498 | if (myModule != myCompilationModule) |
4499 | { |
4500 | _ASSERTE(!"You should only call ComputeNeedsRestoreWorker on things being saved into the current LoaderModule" ); |
4501 | } |
4502 | #endif |
4503 | |
4504 | if (g_CorCompileVerboseLevel == CORCOMPILE_VERBOSE) |
4505 | { |
4506 | DefineFullyQualifiedNameForClassW(); |
4507 | LPCWSTR name = GetFullyQualifiedNameForClassW(this); |
4508 | WszOutputDebugString(W("MethodTable " )); |
4509 | WszOutputDebugString(name); |
4510 | WszOutputDebugString(W(" needs restore? " )); |
4511 | } |
4512 | if (g_CorCompileVerboseLevel >= CORCOMPILE_STATS && GetModule()->GetNgenStats()) |
4513 | GetModule()->GetNgenStats()->MethodTableRestoreNumReasons[TotalMethodTables]++; |
4514 | |
4515 | #define UPDATE_RESTORE_REASON(ARG) \ |
4516 | if (g_CorCompileVerboseLevel == CORCOMPILE_VERBOSE) \ |
4517 | { WszOutputDebugString(W("Yes, ")); WszOutputDebugString(W(#ARG "\n")); } \ |
4518 | if (g_CorCompileVerboseLevel >= CORCOMPILE_STATS && GetModule()->GetNgenStats()) \ |
4519 | GetModule()->GetNgenStats()->MethodTableRestoreNumReasons[ARG]++; |
4520 | |
4521 | // The special method table for IL stubs has to be prerestored. Restore is not able to handle it |
4522 | // because of it does not have a token. In particular, this is a problem for /profiling native images. |
4523 | if (this == image->GetModule()->GetILStubCache()->GetStubMethodTable()) |
4524 | { |
4525 | return FALSE; |
4526 | } |
4527 | |
4528 | // When profiling, we always want to perform the restore. |
4529 | if (GetAppDomain()->ToCompilationDomain()->m_fForceProfiling) |
4530 | { |
4531 | UPDATE_RESTORE_REASON(ProfilingEnabled); |
4532 | return TRUE; |
4533 | } |
4534 | |
4535 | if (DependsOnEquivalentOrForwardedStructs()) |
4536 | { |
4537 | UPDATE_RESTORE_REASON(ComImportStructDependenciesNeedRestore); |
4538 | return TRUE; |
4539 | } |
4540 | |
4541 | if (!IsCanonicalMethodTable() && !image->CanPrerestoreEagerBindToMethodTable(GetCanonicalMethodTable(), pVisited)) |
4542 | { |
4543 | UPDATE_RESTORE_REASON(CanNotPreRestoreHardBindToCanonicalMethodTable); |
4544 | return TRUE; |
4545 | } |
4546 | |
4547 | if (!image->CanEagerBindToModule(GetModule())) |
4548 | { |
4549 | UPDATE_RESTORE_REASON(CrossAssembly); |
4550 | return TRUE; |
4551 | } |
4552 | |
4553 | if (GetParentMethodTable()) |
4554 | { |
4555 | if (!image->CanPrerestoreEagerBindToMethodTable(GetParentMethodTable(), pVisited)) |
4556 | { |
4557 | UPDATE_RESTORE_REASON(CanNotPreRestoreHardBindToParentMethodTable); |
4558 | return TRUE; |
4559 | } |
4560 | } |
4561 | |
4562 | // Check per-inst pointers-to-dictionaries. |
4563 | if (!CanEagerBindToParentDictionaries(image, pVisited)) |
4564 | { |
4565 | UPDATE_RESTORE_REASON(CanNotHardBindToInstanceMethodTableChain); |
4566 | return TRUE; |
4567 | } |
4568 | |
4569 | // Now check if the dictionary (if any) owned by this methodtable needs a restore. |
4570 | if (GetDictionary()) |
4571 | { |
4572 | if (GetDictionary()->ComputeNeedsRestore(image, pVisited, GetNumGenericArgs())) |
4573 | { |
4574 | UPDATE_RESTORE_REASON(GenericsDictionaryNeedsRestore); |
4575 | return TRUE; |
4576 | } |
4577 | } |
4578 | |
4579 | // The interface chain is traversed without doing CheckRestore's. Thus |
4580 | // if any of the types in the inherited interfaces hierarchy need a restore |
4581 | // or are cross-module pointers then this methodtable will also need a restore. |
4582 | InterfaceMapIterator it = IterateInterfaceMap(); |
4583 | while (it.Next()) |
4584 | { |
4585 | if (!image->CanPrerestoreEagerBindToMethodTable(it.GetInterface(), pVisited)) |
4586 | { |
4587 | UPDATE_RESTORE_REASON(InterfaceIsGeneric); |
4588 | return TRUE; |
4589 | } |
4590 | } |
4591 | |
4592 | if (NeedsCrossModuleGenericsStaticsInfo()) |
4593 | { |
4594 | UPDATE_RESTORE_REASON(CrossModuleGenericsStatics); |
4595 | return TRUE; |
4596 | } |
4597 | |
4598 | if (IsArray()) |
4599 | { |
4600 | if(!image->CanPrerestoreEagerBindToTypeHandle(GetApproxArrayElementTypeHandle(), pVisited)) |
4601 | { |
4602 | UPDATE_RESTORE_REASON(ArrayElement); |
4603 | return TRUE; |
4604 | } |
4605 | } |
4606 | |
4607 | if (g_CorCompileVerboseLevel == CORCOMPILE_VERBOSE) |
4608 | { |
4609 | WszOutputDebugString(W("No\n" )); |
4610 | } |
4611 | return FALSE; |
4612 | } |
4613 | |
4614 | //========================================================================================== |
4615 | BOOL MethodTable::CanEagerBindToParentDictionaries(DataImage *image, TypeHandleList *pVisited) |
4616 | { |
4617 | STANDARD_VM_CONTRACT; |
4618 | |
4619 | MethodTable *pChain = GetParentMethodTable(); |
4620 | while (pChain != NULL) |
4621 | { |
4622 | // This is for the case were the method table contains a pointer to |
4623 | // an inherited dictionary, e.g. given the case D : C, C : B<int> |
4624 | // where B<int> is in another module then D contains a pointer to the |
4625 | // dictionary for B<int>. Note that in this case we might still be |
4626 | // able to hadbind to C. |
4627 | if (pChain->HasInstantiation()) |
4628 | { |
4629 | if (!image->CanEagerBindToMethodTable(pChain, FALSE, pVisited) || |
4630 | !image->CanHardBindToZapModule(pChain->GetLoaderModule())) |
4631 | { |
4632 | return FALSE; |
4633 | } |
4634 | } |
4635 | pChain = pChain->GetParentMethodTable(); |
4636 | } |
4637 | return TRUE; |
4638 | } |
4639 | |
4640 | //========================================================================================== |
4641 | BOOL MethodTable::NeedsCrossModuleGenericsStaticsInfo() |
4642 | { |
4643 | STANDARD_VM_CONTRACT; |
4644 | |
4645 | return HasGenericsStaticsInfo() && !ContainsGenericVariables() && !IsSharedByGenericInstantiations() && |
4646 | (Module::GetPreferredZapModuleForMethodTable(this) != GetLoaderModule()); |
4647 | } |
4648 | |
4649 | //========================================================================================== |
4650 | BOOL MethodTable::IsWriteable() |
4651 | { |
4652 | STANDARD_VM_CONTRACT; |
4653 | |
4654 | #ifdef FEATURE_COMINTEROP |
4655 | // Dynamic expansion of interface map writes into method table |
4656 | // (see code:MethodTable::AddDynamicInterface) |
4657 | if (HasDynamicInterfaceMap()) |
4658 | return TRUE; |
4659 | |
4660 | // CCW template is created lazily and when that happens, the |
4661 | // pointer is written directly into the method table. |
4662 | if (HasCCWTemplate()) |
4663 | return TRUE; |
4664 | |
4665 | // RCW per-type data is created lazily at run-time. |
4666 | if (HasRCWPerTypeData()) |
4667 | return TRUE; |
4668 | #endif |
4669 | |
4670 | return FALSE; |
4671 | } |
4672 | |
4673 | //========================================================================================== |
4674 | // This is used when non-canonical (i.e. duplicated) method tables |
4675 | // attempt to bind to items logically belonging to an EEClass or MethodTable. |
4676 | // i.e. the contract map in the EEClass and the generic dictionary stored in the canonical |
4677 | // method table. |
4678 | // |
4679 | // We want to check if we can hard bind to the containing structure before |
4680 | // deciding to hardbind to the inside of it. This is because we may not be able |
4681 | // to hardbind to all EEClass and/or MethodTables even if they live in a hradbindable |
4682 | // target module. Thus we want to call CanEagerBindToMethodTable |
4683 | // to check we can hardbind to the containing structure. |
4684 | static |
4685 | void HardBindOrClearDictionaryPointer(DataImage *image, MethodTable *pMT, void * p, SSIZE_T offset, bool isRelative) |
4686 | { |
4687 | WRAPPER_NO_CONTRACT; |
4688 | |
4689 | if (image->CanEagerBindToMethodTable(pMT) && |
4690 | image->CanHardBindToZapModule(pMT->GetLoaderModule())) |
4691 | { |
4692 | if (isRelative) |
4693 | { |
4694 | image->FixupRelativePointerField(p, offset); |
4695 | } |
4696 | else |
4697 | { |
4698 | image->FixupPointerField(p, offset); |
4699 | } |
4700 | } |
4701 | else |
4702 | { |
4703 | image->ZeroPointerField(p, offset); |
4704 | } |
4705 | } |
4706 | |
4707 | //========================================================================================== |
4708 | void MethodTable::Fixup(DataImage *image) |
4709 | { |
4710 | CONTRACTL |
4711 | { |
4712 | STANDARD_VM_CHECK; |
4713 | PRECONDITION(IsFullyLoaded()); |
4714 | } |
4715 | CONTRACTL_END; |
4716 | |
4717 | LOG((LF_ZAP, LL_INFO10000, "MethodTable::Fixup %s\n" , GetDebugClassName())); |
4718 | |
4719 | if (GetWriteableData()->IsFixedUp()) |
4720 | return; |
4721 | |
4722 | BOOL needsRestore = NeedsRestore(image); |
4723 | LOG((LF_ZAP, LL_INFO10000, "MethodTable::Fixup %s (%p), needsRestore=%d\n" , GetDebugClassName(), this, needsRestore)); |
4724 | |
4725 | BOOL isCanonical = IsCanonicalMethodTable(); |
4726 | |
4727 | Module *pZapModule = image->GetModule(); |
4728 | |
4729 | MethodTable *pNewMT = (MethodTable *) image->GetImagePointer(this); |
4730 | |
4731 | // For canonical method tables, the pointer to the EEClass is never encoded as a fixup |
4732 | // even if this method table is not in its preferred zap module, i.e. the two are |
4733 | // "tightly-bound". |
4734 | if (IsCanonicalMethodTable()) |
4735 | { |
4736 | // Pointer to EEClass |
4737 | image->FixupPlainOrRelativePointerField(this, &MethodTable::m_pEEClass); |
4738 | } |
4739 | else |
4740 | { |
4741 | // |
4742 | // Encode m_pEEClassOrCanonMT |
4743 | // |
4744 | MethodTable * pCanonMT = GetCanonicalMethodTable(); |
4745 | |
4746 | ZapNode * pImport = NULL; |
4747 | if (image->CanEagerBindToMethodTable(pCanonMT)) |
4748 | { |
4749 | if (image->CanHardBindToZapModule(pCanonMT->GetLoaderModule())) |
4750 | { |
4751 | // Pointer to canonical methodtable |
4752 | image->FixupPlainOrRelativeField(this, &MethodTable::m_pCanonMT, pCanonMT, UNION_METHODTABLE); |
4753 | } |
4754 | else |
4755 | { |
4756 | // Pointer to lazy bound indirection cell to canonical methodtable |
4757 | pImport = image->GetTypeHandleImport(pCanonMT); |
4758 | } |
4759 | } |
4760 | else |
4761 | { |
4762 | // Pointer to eager bound indirection cell to canonical methodtable |
4763 | _ASSERTE(pCanonMT->IsTypicalTypeDefinition() || |
4764 | !pCanonMT->ContainsGenericVariables()); |
4765 | pImport = image->GetTypeHandleImport(pCanonMT); |
4766 | } |
4767 | |
4768 | if (pImport != NULL) |
4769 | { |
4770 | image->FixupPlainOrRelativeFieldToNode(this, &MethodTable::m_pCanonMT, pImport, UNION_INDIRECTION); |
4771 | } |
4772 | } |
4773 | |
4774 | image->FixupField(this, offsetof(MethodTable, m_pLoaderModule), pZapModule, 0, IMAGE_REL_BASED_RELPTR); |
4775 | |
4776 | #ifdef _DEBUG |
4777 | image->FixupPointerField(this, offsetof(MethodTable, debug_m_szClassName)); |
4778 | #endif // _DEBUG |
4779 | |
4780 | MethodTable * pParentMT = GetParentMethodTable(); |
4781 | _ASSERTE(!pNewMT->IsParentMethodTableIndirectPointerMaybeNull()); |
4782 | |
4783 | ZapRelocationType relocType; |
4784 | if (decltype(MethodTable::m_pParentMethodTable)::isRelative) |
4785 | { |
4786 | relocType = IMAGE_REL_BASED_RELPTR; |
4787 | } |
4788 | else |
4789 | { |
4790 | relocType = IMAGE_REL_BASED_PTR; |
4791 | } |
4792 | |
4793 | if (pParentMT != NULL) |
4794 | { |
4795 | // |
4796 | // Encode m_pParentMethodTable |
4797 | // |
4798 | ZapNode * pImport = NULL; |
4799 | if (image->CanEagerBindToMethodTable(pParentMT)) |
4800 | { |
4801 | if (image->CanHardBindToZapModule(pParentMT->GetLoaderModule())) |
4802 | { |
4803 | _ASSERTE(!IsParentMethodTableIndirectPointer()); |
4804 | image->FixupField(this, offsetof(MethodTable, m_pParentMethodTable), pParentMT, 0, relocType); |
4805 | } |
4806 | else |
4807 | { |
4808 | pImport = image->GetTypeHandleImport(pParentMT); |
4809 | } |
4810 | } |
4811 | else |
4812 | { |
4813 | if (!pParentMT->IsCanonicalMethodTable()) |
4814 | { |
4815 | #ifdef _DEBUG |
4816 | IMDInternalImport *pInternalImport = GetModule()->GetMDImport(); |
4817 | |
4818 | mdToken crExtends; |
4819 | pInternalImport->GetTypeDefProps(GetCl(), |
4820 | NULL, |
4821 | &crExtends); |
4822 | |
4823 | _ASSERTE(TypeFromToken(crExtends) == mdtTypeSpec); |
4824 | #endif |
4825 | |
4826 | // Use unique cell for now since we are first going to set the parent method table to |
4827 | // approx one first, and then to the exact one later. This would mess up the shared cell. |
4828 | // It would be nice to clean it up to use the shared cell - we should set the parent method table |
4829 | // just once at the end. |
4830 | pImport = image->GetTypeHandleImport(pParentMT, this /* pUniqueId */); |
4831 | } |
4832 | else |
4833 | { |
4834 | pImport = image->GetTypeHandleImport(pParentMT); |
4835 | } |
4836 | } |
4837 | |
4838 | if (pImport != NULL) |
4839 | { |
4840 | image->FixupFieldToNode(this, offsetof(MethodTable, m_pParentMethodTable), pImport, -PARENT_MT_FIXUP_OFFSET, relocType); |
4841 | pNewMT->SetFlag(enum_flag_HasIndirectParent); |
4842 | } |
4843 | } |
4844 | |
4845 | if (HasNonVirtualSlotsArray()) |
4846 | { |
4847 | TADDR ppNonVirtualSlots = GetNonVirtualSlotsPtr(); |
4848 | PREFIX_ASSUME(ppNonVirtualSlots != NULL); |
4849 | image->FixupRelativePointerField(this, (BYTE *)ppNonVirtualSlots - (BYTE *)this); |
4850 | } |
4851 | |
4852 | if (HasInterfaceMap()) |
4853 | { |
4854 | image->FixupPlainOrRelativePointerField(this, &MethodTable::m_pInterfaceMap); |
4855 | |
4856 | FixupExtraInterfaceInfo(image); |
4857 | } |
4858 | |
4859 | _ASSERTE(GetWriteableData()); |
4860 | image->FixupPlainOrRelativePointerField(this, &MethodTable::m_pWriteableData); |
4861 | m_pWriteableData.GetValue()->Fixup(image, this, needsRestore); |
4862 | |
4863 | #ifdef FEATURE_COMINTEROP |
4864 | if (HasGuidInfo()) |
4865 | { |
4866 | GuidInfo **ppGuidInfo = GetGuidInfoPtr(); |
4867 | if (*ppGuidInfo != NULL) |
4868 | { |
4869 | image->FixupPointerField(this, (BYTE *)ppGuidInfo - (BYTE *)this); |
4870 | } |
4871 | else |
4872 | { |
4873 | image->ZeroPointerField(this, (BYTE *)ppGuidInfo - (BYTE *)this); |
4874 | } |
4875 | } |
4876 | |
4877 | if (HasCCWTemplate()) |
4878 | { |
4879 | ComCallWrapperTemplate **ppTemplate = GetCCWTemplatePtr(); |
4880 | image->ZeroPointerField(this, (BYTE *)ppTemplate - (BYTE *)this); |
4881 | } |
4882 | |
4883 | if (HasRCWPerTypeData()) |
4884 | { |
4885 | // it would be nice to save these but the impact on mscorlib.ni size is prohibitive |
4886 | RCWPerTypeData **ppData = GetRCWPerTypeDataPtr(); |
4887 | image->ZeroPointerField(this, (BYTE *)ppData - (BYTE *)this); |
4888 | } |
4889 | #endif // FEATURE_COMINTEROP |
4890 | |
4891 | |
4892 | // |
4893 | // Fix flags |
4894 | // |
4895 | |
4896 | _ASSERTE((pNewMT->GetFlag(enum_flag_IsZapped) == 0)); |
4897 | pNewMT->SetFlag(enum_flag_IsZapped); |
4898 | |
4899 | _ASSERTE((pNewMT->GetFlag(enum_flag_IsPreRestored) == 0)); |
4900 | if (!needsRestore) |
4901 | pNewMT->SetFlag(enum_flag_IsPreRestored); |
4902 | |
4903 | // |
4904 | // Fixup vtable |
4905 | // If the canonical method table lives in a different loader module |
4906 | // then just zero out the entries and copy them across from the canonical |
4907 | // vtable on restore. |
4908 | // |
4909 | // Note the canonical method table will be the same as the current method table |
4910 | // if the method table is not a generic instantiation. |
4911 | |
4912 | if (HasDispatchMapSlot()) |
4913 | { |
4914 | TADDR pSlot = GetMultipurposeSlotPtr(enum_flag_HasDispatchMapSlot, c_DispatchMapSlotOffsets); |
4915 | DispatchMap * pDispatchMap = RelativePointer<PTR_DispatchMap>::GetValueAtPtr(pSlot); |
4916 | image->FixupField(this, pSlot - (TADDR)this, pDispatchMap, 0, IMAGE_REL_BASED_RelativePointer); |
4917 | pDispatchMap->Fixup(image); |
4918 | } |
4919 | |
4920 | if (HasModuleOverride()) |
4921 | { |
4922 | image->FixupModulePointer(this, GetModuleOverridePtr()); |
4923 | } |
4924 | |
4925 | { |
4926 | VtableIndirectionSlotIterator it = IterateVtableIndirectionSlots(); |
4927 | while (it.Next()) |
4928 | { |
4929 | if (VTableIndir_t::isRelative) |
4930 | { |
4931 | image->FixupRelativePointerField(this, it.GetOffsetFromMethodTable()); |
4932 | } |
4933 | else |
4934 | { |
4935 | image->FixupPointerField(this, it.GetOffsetFromMethodTable()); |
4936 | } |
4937 | } |
4938 | } |
4939 | |
4940 | unsigned numVTableSlots = GetNumVtableSlots(); |
4941 | for (unsigned slotNumber = 0; slotNumber < numVTableSlots; slotNumber++) |
4942 | { |
4943 | // |
4944 | // Find the method desc from the slot. |
4945 | // |
4946 | MethodDesc *pMD = GetMethodDescForSlot(slotNumber); |
4947 | _ASSERTE(pMD != NULL); |
4948 | pMD->CheckRestore(); |
4949 | |
4950 | PVOID slotBase; |
4951 | SSIZE_T slotOffset; |
4952 | |
4953 | if (slotNumber < GetNumVirtuals()) |
4954 | { |
4955 | // Virtual slots live in chunks pointed to by vtable indirections |
4956 | |
4957 | slotBase = (PVOID) GetVtableIndirections()[GetIndexOfVtableIndirection(slotNumber)].GetValueMaybeNull(); |
4958 | slotOffset = GetIndexAfterVtableIndirection(slotNumber) * sizeof(MethodTable::VTableIndir2_t); |
4959 | } |
4960 | else if (HasSingleNonVirtualSlot()) |
4961 | { |
4962 | // Non-virtual slots < GetNumVtableSlots live in a single chunk pointed to by an optional member, |
4963 | // except when there is only one in which case it lives in the optional member itself |
4964 | |
4965 | _ASSERTE(slotNumber == GetNumVirtuals()); |
4966 | slotBase = (PVOID) this; |
4967 | slotOffset = (BYTE *)GetSlotPtr(slotNumber) - (BYTE *)this; |
4968 | } |
4969 | else |
4970 | { |
4971 | // Non-virtual slots < GetNumVtableSlots live in a single chunk pointed to by an optional member |
4972 | |
4973 | _ASSERTE(HasNonVirtualSlotsArray()); |
4974 | slotBase = (PVOID) GetNonVirtualSlotsArray(); |
4975 | slotOffset = (slotNumber - GetNumVirtuals()) * sizeof(PCODE); |
4976 | } |
4977 | |
4978 | // Attempt to make the slot point directly at the prejitted code. |
4979 | // Note that changes to this logic may require or enable an update to CanInternVtableChunk. |
4980 | // If a necessary update is not made, an assert will fire in ZapStoredStructure::Save. |
4981 | |
4982 | if (pMD->GetMethodTable() == this) |
4983 | { |
4984 | ZapRelocationType relocType; |
4985 | if (slotNumber >= GetNumVirtuals() || MethodTable::VTableIndir2_t::isRelative) |
4986 | relocType = IMAGE_REL_BASED_RelativePointer; |
4987 | else |
4988 | relocType = IMAGE_REL_BASED_PTR; |
4989 | |
4990 | pMD->FixupSlot(image, slotBase, slotOffset, relocType); |
4991 | } |
4992 | else |
4993 | { |
4994 | |
4995 | #ifdef _DEBUG |
4996 | |
4997 | // Static method should be in the owning methodtable only. |
4998 | _ASSERTE(!pMD->IsStatic()); |
4999 | |
5000 | MethodTable *pSourceMT = isCanonical |
5001 | ? GetParentMethodTable() |
5002 | : GetCanonicalMethodTable(); |
5003 | |
5004 | // It must be inherited from the parent or copied from the canonical |
5005 | _ASSERTE(pSourceMT->GetMethodDescForSlot(slotNumber) == pMD); |
5006 | #endif |
5007 | |
5008 | ZapRelocationType relocType; |
5009 | if (MethodTable::VTableIndir2_t::isRelative) |
5010 | relocType = IMAGE_REL_BASED_RELPTR; |
5011 | else |
5012 | relocType = IMAGE_REL_BASED_PTR; |
5013 | |
5014 | if (image->CanEagerBindToMethodDesc(pMD) && pMD->GetLoaderModule() == pZapModule) |
5015 | { |
5016 | pMD->FixupSlot(image, slotBase, slotOffset, relocType); |
5017 | } |
5018 | else |
5019 | { |
5020 | if (!pMD->IsGenericMethodDefinition()) |
5021 | { |
5022 | ZapNode * importThunk = image->GetVirtualImportThunk(pMD->GetMethodTable(), pMD, slotNumber); |
5023 | // On ARM, make sure that the address to the virtual thunk that we write into the |
5024 | // vtable "chunk" has the Thumb bit set. |
5025 | image->FixupFieldToNode(slotBase, slotOffset, importThunk ARM_ARG(THUMB_CODE) NOT_ARM_ARG(0), relocType); |
5026 | } |
5027 | else |
5028 | { |
5029 | // Virtual generic methods don't/can't use their vtable slot |
5030 | image->ZeroPointerField(slotBase, slotOffset); |
5031 | } |
5032 | } |
5033 | } |
5034 | } |
5035 | |
5036 | // |
5037 | // Fixup Interface map |
5038 | // |
5039 | |
5040 | InterfaceMapIterator it = IterateInterfaceMap(); |
5041 | while (it.Next()) |
5042 | { |
5043 | image->FixupMethodTablePointer(GetInterfaceMap(), &it.GetInterfaceInfo()->m_pMethodTable); |
5044 | } |
5045 | |
5046 | if (IsArray()) |
5047 | { |
5048 | image->HardBindTypeHandlePointer(this, offsetof(MethodTable, m_ElementTypeHnd)); |
5049 | } |
5050 | |
5051 | // |
5052 | // Fixup per-inst pointers for this method table |
5053 | // |
5054 | |
5055 | if (HasPerInstInfo()) |
5056 | { |
5057 | // Fixup the pointer to the per-inst table |
5058 | image->FixupPlainOrRelativePointerField(this, &MethodTable::m_pPerInstInfo); |
5059 | |
5060 | for (MethodTable *pChain = this; pChain != NULL; pChain = pChain->GetParentMethodTable()) |
5061 | { |
5062 | if (pChain->HasInstantiation()) |
5063 | { |
5064 | DWORD dictNum = pChain->GetNumDicts()-1; |
5065 | |
5066 | // If we can't hardbind then the value will be copied down from |
5067 | // the parent upon restore. |
5068 | |
5069 | // We special-case the dictionary for this method table because we must always |
5070 | // hard bind to it even if it's not in its preferred zap module |
5071 | size_t sizeDict = sizeof(PerInstInfoElem_t); |
5072 | |
5073 | if (pChain == this) |
5074 | { |
5075 | if (PerInstInfoElem_t::isRelative) |
5076 | { |
5077 | image->FixupRelativePointerField(GetPerInstInfo(), dictNum * sizeDict); |
5078 | } |
5079 | else |
5080 | { |
5081 | image->FixupPointerField(GetPerInstInfo(), dictNum * sizeDict); |
5082 | } |
5083 | } |
5084 | else |
5085 | { |
5086 | HardBindOrClearDictionaryPointer(image, pChain, GetPerInstInfo(), dictNum * sizeDict, PerInstInfoElem_t::isRelative); |
5087 | } |
5088 | } |
5089 | } |
5090 | } |
5091 | // |
5092 | // Fixup instantiation+dictionary for this method table (if any) |
5093 | // |
5094 | if (GetDictionary()) |
5095 | { |
5096 | LOG((LF_JIT, LL_INFO10000, "GENERICS: Fixup dictionary for MT %s\n" , GetDebugClassName())); |
5097 | |
5098 | // CanEagerBindToMethodTable would not work for targeted patching here. The dictionary |
5099 | // layout is sensitive to compilation order that can be changed by TP compatible changes. |
5100 | BOOL canSaveSlots = !IsCanonicalMethodTable() && (image->GetModule() == GetCanonicalMethodTable()->GetLoaderModule()); |
5101 | |
5102 | // See comment on Dictionary::Fixup |
5103 | GetDictionary()->Fixup(image, |
5104 | TRUE, |
5105 | canSaveSlots, |
5106 | GetNumGenericArgs(), |
5107 | GetModule(), |
5108 | GetClass()->GetDictionaryLayout()); |
5109 | } |
5110 | |
5111 | // Fixup per-inst statics info |
5112 | if (HasGenericsStaticsInfo()) |
5113 | { |
5114 | GenericsStaticsInfo *pInfo = GetGenericsStaticsInfo(); |
5115 | |
5116 | image->FixupRelativePointerField(this, (BYTE *)&pInfo->m_pFieldDescs - (BYTE *)this); |
5117 | if (!isCanonical) |
5118 | { |
5119 | for (DWORD i = 0; i < GetClass()->GetNumStaticFields(); i++) |
5120 | { |
5121 | FieldDesc *pFld = GetGenericsStaticFieldDescs() + i; |
5122 | pFld->Fixup(image); |
5123 | } |
5124 | } |
5125 | |
5126 | if (NeedsCrossModuleGenericsStaticsInfo()) |
5127 | { |
5128 | MethodTableWriteableData * pNewWriteableData = (MethodTableWriteableData *)image->GetImagePointer(m_pWriteableData.GetValue()); |
5129 | CrossModuleGenericsStaticsInfo * pNewCrossModuleGenericsStaticsInfo = pNewWriteableData->GetCrossModuleGenericsStaticsInfo(); |
5130 | |
5131 | pNewCrossModuleGenericsStaticsInfo->m_DynamicTypeID = pInfo->m_DynamicTypeID; |
5132 | |
5133 | image->ZeroPointerField(m_pWriteableData.GetValue(), sizeof(MethodTableWriteableData) + offsetof(CrossModuleGenericsStaticsInfo, m_pModuleForStatics)); |
5134 | |
5135 | pNewMT->SetFlag(enum_flag_StaticsMask_IfGenericsThenCrossModule); |
5136 | } |
5137 | } |
5138 | else |
5139 | { |
5140 | _ASSERTE(!NeedsCrossModuleGenericsStaticsInfo()); |
5141 | } |
5142 | |
5143 | |
5144 | LOG((LF_ZAP, LL_INFO10000, "MethodTable::Fixup %s (%p) complete\n" , GetDebugClassName(), this)); |
5145 | |
5146 | // If this method table is canonical (one-to-one with EEClass) then fix up the EEClass also |
5147 | if (isCanonical) |
5148 | GetClass()->Fixup(image, this); |
5149 | |
5150 | // Mark method table as fixed-up |
5151 | GetWriteableDataForWrite()->SetFixedUp(); |
5152 | |
5153 | } // MethodTable::Fixup |
5154 | |
5155 | //========================================================================================== |
5156 | void MethodTableWriteableData::Save(DataImage *image, MethodTable *pMT, DWORD profilingFlags) const |
5157 | { |
5158 | STANDARD_VM_CONTRACT; |
5159 | |
5160 | SIZE_T size = sizeof(MethodTableWriteableData); |
5161 | |
5162 | // MethodTableWriteableData is followed by optional CrossModuleGenericsStaticsInfo in NGen images |
5163 | if (pMT->NeedsCrossModuleGenericsStaticsInfo()) |
5164 | size += sizeof(CrossModuleGenericsStaticsInfo); |
5165 | |
5166 | DataImage::ItemKind kindWriteable = DataImage::ITEM_METHOD_TABLE_DATA_COLD_WRITEABLE; |
5167 | if ((profilingFlags & (1 << WriteMethodTableWriteableData)) != 0) |
5168 | kindWriteable = DataImage::ITEM_METHOD_TABLE_DATA_HOT_WRITEABLE; |
5169 | |
5170 | ZapStoredStructure * pNode = image->StoreStructure(NULL, size, kindWriteable); |
5171 | image->BindPointer(this, pNode, 0); |
5172 | image->CopyData(pNode, this, sizeof(MethodTableWriteableData)); |
5173 | } |
5174 | |
5175 | //========================================================================================== |
5176 | void MethodTableWriteableData::Fixup(DataImage *image, MethodTable *pMT, BOOL needsRestore) |
5177 | { |
5178 | STANDARD_VM_CONTRACT; |
5179 | |
5180 | image->ZeroField(this, offsetof(MethodTableWriteableData, m_hExposedClassObject), sizeof(m_hExposedClassObject)); |
5181 | |
5182 | MethodTableWriteableData *pNewNgenPrivateMT = (MethodTableWriteableData*) image->GetImagePointer(this); |
5183 | _ASSERTE(pNewNgenPrivateMT != NULL); |
5184 | |
5185 | if (needsRestore) |
5186 | pNewNgenPrivateMT->m_dwFlags |= (enum_flag_UnrestoredTypeKey | |
5187 | enum_flag_Unrestored | |
5188 | enum_flag_HasApproxParent | |
5189 | enum_flag_IsNotFullyLoaded); |
5190 | |
5191 | #ifdef _DEBUG |
5192 | pNewNgenPrivateMT->m_dwLastVerifedGCCnt = (DWORD)-1; |
5193 | #endif |
5194 | } |
5195 | |
5196 | #endif // !DACCESS_COMPILE |
5197 | |
5198 | #endif // FEATURE_NATIVE_IMAGE_GENERATION |
5199 | |
5200 | #ifdef FEATURE_PREJIT |
5201 | |
5202 | //========================================================================================== |
5203 | void MethodTable::CheckRestore() |
5204 | { |
5205 | CONTRACTL |
5206 | { |
5207 | if (FORBIDGC_LOADER_USE_ENABLED()) NOTHROW; else THROWS; |
5208 | if (FORBIDGC_LOADER_USE_ENABLED()) GC_NOTRIGGER; else GC_TRIGGERS; |
5209 | } |
5210 | CONTRACTL_END |
5211 | |
5212 | if (!IsFullyLoaded()) |
5213 | { |
5214 | ClassLoader::EnsureLoaded(this); |
5215 | _ASSERTE(IsFullyLoaded()); |
5216 | } |
5217 | |
5218 | g_IBCLogger.LogMethodTableAccess(this); |
5219 | } |
5220 | |
5221 | #else // !FEATURE_PREJIT |
5222 | //========================================================================================== |
5223 | void MethodTable::CheckRestore() |
5224 | { |
5225 | LIMITED_METHOD_CONTRACT; |
5226 | } |
5227 | #endif // !FEATURE_PREJIT |
5228 | |
5229 | |
5230 | #ifndef DACCESS_COMPILE |
5231 | |
5232 | BOOL SatisfiesClassConstraints(TypeHandle instanceTypeHnd, TypeHandle typicalTypeHnd, |
5233 | const InstantiationContext *pInstContext); |
5234 | |
5235 | static VOID DoAccessibilityCheck(MethodTable *pAskingMT, MethodTable *pTargetMT, UINT resIDWhy) |
5236 | { |
5237 | CONTRACTL |
5238 | { |
5239 | THROWS; |
5240 | GC_TRIGGERS; |
5241 | } |
5242 | CONTRACTL_END; |
5243 | |
5244 | StaticAccessCheckContext accessContext(NULL, pAskingMT); |
5245 | |
5246 | if (!ClassLoader::CanAccessClass(&accessContext, |
5247 | pTargetMT, //the desired class |
5248 | pTargetMT->GetAssembly(), //the desired class's assembly |
5249 | *AccessCheckOptions::s_pNormalAccessChecks |
5250 | )) |
5251 | { |
5252 | SString displayName; |
5253 | pAskingMT->GetAssembly()->GetDisplayName(displayName); |
5254 | SString targetName; |
5255 | |
5256 | // Error string is either E_ACCESSDENIED which requires the type name of the target, vs |
5257 | // a more normal TypeLoadException which displays the requesting type. |
5258 | _ASSERTE((resIDWhy == (UINT)E_ACCESSDENIED) || (resIDWhy == (UINT)IDS_CLASSLOAD_INTERFACE_NO_ACCESS)); |
5259 | TypeString::AppendType(targetName, TypeHandle((resIDWhy == (UINT)E_ACCESSDENIED) ? pTargetMT : pAskingMT)); |
5260 | |
5261 | COMPlusThrow(kTypeLoadException, resIDWhy, targetName.GetUnicode(), displayName.GetUnicode()); |
5262 | } |
5263 | |
5264 | } |
5265 | |
5266 | VOID DoAccessibilityCheckForConstraint(MethodTable *pAskingMT, TypeHandle thConstraint, UINT resIDWhy) |
5267 | { |
5268 | CONTRACTL |
5269 | { |
5270 | THROWS; |
5271 | GC_TRIGGERS; |
5272 | } |
5273 | CONTRACTL_END; |
5274 | |
5275 | if (thConstraint.IsTypeDesc()) |
5276 | { |
5277 | TypeDesc *pTypeDesc = thConstraint.AsTypeDesc(); |
5278 | |
5279 | if (pTypeDesc->IsGenericVariable()) |
5280 | { |
5281 | // since the metadata respresents a generic type param constraint as an index into |
5282 | // the declaring type's list of generic params, it is structurally impossible |
5283 | // to express a violation this way. So there's no check to be done here. |
5284 | } |
5285 | else |
5286 | if (pTypeDesc->HasTypeParam()) |
5287 | { |
5288 | DoAccessibilityCheckForConstraint(pAskingMT, pTypeDesc->GetTypeParam(), resIDWhy); |
5289 | } |
5290 | else |
5291 | { |
5292 | COMPlusThrow(kTypeLoadException, E_ACCESSDENIED); |
5293 | } |
5294 | |
5295 | } |
5296 | else |
5297 | { |
5298 | DoAccessibilityCheck(pAskingMT, thConstraint.GetMethodTable(), resIDWhy); |
5299 | } |
5300 | |
5301 | } |
5302 | |
5303 | VOID DoAccessibilityCheckForConstraints(MethodTable *pAskingMT, TypeVarTypeDesc *pTyVar, UINT resIDWhy) |
5304 | { |
5305 | CONTRACTL |
5306 | { |
5307 | THROWS; |
5308 | GC_TRIGGERS; |
5309 | } |
5310 | CONTRACTL_END; |
5311 | |
5312 | DWORD numConstraints; |
5313 | TypeHandle *pthConstraints = pTyVar->GetCachedConstraints(&numConstraints); |
5314 | for (DWORD cidx = 0; cidx < numConstraints; cidx++) |
5315 | { |
5316 | TypeHandle thConstraint = pthConstraints[cidx]; |
5317 | |
5318 | DoAccessibilityCheckForConstraint(pAskingMT, thConstraint, resIDWhy); |
5319 | } |
5320 | } |
5321 | |
5322 | |
5323 | // Recursive worker that pumps the transitive closure of a type's dependencies to the specified target level. |
5324 | // Dependencies include: |
5325 | // |
5326 | // - parent |
5327 | // - interfaces |
5328 | // - canonical type, for non-canonical instantiations |
5329 | // - typical type, for non-typical instantiations |
5330 | // |
5331 | // Parameters: |
5332 | // |
5333 | // pVisited - used to prevent endless recursion in the case of cyclic dependencies |
5334 | // |
5335 | // level - target level to pump to - must be CLASS_DEPENDENCIES_LOADED or CLASS_LOADED |
5336 | // |
5337 | // if CLASS_DEPENDENCIES_LOADED, all transitive dependencies are resolved to their |
5338 | // exact types. |
5339 | // |
5340 | // if CLASS_LOADED, all type-safety checks are done on the type and all its transitive |
5341 | // dependencies. Note that for the CLASS_LOADED case, some types may be left |
5342 | // on the pending list rather that pushed to CLASS_LOADED in the case of cyclic |
5343 | // dependencies - the root caller must handle this. |
5344 | // |
5345 | // pfBailed - if we or one of our depedencies bails early due to cyclic dependencies, we |
5346 | // must set *pfBailed to TRUE. Otherwise, we must *leave it unchanged* (thus, the |
5347 | // boolean acts as a cumulative OR.) |
5348 | // |
5349 | // pPending - if one of our dependencies bailed, the type cannot yet be promoted to CLASS_LOADED |
5350 | // as the dependencies will be checked later and may fail a security check then. |
5351 | // Instead, DoFullyLoad() will add the type to the pending list - the root caller |
5352 | // is responsible for promoting the type after the full transitive closure has been |
5353 | // walked. Note that it would be just as correct to always defer to the pending list - |
5354 | // however, that is a little less performant. |
5355 | // |
5356 | |
5357 | |
5358 | // Closure of locals necessary for implementing CheckForEquivalenceAndFullyLoadType. |
5359 | // Used so that we can have one valuetype walking algorithm used for type equivalence walking of the parameters of the method. |
5360 | struct DoFullyLoadLocals |
5361 | { |
5362 | DoFullyLoadLocals(DFLPendingList *pPendingParam, ClassLoadLevel levelParam, MethodTable *pMT, Generics::RecursionGraph *pVisited) |
5363 | : newVisited(pVisited, TypeHandle(pMT)) |
5364 | , pPending(pPendingParam) |
5365 | , level(levelParam) |
5366 | , fBailed(FALSE) |
5367 | #ifdef FEATURE_TYPEEQUIVALENCE |
5368 | , fHasEquivalentStructParameter(FALSE) |
5369 | #endif |
5370 | , fHasTypeForwarderDependentStructParameter(FALSE) |
5371 | , fDependsOnEquivalentOrForwardedStructs(FALSE) |
5372 | { |
5373 | LIMITED_METHOD_CONTRACT; |
5374 | } |
5375 | |
5376 | Generics::RecursionGraph newVisited; |
5377 | DFLPendingList * const pPending; |
5378 | const ClassLoadLevel level; |
5379 | BOOL fBailed; |
5380 | #ifdef FEATURE_TYPEEQUIVALENCE |
5381 | BOOL fHasEquivalentStructParameter; |
5382 | #endif |
5383 | BOOL fHasTypeForwarderDependentStructParameter; |
5384 | BOOL fDependsOnEquivalentOrForwardedStructs; |
5385 | }; |
5386 | |
5387 | #if defined(FEATURE_TYPEEQUIVALENCE) && !defined(DACCESS_COMPILE) |
5388 | static void CheckForEquivalenceAndFullyLoadType(Module *pModule, mdToken token, Module *pDefModule, mdToken defToken, const SigParser *ptr, SigTypeContext *pTypeContext, void *pData) |
5389 | { |
5390 | CONTRACTL |
5391 | { |
5392 | THROWS; |
5393 | GC_TRIGGERS; |
5394 | SO_INTOLERANT; |
5395 | } |
5396 | CONTRACTL_END; |
5397 | |
5398 | SigPointer sigPtr(*ptr); |
5399 | |
5400 | DoFullyLoadLocals *pLocals = (DoFullyLoadLocals *)pData; |
5401 | |
5402 | if (IsTypeDefEquivalent(defToken, pDefModule)) |
5403 | { |
5404 | TypeHandle th = sigPtr.GetTypeHandleThrowing(pModule, pTypeContext, ClassLoader::LoadTypes, (ClassLoadLevel)(pLocals->level - 1)); |
5405 | CONSISTENCY_CHECK(!th.IsNull()); |
5406 | |
5407 | th.DoFullyLoad(&pLocals->newVisited, pLocals->level, pLocals->pPending, &pLocals->fBailed, NULL); |
5408 | pLocals->fDependsOnEquivalentOrForwardedStructs = TRUE; |
5409 | pLocals->fHasEquivalentStructParameter = TRUE; |
5410 | } |
5411 | } |
5412 | |
5413 | #endif // defined(FEATURE_TYPEEQUIVALENCE) && !defined(DACCESS_COMPILE) |
5414 | |
5415 | struct CheckForTypeForwardedTypeRefParameterLocals |
5416 | { |
5417 | Module * pModule; |
5418 | BOOL * pfTypeForwarderFound; |
5419 | }; |
5420 | |
5421 | // Callback for code:WalkValueTypeTypeDefOrRefs of type code:PFN_WalkValueTypeTypeDefOrRefs |
5422 | static void CheckForTypeForwardedTypeRef( |
5423 | mdToken tkTypeDefOrRef, |
5424 | void * pData) |
5425 | { |
5426 | STANDARD_VM_CONTRACT; |
5427 | |
5428 | CheckForTypeForwardedTypeRefParameterLocals * pLocals = (CheckForTypeForwardedTypeRefParameterLocals *)pData; |
5429 | |
5430 | // If a type forwarder was found, return - we're done |
5431 | if ((pLocals->pfTypeForwarderFound != NULL) && (*(pLocals->pfTypeForwarderFound))) |
5432 | return; |
5433 | |
5434 | // Only type ref's are interesting |
5435 | if (TypeFromToken(tkTypeDefOrRef) == mdtTypeRef) |
5436 | { |
5437 | Module * pDummyModule; |
5438 | mdToken tkDummy; |
5439 | ClassLoader::ResolveTokenToTypeDefThrowing( |
5440 | pLocals->pModule, |
5441 | tkTypeDefOrRef, |
5442 | &pDummyModule, |
5443 | &tkDummy, |
5444 | Loader::Load, |
5445 | pLocals->pfTypeForwarderFound); |
5446 | } |
5447 | } |
5448 | |
5449 | typedef void (* PFN_WalkValueTypeTypeDefOrRefs)(mdToken tkTypeDefOrRef, void * pData); |
5450 | |
5451 | // Call 'function' for ValueType in the signature. |
5452 | void WalkValueTypeTypeDefOrRefs( |
5453 | const SigParser * pSig, |
5454 | PFN_WalkValueTypeTypeDefOrRefs function, |
5455 | void * pData) |
5456 | { |
5457 | STANDARD_VM_CONTRACT; |
5458 | |
5459 | SigParser sig(*pSig); |
5460 | |
5461 | CorElementType typ; |
5462 | IfFailThrow(sig.GetElemType(&typ)); |
5463 | |
5464 | switch (typ) |
5465 | { |
5466 | case ELEMENT_TYPE_VALUETYPE: |
5467 | mdToken token; |
5468 | IfFailThrow(sig.GetToken(&token)); |
5469 | function(token, pData); |
5470 | break; |
5471 | |
5472 | case ELEMENT_TYPE_GENERICINST: |
5473 | // Process and skip generic type |
5474 | WalkValueTypeTypeDefOrRefs(&sig, function, pData); |
5475 | IfFailThrow(sig.SkipExactlyOne()); |
5476 | |
5477 | // Get number of parameters |
5478 | ULONG argCnt; |
5479 | IfFailThrow(sig.GetData(&argCnt)); |
5480 | while (argCnt-- != 0) |
5481 | { // Process and skip generic parameter |
5482 | WalkValueTypeTypeDefOrRefs(&sig, function, pData); |
5483 | IfFailThrow(sig.SkipExactlyOne()); |
5484 | } |
5485 | break; |
5486 | default: |
5487 | break; |
5488 | } |
5489 | } |
5490 | |
5491 | // Callback for code:MethodDesc::WalkValueTypeParameters (of type code:WalkValueTypeParameterFnPtr) |
5492 | static void CheckForTypeForwardedTypeRefParameter( |
5493 | Module * pModule, |
5494 | mdToken token, |
5495 | Module * pDefModule, |
5496 | mdToken defToken, |
5497 | const SigParser *ptr, |
5498 | SigTypeContext * pTypeContext, |
5499 | void * pData) |
5500 | { |
5501 | STANDARD_VM_CONTRACT; |
5502 | |
5503 | DoFullyLoadLocals * pLocals = (DoFullyLoadLocals *)pData; |
5504 | |
5505 | // If a type forwarder was found, return - we're done |
5506 | if (pLocals->fHasTypeForwarderDependentStructParameter) |
5507 | return; |
5508 | |
5509 | CheckForTypeForwardedTypeRefParameterLocals locals; |
5510 | locals.pModule = pModule; |
5511 | locals.pfTypeForwarderFound = &pLocals->fHasTypeForwarderDependentStructParameter; // By not passing NULL here, we determine if there is a type forwarder involved. |
5512 | |
5513 | WalkValueTypeTypeDefOrRefs(ptr, CheckForTypeForwardedTypeRef, &locals); |
5514 | |
5515 | if (pLocals->fHasTypeForwarderDependentStructParameter) |
5516 | pLocals->fDependsOnEquivalentOrForwardedStructs = TRUE; |
5517 | } |
5518 | |
5519 | // Callback for code:MethodDesc::WalkValueTypeParameters (of type code:WalkValueTypeParameterFnPtr) |
5520 | static void LoadTypeDefOrRefAssembly( |
5521 | Module * pModule, |
5522 | mdToken token, |
5523 | Module * pDefModule, |
5524 | mdToken defToken, |
5525 | const SigParser *ptr, |
5526 | SigTypeContext * pTypeContext, |
5527 | void * pData) |
5528 | { |
5529 | STANDARD_VM_CONTRACT; |
5530 | |
5531 | DoFullyLoadLocals * pLocals = (DoFullyLoadLocals *)pData; |
5532 | |
5533 | CheckForTypeForwardedTypeRefParameterLocals locals; |
5534 | locals.pModule = pModule; |
5535 | locals.pfTypeForwarderFound = NULL; // By passing NULL here, we simply resolve the token to TypeDef. |
5536 | |
5537 | WalkValueTypeTypeDefOrRefs(ptr, CheckForTypeForwardedTypeRef, &locals); |
5538 | } |
5539 | |
5540 | #endif //!DACCESS_COMPILE |
5541 | |
5542 | void MethodTable::DoFullyLoad(Generics::RecursionGraph * const pVisited, const ClassLoadLevel level, DFLPendingList * const pPending, |
5543 | BOOL * const pfBailed, const InstantiationContext * const pInstContext) |
5544 | { |
5545 | STANDARD_VM_CONTRACT; |
5546 | |
5547 | _ASSERTE(level == CLASS_LOADED || level == CLASS_DEPENDENCIES_LOADED); |
5548 | _ASSERTE(pfBailed != NULL); |
5549 | _ASSERTE(!(level == CLASS_LOADED && pPending == NULL)); |
5550 | |
5551 | |
5552 | #ifndef DACCESS_COMPILE |
5553 | |
5554 | if (Generics::RecursionGraph::HasSeenType(pVisited, TypeHandle(this))) |
5555 | { |
5556 | *pfBailed = TRUE; |
5557 | return; |
5558 | } |
5559 | |
5560 | if (GetLoadLevel() >= level) |
5561 | { |
5562 | return; |
5563 | } |
5564 | |
5565 | if (level == CLASS_LOADED) |
5566 | { |
5567 | UINT numTH = pPending->Count(); |
5568 | TypeHandle *pTypeHndPending = pPending->Table(); |
5569 | for (UINT idxPending = 0; idxPending < numTH; idxPending++) |
5570 | { |
5571 | if (pTypeHndPending[idxPending] == this) |
5572 | { |
5573 | *pfBailed = TRUE; |
5574 | return; |
5575 | } |
5576 | } |
5577 | |
5578 | } |
5579 | |
5580 | BEGIN_SO_INTOLERANT_CODE(GetThread()); |
5581 | // First ensure that we're loaded to just below CLASS_DEPENDENCIES_LOADED |
5582 | ClassLoader::EnsureLoaded(this, (ClassLoadLevel) (level-1)); |
5583 | |
5584 | CONSISTENCY_CHECK(IsRestored_NoLogging()); |
5585 | CONSISTENCY_CHECK(!HasApproxParent()); |
5586 | |
5587 | |
5588 | DoFullyLoadLocals locals(pPending, level, this, pVisited); |
5589 | |
5590 | bool fNeedsSanityChecks = !IsZapped(); // Validation has been performed for NGened classes already |
5591 | |
5592 | #ifdef FEATURE_READYTORUN |
5593 | if (fNeedsSanityChecks) |
5594 | { |
5595 | Module * pModule = GetModule(); |
5596 | |
5597 | // No sanity checks for ready-to-run compiled images if possible |
5598 | if (pModule->IsReadyToRun() && pModule->GetReadyToRunInfo()->SkipTypeValidation()) |
5599 | fNeedsSanityChecks = false; |
5600 | } |
5601 | #endif |
5602 | |
5603 | bool fNeedAccessChecks = (level == CLASS_LOADED) && |
5604 | fNeedsSanityChecks && |
5605 | IsTypicalTypeDefinition(); |
5606 | |
5607 | TypeHandle typicalTypeHnd; |
5608 | |
5609 | if (!IsZapped()) // Validation has been performed for NGened classes already |
5610 | { |
5611 | // Fully load the typical instantiation. Make sure that this is done before loading other dependencies |
5612 | // as the recursive generics detection algorithm needs to examine typical instantiations of the types |
5613 | // in the closure. |
5614 | if (!IsTypicalTypeDefinition()) |
5615 | { |
5616 | typicalTypeHnd = ClassLoader::LoadTypeDefThrowing(GetModule(), GetCl(), |
5617 | ClassLoader::ThrowIfNotFound, ClassLoader::PermitUninstDefOrRef, tdNoTypes, |
5618 | (ClassLoadLevel) (level - 1)); |
5619 | CONSISTENCY_CHECK(!typicalTypeHnd.IsNull()); |
5620 | typicalTypeHnd.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext); |
5621 | } |
5622 | else if (level == CLASS_DEPENDENCIES_LOADED && HasInstantiation()) |
5623 | { |
5624 | // This is a typical instantiation of a generic type. When attaining CLASS_DEPENDENCIES_LOADED, the |
5625 | // recursive inheritance graph (ECMA part.II Section 9.2) will be constructed and checked for "expanding |
5626 | // cycles" to detect infinite recursion, e.g. A<T> : B<A<A<T>>>. |
5627 | // |
5628 | // The dependencies loaded by this method (parent type, implemented interfaces, generic arguments) |
5629 | // ensure that we will generate the finite instantiation closure as defined in ECMA. This load level |
5630 | // is not being attained under lock so it's not possible to use TypeVarTypeDesc to represent graph |
5631 | // nodes because multiple threads trying to fully load types from the closure at the same time would |
5632 | // interfere with each other. In addition, the graph is only used for loading and can be discarded |
5633 | // when the closure is fully loaded (TypeVarTypeDesc need to stay). |
5634 | // |
5635 | // The graph is represented by Generics::RecursionGraph instances organized in a linked list with |
5636 | // each of them holding part of the graph. They live on the stack and are cleaned up automatically |
5637 | // before returning from DoFullyLoad. |
5638 | |
5639 | if (locals.newVisited.CheckForIllegalRecursion()) |
5640 | { |
5641 | // An expanding cycle was detected, this type is part of a closure that is defined recursively. |
5642 | IMDInternalImport* pInternalImport = GetModule()->GetMDImport(); |
5643 | GetModule()->GetAssembly()->ThrowTypeLoadException(pInternalImport, GetCl(), IDS_CLASSLOAD_GENERICTYPE_RECURSIVE); |
5644 | } |
5645 | } |
5646 | } |
5647 | |
5648 | // Fully load the parent |
5649 | MethodTable *pParentMT = GetParentMethodTable(); |
5650 | |
5651 | if (pParentMT) |
5652 | { |
5653 | pParentMT->DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext); |
5654 | |
5655 | if (fNeedAccessChecks) |
5656 | { |
5657 | if (!IsComObjectType()) //RCW's are special - they are manufactured by the runtime and derive from the non-public type System.__ComObject |
5658 | { |
5659 | // A transparenct type should not be allowed to derive from a critical type. |
5660 | // However since this has never been enforced before we have many classes that |
5661 | // violate this rule. Enforcing it now will be a breaking change. |
5662 | DoAccessibilityCheck(this, pParentMT, E_ACCESSDENIED); |
5663 | } |
5664 | } |
5665 | } |
5666 | |
5667 | // Fully load the interfaces |
5668 | MethodTable::InterfaceMapIterator it = IterateInterfaceMap(); |
5669 | while (it.Next()) |
5670 | { |
5671 | it.GetInterface()->DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext); |
5672 | |
5673 | if (fNeedAccessChecks) |
5674 | { |
5675 | if (IsInterfaceDeclaredOnClass(it.GetIndex())) // only test directly implemented interfaces (it's |
5676 | // legal for an inherited interface to be private.) |
5677 | { |
5678 | // A transparenct type should not be allowed to implement a critical interface. |
5679 | // However since this has never been enforced before we have many classes that |
5680 | // violate this rule. Enforcing it now will be a breaking change. |
5681 | DoAccessibilityCheck(this, it.GetInterface(), IDS_CLASSLOAD_INTERFACE_NO_ACCESS); |
5682 | } |
5683 | } |
5684 | } |
5685 | |
5686 | // Fully load the generic arguments |
5687 | Instantiation inst = GetInstantiation(); |
5688 | for (DWORD i = 0; i < inst.GetNumArgs(); i++) |
5689 | { |
5690 | inst[i].DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext); |
5691 | } |
5692 | |
5693 | // Fully load the canonical methodtable |
5694 | if (!IsCanonicalMethodTable()) |
5695 | { |
5696 | GetCanonicalMethodTable()->DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, NULL); |
5697 | } |
5698 | |
5699 | if (fNeedsSanityChecks) |
5700 | { |
5701 | // Fully load the exact field types for value type fields |
5702 | // Note that MethodTableBuilder::InitializeFieldDescs() loads the type of the |
5703 | // field only upto level CLASS_LOAD_APPROXPARENTS. |
5704 | FieldDesc *pField = GetApproxFieldDescListRaw(); |
5705 | FieldDesc *pFieldEnd = pField + GetNumStaticFields() + GetNumIntroducedInstanceFields(); |
5706 | |
5707 | while (pField < pFieldEnd) |
5708 | { |
5709 | g_IBCLogger.LogFieldDescsAccess(pField); |
5710 | |
5711 | if (pField->GetFieldType() == ELEMENT_TYPE_VALUETYPE) |
5712 | { |
5713 | TypeHandle th = pField->GetFieldTypeHandleThrowing((ClassLoadLevel) (level - 1)); |
5714 | CONSISTENCY_CHECK(!th.IsNull()); |
5715 | |
5716 | th.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext); |
5717 | |
5718 | if (fNeedAccessChecks) |
5719 | { |
5720 | DoAccessibilityCheck(this, th.GetMethodTable(), E_ACCESSDENIED); |
5721 | } |
5722 | |
5723 | } |
5724 | pField++; |
5725 | } |
5726 | |
5727 | // Fully load the exact field types for generic value type fields |
5728 | if (HasGenericsStaticsInfo()) |
5729 | { |
5730 | FieldDesc *pGenStaticField = GetGenericsStaticFieldDescs(); |
5731 | FieldDesc *pGenStaticFieldEnd = pGenStaticField + GetNumStaticFields(); |
5732 | while (pGenStaticField < pGenStaticFieldEnd) |
5733 | { |
5734 | if (pGenStaticField->GetFieldType() == ELEMENT_TYPE_VALUETYPE) |
5735 | { |
5736 | TypeHandle th = pGenStaticField->GetFieldTypeHandleThrowing((ClassLoadLevel) (level - 1)); |
5737 | CONSISTENCY_CHECK(!th.IsNull()); |
5738 | |
5739 | th.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext); |
5740 | |
5741 | // The accessibility check is not necessary for generic fields. The generic fields are copy |
5742 | // of the regular fields, the only difference is that they have the exact type. |
5743 | } |
5744 | pGenStaticField++; |
5745 | } |
5746 | } |
5747 | } |
5748 | |
5749 | #ifdef FEATURE_NATIVE_IMAGE_GENERATION |
5750 | // Fully load the types of fields associated with a field marshaler when ngenning |
5751 | if (HasLayout() && GetAppDomain()->IsCompilationDomain() && !IsZapped()) |
5752 | { |
5753 | FieldMarshaler* pFM = this->GetLayoutInfo()->GetFieldMarshalers(); |
5754 | UINT numReferenceFields = this->GetLayoutInfo()->GetNumCTMFields(); |
5755 | |
5756 | while (numReferenceFields--) |
5757 | { |
5758 | |
5759 | FieldDesc *pMarshalerField = pFM->GetFieldDesc(); |
5760 | |
5761 | // If the fielddesc pointer here is a token tagged pointer, then the field marshaler that we are |
5762 | // working with will not need to be saved into this ngen image. And as that was the reason that we |
5763 | // needed to load this type, thus we will not need to fully load the type associated with this field desc. |
5764 | // |
5765 | if (!CORCOMPILE_IS_POINTER_TAGGED(pMarshalerField)) |
5766 | { |
5767 | TypeHandle th = pMarshalerField->GetFieldTypeHandleThrowing((ClassLoadLevel) (level-1)); |
5768 | CONSISTENCY_CHECK(!th.IsNull()); |
5769 | |
5770 | th.DoFullyLoad(&locals.newVisited, level, pPending, &locals.fBailed, pInstContext); |
5771 | } |
5772 | // The accessibility check is not used here to prevent functional differences between ngen and non-ngen scenarios. |
5773 | ((BYTE*&)pFM) += MAXFIELDMARSHALERSIZE; |
5774 | } |
5775 | } |
5776 | #endif //FEATURE_NATIVE_IMAGE_GENERATION |
5777 | |
5778 | // Fully load exact parameter types for value type parameters opted into equivalence. This is required in case GC is |
5779 | // triggered during prestub. GC needs to know where references are on the stack and if the parameter (as read from |
5780 | // the method signature) is a structure, it relies on the loaded type to get the layout information from. For ordinary |
5781 | // structures we are guaranteed to have loaded the type before entering prestub - the caller must have loaded it. |
5782 | // However due to type equivalence, the caller may work with a different type than what's in the method signature. |
5783 | // |
5784 | // We deal with situation by eagerly loading types that may cause these problems, i.e. value types in signatures of |
5785 | // methods introduced by this type. To avoid the perf hit for scenarios without type equivalence, we only preload |
5786 | // structures that marked as type equivalent. In the no-PIA world |
5787 | // these structures are called "local types" and are usually generated automatically by the compiler. Note that there |
5788 | // is a related logic in code:CompareTypeDefsForEquivalence that declares two tokens corresponding to structures as |
5789 | // equivalent based on an extensive set of equivalency checks.. |
5790 | // |
5791 | // To address this situation for NGENed types and methods, we prevent pre-restoring them - see code:ComputeNeedsRestoreWorker |
5792 | // for details. That forces them to go through the final stages of loading at run-time and hit the same code below. |
5793 | |
5794 | if ((level == CLASS_LOADED) |
5795 | && (GetCl() != mdTypeDefNil) |
5796 | && !ContainsGenericVariables() |
5797 | && (!IsZapped() |
5798 | || DependsOnEquivalentOrForwardedStructs() |
5799 | #ifdef DEBUG |
5800 | || TRUE // Always load types in debug builds so that we calculate fDependsOnEquivalentOrForwardedStructs all of the time |
5801 | #endif |
5802 | ) |
5803 | ) |
5804 | { |
5805 | MethodTable::IntroducedMethodIterator itMethods(this, FALSE); |
5806 | for (; itMethods.IsValid(); itMethods.Next()) |
5807 | { |
5808 | MethodDesc * pMD = itMethods.GetMethodDesc(); |
5809 | |
5810 | if (IsCompilationProcess()) |
5811 | { |
5812 | locals.fHasTypeForwarderDependentStructParameter = FALSE; |
5813 | EX_TRY |
5814 | { |
5815 | pMD->WalkValueTypeParameters(this, CheckForTypeForwardedTypeRefParameter, &locals); |
5816 | } |
5817 | EX_CATCH |
5818 | { |
5819 | } |
5820 | EX_END_CATCH(RethrowTerminalExceptions); |
5821 | |
5822 | // This marks the class as needing restore. |
5823 | if (locals.fHasTypeForwarderDependentStructParameter && !pMD->IsZapped()) |
5824 | pMD->SetHasForwardedValuetypeParameter(); |
5825 | } |
5826 | else if (pMD->IsZapped() && pMD->HasForwardedValuetypeParameter()) |
5827 | { |
5828 | pMD->WalkValueTypeParameters(this, LoadTypeDefOrRefAssembly, NULL); |
5829 | locals.fDependsOnEquivalentOrForwardedStructs = TRUE; |
5830 | } |
5831 | |
5832 | #ifdef FEATURE_TYPEEQUIVALENCE |
5833 | if (!pMD->DoesNotHaveEquivalentValuetypeParameters() && pMD->IsVirtual()) |
5834 | { |
5835 | locals.fHasEquivalentStructParameter = FALSE; |
5836 | pMD->WalkValueTypeParameters(this, CheckForEquivalenceAndFullyLoadType, &locals); |
5837 | if (!locals.fHasEquivalentStructParameter && !IsZapped()) |
5838 | pMD->SetDoesNotHaveEquivalentValuetypeParameters(); |
5839 | } |
5840 | #else |
5841 | #ifdef FEATURE_PREJIT |
5842 | if (!IsZapped() && pMD->IsVirtual() && !IsCompilationProcess() ) |
5843 | { |
5844 | pMD->PrepareForUseAsADependencyOfANativeImage(); |
5845 | } |
5846 | #endif |
5847 | #endif //FEATURE_TYPEEQUIVALENCE |
5848 | } |
5849 | } |
5850 | |
5851 | _ASSERTE(!IsZapped() || !IsCanonicalMethodTable() || (level != CLASS_LOADED) || ((!!locals.fDependsOnEquivalentOrForwardedStructs) == (!!DependsOnEquivalentOrForwardedStructs()))); |
5852 | if (locals.fDependsOnEquivalentOrForwardedStructs) |
5853 | { |
5854 | if (!IsZapped()) |
5855 | { |
5856 | // if this type declares a method that has an equivalent or type forwarded structure as a parameter type, |
5857 | // make sure we come here and pre-load these structure types in NGENed cases as well |
5858 | SetDependsOnEquivalentOrForwardedStructs(); |
5859 | } |
5860 | } |
5861 | |
5862 | // The rules for constraint cycles are same as rules for acccess checks |
5863 | if (fNeedAccessChecks) |
5864 | { |
5865 | // Check for cyclical class constraints |
5866 | { |
5867 | Instantiation formalParams = GetInstantiation(); |
5868 | |
5869 | for (DWORD i = 0; i < formalParams.GetNumArgs(); i++) |
5870 | { |
5871 | BOOL Bounded(TypeVarTypeDesc *tyvar, DWORD depth); |
5872 | |
5873 | TypeVarTypeDesc *pTyVar = formalParams[i].AsGenericVariable(); |
5874 | pTyVar->LoadConstraints(CLASS_DEPENDENCIES_LOADED); |
5875 | if (!Bounded(pTyVar, formalParams.GetNumArgs())) |
5876 | { |
5877 | COMPlusThrow(kTypeLoadException, VER_E_CIRCULAR_VAR_CONSTRAINTS); |
5878 | } |
5879 | |
5880 | DoAccessibilityCheckForConstraints(this, pTyVar, E_ACCESSDENIED); |
5881 | } |
5882 | } |
5883 | |
5884 | // Check for cyclical method constraints |
5885 | { |
5886 | if (GetCl() != mdTypeDefNil) // Make sure this is actually a metadata type! |
5887 | { |
5888 | MethodTable::IntroducedMethodIterator itMethods(this, FALSE); |
5889 | for (; itMethods.IsValid(); itMethods.Next()) |
5890 | { |
5891 | MethodDesc * pMD = itMethods.GetMethodDesc(); |
5892 | |
5893 | if (pMD->IsGenericMethodDefinition() && pMD->IsTypicalMethodDefinition()) |
5894 | { |
5895 | BOOL fHasCircularClassConstraints = TRUE; |
5896 | BOOL fHasCircularMethodConstraints = TRUE; |
5897 | |
5898 | pMD->LoadConstraintsForTypicalMethodDefinition(&fHasCircularClassConstraints, &fHasCircularMethodConstraints, CLASS_DEPENDENCIES_LOADED); |
5899 | |
5900 | if (fHasCircularClassConstraints) |
5901 | { |
5902 | COMPlusThrow(kTypeLoadException, VER_E_CIRCULAR_VAR_CONSTRAINTS); |
5903 | } |
5904 | if (fHasCircularMethodConstraints) |
5905 | { |
5906 | COMPlusThrow(kTypeLoadException, VER_E_CIRCULAR_MVAR_CONSTRAINTS); |
5907 | } |
5908 | } |
5909 | } |
5910 | } |
5911 | } |
5912 | |
5913 | } |
5914 | |
5915 | |
5916 | #ifdef _DEBUG |
5917 | if (LoggingOn(LF_CLASSLOADER, LL_INFO10000)) |
5918 | { |
5919 | SString name; |
5920 | TypeString::AppendTypeDebug(name, this); |
5921 | LOG((LF_CLASSLOADER, LL_INFO10000, "PHASEDLOAD: Completed full dependency load of type %S\n" , name.GetUnicode())); |
5922 | } |
5923 | #endif |
5924 | |
5925 | switch (level) |
5926 | { |
5927 | case CLASS_DEPENDENCIES_LOADED: |
5928 | SetIsDependenciesLoaded(); |
5929 | |
5930 | #if defined(FEATURE_COMINTEROP) && !defined(DACCESS_COMPILE) |
5931 | if (WinRTSupported() && g_fEEStarted) |
5932 | { |
5933 | _ASSERTE(GetAppDomain() != NULL); |
5934 | |
5935 | AppDomain* pAppDomain = GetAppDomain(); |
5936 | if (pAppDomain->CanCacheWinRTTypeByGuid(this)) |
5937 | { |
5938 | pAppDomain->CacheWinRTTypeByGuid(this); |
5939 | } |
5940 | } |
5941 | #endif // FEATURE_COMINTEROP && !DACCESS_COMPILE |
5942 | |
5943 | break; |
5944 | |
5945 | case CLASS_LOADED: |
5946 | if (!IsZapped() && // Constraint checks have been performed for NGened classes already |
5947 | !IsTypicalTypeDefinition() && |
5948 | !IsSharedByGenericInstantiations()) |
5949 | { |
5950 | TypeHandle thThis = TypeHandle(this); |
5951 | |
5952 | // If we got here, we about to mark a generic instantiation as fully loaded. Before we do so, |
5953 | // check to see if has constraints that aren't being satisfied. |
5954 | SatisfiesClassConstraints(thThis, typicalTypeHnd, pInstContext); |
5955 | |
5956 | } |
5957 | |
5958 | if (locals.fBailed) |
5959 | { |
5960 | // We couldn't complete security checks on some dependency because he is already being processed by one of our callers. |
5961 | // Do not mark this class fully loaded yet. Put him on the pending list and he will be marked fully loaded when |
5962 | // everything unwinds. |
5963 | |
5964 | *pfBailed = TRUE; |
5965 | |
5966 | TypeHandle *pTHPending = pPending->AppendThrowing(); |
5967 | *pTHPending = TypeHandle(this); |
5968 | } |
5969 | else |
5970 | { |
5971 | // Finally, mark this method table as fully loaded |
5972 | SetIsFullyLoaded(); |
5973 | } |
5974 | break; |
5975 | |
5976 | default: |
5977 | _ASSERTE(!"Can't get here." ); |
5978 | break; |
5979 | |
5980 | } |
5981 | |
5982 | if (level >= CLASS_DEPENDENCIES_LOADED && IsArray()) |
5983 | { |
5984 | // The array type should be loaded, if template method table is loaded |
5985 | // See also: ArrayBase::SetArrayMethodTable, ArrayBase::SetArrayMethodTableForLargeObject |
5986 | TypeHandle th = ClassLoader::LoadArrayTypeThrowing(GetApproxArrayElementTypeHandle(), |
5987 | GetInternalCorElementType(), |
5988 | GetRank(), |
5989 | ClassLoader::LoadTypes, |
5990 | level); |
5991 | _ASSERTE(th.IsTypeDesc() && th.IsArray()); |
5992 | _ASSERTE(!(level == CLASS_LOADED && !th.IsFullyLoaded())); |
5993 | } |
5994 | |
5995 | END_SO_INTOLERANT_CODE; |
5996 | |
5997 | #endif //!DACCESS_COMPILE |
5998 | } //MethodTable::DoFullyLoad |
5999 | |
6000 | |
6001 | #ifndef DACCESS_COMPILE |
6002 | |
6003 | #ifdef FEATURE_PREJIT |
6004 | |
6005 | // For a MethodTable in a native image, decode sufficient encoded pointers |
6006 | // that the TypeKey for this type is recoverable. |
6007 | // |
6008 | // For instantiated generic types, we need the generic type arguments, |
6009 | // the EEClass pointer, and its Module pointer. |
6010 | // (For non-generic types, the EEClass and Module are always hard bound). |
6011 | // |
6012 | // The process is applied recursively e.g. consider C<D<string>[]>. |
6013 | // It is guaranteed to terminate because types cannot contain cycles in their structure. |
6014 | // |
6015 | // Also note that no lock is required; the process of restoring this information is idempotent. |
6016 | // (Note the atomic action at the end though) |
6017 | // |
6018 | void MethodTable::DoRestoreTypeKey() |
6019 | { |
6020 | CONTRACTL |
6021 | { |
6022 | THROWS; |
6023 | GC_TRIGGERS; |
6024 | } |
6025 | CONTRACTL_END; |
6026 | |
6027 | // If we have an indirection cell then restore the m_pCanonMT and its module pointer |
6028 | // |
6029 | if (union_getLowBits(m_pCanonMT.GetValue()) == UNION_INDIRECTION) |
6030 | { |
6031 | Module::RestoreMethodTablePointerRaw((MethodTable **)(union_getPointer(m_pCanonMT.GetValue())), |
6032 | GetLoaderModule(), CLASS_LOAD_UNRESTORED); |
6033 | } |
6034 | |
6035 | MethodTable * pMTForModule = IsArray() ? this : GetCanonicalMethodTable(); |
6036 | if (pMTForModule->HasModuleOverride()) |
6037 | { |
6038 | Module::RestoreModulePointer(pMTForModule->GetModuleOverridePtr(), pMTForModule->GetLoaderModule()); |
6039 | } |
6040 | |
6041 | if (IsArray()) |
6042 | { |
6043 | // |
6044 | // Restore array element type handle |
6045 | // |
6046 | Module::RestoreTypeHandlePointerRaw(GetApproxArrayElementTypeHandlePtr(), |
6047 | GetLoaderModule(), CLASS_LOAD_UNRESTORED); |
6048 | } |
6049 | |
6050 | // Next restore the instantiation and recurse |
6051 | Instantiation inst = GetInstantiation(); |
6052 | for (DWORD j = 0; j < inst.GetNumArgs(); j++) |
6053 | { |
6054 | Module::RestoreTypeHandlePointer(&inst.GetRawArgs()[j], GetLoaderModule(), CLASS_LOAD_UNRESTORED); |
6055 | } |
6056 | |
6057 | FastInterlockAnd(&(EnsureWritablePages(GetWriteableDataForWrite())->m_dwFlags), ~MethodTableWriteableData::enum_flag_UnrestoredTypeKey); |
6058 | } |
6059 | |
6060 | //========================================================================================== |
6061 | // For a MethodTable in a native image, apply Restore actions |
6062 | // * Decode any encoded pointers |
6063 | // * Instantiate static handles |
6064 | // * Propagate Restore to EEClass |
6065 | // For array method tables, Restore MUST BE IDEMPOTENT as it can be entered from multiple threads |
6066 | // For other classes, restore cannot be entered twice because the loader maintains locks |
6067 | // |
6068 | // When you actually restore the MethodTable for a generic type, the generic |
6069 | // dictionary is restored. That means: |
6070 | // * Parent slots in the PerInstInfo are restored by this method eagerly. They are copied down from the |
6071 | // parent in code:ClassLoader.LoadExactParentAndInterfacesTransitively |
6072 | // * Instantiation parameters in the dictionary are restored eagerly when the type is restored. These are |
6073 | // either hard bound pointers, or tagged tokens (fixups). |
6074 | // * All other dictionary entries are either hard bound pointers or they are NULL (they are cleared when we |
6075 | // freeze the Ngen image). They are *never* tagged tokens. |
6076 | void MethodTable::Restore() |
6077 | { |
6078 | CONTRACTL |
6079 | { |
6080 | THROWS; |
6081 | GC_TRIGGERS; |
6082 | PRECONDITION(IsZapped()); |
6083 | PRECONDITION(!IsRestored_NoLogging()); |
6084 | PRECONDITION(!HasUnrestoredTypeKey()); |
6085 | } |
6086 | CONTRACTL_END; |
6087 | |
6088 | g_IBCLogger.LogMethodTableAccess(this); |
6089 | |
6090 | STRESS_LOG1(LF_ZAP, LL_INFO10000, "MethodTable::Restore: Restoring type %pT\n" , this); |
6091 | LOG((LF_ZAP, LL_INFO10000, |
6092 | "Restoring methodtable %s at " FMT_ADDR ".\n" , GetDebugClassName(), DBG_ADDR(this))); |
6093 | |
6094 | // Class pointer should be restored already (in DoRestoreTypeKey) |
6095 | CONSISTENCY_CHECK(IsClassPointerValid()); |
6096 | |
6097 | // If this isn't the canonical method table itself, then restore the canonical method table |
6098 | // We will load the canonical method table to level EXACTPARENTS in LoadExactParents |
6099 | if (!IsCanonicalMethodTable()) |
6100 | { |
6101 | ClassLoader::EnsureLoaded(GetCanonicalMethodTable(), CLASS_LOAD_APPROXPARENTS); |
6102 | } |
6103 | |
6104 | // |
6105 | // Restore parent method table |
6106 | // |
6107 | if (IsParentMethodTableIndirectPointerMaybeNull()) |
6108 | { |
6109 | Module::RestoreMethodTablePointerRaw(GetParentMethodTableValuePtr(), GetLoaderModule(), CLASS_LOAD_APPROXPARENTS); |
6110 | } |
6111 | else |
6112 | { |
6113 | ClassLoader::EnsureLoaded(ReadPointer(this, &MethodTable::m_pParentMethodTable, GetFlagHasIndirectParent()), |
6114 | CLASS_LOAD_APPROXPARENTS); |
6115 | } |
6116 | |
6117 | // |
6118 | // Restore interface classes |
6119 | // |
6120 | InterfaceMapIterator it = IterateInterfaceMap(); |
6121 | while (it.Next()) |
6122 | { |
6123 | // Just make sure that approximate interface is loaded. LoadExactParents fill in the exact interface later. |
6124 | MethodTable * pIftMT; |
6125 | pIftMT = it.GetInterfaceInfo()->GetApproxMethodTable(GetLoaderModule()); |
6126 | _ASSERTE(pIftMT != NULL); |
6127 | } |
6128 | |
6129 | if (HasCrossModuleGenericStaticsInfo()) |
6130 | { |
6131 | MethodTableWriteableData * pWriteableData = GetWriteableDataForWrite(); |
6132 | CrossModuleGenericsStaticsInfo * pInfo = pWriteableData->GetCrossModuleGenericsStaticsInfo(); |
6133 | |
6134 | EnsureWritablePages(pWriteableData, sizeof(MethodTableWriteableData) + sizeof(CrossModuleGenericsStaticsInfo)); |
6135 | |
6136 | pInfo->m_pModuleForStatics = GetLoaderModule(); |
6137 | } |
6138 | |
6139 | LOG((LF_ZAP, LL_INFO10000, |
6140 | "Restored methodtable %s at " FMT_ADDR ".\n" , GetDebugClassName(), DBG_ADDR(this))); |
6141 | |
6142 | // This has to be last! |
6143 | SetIsRestored(); |
6144 | } |
6145 | #endif // FEATURE_PREJIT |
6146 | |
6147 | #ifdef FEATURE_COMINTEROP |
6148 | |
6149 | //========================================================================================== |
6150 | BOOL MethodTable::IsExtensibleRCW() |
6151 | { |
6152 | WRAPPER_NO_CONTRACT; |
6153 | _ASSERTE(GetClass()); |
6154 | return IsComObjectType() && !GetClass()->IsComImport(); |
6155 | } |
6156 | |
6157 | //========================================================================================== |
6158 | OBJECTHANDLE MethodTable::GetOHDelegate() |
6159 | { |
6160 | WRAPPER_NO_CONTRACT; |
6161 | _ASSERTE(GetClass()); |
6162 | return GetClass()->GetOHDelegate(); |
6163 | } |
6164 | |
6165 | //========================================================================================== |
6166 | void MethodTable::SetOHDelegate (OBJECTHANDLE _ohDelegate) |
6167 | { |
6168 | LIMITED_METHOD_CONTRACT; |
6169 | _ASSERTE(GetClass()); |
6170 | g_IBCLogger.LogEEClassCOWTableAccess(this); |
6171 | GetClass_NoLogging()->SetOHDelegate(_ohDelegate); |
6172 | } |
6173 | |
6174 | //========================================================================================== |
6175 | // Helper to skip over COM class in the hierarchy |
6176 | MethodTable* MethodTable::GetComPlusParentMethodTable() |
6177 | { |
6178 | CONTRACTL |
6179 | { |
6180 | THROWS; |
6181 | GC_TRIGGERS; |
6182 | MODE_ANY; |
6183 | } |
6184 | CONTRACTL_END |
6185 | |
6186 | MethodTable* pParent = GetParentMethodTable(); |
6187 | |
6188 | if (pParent && pParent->IsComImport()) |
6189 | { |
6190 | if (pParent->IsProjectedFromWinRT()) |
6191 | { |
6192 | // skip all Com Import classes |
6193 | do |
6194 | { |
6195 | pParent = pParent->GetParentMethodTable(); |
6196 | _ASSERTE(pParent != NULL); |
6197 | }while(pParent->IsComImport()); |
6198 | |
6199 | // Now we have either System.__ComObject or WindowsRuntime.RuntimeClass |
6200 | if (pParent != g_pBaseCOMObject) |
6201 | { |
6202 | return pParent; |
6203 | } |
6204 | } |
6205 | else |
6206 | { |
6207 | // Skip the single ComImport class we expect |
6208 | _ASSERTE(pParent->GetParentMethodTable() != NULL); |
6209 | pParent = pParent->GetParentMethodTable(); |
6210 | } |
6211 | _ASSERTE(!pParent->IsComImport()); |
6212 | |
6213 | // Skip over System.__ComObject, expect System.MarshalByRefObject |
6214 | pParent=pParent->GetParentMethodTable(); |
6215 | _ASSERTE(pParent != NULL); |
6216 | _ASSERTE(pParent->GetParentMethodTable() != NULL); |
6217 | _ASSERTE(pParent->GetParentMethodTable() == g_pObjectClass); |
6218 | } |
6219 | |
6220 | return pParent; |
6221 | } |
6222 | |
6223 | BOOL MethodTable::IsWinRTObjectType() |
6224 | { |
6225 | LIMITED_METHOD_CONTRACT; |
6226 | |
6227 | // Try to determine if this object represents a WindowsRuntime object - i.e. is either |
6228 | // ProjectedFromWinRT or derived from a class that is |
6229 | |
6230 | if (!IsComObjectType()) |
6231 | return FALSE; |
6232 | |
6233 | // Ideally we'd compute this once in BuildMethodTable and track it with another |
6234 | // flag, but we're now out of bits on m_dwFlags, and this is used very rarely |
6235 | // so for now we'll just recompute it when necessary. |
6236 | MethodTable* pMT = this; |
6237 | do |
6238 | { |
6239 | if (pMT->IsProjectedFromWinRT()) |
6240 | { |
6241 | // Found a WinRT COM object |
6242 | return TRUE; |
6243 | } |
6244 | if (pMT->IsComImport()) |
6245 | { |
6246 | // Found a class that is actually imported from COM but not WinRT |
6247 | // this is definitely a non-WinRT COM object |
6248 | return FALSE; |
6249 | } |
6250 | pMT = pMT->GetParentMethodTable(); |
6251 | }while(pMT != NULL); |
6252 | |
6253 | return FALSE; |
6254 | } |
6255 | |
6256 | #endif // FEATURE_COMINTEROP |
6257 | |
6258 | #endif // !DACCESS_COMPILE |
6259 | |
6260 | //========================================================================================== |
6261 | // Return a pointer to the dictionary for an instantiated type |
6262 | // Return NULL if not instantiated |
6263 | PTR_Dictionary MethodTable::GetDictionary() |
6264 | { |
6265 | LIMITED_METHOD_DAC_CONTRACT; |
6266 | |
6267 | if (HasInstantiation()) |
6268 | { |
6269 | // The instantiation for this class is stored in the type slots table |
6270 | // *after* any inherited slots |
6271 | TADDR base = dac_cast<TADDR>(&(GetPerInstInfo()[GetNumDicts()-1])); |
6272 | return PerInstInfoElem_t::GetValueMaybeNullAtPtr(base); |
6273 | } |
6274 | else |
6275 | { |
6276 | return NULL; |
6277 | } |
6278 | } |
6279 | |
6280 | //========================================================================================== |
6281 | // As above, but assert if an instantiated type is not restored |
6282 | Instantiation MethodTable::GetInstantiation() |
6283 | { |
6284 | LIMITED_METHOD_CONTRACT; |
6285 | SUPPORTS_DAC; |
6286 | if (HasInstantiation()) |
6287 | { |
6288 | PTR_GenericsDictInfo pDictInfo = GetGenericsDictInfo(); |
6289 | TADDR base = dac_cast<TADDR>(&(GetPerInstInfo()[pDictInfo->m_wNumDicts-1])); |
6290 | return Instantiation(PerInstInfoElem_t::GetValueMaybeNullAtPtr(base)->GetInstantiation(), pDictInfo->m_wNumTyPars); |
6291 | } |
6292 | else |
6293 | { |
6294 | return Instantiation(); |
6295 | } |
6296 | } |
6297 | |
6298 | //========================================================================================== |
6299 | // Obtain instantiation from an instantiated type or a pointer to the |
6300 | // element type of an array |
6301 | Instantiation MethodTable::GetClassOrArrayInstantiation() |
6302 | { |
6303 | LIMITED_METHOD_CONTRACT; |
6304 | SUPPORTS_DAC; |
6305 | if (IsArray()) { |
6306 | return GetArrayInstantiation(); |
6307 | } |
6308 | else { |
6309 | return GetInstantiation(); |
6310 | } |
6311 | } |
6312 | |
6313 | //========================================================================================== |
6314 | Instantiation MethodTable::GetArrayInstantiation() |
6315 | { |
6316 | LIMITED_METHOD_CONTRACT; |
6317 | SUPPORTS_DAC; |
6318 | _ASSERTE(IsArray()); |
6319 | return Instantiation((TypeHandle *)&m_ElementTypeHnd, 1); |
6320 | } |
6321 | |
6322 | //========================================================================================== |
6323 | CorElementType MethodTable::GetInternalCorElementType() |
6324 | { |
6325 | LIMITED_METHOD_CONTRACT; |
6326 | SUPPORTS_DAC; |
6327 | |
6328 | // This should not touch the EEClass, at least not in the |
6329 | // common cases of ELEMENT_TYPE_CLASS and ELEMENT_TYPE_VALUETYPE. |
6330 | |
6331 | g_IBCLogger.LogMethodTableAccess(this); |
6332 | |
6333 | CorElementType ret; |
6334 | |
6335 | switch (GetFlag(enum_flag_Category_ElementTypeMask)) |
6336 | { |
6337 | case enum_flag_Category_Array: |
6338 | ret = ELEMENT_TYPE_ARRAY; |
6339 | break; |
6340 | |
6341 | case enum_flag_Category_Array | enum_flag_Category_IfArrayThenSzArray: |
6342 | ret = ELEMENT_TYPE_SZARRAY; |
6343 | break; |
6344 | |
6345 | case enum_flag_Category_ValueType: |
6346 | ret = ELEMENT_TYPE_VALUETYPE; |
6347 | break; |
6348 | |
6349 | case enum_flag_Category_PrimitiveValueType: |
6350 | // This path should only be taken for the builtin mscorlib types |
6351 | // and primitive valuetypes |
6352 | ret = GetClass()->GetInternalCorElementType(); |
6353 | _ASSERTE((ret != ELEMENT_TYPE_CLASS) && |
6354 | (ret != ELEMENT_TYPE_VALUETYPE)); |
6355 | break; |
6356 | |
6357 | default: |
6358 | ret = ELEMENT_TYPE_CLASS; |
6359 | break; |
6360 | } |
6361 | |
6362 | // DAC may be targetting a dump; dumps do not guarantee you can retrieve the EEClass from |
6363 | // the MethodTable so this is not expected to work in a DAC build. |
6364 | #if defined(_DEBUG) && !defined(DACCESS_COMPILE) |
6365 | if (IsRestored_NoLogging()) |
6366 | { |
6367 | PTR_EEClass pClass = GetClass_NoLogging(); |
6368 | if (ret != pClass->GetInternalCorElementType()) |
6369 | { |
6370 | _ASSERTE(!"Mismatched results in MethodTable::GetInternalCorElementType" ); |
6371 | } |
6372 | } |
6373 | #endif // defined(_DEBUG) && !defined(DACCESS_COMPILE) |
6374 | return ret; |
6375 | } |
6376 | |
6377 | //========================================================================================== |
6378 | CorElementType MethodTable::GetVerifierCorElementType() |
6379 | { |
6380 | LIMITED_METHOD_CONTRACT; |
6381 | SUPPORTS_DAC; |
6382 | |
6383 | // This should not touch the EEClass, at least not in the |
6384 | // common cases of ELEMENT_TYPE_CLASS and ELEMENT_TYPE_VALUETYPE. |
6385 | |
6386 | g_IBCLogger.LogMethodTableAccess(this); |
6387 | |
6388 | CorElementType ret; |
6389 | |
6390 | switch (GetFlag(enum_flag_Category_ElementTypeMask)) |
6391 | { |
6392 | case enum_flag_Category_Array: |
6393 | ret = ELEMENT_TYPE_ARRAY; |
6394 | break; |
6395 | |
6396 | case enum_flag_Category_Array | enum_flag_Category_IfArrayThenSzArray: |
6397 | ret = ELEMENT_TYPE_SZARRAY; |
6398 | break; |
6399 | |
6400 | case enum_flag_Category_ValueType: |
6401 | ret = ELEMENT_TYPE_VALUETYPE; |
6402 | break; |
6403 | |
6404 | case enum_flag_Category_PrimitiveValueType: |
6405 | // |
6406 | // This is the only difference from MethodTable::GetInternalCorElementType() |
6407 | // |
6408 | if (IsTruePrimitive() || IsEnum()) |
6409 | ret = GetClass()->GetInternalCorElementType(); |
6410 | else |
6411 | ret = ELEMENT_TYPE_VALUETYPE; |
6412 | break; |
6413 | |
6414 | default: |
6415 | ret = ELEMENT_TYPE_CLASS; |
6416 | break; |
6417 | } |
6418 | |
6419 | return ret; |
6420 | } |
6421 | |
6422 | //========================================================================================== |
6423 | CorElementType MethodTable::GetSignatureCorElementType() |
6424 | { |
6425 | LIMITED_METHOD_CONTRACT; |
6426 | SUPPORTS_DAC; |
6427 | |
6428 | // This should not touch the EEClass, at least not in the |
6429 | // common cases of ELEMENT_TYPE_CLASS and ELEMENT_TYPE_VALUETYPE. |
6430 | |
6431 | g_IBCLogger.LogMethodTableAccess(this); |
6432 | |
6433 | CorElementType ret; |
6434 | |
6435 | switch (GetFlag(enum_flag_Category_ElementTypeMask)) |
6436 | { |
6437 | case enum_flag_Category_Array: |
6438 | ret = ELEMENT_TYPE_ARRAY; |
6439 | break; |
6440 | |
6441 | case enum_flag_Category_Array | enum_flag_Category_IfArrayThenSzArray: |
6442 | ret = ELEMENT_TYPE_SZARRAY; |
6443 | break; |
6444 | |
6445 | case enum_flag_Category_ValueType: |
6446 | ret = ELEMENT_TYPE_VALUETYPE; |
6447 | break; |
6448 | |
6449 | case enum_flag_Category_PrimitiveValueType: |
6450 | // |
6451 | // This is the only difference from MethodTable::GetInternalCorElementType() |
6452 | // |
6453 | if (IsTruePrimitive()) |
6454 | ret = GetClass()->GetInternalCorElementType(); |
6455 | else |
6456 | ret = ELEMENT_TYPE_VALUETYPE; |
6457 | break; |
6458 | |
6459 | default: |
6460 | ret = ELEMENT_TYPE_CLASS; |
6461 | break; |
6462 | } |
6463 | |
6464 | return ret; |
6465 | } |
6466 | |
6467 | #ifndef DACCESS_COMPILE |
6468 | |
6469 | //========================================================================================== |
6470 | void MethodTable::SetInternalCorElementType (CorElementType _NormType) |
6471 | { |
6472 | WRAPPER_NO_CONTRACT; |
6473 | |
6474 | switch (_NormType) |
6475 | { |
6476 | case ELEMENT_TYPE_CLASS: |
6477 | _ASSERTE(!IsArray()); |
6478 | // Nothing to do |
6479 | break; |
6480 | case ELEMENT_TYPE_VALUETYPE: |
6481 | SetFlag(enum_flag_Category_ValueType); |
6482 | _ASSERTE(GetFlag(enum_flag_Category_Mask) == enum_flag_Category_ValueType); |
6483 | break; |
6484 | default: |
6485 | SetFlag(enum_flag_Category_PrimitiveValueType); |
6486 | _ASSERTE(GetFlag(enum_flag_Category_Mask) == enum_flag_Category_PrimitiveValueType); |
6487 | break; |
6488 | } |
6489 | |
6490 | GetClass_NoLogging()->SetInternalCorElementType(_NormType); |
6491 | _ASSERTE(GetInternalCorElementType() == _NormType); |
6492 | } |
6493 | |
6494 | #endif // !DACCESS_COMPILE |
6495 | |
6496 | #ifdef FEATURE_COMINTEROP |
6497 | #ifndef DACCESS_COMPILE |
6498 | |
6499 | #ifndef CROSSGEN_COMPILE |
6500 | BOOL MethodTable::IsLegalWinRTType(OBJECTREF *poref) |
6501 | { |
6502 | CONTRACTL |
6503 | { |
6504 | THROWS; |
6505 | GC_TRIGGERS; |
6506 | MODE_COOPERATIVE; |
6507 | PRECONDITION(IsProtectedByGCFrame(poref)); |
6508 | PRECONDITION(CheckPointer(poref)); |
6509 | PRECONDITION((*poref) != NULL); |
6510 | } |
6511 | CONTRACTL_END |
6512 | |
6513 | if (IsArray()) |
6514 | { |
6515 | BASEARRAYREF arrayRef = (BASEARRAYREF)(*poref); |
6516 | |
6517 | // WinRT array must be one-dimensional array with 0 lower-bound |
6518 | if (arrayRef->GetRank() == 1 && arrayRef->GetLowerBoundsPtr()[0] == 0) |
6519 | { |
6520 | MethodTable *pElementMT = ((BASEARRAYREF)(*poref))->GetArrayElementTypeHandle().GetMethodTable(); |
6521 | |
6522 | // Element must be a legal WinRT type and not an array |
6523 | if (!pElementMT->IsArray() && pElementMT->IsLegalNonArrayWinRTType()) |
6524 | return TRUE; |
6525 | } |
6526 | |
6527 | return FALSE; |
6528 | } |
6529 | else |
6530 | { |
6531 | // Non-Array version of IsLegalNonArrayWinRTType |
6532 | return IsLegalNonArrayWinRTType(); |
6533 | } |
6534 | } |
6535 | #endif //#ifndef CROSSGEN_COMPILE |
6536 | |
6537 | BOOL MethodTable::IsLegalNonArrayWinRTType() |
6538 | { |
6539 | CONTRACTL |
6540 | { |
6541 | THROWS; |
6542 | GC_TRIGGERS; |
6543 | MODE_ANY; |
6544 | PRECONDITION(!IsArray()); // arrays are not fully described by MethodTable |
6545 | } |
6546 | CONTRACTL_END |
6547 | |
6548 | if (WinRTTypeNameConverter::IsWinRTPrimitiveType(this)) |
6549 | return TRUE; |
6550 | |
6551 | // Attributes are not legal |
6552 | MethodTable *pParentMT = GetParentMethodTable(); |
6553 | if (pParentMT == MscorlibBinder::GetExistingClass(CLASS__ATTRIBUTE)) |
6554 | { |
6555 | return FALSE; |
6556 | } |
6557 | |
6558 | bool fIsRedirected = false; |
6559 | if (!IsProjectedFromWinRT() && !IsExportedToWinRT()) |
6560 | { |
6561 | // If the type is not primitive and not coming from .winmd, it can still be legal if |
6562 | // it's one of the redirected types (e.g. IEnumerable<T>). |
6563 | if (!WinRTTypeNameConverter::IsRedirectedType(this)) |
6564 | return FALSE; |
6565 | |
6566 | fIsRedirected = true; |
6567 | } |
6568 | |
6569 | if (IsValueType()) |
6570 | { |
6571 | if (!fIsRedirected) |
6572 | { |
6573 | // check fields |
6574 | ApproxFieldDescIterator fieldIterator(this, ApproxFieldDescIterator::INSTANCE_FIELDS); |
6575 | for (FieldDesc *pFD = fieldIterator.Next(); pFD != NULL; pFD = fieldIterator.Next()) |
6576 | { |
6577 | TypeHandle thField = pFD->GetFieldTypeHandleThrowing(CLASS_LOAD_EXACTPARENTS); |
6578 | |
6579 | if (thField.IsTypeDesc()) |
6580 | return FALSE; |
6581 | |
6582 | MethodTable *pFieldMT = thField.GetMethodTable(); |
6583 | |
6584 | // the only allowed reference types are System.String and types projected from WinRT value types |
6585 | if (!pFieldMT->IsValueType() && !pFieldMT->IsString()) |
6586 | { |
6587 | WinMDAdapter::RedirectedTypeIndex index; |
6588 | if (!WinRTTypeNameConverter::ResolveRedirectedType(pFieldMT, &index)) |
6589 | return FALSE; |
6590 | |
6591 | WinMDAdapter::WinMDTypeKind typeKind; |
6592 | WinMDAdapter::GetRedirectedTypeInfo(index, NULL, NULL, NULL, NULL, NULL, &typeKind); |
6593 | if (typeKind != WinMDAdapter::WinMDTypeKind_Struct && typeKind != WinMDAdapter::WinMDTypeKind_Enum) |
6594 | return FALSE; |
6595 | } |
6596 | |
6597 | if (!pFieldMT->IsLegalNonArrayWinRTType()) |
6598 | return FALSE; |
6599 | } |
6600 | } |
6601 | } |
6602 | |
6603 | if (IsInterface() || IsDelegate() || (IsValueType() && fIsRedirected)) |
6604 | { |
6605 | // interfaces, delegates, and redirected structures can be generic - check the instantiation |
6606 | if (HasInstantiation()) |
6607 | { |
6608 | Instantiation inst = GetInstantiation(); |
6609 | for (DWORD i = 0; i < inst.GetNumArgs(); i++) |
6610 | { |
6611 | // arrays are not allowed as generic arguments |
6612 | if (inst[i].IsArrayType()) |
6613 | return FALSE; |
6614 | |
6615 | if (inst[i].IsTypeDesc()) |
6616 | return FALSE; |
6617 | |
6618 | if (!inst[i].AsMethodTable()->IsLegalNonArrayWinRTType()) |
6619 | return FALSE; |
6620 | } |
6621 | } |
6622 | } |
6623 | else |
6624 | { |
6625 | // generic structures and runtime clases are not supported |
6626 | if (HasInstantiation()) |
6627 | return FALSE; |
6628 | } |
6629 | |
6630 | return TRUE; |
6631 | } |
6632 | |
6633 | //========================================================================================== |
6634 | // Returns the default WinRT interface if this is a WinRT class, NULL otherwise. |
6635 | MethodTable *MethodTable::GetDefaultWinRTInterface() |
6636 | { |
6637 | CONTRACTL |
6638 | { |
6639 | THROWS; |
6640 | GC_TRIGGERS; |
6641 | MODE_ANY; |
6642 | } |
6643 | CONTRACTL_END |
6644 | |
6645 | if (!IsProjectedFromWinRT() && !IsExportedToWinRT()) |
6646 | return NULL; |
6647 | |
6648 | if (IsInterface()) |
6649 | return NULL; |
6650 | |
6651 | // System.Runtime.InteropServices.WindowsRuntime.RuntimeClass is weird |
6652 | // It is ProjectedFromWinRT but isn't really a WinRT class |
6653 | if (this == g_pBaseRuntimeClass) |
6654 | return NULL; |
6655 | |
6656 | WinRTClassFactory *pFactory = ::GetComClassFactory(this)->AsWinRTClassFactory(); |
6657 | return pFactory->GetDefaultInterface(); |
6658 | } |
6659 | |
6660 | #endif // !DACCESS_COMPILE |
6661 | #endif // FEATURE_COMINTEROP |
6662 | |
6663 | #ifdef FEATURE_TYPEEQUIVALENCE |
6664 | #ifndef DACCESS_COMPILE |
6665 | |
6666 | WORD GetEquivalentMethodSlot(MethodTable * pOldMT, MethodTable * pNewMT, WORD wMTslot, BOOL *pfFound) |
6667 | { |
6668 | CONTRACTL { |
6669 | THROWS; |
6670 | GC_NOTRIGGER; |
6671 | } CONTRACTL_END; |
6672 | |
6673 | *pfFound = FALSE; |
6674 | |
6675 | WORD wVTslot = wMTslot; |
6676 | |
6677 | #ifdef FEATURE_COMINTEROP |
6678 | // Get the COM vtable slot corresponding to the given MT slot |
6679 | if (pOldMT->IsSparseForCOMInterop()) |
6680 | wVTslot = pOldMT->GetClass()->GetSparseCOMInteropVTableMap()->LookupVTSlot(wMTslot); |
6681 | |
6682 | // If the other MT is not sparse, we can return the COM slot directly |
6683 | if (!pNewMT->IsSparseForCOMInterop()) |
6684 | { |
6685 | if (wVTslot < pNewMT->GetNumVirtuals()) |
6686 | *pfFound = TRUE; |
6687 | |
6688 | return wVTslot; |
6689 | } |
6690 | |
6691 | // Otherwise we iterate over all virtuals in the other MT trying to find a match |
6692 | for (WORD wSlot = 0; wSlot < pNewMT->GetNumVirtuals(); wSlot++) |
6693 | { |
6694 | if (wVTslot == pNewMT->GetClass()->GetSparseCOMInteropVTableMap()->LookupVTSlot(wSlot)) |
6695 | { |
6696 | *pfFound = TRUE; |
6697 | return wSlot; |
6698 | } |
6699 | } |
6700 | |
6701 | _ASSERTE(!*pfFound); |
6702 | return 0; |
6703 | |
6704 | #else |
6705 | // No COM means there is no sparse interface |
6706 | if (wVTslot < pNewMT->GetNumVirtuals()) |
6707 | *pfFound = TRUE; |
6708 | |
6709 | return wVTslot; |
6710 | |
6711 | #endif // FEATURE_COMINTEROP |
6712 | } |
6713 | #endif // #ifdef DACCESS_COMPILE |
6714 | #endif // #ifdef FEATURE_TYPEEQUIVALENCE |
6715 | |
6716 | //========================================================================================== |
6717 | BOOL |
6718 | MethodTable::FindEncodedMapDispatchEntry( |
6719 | UINT32 typeID, |
6720 | UINT32 slotNumber, |
6721 | DispatchMapEntry * pEntry) |
6722 | { |
6723 | CONTRACTL { |
6724 | // NOTE: LookupDispatchMapType may or may not throw. Currently, it |
6725 | // should never throw because lazy interface restore is disabled. |
6726 | THROWS; |
6727 | GC_TRIGGERS; |
6728 | INSTANCE_CHECK; |
6729 | PRECONDITION(CheckPointer(pEntry)); |
6730 | PRECONDITION(typeID != TYPE_ID_THIS_CLASS); |
6731 | } CONTRACTL_END; |
6732 | |
6733 | CONSISTENCY_CHECK(HasDispatchMap()); |
6734 | |
6735 | MethodTable * dispatchTokenType = GetThread()->GetDomain()->LookupType(typeID); |
6736 | |
6737 | // Search for an exact type match. |
6738 | { |
6739 | DispatchMap::EncodedMapIterator it(this); |
6740 | for (; it.IsValid(); it.Next()) |
6741 | { |
6742 | DispatchMapEntry * pCurEntry = it.Entry(); |
6743 | if (pCurEntry->GetSlotNumber() == slotNumber) |
6744 | { |
6745 | MethodTable * pCurEntryType = LookupDispatchMapType(pCurEntry->GetTypeID()); |
6746 | if (pCurEntryType == dispatchTokenType) |
6747 | { |
6748 | *pEntry = *pCurEntry; |
6749 | return TRUE; |
6750 | } |
6751 | } |
6752 | } |
6753 | } |
6754 | |
6755 | // Repeat the search if any variance is involved, allowing a CanCastTo match. (We do |
6756 | // this in a separate pass because we want to avoid touching the type |
6757 | // to see if it has variance or not) |
6758 | // |
6759 | // NOTE: CERs are not guaranteed for interfaces with co- and contra-variance involved. |
6760 | if (dispatchTokenType->HasVariance() || dispatchTokenType->HasTypeEquivalence()) |
6761 | { |
6762 | DispatchMap::EncodedMapIterator it(this); |
6763 | for (; it.IsValid(); it.Next()) |
6764 | { |
6765 | DispatchMapEntry * pCurEntry = it.Entry(); |
6766 | if (pCurEntry->GetSlotNumber() == slotNumber) |
6767 | { |
6768 | #ifndef DACCESS_COMPILE |
6769 | MethodTable * pCurEntryType = LookupDispatchMapType(pCurEntry->GetTypeID()); |
6770 | //@TODO: This is currently not guaranteed to work without throwing, |
6771 | //@TODO: even with lazy interface restore disabled. |
6772 | if (dispatchTokenType->HasVariance() && |
6773 | pCurEntryType->CanCastByVarianceToInterfaceOrDelegate(dispatchTokenType, NULL)) |
6774 | { |
6775 | *pEntry = *pCurEntry; |
6776 | return TRUE; |
6777 | } |
6778 | |
6779 | if (dispatchTokenType->HasInstantiation() && dispatchTokenType->HasTypeEquivalence()) |
6780 | { |
6781 | if (dispatchTokenType->IsEquivalentTo(pCurEntryType)) |
6782 | { |
6783 | *pEntry = *pCurEntry; |
6784 | return TRUE; |
6785 | } |
6786 | } |
6787 | #endif // !DACCESS_COMPILE |
6788 | } |
6789 | #if !defined(DACCESS_COMPILE) && defined(FEATURE_TYPEEQUIVALENCE) |
6790 | if (this->HasTypeEquivalence() && |
6791 | !dispatchTokenType->HasInstantiation() && |
6792 | dispatchTokenType->HasTypeEquivalence() && |
6793 | dispatchTokenType->GetClass()->IsEquivalentType()) |
6794 | { |
6795 | _ASSERTE(dispatchTokenType->IsInterface()); |
6796 | MethodTable * pCurEntryType = LookupDispatchMapType(pCurEntry->GetTypeID()); |
6797 | |
6798 | if (pCurEntryType->IsEquivalentTo(dispatchTokenType)) |
6799 | { |
6800 | MethodDesc * pMD = dispatchTokenType->GetMethodDescForSlot(slotNumber); |
6801 | _ASSERTE(FitsIn<WORD>(slotNumber)); |
6802 | BOOL fNewSlotFound = FALSE; |
6803 | DWORD newSlot = GetEquivalentMethodSlot( |
6804 | dispatchTokenType, |
6805 | pCurEntryType, |
6806 | static_cast<WORD>(slotNumber), |
6807 | &fNewSlotFound); |
6808 | if (fNewSlotFound && (newSlot == pCurEntry->GetSlotNumber())) |
6809 | { |
6810 | MethodDesc * pNewMD = pCurEntryType->GetMethodDescForSlot(newSlot); |
6811 | |
6812 | MetaSig msig(pMD); |
6813 | MetaSig msignew(pNewMD); |
6814 | |
6815 | if (MetaSig::CompareMethodSigs(msig, msignew, FALSE)) |
6816 | { |
6817 | *pEntry = *pCurEntry; |
6818 | return TRUE; |
6819 | } |
6820 | } |
6821 | } |
6822 | } |
6823 | #endif |
6824 | } |
6825 | } |
6826 | return FALSE; |
6827 | } // MethodTable::FindEncodedMapDispatchEntry |
6828 | |
6829 | //========================================================================================== |
6830 | BOOL MethodTable::FindDispatchEntryForCurrentType(UINT32 typeID, |
6831 | UINT32 slotNumber, |
6832 | DispatchMapEntry *pEntry) |
6833 | { |
6834 | CONTRACTL { |
6835 | THROWS; |
6836 | GC_TRIGGERS; |
6837 | INSTANCE_CHECK; |
6838 | PRECONDITION(CheckPointer(pEntry)); |
6839 | PRECONDITION(typeID != TYPE_ID_THIS_CLASS); |
6840 | } CONTRACTL_END; |
6841 | |
6842 | BOOL fRes = FALSE; |
6843 | |
6844 | if (HasDispatchMap()) |
6845 | { |
6846 | fRes = FindEncodedMapDispatchEntry( |
6847 | typeID, slotNumber, pEntry); |
6848 | } |
6849 | |
6850 | return fRes; |
6851 | } |
6852 | |
6853 | //========================================================================================== |
6854 | BOOL MethodTable::FindDispatchEntry(UINT32 typeID, |
6855 | UINT32 slotNumber, |
6856 | DispatchMapEntry *pEntry) |
6857 | { |
6858 | CONTRACT (BOOL) { |
6859 | INSTANCE_CHECK; |
6860 | MODE_ANY; |
6861 | THROWS; |
6862 | GC_TRIGGERS; |
6863 | POSTCONDITION(!RETVAL || pEntry->IsValid()); |
6864 | PRECONDITION(typeID != TYPE_ID_THIS_CLASS); |
6865 | } CONTRACT_END; |
6866 | |
6867 | // Start at the current type and work up the inheritance chain |
6868 | MethodTable *pCurMT = this; |
6869 | UINT32 iCurInheritanceChainDelta = 0; |
6870 | while (pCurMT != NULL) |
6871 | { |
6872 | g_IBCLogger.LogMethodTableAccess(pCurMT); |
6873 | if (pCurMT->FindDispatchEntryForCurrentType( |
6874 | typeID, slotNumber, pEntry)) |
6875 | { |
6876 | RETURN (TRUE); |
6877 | } |
6878 | pCurMT = pCurMT->GetParentMethodTable(); |
6879 | iCurInheritanceChainDelta++; |
6880 | } |
6881 | RETURN (FALSE); |
6882 | } |
6883 | |
6884 | //========================================================================================== |
6885 | // Possible cases: |
6886 | // 1. Typed (interface) contract |
6887 | // a. To non-virtual implementation (NYI). Just |
6888 | // return the DispatchSlot as the implementation |
6889 | // b. Mapped virtually to virtual slot on 'this'. Need to |
6890 | // further resolve the new 'this' virtual slot. |
6891 | // 2. 'this' contract |
6892 | // a. To non-virtual implementation. Return the DispatchSlot |
6893 | // as the implementation. |
6894 | // b. Mapped virtually to another virtual slot. Need to further |
6895 | // resolve the new slot on 'this'. |
6896 | BOOL |
6897 | MethodTable::FindDispatchImpl( |
6898 | UINT32 typeID, |
6899 | UINT32 slotNumber, |
6900 | DispatchSlot * pImplSlot, |
6901 | BOOL throwOnConflict) |
6902 | { |
6903 | CONTRACT (BOOL) { |
6904 | INSTANCE_CHECK; |
6905 | MODE_ANY; |
6906 | THROWS; |
6907 | GC_TRIGGERS; |
6908 | PRECONDITION(CheckPointer(pImplSlot)); |
6909 | POSTCONDITION(!RETVAL || !pImplSlot->IsNull() || IsComObjectType()); |
6910 | } CONTRACT_END; |
6911 | |
6912 | LOG((LF_LOADER, LL_INFO10000, "SD: MT::FindDispatchImpl: searching %s.\n" , GetClass()->GetDebugClassName())); |
6913 | |
6914 | /////////////////////////////////// |
6915 | // 1. Typed (interface) contract |
6916 | |
6917 | INDEBUG(MethodTable *dbg_pMTTok = NULL; dbg_pMTTok = this;) |
6918 | DispatchMapEntry declEntry; |
6919 | DispatchMapEntry implEntry; |
6920 | |
6921 | #ifndef DACCESS_COMPILE |
6922 | if (typeID != TYPE_ID_THIS_CLASS) |
6923 | { |
6924 | INDEBUG(dbg_pMTTok = GetThread()->GetDomain()->LookupType(typeID)); |
6925 | DispatchMapEntry e; |
6926 | if (!FindDispatchEntry(typeID, slotNumber, &e)) |
6927 | { |
6928 | // Figure out the interface being called |
6929 | MethodTable *pIfcMT = GetThread()->GetDomain()->LookupType(typeID); |
6930 | |
6931 | // Figure out which method of the interface the caller requested. |
6932 | MethodDesc * pIfcMD = pIfcMT->GetMethodDescForSlot(slotNumber); |
6933 | |
6934 | // A call to an array thru IList<T> (or IEnumerable<T> or ICollection<T>) has to be handled specially. |
6935 | // These interfaces are "magic" (mostly due to working set concerned - they are created on demand internally |
6936 | // even though semantically, these are static interfaces.) |
6937 | // |
6938 | // NOTE: CERs are not currently supported with generic array interfaces. |
6939 | if (IsArray()) |
6940 | { |
6941 | // At this, we know that we're trying to cast an array to an interface and that the normal static lookup failed. |
6942 | |
6943 | // FindDispatchImpl assumes that the cast is legal so we should be able to assume now that it is a valid |
6944 | // IList<T> call thru an array. |
6945 | |
6946 | // Get the MT of IList<T> or IReadOnlyList<T> |
6947 | |
6948 | |
6949 | // Quick sanity check |
6950 | if (!(pIfcMT->HasInstantiation())) |
6951 | { |
6952 | _ASSERTE(!"Should not have gotten here. If you did, it's probably because multiple interface instantiation hasn't been checked in yet. This code only works on top of that." ); |
6953 | RETURN(FALSE); |
6954 | } |
6955 | |
6956 | // Get the type of T (as in IList<T>) |
6957 | TypeHandle theT = pIfcMT->GetInstantiation()[0]; |
6958 | |
6959 | // Retrieve the corresponding method of SZArrayHelper. This is the guy that will actually execute. |
6960 | // This method will be an instantiation of a generic method. I.e. if the caller requested |
6961 | // IList<T>.Meth(), he will actually be diverted to SZArrayHelper.Meth<T>(). |
6962 | MethodDesc * pActualImplementor = GetActualImplementationForArrayGenericIListOrIReadOnlyListMethod(pIfcMD, theT); |
6963 | |
6964 | // Now, construct a DispatchSlot to return in *pImplSlot |
6965 | DispatchSlot ds(pActualImplementor->GetMethodEntryPoint()); |
6966 | |
6967 | if (pImplSlot != NULL) |
6968 | { |
6969 | *pImplSlot = ds; |
6970 | } |
6971 | |
6972 | RETURN(TRUE); |
6973 | |
6974 | } |
6975 | else |
6976 | { |
6977 | // |
6978 | // See if we can find a default method from one of the implemented interfaces |
6979 | // |
6980 | |
6981 | // Try exact match first |
6982 | MethodDesc *pDefaultMethod = NULL; |
6983 | BOOL foundDefaultInterfaceImplementation = FindDefaultInterfaceImplementation( |
6984 | pIfcMD, // the interface method being resolved |
6985 | pIfcMT, // the interface being resolved |
6986 | &pDefaultMethod, |
6987 | FALSE, // allowVariance |
6988 | throwOnConflict); |
6989 | |
6990 | // If there's no exact match, try a variant match |
6991 | if (!foundDefaultInterfaceImplementation && pIfcMT->HasVariance()) |
6992 | { |
6993 | foundDefaultInterfaceImplementation = FindDefaultInterfaceImplementation( |
6994 | pIfcMD, // the interface method being resolved |
6995 | pIfcMT, // the interface being resolved |
6996 | &pDefaultMethod, |
6997 | TRUE, // allowVariance |
6998 | throwOnConflict); |
6999 | } |
7000 | |
7001 | if (foundDefaultInterfaceImplementation) |
7002 | { |
7003 | // Now, construct a DispatchSlot to return in *pImplSlot |
7004 | DispatchSlot ds(pDefaultMethod->GetMethodEntryPoint()); |
7005 | |
7006 | if (pImplSlot != NULL) |
7007 | { |
7008 | *pImplSlot = ds; |
7009 | } |
7010 | |
7011 | RETURN(TRUE); |
7012 | } |
7013 | } |
7014 | |
7015 | // This contract is not implemented by this class or any parent class. |
7016 | RETURN(FALSE); |
7017 | } |
7018 | |
7019 | |
7020 | ///////////////////////////////// |
7021 | // 1.1. Update the typeID and slotNumber so that the full search can commense below |
7022 | typeID = TYPE_ID_THIS_CLASS; |
7023 | slotNumber = e.GetTargetSlotNumber(); |
7024 | } |
7025 | #endif // !DACCESS_COMPILE |
7026 | |
7027 | ////////////////////////////////// |
7028 | // 2. 'this' contract |
7029 | |
7030 | // Just grab the target out of the vtable |
7031 | *pImplSlot = GetRestoredSlot(slotNumber); |
7032 | |
7033 | // Successfully determined the target for the given target |
7034 | RETURN (TRUE); |
7035 | } |
7036 | |
7037 | #ifndef DACCESS_COMPILE |
7038 | |
7039 | struct MatchCandidate |
7040 | { |
7041 | MethodTable *pMT; |
7042 | MethodDesc *pMD; |
7043 | }; |
7044 | |
7045 | void ThrowExceptionForConflictingOverride( |
7046 | MethodTable *pTargetClass, |
7047 | MethodTable *pInterfaceMT, |
7048 | MethodDesc *pInterfaceMD) |
7049 | { |
7050 | LIMITED_METHOD_CONTRACT; |
7051 | |
7052 | SString assemblyName; |
7053 | |
7054 | pTargetClass->GetAssembly()->GetDisplayName(assemblyName); |
7055 | |
7056 | SString strInterfaceName; |
7057 | TypeString::AppendType(strInterfaceName, TypeHandle(pInterfaceMT)); |
7058 | |
7059 | SString strMethodName; |
7060 | TypeString::AppendMethod(strMethodName, pInterfaceMD, pInterfaceMD->GetMethodInstantiation()); |
7061 | |
7062 | SString strTargetClassName; |
7063 | TypeString::AppendType(strTargetClassName, pTargetClass); |
7064 | |
7065 | COMPlusThrow( |
7066 | kNotSupportedException, |
7067 | IDS_CLASSLOAD_AMBIGUOUS_OVERRIDE, |
7068 | strMethodName, |
7069 | strInterfaceName, |
7070 | strTargetClassName, |
7071 | assemblyName); |
7072 | } |
7073 | |
7074 | // Find the default interface implementation method for interface dispatch |
7075 | // It is either the interface method with default interface method implementation, |
7076 | // or an most specific interface with an explicit methodimpl overriding the method |
7077 | BOOL MethodTable::FindDefaultInterfaceImplementation( |
7078 | MethodDesc *pInterfaceMD, |
7079 | MethodTable *pInterfaceMT, |
7080 | MethodDesc **ppDefaultMethod, |
7081 | BOOL allowVariance, |
7082 | BOOL throwOnConflict |
7083 | ) |
7084 | { |
7085 | CONTRACT(BOOL) { |
7086 | INSTANCE_CHECK; |
7087 | MODE_ANY; |
7088 | THROWS; |
7089 | GC_TRIGGERS; |
7090 | PRECONDITION(CheckPointer(pInterfaceMD)); |
7091 | PRECONDITION(CheckPointer(pInterfaceMT)); |
7092 | PRECONDITION(CheckPointer(ppDefaultMethod)); |
7093 | POSTCONDITION(!RETVAL || (*ppDefaultMethod) != nullptr); |
7094 | } CONTRACT_END; |
7095 | |
7096 | #ifdef FEATURE_DEFAULT_INTERFACES |
7097 | InterfaceMapIterator it = this->IterateInterfaceMap(); |
7098 | |
7099 | CQuickArray<MatchCandidate> candidates; |
7100 | unsigned candidatesCount = 0; |
7101 | candidates.AllocThrows(this->GetNumInterfaces()); |
7102 | |
7103 | // |
7104 | // Walk interface from derived class to parent class |
7105 | // We went with a straight-forward implementation as in most cases the number of interfaces are small |
7106 | // and the result of the interface dispatch are already cached. If there are significant usage of default |
7107 | // interface methods in highly complex interface hierarchies we can revisit this |
7108 | // |
7109 | MethodTable *pMT = this; |
7110 | while (pMT != NULL) |
7111 | { |
7112 | MethodTable *pParentMT = pMT->GetParentMethodTable(); |
7113 | unsigned dwParentInterfaces = 0; |
7114 | if (pParentMT) |
7115 | dwParentInterfaces = pParentMT->GetNumInterfaces(); |
7116 | |
7117 | // Scanning only current class only if the current class have more interface than parent |
7118 | // (parent interface are laid out first in interface map) |
7119 | if (pMT->GetNumInterfaces() > dwParentInterfaces) |
7120 | { |
7121 | // Only iterate the interfaceimpls on current class |
7122 | MethodTable::InterfaceMapIterator it = pMT->IterateInterfaceMapFrom(dwParentInterfaces); |
7123 | while (!it.Finished()) |
7124 | { |
7125 | MethodTable *pCurMT = it.GetInterface(); |
7126 | |
7127 | MethodDesc *pCurMD = NULL; |
7128 | if (pCurMT == pInterfaceMT) |
7129 | { |
7130 | if (!pInterfaceMD->IsAbstract()) |
7131 | { |
7132 | // exact match |
7133 | pCurMD = pInterfaceMD; |
7134 | } |
7135 | } |
7136 | else if (pCurMT->CanCastToInterface(pInterfaceMT)) |
7137 | { |
7138 | if (pCurMT->HasSameTypeDefAs(pInterfaceMT)) |
7139 | { |
7140 | if (allowVariance && !pInterfaceMD->IsAbstract()) |
7141 | { |
7142 | // Generic variance match - we'll instantiate pCurMD with the right type arguments later |
7143 | pCurMD = pInterfaceMD; |
7144 | } |
7145 | } |
7146 | else |
7147 | { |
7148 | // |
7149 | // A more specific interface - search for an methodimpl for explicit override |
7150 | // Implicit override in default interface methods are not allowed |
7151 | // |
7152 | MethodIterator methodIt(pCurMT); |
7153 | for (; methodIt.IsValid() && pCurMD == NULL; methodIt.Next()) |
7154 | { |
7155 | MethodDesc *pMD = methodIt.GetMethodDesc(); |
7156 | int targetSlot = pInterfaceMD->GetSlot(); |
7157 | |
7158 | // If this is not a MethodImpl, it can't be implementing the method we're looking for |
7159 | if (!pMD->IsMethodImpl()) |
7160 | continue; |
7161 | |
7162 | // We have a MethodImpl - iterate over all the declarations it's implementing, |
7163 | // looking for the interface method we need. |
7164 | MethodImpl::Iterator it(pMD); |
7165 | for (; it.IsValid() && pCurMD == NULL; it.Next()) |
7166 | { |
7167 | MethodDesc *pDeclMD = it.GetMethodDesc(); |
7168 | |
7169 | // Is this the right slot? |
7170 | if (pDeclMD->GetSlot() != targetSlot) |
7171 | continue; |
7172 | |
7173 | // Is this the right interface? |
7174 | if (!pDeclMD->HasSameMethodDefAs(pInterfaceMD)) |
7175 | continue; |
7176 | |
7177 | if (pInterfaceMD->HasClassInstantiation()) |
7178 | { |
7179 | // pInterfaceMD will be in the canonical form, so we need to check the specific |
7180 | // instantiation against pInterfaceMT. |
7181 | // |
7182 | // The parent of pDeclMD is unreliable for this purpose because it may or |
7183 | // may not be canonicalized. Let's go from the metadata. |
7184 | |
7185 | SigTypeContext typeContext = SigTypeContext(pCurMT); |
7186 | |
7187 | mdTypeRef tkParent; |
7188 | IfFailThrow(pMD->GetModule()->GetMDImport()->GetParentToken(it.GetToken(), &tkParent)); |
7189 | |
7190 | MethodTable* pDeclMT = ClassLoader::LoadTypeDefOrRefOrSpecThrowing( |
7191 | pMD->GetModule(), |
7192 | tkParent, |
7193 | &typeContext).AsMethodTable(); |
7194 | |
7195 | // We do CanCastToInterface to also cover variance. |
7196 | // We already know this is a method on the same type definition as the (generic) |
7197 | // interface but we need to make sure the instantiations match. |
7198 | if ((allowVariance && pDeclMT->CanCastToInterface(pInterfaceMT)) |
7199 | || pDeclMT == pInterfaceMT) |
7200 | { |
7201 | // We have a match |
7202 | pCurMD = pMD; |
7203 | } |
7204 | } |
7205 | else |
7206 | { |
7207 | // No generics involved. If the method definitions match, it's a match. |
7208 | pCurMD = pMD; |
7209 | } |
7210 | } |
7211 | } |
7212 | } |
7213 | } |
7214 | |
7215 | if (pCurMD != NULL) |
7216 | { |
7217 | // |
7218 | // Found a match. But is it a more specific match (we want most specific interfaces) |
7219 | // |
7220 | if (pCurMD->HasClassOrMethodInstantiation()) |
7221 | { |
7222 | // Instantiate the MethodDesc |
7223 | // We don't want generic dictionary from this pointer - we need pass secret type argument |
7224 | // from instantiating stubs to resolve ambiguity |
7225 | pCurMD = MethodDesc::FindOrCreateAssociatedMethodDesc( |
7226 | pCurMD, |
7227 | pCurMT, |
7228 | FALSE, // forceBoxedEntryPoint |
7229 | pCurMD->HasMethodInstantiation() ? |
7230 | pCurMD->AsInstantiatedMethodDesc()->IMD_GetMethodInstantiation() : |
7231 | Instantiation(), // for method themselves that are generic |
7232 | FALSE, // allowInstParam |
7233 | TRUE // forceRemoteableMethod |
7234 | ); |
7235 | } |
7236 | |
7237 | bool needToInsert = true; |
7238 | bool seenMoreSpecific = false; |
7239 | |
7240 | // We need to maintain the invariant that the candidates are always the most specific |
7241 | // in all path scaned so far. There might be multiple incompatible candidates |
7242 | for (unsigned i = 0; i < candidatesCount; ++i) |
7243 | { |
7244 | MethodTable *pCandidateMT = candidates[i].pMT; |
7245 | if (pCandidateMT == NULL) |
7246 | continue; |
7247 | |
7248 | if (pCandidateMT == pCurMT) |
7249 | { |
7250 | // A dup - we are done |
7251 | needToInsert = false; |
7252 | break; |
7253 | } |
7254 | |
7255 | if (allowVariance && pCandidateMT->HasSameTypeDefAs(pCurMT)) |
7256 | { |
7257 | // Variant match on the same type - this is a tie |
7258 | } |
7259 | else if (pCurMT->CanCastToInterface(pCandidateMT)) |
7260 | { |
7261 | // pCurMT is a more specific choice than IFoo/IBar both overrides IBlah : |
7262 | if (!seenMoreSpecific) |
7263 | { |
7264 | seenMoreSpecific = true; |
7265 | candidates[i].pMT = pCurMT; |
7266 | candidates[i].pMD = pCurMD; |
7267 | } |
7268 | else |
7269 | { |
7270 | candidates[i].pMT = NULL; |
7271 | candidates[i].pMD = NULL; |
7272 | } |
7273 | |
7274 | needToInsert = false; |
7275 | } |
7276 | else if (pCandidateMT->CanCastToInterface(pCurMT)) |
7277 | { |
7278 | // pCurMT is less specific - we don't need to scan more entries as this entry can |
7279 | // represent pCurMT (other entries are incompatible with pCurMT) |
7280 | needToInsert = false; |
7281 | break; |
7282 | } |
7283 | else |
7284 | { |
7285 | // pCurMT is incompatible - keep scanning |
7286 | } |
7287 | } |
7288 | |
7289 | if (needToInsert) |
7290 | { |
7291 | ASSERT(candidatesCount < candidates.Size()); |
7292 | candidates[candidatesCount].pMT = pCurMT; |
7293 | candidates[candidatesCount].pMD = pCurMD; |
7294 | candidatesCount++; |
7295 | } |
7296 | } |
7297 | |
7298 | it.Next(); |
7299 | } |
7300 | } |
7301 | |
7302 | pMT = pParentMT; |
7303 | } |
7304 | |
7305 | // scan to see if there are any conflicts |
7306 | // If we are doing second pass (allowing variance), we know don't actually look for |
7307 | // a conflict anymore, but pick the first match. |
7308 | MethodTable *pBestCandidateMT = NULL; |
7309 | MethodDesc *pBestCandidateMD = NULL; |
7310 | for (unsigned i = 0; i < candidatesCount; ++i) |
7311 | { |
7312 | if (candidates[i].pMT == NULL) |
7313 | continue; |
7314 | |
7315 | if (pBestCandidateMT == NULL) |
7316 | { |
7317 | pBestCandidateMT = candidates[i].pMT; |
7318 | pBestCandidateMD = candidates[i].pMD; |
7319 | |
7320 | // If this is a second pass lookup, we know this is a variant match. As such |
7321 | // we pick the first result as the winner and don't look for a conflict. |
7322 | if (allowVariance) |
7323 | break; |
7324 | } |
7325 | else if (pBestCandidateMT != candidates[i].pMT) |
7326 | { |
7327 | if (throwOnConflict) |
7328 | ThrowExceptionForConflictingOverride(this, pInterfaceMT, pInterfaceMD); |
7329 | |
7330 | *ppDefaultMethod = NULL; |
7331 | RETURN(FALSE); |
7332 | } |
7333 | } |
7334 | |
7335 | if (pBestCandidateMD != NULL) |
7336 | { |
7337 | *ppDefaultMethod = pBestCandidateMD; |
7338 | RETURN(TRUE); |
7339 | } |
7340 | #else |
7341 | *ppDefaultMethod = NULL; |
7342 | #endif // FEATURE_DEFAULT_INTERFACES |
7343 | |
7344 | RETURN(FALSE); |
7345 | } |
7346 | #endif // DACCESS_COMPILE |
7347 | |
7348 | //========================================================================================== |
7349 | DispatchSlot MethodTable::FindDispatchSlot(UINT32 typeID, UINT32 slotNumber, BOOL throwOnConflict) |
7350 | { |
7351 | WRAPPER_NO_CONTRACT; |
7352 | STATIC_CONTRACT_SO_TOLERANT; |
7353 | DispatchSlot implSlot(NULL); |
7354 | FindDispatchImpl(typeID, slotNumber, &implSlot, throwOnConflict); |
7355 | return implSlot; |
7356 | } |
7357 | |
7358 | //========================================================================================== |
7359 | DispatchSlot MethodTable::FindDispatchSlot(DispatchToken tok, BOOL throwOnConflict) |
7360 | { |
7361 | CONTRACTL |
7362 | { |
7363 | THROWS; |
7364 | GC_TRIGGERS; |
7365 | SO_TOLERANT; |
7366 | MODE_ANY; |
7367 | } |
7368 | CONTRACTL_END; |
7369 | return FindDispatchSlot(tok.GetTypeID(), tok.GetSlotNumber(), throwOnConflict); |
7370 | } |
7371 | |
7372 | #ifndef DACCESS_COMPILE |
7373 | |
7374 | //========================================================================================== |
7375 | DispatchSlot MethodTable::FindDispatchSlotForInterfaceMD(MethodDesc *pMD, BOOL throwOnConflict) |
7376 | { |
7377 | WRAPPER_NO_CONTRACT; |
7378 | CONSISTENCY_CHECK(CheckPointer(pMD)); |
7379 | CONSISTENCY_CHECK(pMD->IsInterface()); |
7380 | return FindDispatchSlotForInterfaceMD(TypeHandle(pMD->GetMethodTable()), pMD, throwOnConflict); |
7381 | } |
7382 | |
7383 | //========================================================================================== |
7384 | DispatchSlot MethodTable::FindDispatchSlotForInterfaceMD(TypeHandle ownerType, MethodDesc *pMD, BOOL throwOnConflict) |
7385 | { |
7386 | WRAPPER_NO_CONTRACT; |
7387 | CONSISTENCY_CHECK(!ownerType.IsNull()); |
7388 | CONSISTENCY_CHECK(CheckPointer(pMD)); |
7389 | CONSISTENCY_CHECK(pMD->IsInterface()); |
7390 | return FindDispatchSlot(ownerType.GetMethodTable()->GetTypeID(), pMD->GetSlot(), throwOnConflict); |
7391 | } |
7392 | |
7393 | //========================================================================================== |
7394 | // This is used for reverse methodimpl lookups by ComPlusMethodCall MDs. |
7395 | // This assumes the following: |
7396 | // The methodimpl is for an interfaceToken->slotNumber |
7397 | // There is ONLY ONE such mapping for this slot number |
7398 | // The mapping exists in this type, not a parent type. |
7399 | MethodDesc * MethodTable::ReverseInterfaceMDLookup(UINT32 slotNumber) |
7400 | { |
7401 | CONTRACTL { |
7402 | THROWS; |
7403 | GC_TRIGGERS; |
7404 | } CONTRACTL_END; |
7405 | DispatchMap::Iterator it(this); |
7406 | for (; it.IsValid(); it.Next()) |
7407 | { |
7408 | if (it.Entry()->GetTargetSlotNumber() == slotNumber) |
7409 | { |
7410 | DispatchMapTypeID typeID = it.Entry()->GetTypeID(); |
7411 | _ASSERTE(!typeID.IsThisClass()); |
7412 | UINT32 slotNum = it.Entry()->GetSlotNumber(); |
7413 | MethodTable * pMTItf = LookupDispatchMapType(typeID); |
7414 | CONSISTENCY_CHECK(CheckPointer(pMTItf)); |
7415 | |
7416 | MethodDesc *pCanonMD = pMTItf->GetMethodDescForSlot((DWORD)slotNum); |
7417 | return MethodDesc::FindOrCreateAssociatedMethodDesc( |
7418 | pCanonMD, |
7419 | pMTItf, |
7420 | FALSE, // forceBoxedEntryPoint |
7421 | Instantiation(), // methodInst |
7422 | FALSE, // allowInstParam |
7423 | TRUE); // forceRemotableMethod |
7424 | } |
7425 | } |
7426 | return NULL; |
7427 | } |
7428 | |
7429 | //========================================================================================== |
7430 | UINT32 MethodTable::GetTypeID() |
7431 | { |
7432 | CONTRACTL { |
7433 | THROWS; |
7434 | GC_TRIGGERS; |
7435 | } CONTRACTL_END; |
7436 | |
7437 | PTR_MethodTable pMT = PTR_MethodTable(this); |
7438 | |
7439 | return GetDomain()->GetTypeID(pMT); |
7440 | } |
7441 | |
7442 | //========================================================================================== |
7443 | UINT32 MethodTable::LookupTypeID() |
7444 | { |
7445 | CONTRACTL |
7446 | { |
7447 | NOTHROW; |
7448 | GC_NOTRIGGER; |
7449 | SO_TOLERANT; |
7450 | MODE_ANY; |
7451 | } |
7452 | CONTRACTL_END; |
7453 | PTR_MethodTable pMT = PTR_MethodTable(this); |
7454 | |
7455 | return GetDomain()->LookupTypeID(pMT); |
7456 | } |
7457 | |
7458 | //========================================================================================== |
7459 | BOOL MethodTable::ImplementsInterfaceWithSameSlotsAsParent(MethodTable *pItfMT, MethodTable *pParentMT) |
7460 | { |
7461 | CONTRACTL |
7462 | { |
7463 | THROWS; |
7464 | GC_TRIGGERS; |
7465 | PRECONDITION(!IsInterface() && !pParentMT->IsInterface()); |
7466 | PRECONDITION(pItfMT->IsInterface()); |
7467 | } CONTRACTL_END; |
7468 | |
7469 | MethodTable *pMT = this; |
7470 | do |
7471 | { |
7472 | DispatchMap::EncodedMapIterator it(pMT); |
7473 | for (; it.IsValid(); it.Next()) |
7474 | { |
7475 | DispatchMapEntry *pCurEntry = it.Entry(); |
7476 | if (LookupDispatchMapType(pCurEntry->GetTypeID()) == pItfMT) |
7477 | { |
7478 | // this class and its parents up to pParentMT must have no mappings for the interface |
7479 | return FALSE; |
7480 | } |
7481 | } |
7482 | |
7483 | pMT = pMT->GetParentMethodTable(); |
7484 | _ASSERTE(pMT != NULL); |
7485 | } |
7486 | while (pMT != pParentMT); |
7487 | |
7488 | return TRUE; |
7489 | } |
7490 | |
7491 | //========================================================================================== |
7492 | BOOL MethodTable::HasSameInterfaceImplementationAsParent(MethodTable *pItfMT, MethodTable *pParentMT) |
7493 | { |
7494 | CONTRACTL |
7495 | { |
7496 | THROWS; |
7497 | GC_TRIGGERS; |
7498 | PRECONDITION(!IsInterface() && !pParentMT->IsInterface()); |
7499 | PRECONDITION(pItfMT->IsInterface()); |
7500 | } CONTRACTL_END; |
7501 | |
7502 | if (!ImplementsInterfaceWithSameSlotsAsParent(pItfMT, pParentMT)) |
7503 | { |
7504 | // if the slots are not same, this class reimplements the interface |
7505 | return FALSE; |
7506 | } |
7507 | |
7508 | // The target slots are the same, but they can still be overriden. We'll iterate |
7509 | // the dispatch map beginning with pParentMT up the hierarchy and for each pItfMT |
7510 | // entry check the target slot contents (pParentMT vs. this class). A mismatch |
7511 | // means that there is an override. We'll keep track of source (interface) slots |
7512 | // we have seen so that we can ignore entries higher in the hierarchy that are no |
7513 | // longer in effect at pParentMT level. |
7514 | BitMask bitMask; |
7515 | |
7516 | WORD wSeenSlots = 0; |
7517 | WORD wTotalSlots = pItfMT->GetNumVtableSlots(); |
7518 | |
7519 | MethodTable *pMT = pParentMT; |
7520 | do |
7521 | { |
7522 | DispatchMap::EncodedMapIterator it(pMT); |
7523 | for (; it.IsValid(); it.Next()) |
7524 | { |
7525 | DispatchMapEntry *pCurEntry = it.Entry(); |
7526 | if (LookupDispatchMapType(pCurEntry->GetTypeID()) == pItfMT) |
7527 | { |
7528 | UINT32 ifaceSlot = pCurEntry->GetSlotNumber(); |
7529 | if (!bitMask.TestBit(ifaceSlot)) |
7530 | { |
7531 | bitMask.SetBit(ifaceSlot); |
7532 | |
7533 | UINT32 targetSlot = pCurEntry->GetTargetSlotNumber(); |
7534 | if (GetRestoredSlot(targetSlot) != pParentMT->GetRestoredSlot(targetSlot)) |
7535 | { |
7536 | // the target slot is overriden |
7537 | return FALSE; |
7538 | } |
7539 | |
7540 | if (++wSeenSlots == wTotalSlots) |
7541 | { |
7542 | // we've resolved all slots, no reason to continue |
7543 | break; |
7544 | } |
7545 | } |
7546 | } |
7547 | } |
7548 | pMT = pMT->GetParentMethodTable(); |
7549 | } |
7550 | while (pMT != NULL); |
7551 | |
7552 | return TRUE; |
7553 | } |
7554 | |
7555 | #endif // !DACCESS_COMPILE |
7556 | |
7557 | //========================================================================================== |
7558 | MethodTable * MethodTable::LookupDispatchMapType(DispatchMapTypeID typeID) |
7559 | { |
7560 | CONTRACTL { |
7561 | WRAPPER(THROWS); |
7562 | GC_TRIGGERS; |
7563 | } CONTRACTL_END; |
7564 | |
7565 | _ASSERTE(!typeID.IsThisClass()); |
7566 | |
7567 | InterfaceMapIterator intIt = IterateInterfaceMapFrom(typeID.GetInterfaceNum()); |
7568 | return intIt.GetInterface(); |
7569 | } |
7570 | |
7571 | //========================================================================================== |
7572 | MethodDesc * MethodTable::GetIntroducingMethodDesc(DWORD slotNumber) |
7573 | { |
7574 | CONTRACTL |
7575 | { |
7576 | NOTHROW; |
7577 | GC_NOTRIGGER; |
7578 | SO_TOLERANT; |
7579 | MODE_ANY; |
7580 | } |
7581 | CONTRACTL_END; |
7582 | |
7583 | MethodDesc * pCurrentMD = GetMethodDescForSlot(slotNumber); |
7584 | DWORD dwSlot = pCurrentMD->GetSlot(); |
7585 | MethodDesc * pIntroducingMD = NULL; |
7586 | |
7587 | MethodTable * pParentType = GetParentMethodTable(); |
7588 | MethodTable * pPrevParentType = NULL; |
7589 | |
7590 | // Find this method in the parent. |
7591 | // If it does exist in the parent, it would be at the same vtable slot. |
7592 | while ((pParentType != NULL) && |
7593 | (dwSlot < pParentType->GetNumVirtuals())) |
7594 | { |
7595 | pPrevParentType = pParentType; |
7596 | pParentType = pParentType->GetParentMethodTable(); |
7597 | } |
7598 | |
7599 | if (pPrevParentType != NULL) |
7600 | { |
7601 | pIntroducingMD = pPrevParentType->GetMethodDescForSlot(dwSlot); |
7602 | } |
7603 | |
7604 | return pIntroducingMD; |
7605 | } |
7606 | |
7607 | //========================================================================================== |
7608 | // There is a case where a method declared in a type can be explicitly |
7609 | // overridden by a methodImpl on another method within the same type. In |
7610 | // this case, we need to call the methodImpl target, and this will map |
7611 | // things appropriately for us. |
7612 | MethodDesc * MethodTable::MapMethodDeclToMethodImpl(MethodDesc * pMDDecl) |
7613 | { |
7614 | STATIC_CONTRACT_THROWS; |
7615 | STATIC_CONTRACT_GC_TRIGGERS; |
7616 | |
7617 | MethodTable * pMT = pMDDecl->GetMethodTable(); |
7618 | |
7619 | // |
7620 | // Fast negative case check |
7621 | // |
7622 | |
7623 | // If it's not virtual, then it could not have been methodImpl'd. |
7624 | if (!pMDDecl->IsVirtual() || |
7625 | // Is it a non-virtual call to the instantiating stub |
7626 | (pMT->IsValueType() && !pMDDecl->IsUnboxingStub())) |
7627 | { |
7628 | return pMDDecl; |
7629 | } |
7630 | |
7631 | MethodDesc * pMDImpl = pMT->GetParallelMethodDesc(pMDDecl); |
7632 | |
7633 | // If the method is instantiated, then we need to resolve to the corresponding |
7634 | // instantiated MD for the new slot number. |
7635 | if (pMDDecl->HasMethodInstantiation()) |
7636 | { |
7637 | if (pMDDecl->GetSlot() != pMDImpl->GetSlot()) |
7638 | { |
7639 | if (!pMDDecl->IsGenericMethodDefinition()) |
7640 | { |
7641 | #ifndef DACCESS_COMPILE |
7642 | pMDImpl = pMDDecl->FindOrCreateAssociatedMethodDesc( |
7643 | pMDImpl, |
7644 | pMT, |
7645 | pMDDecl->IsUnboxingStub(), |
7646 | pMDDecl->GetMethodInstantiation(), |
7647 | pMDDecl->IsInstantiatingStub()); |
7648 | #else |
7649 | DacNotImpl(); |
7650 | #endif |
7651 | } |
7652 | } |
7653 | else |
7654 | { |
7655 | // Since the generic method definition is always in the actual |
7656 | // slot for the method table, and since the slot numbers for |
7657 | // the Decl and Impl MDs are the same, then the call to |
7658 | // FindOrCreateAssociatedMethodDesc would just result in the |
7659 | // same pMDDecl being returned. In this case, we can skip all |
7660 | // the work. |
7661 | pMDImpl = pMDDecl; |
7662 | } |
7663 | } |
7664 | |
7665 | CONSISTENCY_CHECK(CheckPointer(pMDImpl)); |
7666 | CONSISTENCY_CHECK(!pMDImpl->IsGenericMethodDefinition()); |
7667 | return pMDImpl; |
7668 | } // MethodTable::MapMethodDeclToMethodImpl |
7669 | |
7670 | |
7671 | //========================================================================================== |
7672 | HRESULT MethodTable::GetGuidNoThrow(GUID *pGuid, BOOL bGenerateIfNotFound, BOOL bClassic /*= TRUE*/) |
7673 | { |
7674 | CONTRACTL { |
7675 | NOTHROW; |
7676 | GC_TRIGGERS; |
7677 | MODE_ANY; |
7678 | SUPPORTS_DAC; |
7679 | } CONTRACTL_END; |
7680 | |
7681 | HRESULT hr = S_OK; |
7682 | EX_TRY |
7683 | { |
7684 | GetGuid(pGuid, bGenerateIfNotFound, bClassic); |
7685 | } |
7686 | EX_CATCH_HRESULT(hr); |
7687 | |
7688 | // ensure we return a failure hr when pGuid is not filled in |
7689 | if (SUCCEEDED(hr) && (*pGuid == GUID_NULL)) |
7690 | hr = E_FAIL; |
7691 | |
7692 | return hr; |
7693 | } |
7694 | |
7695 | //========================================================================================== |
7696 | // Returns the GUID of this MethodTable. |
7697 | // If metadata does not specify GUID for the type, GUID_NULL is returned (if bGenerateIfNotFound |
7698 | // is FALSE) or a GUID is auto-generated on the fly from the name and members of the type |
7699 | // (bGenerateIfNotFound is TRUE). |
7700 | // |
7701 | // Redirected WinRT types may have two GUIDs, the "classic" one which matches the return value |
7702 | // of Type.Guid, and the new one which is the GUID of the WinRT type to which it is redirected. |
7703 | // The bClassic parameter controls which one is returned from this method. Note that the parameter |
7704 | // is ignored for genuine WinRT types, i.e. types loaded from .winmd files, those always return |
7705 | // the new GUID. |
7706 | // |
7707 | void MethodTable::GetGuid(GUID *pGuid, BOOL bGenerateIfNotFound, BOOL bClassic /*=TRUE*/) |
7708 | { |
7709 | CONTRACTL { |
7710 | THROWS; |
7711 | GC_TRIGGERS; |
7712 | MODE_ANY; |
7713 | SUPPORTS_DAC; |
7714 | } CONTRACTL_END; |
7715 | |
7716 | |
7717 | #ifdef DACCESS_COMPILE |
7718 | |
7719 | _ASSERTE(pGuid != NULL); |
7720 | PTR_GuidInfo pGuidInfo = (bClassic ? GetClass()->GetGuidInfo() : GetGuidInfo()); |
7721 | if (pGuidInfo != NULL) |
7722 | *pGuid = pGuidInfo->m_Guid; |
7723 | else |
7724 | *pGuid = GUID_NULL; |
7725 | |
7726 | #else // DACCESS_COMPILE |
7727 | |
7728 | SIZE_T cchName = 0; // Length of the name (possibly after decoration). |
7729 | SIZE_T cbCur; // Current offset. |
7730 | LPCWSTR szName = NULL; // Name to turn to a guid. |
7731 | CQuickArray<BYTE> rName; // Buffer to accumulate signatures. |
7732 | BOOL bGenerated = FALSE; // A flag indicating if we generated the GUID from name. |
7733 | |
7734 | _ASSERTE(pGuid != NULL); |
7735 | |
7736 | // Use the per-EEClass GuidInfo if we are asked for the "classic" non-WinRT GUID of non-WinRT type |
7737 | GuidInfo *pInfo = ((bClassic && !IsProjectedFromWinRT()) ? GetClass()->GetGuidInfo() : GetGuidInfo()); |
7738 | |
7739 | // First check to see if we have already cached the guid for this type. |
7740 | // We currently only cache guids on interfaces and WinRT delegates. |
7741 | // In classic mode, though, ensure we don't retrieve the GuidInfo for redirected interfaces |
7742 | if ((IsInterface() || IsWinRTDelegate()) && pInfo != NULL |
7743 | && (!bClassic || !SupportsGenericInterop(TypeHandle::Interop_NativeToManaged, modeRedirected))) |
7744 | { |
7745 | if (pInfo->m_bGeneratedFromName) |
7746 | { |
7747 | // If the GUID was generated from the name then only return it |
7748 | // if bGenerateIfNotFound is set. |
7749 | if (bGenerateIfNotFound) |
7750 | *pGuid = pInfo->m_Guid; |
7751 | else |
7752 | *pGuid = GUID_NULL; |
7753 | } |
7754 | else |
7755 | { |
7756 | *pGuid = pInfo->m_Guid; |
7757 | } |
7758 | return; |
7759 | } |
7760 | |
7761 | #ifdef FEATURE_COMINTEROP |
7762 | if ((SupportsGenericInterop(TypeHandle::Interop_NativeToManaged, modeProjected)) |
7763 | || (!bClassic |
7764 | && SupportsGenericInterop(TypeHandle::Interop_NativeToManaged, modeRedirected) |
7765 | && IsLegalNonArrayWinRTType())) |
7766 | { |
7767 | // Closed generic WinRT interfaces/delegates have their GUID computed |
7768 | // based on the "PIID" in metadata and the instantiation. |
7769 | // Note that we explicitly do this computation for redirected mscorlib |
7770 | // interfaces only if !bClassic, so typeof(Enumerable<T>).GUID |
7771 | // for example still returns the same result as pre-v4.5 runtimes. |
7772 | // ComputeGuidForGenericType() may throw for generics nested beyond 64 levels. |
7773 | WinRTGuidGenerator::ComputeGuidForGenericType(this, pGuid); |
7774 | |
7775 | // This GUID is per-instantiation so make sure that the cache |
7776 | // where we are going to keep it is per-instantiation as well. |
7777 | _ASSERTE(IsCanonicalMethodTable() || HasGuidInfo()); |
7778 | } |
7779 | else |
7780 | #endif // FEATURE_COMINTEROP |
7781 | if (GetClass()->HasNoGuid()) |
7782 | { |
7783 | *pGuid = GUID_NULL; |
7784 | } |
7785 | else |
7786 | { |
7787 | // If there is a GUID in the metadata then return that. |
7788 | IfFailThrow(GetMDImport()->GetItemGuid(GetCl(), pGuid)); |
7789 | |
7790 | if (*pGuid == GUID_NULL) |
7791 | { |
7792 | // Remember that we didn't find the GUID, so we can skip looking during |
7793 | // future checks. (Note that this is a very important optimization in the |
7794 | // prejit case.) |
7795 | g_IBCLogger.LogEEClassCOWTableAccess(this); |
7796 | GetClass_NoLogging()->SetHasNoGuid(); |
7797 | } |
7798 | } |
7799 | |
7800 | if (*pGuid == GUID_NULL && bGenerateIfNotFound) |
7801 | { |
7802 | // For interfaces, concatenate the signatures of the methods and fields. |
7803 | if (!IsNilToken(GetCl()) && IsInterface()) |
7804 | { |
7805 | // Retrieve the stringized interface definition. |
7806 | cbCur = GetStringizedItfDef(TypeHandle(this), rName); |
7807 | |
7808 | // Pad up to a whole WCHAR. |
7809 | if (cbCur % sizeof(WCHAR)) |
7810 | { |
7811 | SIZE_T cbDelta = sizeof(WCHAR) - (cbCur % sizeof(WCHAR)); |
7812 | rName.ReSizeThrows(cbCur + cbDelta); |
7813 | memset(rName.Ptr() + cbCur, 0, cbDelta); |
7814 | cbCur += cbDelta; |
7815 | } |
7816 | |
7817 | // Point to the new buffer. |
7818 | cchName = cbCur / sizeof(WCHAR); |
7819 | szName = reinterpret_cast<LPWSTR>(rName.Ptr()); |
7820 | } |
7821 | else |
7822 | { |
7823 | // Get the name of the class. |
7824 | DefineFullyQualifiedNameForClassW(); |
7825 | szName = GetFullyQualifiedNameForClassNestedAwareW(this); |
7826 | if (szName == NULL) |
7827 | return; |
7828 | cchName = wcslen(szName); |
7829 | |
7830 | // Enlarge buffer for class name. |
7831 | cbCur = cchName * sizeof(WCHAR); |
7832 | rName.ReSizeThrows(cbCur + sizeof(WCHAR)); |
7833 | wcscpy_s(reinterpret_cast<LPWSTR>(rName.Ptr()), cchName + 1, szName); |
7834 | |
7835 | // Add the assembly guid string to the class name. |
7836 | ULONG cbCurOUT = (ULONG)cbCur; |
7837 | IfFailThrow(GetStringizedTypeLibGuidForAssembly(GetAssembly(), rName, (ULONG)cbCur, &cbCurOUT)); |
7838 | cbCur = (SIZE_T) cbCurOUT; |
7839 | |
7840 | // Pad to a whole WCHAR. |
7841 | if (cbCur % sizeof(WCHAR)) |
7842 | { |
7843 | rName.ReSizeThrows(cbCur + sizeof(WCHAR)-(cbCur%sizeof(WCHAR))); |
7844 | while (cbCur % sizeof(WCHAR)) |
7845 | rName[cbCur++] = 0; |
7846 | } |
7847 | |
7848 | // Point to the new buffer. |
7849 | szName = reinterpret_cast<LPWSTR>(rName.Ptr()); |
7850 | cchName = cbCur / sizeof(WCHAR); |
7851 | // Dont' want to have to pad. |
7852 | _ASSERTE((sizeof(GUID) % sizeof(WCHAR)) == 0); |
7853 | } |
7854 | |
7855 | // Generate guid from name. |
7856 | CorGuidFromNameW(pGuid, szName, cchName); |
7857 | |
7858 | // Remeber we generated the guid from the type name. |
7859 | bGenerated = TRUE; |
7860 | } |
7861 | |
7862 | // Cache the guid in the type, if not already cached. |
7863 | // We currently only do this for interfaces. |
7864 | // Also, in classic mode do NOT cache GUID for redirected interfaces. |
7865 | if ((IsInterface() || IsWinRTDelegate()) && (pInfo == NULL) && (*pGuid != GUID_NULL) |
7866 | #ifdef FEATURE_COMINTEROP |
7867 | && !(bClassic |
7868 | && SupportsGenericInterop(TypeHandle::Interop_NativeToManaged, modeRedirected) |
7869 | && IsLegalNonArrayWinRTType()) |
7870 | #endif // FEATURE_COMINTEROP |
7871 | ) |
7872 | { |
7873 | AllocMemTracker amTracker; |
7874 | BOOL bStoreGuidInfoOnEEClass = false; |
7875 | PTR_LoaderAllocator pLoaderAllocator; |
7876 | |
7877 | #if FEATURE_COMINTEROP |
7878 | if ((bClassic && !IsProjectedFromWinRT()) || !HasGuidInfo()) |
7879 | { |
7880 | bStoreGuidInfoOnEEClass = true; |
7881 | } |
7882 | #else |
7883 | // We will always store the GuidInfo on the methodTable. |
7884 | bStoreGuidInfoOnEEClass = true; |
7885 | #endif |
7886 | if(bStoreGuidInfoOnEEClass) |
7887 | { |
7888 | // Since the GUIDInfo will be stored on the EEClass, |
7889 | // the memory should be allocated on the loaderAllocator of the class. |
7890 | // The definining module and the loaded module could be different in some scenarios. |
7891 | // For example - in case of shared generic instantiations |
7892 | // a shared generic i.e. System.__Canon which would be loaded in shared domain |
7893 | // but the this->GetLoaderAllocator will be the loader allocator for the definining |
7894 | // module which can get unloaded anytime. |
7895 | _ASSERTE(GetClass()); |
7896 | _ASSERTE(GetClass()->GetMethodTable()); |
7897 | pLoaderAllocator = GetClass()->GetMethodTable()->GetLoaderAllocator(); |
7898 | } |
7899 | else |
7900 | { |
7901 | pLoaderAllocator = GetLoaderAllocator(); |
7902 | } |
7903 | |
7904 | _ASSERTE(pLoaderAllocator); |
7905 | |
7906 | // Allocate the guid information. |
7907 | pInfo = (GuidInfo *)amTracker.Track( |
7908 | pLoaderAllocator->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(GuidInfo)))); |
7909 | pInfo->m_Guid = *pGuid; |
7910 | pInfo->m_bGeneratedFromName = bGenerated; |
7911 | |
7912 | // Set in in the interface method table. |
7913 | if (bClassic && !IsProjectedFromWinRT()) |
7914 | { |
7915 | // Set the per-EEClass GuidInfo if we are asked for the "classic" non-WinRT GUID. |
7916 | // The MethodTable may be NGENed and read-only - and there's no point in saving |
7917 | // classic GUIDs in non-WinRT MethodTables anyway. |
7918 | _ASSERTE(bStoreGuidInfoOnEEClass); |
7919 | GetClass()->SetGuidInfo(pInfo); |
7920 | } |
7921 | else |
7922 | { |
7923 | #if FEATURE_COMINTEROP |
7924 | _ASSERTE(bStoreGuidInfoOnEEClass || HasGuidInfo()); |
7925 | #else |
7926 | _ASSERTE(bStoreGuidInfoOnEEClass); |
7927 | #endif |
7928 | SetGuidInfo(pInfo); |
7929 | } |
7930 | |
7931 | amTracker.SuppressRelease(); |
7932 | } |
7933 | #endif // !DACCESS_COMPILE |
7934 | } |
7935 | |
7936 | |
7937 | //========================================================================================== |
7938 | MethodDesc* MethodTable::GetMethodDescForSlotAddress(PCODE addr, BOOL fSpeculative /*=FALSE*/) |
7939 | { |
7940 | CONTRACT(MethodDesc *) |
7941 | { |
7942 | GC_NOTRIGGER; |
7943 | NOTHROW; |
7944 | SO_TOLERANT; |
7945 | POSTCONDITION(CheckPointer(RETVAL, NULL_NOT_OK)); |
7946 | POSTCONDITION(RETVAL->m_pDebugMethodTable.IsNull() || // We must be in BuildMethdTableThrowing() |
7947 | RETVAL->SanityCheck()); |
7948 | } |
7949 | CONTRACT_END; |
7950 | |
7951 | // If we see shared fcall implementation as an argument to this |
7952 | // function, it means that a vtable slot for the shared fcall |
7953 | // got backpatched when it shouldn't have. The reason we can't |
7954 | // backpatch this method is that it is an FCall that has many |
7955 | // MethodDescs for one implementation. If we backpatch delegate |
7956 | // constructors, this function will not be able to recover the |
7957 | // MethodDesc for the method. |
7958 | // |
7959 | _ASSERTE_IMPL(!ECall::IsSharedFCallImpl(addr) && |
7960 | "someone backpatched shared fcall implementation -- " |
7961 | "see comment in code" ); |
7962 | |
7963 | MethodDesc* pMethodDesc = ExecutionManager::GetCodeMethodDesc(addr); |
7964 | if (NULL != pMethodDesc) |
7965 | { |
7966 | goto lExit; |
7967 | } |
7968 | |
7969 | #ifdef FEATURE_INTERPRETER |
7970 | // I don't really know why this helps. Figure it out. |
7971 | #ifndef DACCESS_COMPILE |
7972 | // If we didn't find it above, try as an Interpretation stub... |
7973 | pMethodDesc = Interpreter::InterpretationStubToMethodInfo(addr); |
7974 | |
7975 | if (NULL != pMethodDesc) |
7976 | { |
7977 | goto lExit; |
7978 | } |
7979 | #endif |
7980 | #endif // FEATURE_INTERPRETER |
7981 | |
7982 | // Is it an FCALL? |
7983 | pMethodDesc = ECall::MapTargetBackToMethod(addr); |
7984 | if (pMethodDesc != 0) |
7985 | { |
7986 | goto lExit; |
7987 | } |
7988 | |
7989 | pMethodDesc = MethodDesc::GetMethodDescFromStubAddr(addr, fSpeculative); |
7990 | |
7991 | lExit: |
7992 | |
7993 | RETURN(pMethodDesc); |
7994 | } |
7995 | |
7996 | //========================================================================================== |
7997 | /* static*/ |
7998 | BOOL MethodTable::ComputeContainsGenericVariables(Instantiation inst) |
7999 | { |
8000 | CONTRACTL |
8001 | { |
8002 | NOTHROW; |
8003 | GC_NOTRIGGER; |
8004 | SO_TOLERANT; |
8005 | MODE_ANY; |
8006 | } |
8007 | CONTRACTL_END; |
8008 | |
8009 | for (DWORD j = 0; j < inst.GetNumArgs(); j++) |
8010 | { |
8011 | if (inst[j].ContainsGenericVariables()) |
8012 | { |
8013 | return TRUE; |
8014 | } |
8015 | } |
8016 | return FALSE; |
8017 | } |
8018 | |
8019 | //========================================================================================== |
8020 | BOOL MethodTable::SanityCheck() |
8021 | { |
8022 | LIMITED_METHOD_CONTRACT; |
8023 | SUPPORTS_DAC; |
8024 | |
8025 | // strings have component size2, all other non-arrays should have 0 |
8026 | _ASSERTE((GetComponentSize() <= 2) || IsArray()); |
8027 | |
8028 | if (m_pEEClass.IsNull()) |
8029 | { |
8030 | return FALSE; |
8031 | } |
8032 | |
8033 | EEClass * pClass = GetClass(); |
8034 | MethodTable * pCanonMT = pClass->GetMethodTable(); |
8035 | |
8036 | // Let's try to make sure we have a valid EEClass pointer. |
8037 | if (pCanonMT == NULL) |
8038 | return FALSE; |
8039 | |
8040 | if (GetNumGenericArgs() != 0) |
8041 | return (pCanonMT->GetClass() == pClass); |
8042 | else |
8043 | return (pCanonMT == this) || IsArray(); |
8044 | } |
8045 | |
8046 | //========================================================================================== |
8047 | |
8048 | // Structs containing GC pointers whose size is at most this are always stack-allocated. |
8049 | const unsigned MaxStructBytesForLocalVarRetBuffBytes = 2 * sizeof(void*); // 4 pointer-widths. |
8050 | |
8051 | BOOL MethodTable::IsStructRequiringStackAllocRetBuf() |
8052 | { |
8053 | LIMITED_METHOD_DAC_CONTRACT; |
8054 | |
8055 | // Disable this optimization. It has limited value (only kicks in on x86, and only for less common structs), |
8056 | // causes bugs and introduces odd ABI differences not compatible with ReadyToRun. |
8057 | return FALSE; |
8058 | } |
8059 | |
8060 | //========================================================================================== |
8061 | unsigned MethodTable::GetTypeDefRid() |
8062 | { |
8063 | LIMITED_METHOD_DAC_CONTRACT; |
8064 | |
8065 | g_IBCLogger.LogMethodTableAccess(this); |
8066 | return GetTypeDefRid_NoLogging(); |
8067 | } |
8068 | |
8069 | //========================================================================================== |
8070 | unsigned MethodTable::GetTypeDefRid_NoLogging() |
8071 | { |
8072 | LIMITED_METHOD_DAC_CONTRACT; |
8073 | |
8074 | WORD token = m_wToken; |
8075 | |
8076 | if (token == METHODTABLE_TOKEN_OVERFLOW) |
8077 | return (unsigned)*GetTokenOverflowPtr(); |
8078 | |
8079 | return token; |
8080 | } |
8081 | |
8082 | //========================================================================================== |
8083 | void MethodTable::SetCl(mdTypeDef token) |
8084 | { |
8085 | LIMITED_METHOD_CONTRACT; |
8086 | |
8087 | unsigned rid = RidFromToken(token); |
8088 | if (rid >= METHODTABLE_TOKEN_OVERFLOW) |
8089 | { |
8090 | m_wToken = METHODTABLE_TOKEN_OVERFLOW; |
8091 | *GetTokenOverflowPtr() = rid; |
8092 | } |
8093 | else |
8094 | { |
8095 | _ASSERTE(FitsIn<U2>(rid)); |
8096 | m_wToken = (WORD)rid; |
8097 | } |
8098 | |
8099 | _ASSERTE(GetCl() == token); |
8100 | } |
8101 | |
8102 | //========================================================================================== |
8103 | MethodDesc * MethodTable::GetClassConstructor() |
8104 | { |
8105 | CONTRACTL |
8106 | { |
8107 | NOTHROW; |
8108 | GC_NOTRIGGER; |
8109 | SO_TOLERANT; |
8110 | MODE_ANY; |
8111 | } |
8112 | CONTRACTL_END; |
8113 | return GetMethodDescForSlot(GetClassConstructorSlot()); |
8114 | } |
8115 | |
8116 | //========================================================================================== |
8117 | DWORD MethodTable::HasFixedAddressVTStatics() |
8118 | { |
8119 | LIMITED_METHOD_CONTRACT; |
8120 | |
8121 | return GetClass()->HasFixedAddressVTStatics(); |
8122 | } |
8123 | |
8124 | //========================================================================================== |
8125 | WORD MethodTable::GetNumHandleRegularStatics() |
8126 | { |
8127 | LIMITED_METHOD_CONTRACT; |
8128 | |
8129 | return GetClass()->GetNumHandleRegularStatics(); |
8130 | } |
8131 | |
8132 | //========================================================================================== |
8133 | WORD MethodTable::GetNumBoxedRegularStatics() |
8134 | { |
8135 | LIMITED_METHOD_CONTRACT; |
8136 | |
8137 | return GetClass()->GetNumBoxedRegularStatics(); |
8138 | } |
8139 | |
8140 | //========================================================================================== |
8141 | WORD MethodTable::GetNumBoxedThreadStatics () |
8142 | { |
8143 | LIMITED_METHOD_CONTRACT; |
8144 | |
8145 | return GetClass()->GetNumBoxedThreadStatics(); |
8146 | } |
8147 | |
8148 | //========================================================================================== |
8149 | ClassCtorInfoEntry* MethodTable::GetClassCtorInfoIfExists() |
8150 | { |
8151 | LIMITED_METHOD_CONTRACT; |
8152 | |
8153 | if (!IsZapped()) |
8154 | return NULL; |
8155 | |
8156 | g_IBCLogger.LogCCtorInfoReadAccess(this); |
8157 | |
8158 | if (HasBoxedRegularStatics()) |
8159 | { |
8160 | ModuleCtorInfo *pModuleCtorInfo = GetZapModule()->GetZapModuleCtorInfo(); |
8161 | DPTR(RelativePointer<PTR_MethodTable>) ppMT = pModuleCtorInfo->ppMT; |
8162 | PTR_DWORD hotHashOffsets = pModuleCtorInfo->hotHashOffsets; |
8163 | PTR_DWORD coldHashOffsets = pModuleCtorInfo->coldHashOffsets; |
8164 | |
8165 | if (pModuleCtorInfo->numHotHashes) |
8166 | { |
8167 | DWORD hash = pModuleCtorInfo->GenerateHash(PTR_MethodTable(this), ModuleCtorInfo::HOT); |
8168 | _ASSERTE(hash < pModuleCtorInfo->numHotHashes); |
8169 | |
8170 | for (DWORD i = hotHashOffsets[hash]; i != hotHashOffsets[hash + 1]; i++) |
8171 | { |
8172 | _ASSERTE(!ppMT[i].IsNull()); |
8173 | if (dac_cast<TADDR>(pModuleCtorInfo->GetMT(i)) == dac_cast<TADDR>(this)) |
8174 | { |
8175 | return pModuleCtorInfo->cctorInfoHot + i; |
8176 | } |
8177 | } |
8178 | } |
8179 | |
8180 | if (pModuleCtorInfo->numColdHashes) |
8181 | { |
8182 | DWORD hash = pModuleCtorInfo->GenerateHash(PTR_MethodTable(this), ModuleCtorInfo::COLD); |
8183 | _ASSERTE(hash < pModuleCtorInfo->numColdHashes); |
8184 | |
8185 | for (DWORD i = coldHashOffsets[hash]; i != coldHashOffsets[hash + 1]; i++) |
8186 | { |
8187 | _ASSERTE(!ppMT[i].IsNull()); |
8188 | if (dac_cast<TADDR>(pModuleCtorInfo->GetMT(i)) == dac_cast<TADDR>(this)) |
8189 | { |
8190 | return pModuleCtorInfo->cctorInfoCold + (i - pModuleCtorInfo->numElementsHot); |
8191 | } |
8192 | } |
8193 | } |
8194 | } |
8195 | |
8196 | return NULL; |
8197 | } |
8198 | |
8199 | #ifdef _DEBUG |
8200 | //========================================================================================== |
8201 | // Returns true if pointer to the parent method table has been initialized/restored already. |
8202 | BOOL MethodTable::IsParentMethodTablePointerValid() |
8203 | { |
8204 | LIMITED_METHOD_CONTRACT; |
8205 | SUPPORTS_DAC; |
8206 | |
8207 | // workaround: Type loader accesses partially initialized datastructures that interferes with IBC logging. |
8208 | // Once type loader is fixed to do not access partially initialized datastructures, this can go away. |
8209 | if (!GetWriteableData_NoLogging()->IsParentMethodTablePointerValid()) |
8210 | return FALSE; |
8211 | |
8212 | return !IsParentMethodTableTagged(dac_cast<PTR_MethodTable>(this)); |
8213 | } |
8214 | #endif |
8215 | |
8216 | |
8217 | //--------------------------------------------------------------------------------------- |
8218 | // |
8219 | // Ascends the parent class chain of "this", until a MethodTable is found whose typeDef |
8220 | // matches that of the specified pWhichParent. Why is this useful? See |
8221 | // code:MethodTable::GetInstantiationOfParentClass below and |
8222 | // code:Generics::GetExactInstantiationsOfMethodAndItsClassFromCallInformation for use |
8223 | // cases. |
8224 | // |
8225 | // Arguments: |
8226 | // pWhichParent - MethodTable whose typeDef we're trying to match as we go up |
8227 | // "this"'s parent chain. |
8228 | // |
8229 | // Return Value: |
8230 | // If a matching parent MethodTable is found, it is returned. Else, NULL is |
8231 | // returned. |
8232 | // |
8233 | |
8234 | MethodTable * MethodTable::GetMethodTableMatchingParentClass(MethodTable * pWhichParent) |
8235 | { |
8236 | CONTRACTL |
8237 | { |
8238 | NOTHROW; |
8239 | GC_NOTRIGGER; |
8240 | SO_TOLERANT; |
8241 | PRECONDITION(CheckPointer(pWhichParent)); |
8242 | PRECONDITION(IsRestored_NoLogging()); |
8243 | PRECONDITION(pWhichParent->IsRestored_NoLogging()); |
8244 | SUPPORTS_DAC; |
8245 | } CONTRACTL_END; |
8246 | |
8247 | MethodTable *pMethodTableSearch = this; |
8248 | |
8249 | #ifdef DACCESS_COMPILE |
8250 | unsigned parentCount = 0; |
8251 | MethodTable *pOldMethodTable = NULL; |
8252 | #endif // DACCESS_COMPILE |
8253 | |
8254 | while (pMethodTableSearch != NULL) |
8255 | { |
8256 | #ifdef DACCESS_COMPILE |
8257 | if (pMethodTableSearch == pOldMethodTable || |
8258 | parentCount > 1000) |
8259 | { |
8260 | break; |
8261 | } |
8262 | pOldMethodTable = pMethodTableSearch; |
8263 | parentCount++; |
8264 | #endif // DACCESS_COMPILE |
8265 | |
8266 | if (pMethodTableSearch->HasSameTypeDefAs(pWhichParent)) |
8267 | { |
8268 | return pMethodTableSearch; |
8269 | } |
8270 | |
8271 | pMethodTableSearch = pMethodTableSearch->GetParentMethodTable(); |
8272 | } |
8273 | |
8274 | return NULL; |
8275 | } |
8276 | |
8277 | |
8278 | //========================================================================================== |
8279 | // Given D<T> : C<List<T>> and a type handle D<string> we sometimes |
8280 | // need to find the corresponding type handle |
8281 | // C<List<string>> (C may also be some type |
8282 | // further up the inheritance hierarchy). GetInstantiationOfParentClass |
8283 | // helps us do this by getting the corresponding instantiation of C, i.e. |
8284 | // <List<string>>. |
8285 | // |
8286 | // pWhichParent: this is used identify which parent type we're interested in. |
8287 | // It must be a canonical EEClass, e.g. for C<ref>. This is used as a token for |
8288 | // C<List<T>>. This method can also be called with the minimal methodtable used |
8289 | // for dynamic methods. In that case, we need to return an empty instantiation. |
8290 | // |
8291 | // Note this only works for parent classes, not parent interfaces. |
8292 | Instantiation MethodTable::GetInstantiationOfParentClass(MethodTable *pWhichParent) |
8293 | { |
8294 | CONTRACTL { |
8295 | NOTHROW; |
8296 | GC_NOTRIGGER; |
8297 | SO_TOLERANT; |
8298 | PRECONDITION(CheckPointer(pWhichParent)); |
8299 | PRECONDITION(IsRestored_NoLogging()); |
8300 | PRECONDITION(pWhichParent->IsRestored_NoLogging()); |
8301 | SUPPORTS_DAC; |
8302 | } CONTRACTL_END; |
8303 | |
8304 | |
8305 | MethodTable * pMatchingParent = GetMethodTableMatchingParentClass(pWhichParent); |
8306 | if (pMatchingParent != NULL) |
8307 | { |
8308 | return pMatchingParent->GetInstantiation(); |
8309 | } |
8310 | |
8311 | // The parameter should always be a parent class or the dynamic method |
8312 | // class. Since there is no bit on the dynamicclass methodtable to indicate |
8313 | // that it is the dynamic method methodtable, we simply check the debug name |
8314 | // This is good enough for an assert. |
8315 | _ASSERTE(strcmp(pWhichParent->GetDebugClassName(), "dynamicClass" ) == 0); |
8316 | return Instantiation(); |
8317 | } |
8318 | |
8319 | #ifndef DACCESS_COMPILE |
8320 | |
8321 | #ifdef FEATURE_COMINTEROP |
8322 | |
8323 | // |
8324 | // This is for COM Interop backwards compatibility |
8325 | // |
8326 | |
8327 | //========================================================================================== |
8328 | // Returns the data pointer if present, NULL otherwise |
8329 | InteropMethodTableData *MethodTable::LookupComInteropData() |
8330 | { |
8331 | WRAPPER_NO_CONTRACT; |
8332 | |
8333 | return GetLoaderAllocator()->LookupComInteropData(this); |
8334 | } |
8335 | |
8336 | //========================================================================================== |
8337 | // Returns TRUE if successfully inserted, FALSE if this would be a duplicate entry |
8338 | BOOL MethodTable::InsertComInteropData(InteropMethodTableData *pData) |
8339 | { |
8340 | WRAPPER_NO_CONTRACT; |
8341 | |
8342 | return GetLoaderAllocator()->InsertComInteropData(this, pData); |
8343 | } |
8344 | |
8345 | //========================================================================================== |
8346 | InteropMethodTableData *MethodTable::CreateComInteropData(AllocMemTracker *pamTracker) |
8347 | { |
8348 | CONTRACTL { |
8349 | STANDARD_VM_CHECK; |
8350 | PRECONDITION(GetParentMethodTable() == NULL || GetParentMethodTable()->LookupComInteropData() != NULL); |
8351 | } CONTRACTL_END; |
8352 | |
8353 | ClassCompat::MethodTableBuilder builder(this); |
8354 | |
8355 | InteropMethodTableData *pData = builder.BuildInteropVTable(pamTracker); |
8356 | _ASSERTE(pData); |
8357 | return (pData); |
8358 | } |
8359 | |
8360 | //========================================================================================== |
8361 | InteropMethodTableData *MethodTable::GetComInteropData() |
8362 | { |
8363 | CONTRACTL { |
8364 | THROWS; |
8365 | GC_TRIGGERS; |
8366 | } CONTRACTL_END; |
8367 | |
8368 | InteropMethodTableData *pData = LookupComInteropData(); |
8369 | |
8370 | if (!pData) |
8371 | { |
8372 | GCX_PREEMP(); |
8373 | |
8374 | // Make sure that the parent's interop data has been created |
8375 | MethodTable *pParentMT = GetParentMethodTable(); |
8376 | if (pParentMT) |
8377 | pParentMT->GetComInteropData(); |
8378 | |
8379 | AllocMemTracker amTracker; |
8380 | |
8381 | pData = CreateComInteropData(&amTracker); |
8382 | if (InsertComInteropData(pData)) |
8383 | { |
8384 | amTracker.SuppressRelease(); |
8385 | } |
8386 | else |
8387 | { |
8388 | pData = LookupComInteropData(); |
8389 | } |
8390 | } |
8391 | |
8392 | _ASSERTE(pData); |
8393 | return (pData); |
8394 | } |
8395 | |
8396 | #endif // FEATURE_COMINTEROP |
8397 | |
8398 | //========================================================================================== |
8399 | ULONG MethodTable::MethodData::Release() |
8400 | { |
8401 | LIMITED_METHOD_CONTRACT; |
8402 | //@TODO: Must adjust this to use an alternate allocator so that we don't |
8403 | //@TODO: potentially cause deadlocks on the debug thread. |
8404 | SUPPRESS_ALLOCATION_ASSERTS_IN_THIS_SCOPE; |
8405 | ULONG cRef = (ULONG) InterlockedDecrement((LONG*)&m_cRef); |
8406 | if (cRef == 0) { |
8407 | delete this; |
8408 | } |
8409 | return (cRef); |
8410 | } |
8411 | |
8412 | //========================================================================================== |
8413 | void |
8414 | MethodTable::MethodData::ProcessMap( |
8415 | const DispatchMapTypeID * rgTypeIDs, |
8416 | UINT32 cTypeIDs, |
8417 | MethodTable * pMT, |
8418 | UINT32 iCurrentChainDepth, |
8419 | MethodDataEntry * rgWorkingData) |
8420 | { |
8421 | LIMITED_METHOD_CONTRACT; |
8422 | |
8423 | for (DispatchMap::EncodedMapIterator it(pMT); it.IsValid(); it.Next()) |
8424 | { |
8425 | for (UINT32 nTypeIDIndex = 0; nTypeIDIndex < cTypeIDs; nTypeIDIndex++) |
8426 | { |
8427 | if (it.Entry()->GetTypeID() == rgTypeIDs[nTypeIDIndex]) |
8428 | { |
8429 | UINT32 curSlot = it.Entry()->GetSlotNumber(); |
8430 | // If we're processing an interface, or it's for a virtual, or it's for a non-virtual |
8431 | // for the most derived type, we want to process the entry. In other words, we |
8432 | // want to ignore non-virtuals for parent classes. |
8433 | if ((curSlot < pMT->GetNumVirtuals()) || (iCurrentChainDepth == 0)) |
8434 | { |
8435 | MethodDataEntry * pCurEntry = &rgWorkingData[curSlot]; |
8436 | if (!pCurEntry->IsDeclInit() && !pCurEntry->IsImplInit()) |
8437 | { |
8438 | pCurEntry->SetImplData(it.Entry()->GetTargetSlotNumber()); |
8439 | } |
8440 | } |
8441 | } |
8442 | } |
8443 | } |
8444 | } // MethodTable::MethodData::ProcessMap |
8445 | |
8446 | //========================================================================================== |
8447 | UINT32 MethodTable::MethodDataObject::GetObjectSize(MethodTable *pMT) |
8448 | { |
8449 | WRAPPER_NO_CONTRACT; |
8450 | UINT32 cb = sizeof(MethodTable::MethodDataObject); |
8451 | cb += pMT->GetCanonicalMethodTable()->GetNumMethods() * sizeof(MethodDataObjectEntry); |
8452 | return cb; |
8453 | } |
8454 | |
8455 | //========================================================================================== |
8456 | // This will fill in all the MethodEntry slots present in the current MethodTable |
8457 | void MethodTable::MethodDataObject::Init(MethodTable *pMT, MethodData *pParentData) |
8458 | { |
8459 | CONTRACTL { |
8460 | THROWS; |
8461 | WRAPPER(GC_TRIGGERS); |
8462 | PRECONDITION(CheckPointer(pMT)); |
8463 | PRECONDITION(CheckPointer(pParentData, NULL_OK)); |
8464 | PRECONDITION(!pMT->IsInterface()); |
8465 | PRECONDITION(pParentData == NULL || |
8466 | (pMT->ParentEquals(pParentData->GetDeclMethodTable()) && |
8467 | pMT->ParentEquals(pParentData->GetImplMethodTable()))); |
8468 | } CONTRACTL_END; |
8469 | |
8470 | m_pMT = pMT; |
8471 | m_iNextChainDepth = 0; |
8472 | m_containsMethodImpl = FALSE; |
8473 | |
8474 | ZeroMemory(GetEntryData(), sizeof(MethodDataObjectEntry) * GetNumMethods()); |
8475 | } // MethodTable::MethodDataObject::Init |
8476 | |
8477 | //========================================================================================== |
8478 | BOOL MethodTable::MethodDataObject::PopulateNextLevel() |
8479 | { |
8480 | LIMITED_METHOD_CONTRACT; |
8481 | |
8482 | // Get the chain depth to next decode. |
8483 | UINT32 iChainDepth = GetNextChainDepth(); |
8484 | |
8485 | // If the chain depth is MAX_CHAIN_DEPTH, then we've already parsed every parent. |
8486 | if (iChainDepth == MAX_CHAIN_DEPTH) { |
8487 | return FALSE; |
8488 | } |
8489 | // Now move up the chain to the target. |
8490 | MethodTable *pMTCur = m_pMT; |
8491 | for (UINT32 i = 0; pMTCur != NULL && i < iChainDepth; i++) { |
8492 | pMTCur = pMTCur->GetParentMethodTable(); |
8493 | } |
8494 | |
8495 | // If we reached the end, then we're done. |
8496 | if (pMTCur == NULL) { |
8497 | SetNextChainDepth(MAX_CHAIN_DEPTH); |
8498 | return FALSE; |
8499 | } |
8500 | |
8501 | FillEntryDataForAncestor(pMTCur); |
8502 | |
8503 | SetNextChainDepth(iChainDepth + 1); |
8504 | |
8505 | return TRUE; |
8506 | } // MethodTable::MethodDataObject::PopulateNextLevel |
8507 | |
8508 | //========================================================================================== |
8509 | void MethodTable::MethodDataObject::FillEntryDataForAncestor(MethodTable * pMT) |
8510 | { |
8511 | LIMITED_METHOD_CONTRACT; |
8512 | |
8513 | // Since we traverse ancestors from lowest in the inheritance hierarchy |
8514 | // to highest, the first method we come across for a slot is normally |
8515 | // both the declaring and implementing method desc. |
8516 | // |
8517 | // However if this slot is the target of a methodImpl, pMD is not |
8518 | // necessarily either. Rather than track this on a per-slot basis, |
8519 | // we conservatively avoid filling out virtual methods once we |
8520 | // have found that this inheritance chain contains a methodImpl. |
8521 | // |
8522 | // Note that there may be a methodImpl higher in the inheritance chain |
8523 | // that we have not seen yet, and so we will fill out virtual methods |
8524 | // until we reach that level. We are safe doing that because the slots |
8525 | // we fill have been introduced/overridden by a subclass and so take |
8526 | // precedence over any inherited methodImpl. |
8527 | |
8528 | // Before we fill the entry data, find if the current ancestor has any methodImpls |
8529 | |
8530 | if (pMT->GetClass()->ContainsMethodImpls()) |
8531 | m_containsMethodImpl = TRUE; |
8532 | |
8533 | if (m_containsMethodImpl && pMT != m_pMT) |
8534 | return; |
8535 | |
8536 | unsigned nVirtuals = pMT->GetNumVirtuals(); |
8537 | |
8538 | MethodTable::IntroducedMethodIterator it(pMT, FALSE); |
8539 | for (; it.IsValid(); it.Next()) |
8540 | { |
8541 | MethodDesc * pMD = it.GetMethodDesc(); |
8542 | g_IBCLogger.LogMethodDescAccess(pMD); |
8543 | |
8544 | unsigned slot = pMD->GetSlot(); |
8545 | if (slot == MethodTable::NO_SLOT) |
8546 | continue; |
8547 | |
8548 | // We want to fill all methods introduced by the actual type we're gathering |
8549 | // data for, and the virtual methods of the parent and above |
8550 | if (pMT == m_pMT) |
8551 | { |
8552 | if (m_containsMethodImpl && slot < nVirtuals) |
8553 | continue; |
8554 | } |
8555 | else |
8556 | { |
8557 | if (slot >= nVirtuals) |
8558 | continue; |
8559 | } |
8560 | |
8561 | MethodDataObjectEntry * pEntry = GetEntry(slot); |
8562 | |
8563 | if (pEntry->GetDeclMethodDesc() == NULL) |
8564 | { |
8565 | pEntry->SetDeclMethodDesc(pMD); |
8566 | } |
8567 | |
8568 | if (pEntry->GetImplMethodDesc() == NULL) |
8569 | { |
8570 | pEntry->SetImplMethodDesc(pMD); |
8571 | } |
8572 | } |
8573 | } // MethodTable::MethodDataObject::FillEntryDataForAncestor |
8574 | |
8575 | //========================================================================================== |
8576 | MethodDesc * MethodTable::MethodDataObject::GetDeclMethodDesc(UINT32 slotNumber) |
8577 | { |
8578 | WRAPPER_NO_CONTRACT; |
8579 | _ASSERTE(slotNumber < GetNumMethods()); |
8580 | |
8581 | MethodDataObjectEntry * pEntry = GetEntry(slotNumber); |
8582 | |
8583 | // Fill the entries one level of inheritance at a time, |
8584 | // stopping when we have filled the MD we are looking for. |
8585 | while (!pEntry->GetDeclMethodDesc() && PopulateNextLevel()); |
8586 | |
8587 | MethodDesc * pMDRet = pEntry->GetDeclMethodDesc(); |
8588 | if (pMDRet == NULL) |
8589 | { |
8590 | pMDRet = GetImplMethodDesc(slotNumber)->GetDeclMethodDesc(slotNumber); |
8591 | _ASSERTE(CheckPointer(pMDRet)); |
8592 | pEntry->SetDeclMethodDesc(pMDRet); |
8593 | } |
8594 | else |
8595 | { |
8596 | _ASSERTE(pMDRet == GetImplMethodDesc(slotNumber)->GetDeclMethodDesc(slotNumber)); |
8597 | } |
8598 | return pMDRet; |
8599 | } |
8600 | |
8601 | //========================================================================================== |
8602 | DispatchSlot MethodTable::MethodDataObject::GetImplSlot(UINT32 slotNumber) |
8603 | { |
8604 | WRAPPER_NO_CONTRACT; |
8605 | _ASSERTE(slotNumber < GetNumMethods()); |
8606 | return DispatchSlot(m_pMT->GetRestoredSlot(slotNumber)); |
8607 | } |
8608 | |
8609 | //========================================================================================== |
8610 | UINT32 MethodTable::MethodDataObject::GetImplSlotNumber(UINT32 slotNumber) |
8611 | { |
8612 | WRAPPER_NO_CONTRACT; |
8613 | _ASSERTE(slotNumber < GetNumMethods()); |
8614 | return slotNumber; |
8615 | } |
8616 | |
8617 | //========================================================================================== |
8618 | MethodDesc *MethodTable::MethodDataObject::GetImplMethodDesc(UINT32 slotNumber) |
8619 | { |
8620 | CONTRACTL |
8621 | { |
8622 | NOTHROW; |
8623 | GC_NOTRIGGER; |
8624 | MODE_ANY; |
8625 | } |
8626 | CONTRACTL_END; |
8627 | |
8628 | _ASSERTE(slotNumber < GetNumMethods()); |
8629 | MethodDataObjectEntry *pEntry = GetEntry(slotNumber); |
8630 | |
8631 | // Fill the entries one level of inheritance at a time, |
8632 | // stopping when we have filled the MD we are looking for. |
8633 | while (!pEntry->GetImplMethodDesc() && PopulateNextLevel()); |
8634 | |
8635 | MethodDesc *pMDRet = pEntry->GetImplMethodDesc(); |
8636 | |
8637 | if (pMDRet == NULL) |
8638 | { |
8639 | _ASSERTE(slotNumber < GetNumVirtuals()); |
8640 | pMDRet = m_pMT->GetMethodDescForSlot(slotNumber); |
8641 | _ASSERTE(CheckPointer(pMDRet)); |
8642 | pEntry->SetImplMethodDesc(pMDRet); |
8643 | } |
8644 | else |
8645 | { |
8646 | _ASSERTE(slotNumber >= GetNumVirtuals() || pMDRet == m_pMT->GetMethodDescForSlot(slotNumber)); |
8647 | } |
8648 | |
8649 | return pMDRet; |
8650 | } |
8651 | |
8652 | //========================================================================================== |
8653 | void MethodTable::MethodDataObject::InvalidateCachedVirtualSlot(UINT32 slotNumber) |
8654 | { |
8655 | WRAPPER_NO_CONTRACT; |
8656 | _ASSERTE(slotNumber < GetNumVirtuals()); |
8657 | |
8658 | MethodDataObjectEntry *pEntry = GetEntry(slotNumber); |
8659 | pEntry->SetImplMethodDesc(NULL); |
8660 | } |
8661 | |
8662 | //========================================================================================== |
8663 | MethodDesc *MethodTable::MethodDataInterface::GetDeclMethodDesc(UINT32 slotNumber) |
8664 | { |
8665 | WRAPPER_NO_CONTRACT; |
8666 | return m_pMT->GetMethodDescForSlot(slotNumber); |
8667 | } |
8668 | |
8669 | //========================================================================================== |
8670 | MethodDesc *MethodTable::MethodDataInterface::GetImplMethodDesc(UINT32 slotNumber) |
8671 | { |
8672 | WRAPPER_NO_CONTRACT; |
8673 | return MethodTable::MethodDataInterface::GetDeclMethodDesc(slotNumber); |
8674 | } |
8675 | |
8676 | //========================================================================================== |
8677 | void MethodTable::MethodDataInterface::InvalidateCachedVirtualSlot(UINT32 slotNumber) |
8678 | { |
8679 | LIMITED_METHOD_CONTRACT; |
8680 | |
8681 | // MethodDataInterface does not store any cached MethodDesc values |
8682 | return; |
8683 | } |
8684 | |
8685 | //========================================================================================== |
8686 | UINT32 MethodTable::MethodDataInterfaceImpl::GetObjectSize(MethodTable *pMTDecl) |
8687 | { |
8688 | WRAPPER_NO_CONTRACT; |
8689 | UINT32 cb = sizeof(MethodDataInterfaceImpl); |
8690 | cb += pMTDecl->GetNumMethods() * sizeof(MethodDataEntry); |
8691 | return cb; |
8692 | } |
8693 | |
8694 | //========================================================================================== |
8695 | // This will fill in all the MethodEntry slots present in the current MethodTable |
8696 | void |
8697 | MethodTable::MethodDataInterfaceImpl::Init( |
8698 | const DispatchMapTypeID * rgDeclTypeIDs, |
8699 | UINT32 cDeclTypeIDs, |
8700 | MethodData * pDecl, |
8701 | MethodData * pImpl) |
8702 | { |
8703 | CONTRACTL { |
8704 | THROWS; |
8705 | WRAPPER(GC_TRIGGERS); |
8706 | PRECONDITION(CheckPointer(pDecl)); |
8707 | PRECONDITION(CheckPointer(pImpl)); |
8708 | PRECONDITION(pDecl->GetDeclMethodTable()->IsInterface()); |
8709 | PRECONDITION(!pImpl->GetDeclMethodTable()->IsInterface()); |
8710 | PRECONDITION(pDecl->GetDeclMethodTable() == pDecl->GetImplMethodTable()); |
8711 | PRECONDITION(pImpl->GetDeclMethodTable() == pImpl->GetImplMethodTable()); |
8712 | PRECONDITION(pDecl != pImpl); |
8713 | } CONTRACTL_END; |
8714 | |
8715 | // Store and AddRef the decl and impl data. |
8716 | m_pDecl = pDecl; |
8717 | m_pDecl->AddRef(); |
8718 | m_pImpl = pImpl; |
8719 | m_pImpl->AddRef(); |
8720 | |
8721 | m_iNextChainDepth = 0; |
8722 | // Need side effects of the calls, but not the result. |
8723 | /* MethodTable *pDeclMT = */ pDecl->GetDeclMethodTable(); |
8724 | /* MethodTable *pImplMT = */ pImpl->GetImplMethodTable(); |
8725 | m_rgDeclTypeIDs = rgDeclTypeIDs; |
8726 | m_cDeclTypeIDs = cDeclTypeIDs; |
8727 | |
8728 | // Initialize each entry. |
8729 | for (UINT32 i = 0; i < GetNumMethods(); i++) { |
8730 | // Initialize the entry |
8731 | GetEntry(i)->Init(); |
8732 | } |
8733 | } // MethodTable::MethodDataInterfaceImpl::Init |
8734 | |
8735 | //========================================================================================== |
8736 | MethodTable::MethodDataInterfaceImpl::MethodDataInterfaceImpl( |
8737 | const DispatchMapTypeID * rgDeclTypeIDs, |
8738 | UINT32 cDeclTypeIDs, |
8739 | MethodData * pDecl, |
8740 | MethodData * pImpl) |
8741 | { |
8742 | WRAPPER_NO_CONTRACT; |
8743 | Init(rgDeclTypeIDs, cDeclTypeIDs, pDecl, pImpl); |
8744 | } |
8745 | |
8746 | //========================================================================================== |
8747 | MethodTable::MethodDataInterfaceImpl::~MethodDataInterfaceImpl() |
8748 | { |
8749 | WRAPPER_NO_CONTRACT; |
8750 | CONSISTENCY_CHECK(CheckPointer(m_pDecl)); |
8751 | CONSISTENCY_CHECK(CheckPointer(m_pImpl)); |
8752 | m_pDecl->Release(); |
8753 | m_pImpl->Release(); |
8754 | } |
8755 | |
8756 | //========================================================================================== |
8757 | BOOL |
8758 | MethodTable::MethodDataInterfaceImpl::PopulateNextLevel() |
8759 | { |
8760 | LIMITED_METHOD_CONTRACT; |
8761 | |
8762 | // Get the chain depth to next decode. |
8763 | UINT32 iChainDepth = GetNextChainDepth(); |
8764 | |
8765 | // If the chain depth is MAX_CHAIN_DEPTH, then we've already parsed every parent. |
8766 | if (iChainDepth == MAX_CHAIN_DEPTH) { |
8767 | return FALSE; |
8768 | } |
8769 | |
8770 | // Now move up the chain to the target. |
8771 | MethodTable *pMTCur = m_pImpl->GetImplMethodTable(); |
8772 | for (UINT32 i = 0; pMTCur != NULL && i < iChainDepth; i++) { |
8773 | pMTCur = pMTCur->GetParentMethodTable(); |
8774 | } |
8775 | |
8776 | // If we reached the end, then we're done. |
8777 | if (pMTCur == NULL) { |
8778 | SetNextChainDepth(MAX_CHAIN_DEPTH); |
8779 | return FALSE; |
8780 | } |
8781 | |
8782 | if (m_cDeclTypeIDs != 0) |
8783 | { // We got the TypeIDs from TypeLoader, use them |
8784 | ProcessMap(m_rgDeclTypeIDs, m_cDeclTypeIDs, pMTCur, iChainDepth, GetEntryData()); |
8785 | } |
8786 | else |
8787 | { // We should decode all interface duplicates of code:m_pDecl |
8788 | MethodTable * pDeclMT = m_pDecl->GetImplMethodTable(); |
8789 | INDEBUG(BOOL dbg_fInterfaceFound = FALSE); |
8790 | |
8791 | // Call code:ProcessMap for every (duplicate) occurence of interface code:pDeclMT in the interface |
8792 | // map of code:m_pImpl |
8793 | MethodTable::InterfaceMapIterator it = m_pImpl->GetImplMethodTable()->IterateInterfaceMap(); |
8794 | while (it.Next()) |
8795 | { |
8796 | if (pDeclMT == it.GetInterface()) |
8797 | { // We found the interface |
8798 | INDEBUG(dbg_fInterfaceFound = TRUE); |
8799 | DispatchMapTypeID declTypeID = DispatchMapTypeID::InterfaceClassID(it.GetIndex()); |
8800 | |
8801 | ProcessMap(&declTypeID, 1, pMTCur, iChainDepth, GetEntryData()); |
8802 | } |
8803 | } |
8804 | // The interface code:m_Decl should be found at least once in the interface map of code:m_pImpl, |
8805 | // otherwise someone passed wrong information |
8806 | _ASSERTE(dbg_fInterfaceFound); |
8807 | } |
8808 | |
8809 | SetNextChainDepth(iChainDepth + 1); |
8810 | |
8811 | return TRUE; |
8812 | } // MethodTable::MethodDataInterfaceImpl::PopulateNextLevel |
8813 | |
8814 | //========================================================================================== |
8815 | UINT32 MethodTable::MethodDataInterfaceImpl::MapToImplSlotNumber(UINT32 slotNumber) |
8816 | { |
8817 | LIMITED_METHOD_CONTRACT; |
8818 | |
8819 | _ASSERTE(slotNumber < GetNumMethods()); |
8820 | |
8821 | MethodDataEntry *pEntry = GetEntry(slotNumber); |
8822 | while (!pEntry->IsImplInit() && PopulateNextLevel()) {} |
8823 | if (pEntry->IsImplInit()) { |
8824 | return pEntry->GetImplSlotNum(); |
8825 | } |
8826 | else { |
8827 | return INVALID_SLOT_NUMBER; |
8828 | } |
8829 | } |
8830 | |
8831 | //========================================================================================== |
8832 | DispatchSlot MethodTable::MethodDataInterfaceImpl::GetImplSlot(UINT32 slotNumber) |
8833 | { |
8834 | WRAPPER_NO_CONTRACT; |
8835 | UINT32 implSlotNumber = MapToImplSlotNumber(slotNumber); |
8836 | if (implSlotNumber == INVALID_SLOT_NUMBER) { |
8837 | return DispatchSlot(NULL); |
8838 | } |
8839 | return m_pImpl->GetImplSlot(implSlotNumber); |
8840 | } |
8841 | |
8842 | //========================================================================================== |
8843 | UINT32 MethodTable::MethodDataInterfaceImpl::GetImplSlotNumber(UINT32 slotNumber) |
8844 | { |
8845 | WRAPPER_NO_CONTRACT; |
8846 | return MapToImplSlotNumber(slotNumber); |
8847 | } |
8848 | |
8849 | //========================================================================================== |
8850 | MethodDesc *MethodTable::MethodDataInterfaceImpl::GetImplMethodDesc(UINT32 slotNumber) |
8851 | { |
8852 | WRAPPER_NO_CONTRACT; |
8853 | UINT32 implSlotNumber = MapToImplSlotNumber(slotNumber); |
8854 | if (implSlotNumber == INVALID_SLOT_NUMBER) { |
8855 | return NULL; |
8856 | } |
8857 | return m_pImpl->GetImplMethodDesc(MapToImplSlotNumber(slotNumber)); |
8858 | } |
8859 | |
8860 | //========================================================================================== |
8861 | void MethodTable::MethodDataInterfaceImpl::InvalidateCachedVirtualSlot(UINT32 slotNumber) |
8862 | { |
8863 | WRAPPER_NO_CONTRACT; |
8864 | UINT32 implSlotNumber = MapToImplSlotNumber(slotNumber); |
8865 | if (implSlotNumber == INVALID_SLOT_NUMBER) { |
8866 | return; |
8867 | } |
8868 | return m_pImpl->InvalidateCachedVirtualSlot(MapToImplSlotNumber(slotNumber)); |
8869 | } |
8870 | |
8871 | //========================================================================================== |
8872 | void MethodTable::CheckInitMethodDataCache() |
8873 | { |
8874 | CONTRACTL { |
8875 | THROWS; |
8876 | GC_NOTRIGGER; |
8877 | SO_TOLERANT; |
8878 | } CONTRACTL_END; |
8879 | if (s_pMethodDataCache == NULL) |
8880 | { |
8881 | UINT32 cb = MethodDataCache::GetObjectSize(8); |
8882 | NewHolder<BYTE> hb(new BYTE[cb]); |
8883 | MethodDataCache *pCache = new (hb.GetValue()) MethodDataCache(8); |
8884 | if (InterlockedCompareExchangeT( |
8885 | &s_pMethodDataCache, pCache, NULL) == NULL) |
8886 | { |
8887 | hb.SuppressRelease(); |
8888 | } |
8889 | // If somebody beat us, return and allow the holders to take care of cleanup. |
8890 | else |
8891 | { |
8892 | return; |
8893 | } |
8894 | } |
8895 | } |
8896 | |
8897 | //========================================================================================== |
8898 | void MethodTable::ClearMethodDataCache() |
8899 | { |
8900 | LIMITED_METHOD_CONTRACT; |
8901 | if (s_pMethodDataCache != NULL) { |
8902 | s_pMethodDataCache->Clear(); |
8903 | } |
8904 | } |
8905 | |
8906 | //========================================================================================== |
8907 | MethodTable::MethodData *MethodTable::FindMethodDataHelper(MethodTable *pMTDecl, MethodTable *pMTImpl) |
8908 | { |
8909 | CONTRACTL { |
8910 | NOTHROW; |
8911 | GC_NOTRIGGER; |
8912 | CONSISTENCY_CHECK(s_fUseMethodDataCache); |
8913 | } CONTRACTL_END; |
8914 | |
8915 | return s_pMethodDataCache->Find(pMTDecl, pMTImpl); |
8916 | } |
8917 | |
8918 | //========================================================================================== |
8919 | MethodTable::MethodData *MethodTable::FindParentMethodDataHelper(MethodTable *pMT) |
8920 | { |
8921 | CONTRACTL |
8922 | { |
8923 | NOTHROW; |
8924 | GC_NOTRIGGER; |
8925 | MODE_ANY; |
8926 | } |
8927 | CONTRACTL_END; |
8928 | MethodData *pData = NULL; |
8929 | if (s_fUseMethodDataCache && s_fUseParentMethodData) { |
8930 | if (!pMT->IsInterface()) { |
8931 | //@todo : this won't be correct for non-shared code |
8932 | MethodTable *pMTParent = pMT->GetParentMethodTable(); |
8933 | if (pMTParent != NULL) { |
8934 | pData = FindMethodDataHelper(pMTParent, pMTParent); |
8935 | } |
8936 | } |
8937 | } |
8938 | return pData; |
8939 | } |
8940 | |
8941 | //========================================================================================== |
8942 | // This method does not cache the resulting MethodData object in the global MethodDataCache. |
8943 | // The TypeIDs (rgDeclTypeIDs with cDeclTypeIDs items) have to be sorted. |
8944 | MethodTable::MethodData * |
8945 | MethodTable::GetMethodDataHelper( |
8946 | const DispatchMapTypeID * rgDeclTypeIDs, |
8947 | UINT32 cDeclTypeIDs, |
8948 | MethodTable * pMTDecl, |
8949 | MethodTable * pMTImpl) |
8950 | { |
8951 | CONTRACTL { |
8952 | THROWS; |
8953 | WRAPPER(GC_TRIGGERS); |
8954 | PRECONDITION(CheckPointer(pMTDecl)); |
8955 | PRECONDITION(CheckPointer(pMTImpl)); |
8956 | } CONTRACTL_END; |
8957 | |
8958 | //@TODO: Must adjust this to use an alternate allocator so that we don't |
8959 | //@TODO: potentially cause deadlocks on the debug thread. |
8960 | SUPPRESS_ALLOCATION_ASSERTS_IN_THIS_SCOPE; |
8961 | |
8962 | CONSISTENCY_CHECK(pMTDecl->IsInterface() && !pMTImpl->IsInterface()); |
8963 | |
8964 | #ifdef _DEBUG |
8965 | // Check that rgDeclTypeIDs are sorted, are valid interface indexes and reference only pMTDecl interface |
8966 | { |
8967 | InterfaceInfo_t * rgImplInterfaceMap = pMTImpl->GetInterfaceMap(); |
8968 | UINT32 cImplInterfaceMap = pMTImpl->GetNumInterfaces(); |
8969 | // Verify that all types referenced by code:rgDeclTypeIDs are code:pMTDecl (declared interface) |
8970 | for (UINT32 nDeclTypeIDIndex = 0; nDeclTypeIDIndex < cDeclTypeIDs; nDeclTypeIDIndex++) |
8971 | { |
8972 | if (nDeclTypeIDIndex > 0) |
8973 | { // Verify that interface indexes are sorted |
8974 | _ASSERTE(rgDeclTypeIDs[nDeclTypeIDIndex - 1].GetInterfaceNum() < rgDeclTypeIDs[nDeclTypeIDIndex].GetInterfaceNum()); |
8975 | } |
8976 | UINT32 nInterfaceIndex = rgDeclTypeIDs[nDeclTypeIDIndex].GetInterfaceNum(); |
8977 | _ASSERTE(nInterfaceIndex <= cImplInterfaceMap); |
8978 | { |
8979 | OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS); |
8980 | _ASSERTE(rgImplInterfaceMap[nInterfaceIndex].GetApproxMethodTable(pMTImpl->GetLoaderModule())->HasSameTypeDefAs(pMTDecl)); |
8981 | } |
8982 | } |
8983 | } |
8984 | #endif //_DEBUG |
8985 | |
8986 | // Can't cache, since this is a custom method used in BuildMethodTable |
8987 | MethodDataWrapper hDecl(GetMethodData(pMTDecl, FALSE)); |
8988 | MethodDataWrapper hImpl(GetMethodData(pMTImpl, FALSE)); |
8989 | |
8990 | UINT32 cb = MethodDataInterfaceImpl::GetObjectSize(pMTDecl); |
8991 | NewHolder<BYTE> pb(new BYTE[cb]); |
8992 | MethodDataInterfaceImpl * pData = new (pb.GetValue()) MethodDataInterfaceImpl(rgDeclTypeIDs, cDeclTypeIDs, hDecl, hImpl); |
8993 | pb.SuppressRelease(); |
8994 | |
8995 | return pData; |
8996 | } // MethodTable::GetMethodDataHelper |
8997 | |
8998 | //========================================================================================== |
8999 | // The fCanCache argument determines if the resulting MethodData object can |
9000 | // be added to the global MethodDataCache. This is used when requesting a |
9001 | // MethodData object for a type currently being built. |
9002 | MethodTable::MethodData *MethodTable::GetMethodDataHelper(MethodTable *pMTDecl, |
9003 | MethodTable *pMTImpl, |
9004 | BOOL fCanCache) |
9005 | { |
9006 | CONTRACTL { |
9007 | THROWS; |
9008 | WRAPPER(GC_TRIGGERS); |
9009 | PRECONDITION(CheckPointer(pMTDecl)); |
9010 | PRECONDITION(CheckPointer(pMTImpl)); |
9011 | PRECONDITION(pMTDecl == pMTImpl || |
9012 | (pMTDecl->IsInterface() && !pMTImpl->IsInterface())); |
9013 | } CONTRACTL_END; |
9014 | |
9015 | //@TODO: Must adjust this to use an alternate allocator so that we don't |
9016 | //@TODO: potentially cause deadlocks on the debug thread. |
9017 | SUPPRESS_ALLOCATION_ASSERTS_IN_THIS_SCOPE; |
9018 | |
9019 | if (s_fUseMethodDataCache) { |
9020 | MethodData *pData = FindMethodDataHelper(pMTDecl, pMTImpl); |
9021 | if (pData != NULL) { |
9022 | return pData; |
9023 | } |
9024 | } |
9025 | |
9026 | // If we get here, there are no entries in the cache. |
9027 | MethodData *pData = NULL; |
9028 | if (pMTDecl == pMTImpl) { |
9029 | if (pMTDecl->IsInterface()) { |
9030 | pData = new MethodDataInterface(pMTDecl); |
9031 | } |
9032 | else { |
9033 | UINT32 cb = MethodDataObject::GetObjectSize(pMTDecl); |
9034 | NewHolder<BYTE> pb(new BYTE[cb]); |
9035 | MethodDataHolder h(FindParentMethodDataHelper(pMTDecl)); |
9036 | pData = new (pb.GetValue()) MethodDataObject(pMTDecl, h.GetValue()); |
9037 | pb.SuppressRelease(); |
9038 | } |
9039 | } |
9040 | else { |
9041 | pData = GetMethodDataHelper( |
9042 | NULL, |
9043 | 0, |
9044 | pMTDecl, |
9045 | pMTImpl); |
9046 | } |
9047 | |
9048 | // Insert in the cache if it is active. |
9049 | if (fCanCache && s_fUseMethodDataCache) { |
9050 | s_pMethodDataCache->Insert(pData); |
9051 | } |
9052 | |
9053 | // Do not AddRef, already initialized to 1. |
9054 | return pData; |
9055 | } |
9056 | |
9057 | //========================================================================================== |
9058 | // The fCanCache argument determines if the resulting MethodData object can |
9059 | // be added to the global MethodDataCache. This is used when requesting a |
9060 | // MethodData object for a type currently being built. |
9061 | MethodTable::MethodData *MethodTable::GetMethodData(MethodTable *pMTDecl, |
9062 | MethodTable *pMTImpl, |
9063 | BOOL fCanCache) |
9064 | { |
9065 | CONTRACTL { |
9066 | THROWS; |
9067 | WRAPPER(GC_TRIGGERS); |
9068 | } CONTRACTL_END; |
9069 | |
9070 | MethodDataWrapper hData(GetMethodDataHelper(pMTDecl, pMTImpl, fCanCache)); |
9071 | hData.SuppressRelease(); |
9072 | return hData; |
9073 | } |
9074 | |
9075 | //========================================================================================== |
9076 | // This method does not cache the resulting MethodData object in the global MethodDataCache. |
9077 | MethodTable::MethodData * |
9078 | MethodTable::GetMethodData( |
9079 | const DispatchMapTypeID * rgDeclTypeIDs, |
9080 | UINT32 cDeclTypeIDs, |
9081 | MethodTable * pMTDecl, |
9082 | MethodTable * pMTImpl) |
9083 | { |
9084 | CONTRACTL { |
9085 | THROWS; |
9086 | WRAPPER(GC_TRIGGERS); |
9087 | PRECONDITION(pMTDecl != pMTImpl); |
9088 | PRECONDITION(pMTDecl->IsInterface()); |
9089 | PRECONDITION(!pMTImpl->IsInterface()); |
9090 | } CONTRACTL_END; |
9091 | |
9092 | MethodDataWrapper hData(GetMethodDataHelper(rgDeclTypeIDs, cDeclTypeIDs, pMTDecl, pMTImpl)); |
9093 | hData.SuppressRelease(); |
9094 | return hData; |
9095 | } |
9096 | |
9097 | //========================================================================================== |
9098 | // The fCanCache argument determines if the resulting MethodData object can |
9099 | // be added to the global MethodDataCache. This is used when requesting a |
9100 | // MethodData object for a type currently being built. |
9101 | MethodTable::MethodData *MethodTable::GetMethodData(MethodTable *pMT, |
9102 | BOOL fCanCache) |
9103 | { |
9104 | WRAPPER_NO_CONTRACT; |
9105 | return GetMethodData(pMT, pMT, fCanCache); |
9106 | } |
9107 | |
9108 | //========================================================================================== |
9109 | MethodTable::MethodIterator::MethodIterator(MethodTable *pMTDecl, MethodTable *pMTImpl) |
9110 | { |
9111 | WRAPPER_NO_CONTRACT; |
9112 | Init(pMTDecl, pMTImpl); |
9113 | } |
9114 | |
9115 | //========================================================================================== |
9116 | MethodTable::MethodIterator::MethodIterator(MethodTable *pMT) |
9117 | { |
9118 | WRAPPER_NO_CONTRACT; |
9119 | Init(pMT, pMT); |
9120 | } |
9121 | |
9122 | //========================================================================================== |
9123 | MethodTable::MethodIterator::MethodIterator(MethodData *pMethodData) |
9124 | { |
9125 | CONTRACTL { |
9126 | NOTHROW; |
9127 | GC_NOTRIGGER; |
9128 | PRECONDITION(CheckPointer(pMethodData)); |
9129 | } CONTRACTL_END; |
9130 | |
9131 | m_pMethodData = pMethodData; |
9132 | m_pMethodData->AddRef(); |
9133 | m_iCur = 0; |
9134 | m_iMethods = (INT32)m_pMethodData->GetNumMethods(); |
9135 | } |
9136 | |
9137 | //========================================================================================== |
9138 | MethodTable::MethodIterator::MethodIterator(const MethodIterator &it) |
9139 | { |
9140 | WRAPPER_NO_CONTRACT; |
9141 | m_pMethodData = it.m_pMethodData; |
9142 | m_pMethodData->AddRef(); |
9143 | m_iCur = it.m_iCur; |
9144 | m_iMethods = it.m_iMethods; |
9145 | } |
9146 | |
9147 | //========================================================================================== |
9148 | void MethodTable::MethodIterator::Init(MethodTable *pMTDecl, MethodTable *pMTImpl) |
9149 | { |
9150 | CONTRACTL { |
9151 | THROWS; |
9152 | WRAPPER(GC_TRIGGERS); |
9153 | INJECT_FAULT(COMPlusThrowOM()); |
9154 | PRECONDITION(CheckPointer(pMTDecl)); |
9155 | PRECONDITION(CheckPointer(pMTImpl)); |
9156 | } CONTRACTL_END; |
9157 | |
9158 | LOG((LF_LOADER, LL_INFO10000, "SD: MT::MethodIterator created for %s.\n" , pMTDecl->GetDebugClassName())); |
9159 | |
9160 | m_pMethodData = MethodTable::GetMethodData(pMTDecl, pMTImpl); |
9161 | CONSISTENCY_CHECK(CheckPointer(m_pMethodData)); |
9162 | m_iCur = 0; |
9163 | m_iMethods = (INT32)m_pMethodData->GetNumMethods(); |
9164 | } |
9165 | #endif // !DACCESS_COMPILE |
9166 | |
9167 | //========================================================================================== |
9168 | |
9169 | void MethodTable::IntroducedMethodIterator::SetChunk(MethodDescChunk * pChunk) |
9170 | { |
9171 | LIMITED_METHOD_CONTRACT; |
9172 | |
9173 | if (pChunk) |
9174 | { |
9175 | m_pMethodDesc = pChunk->GetFirstMethodDesc(); |
9176 | |
9177 | m_pChunk = pChunk; |
9178 | m_pChunkEnd = dac_cast<TADDR>(pChunk) + pChunk->SizeOf(); |
9179 | } |
9180 | else |
9181 | { |
9182 | m_pMethodDesc = NULL; |
9183 | } |
9184 | } |
9185 | |
9186 | //========================================================================================== |
9187 | |
9188 | MethodDesc * MethodTable::IntroducedMethodIterator::GetFirst(MethodTable *pMT) |
9189 | { |
9190 | LIMITED_METHOD_CONTRACT; |
9191 | MethodDescChunk * pChunk = pMT->GetClass()->GetChunks(); |
9192 | return (pChunk != NULL) ? pChunk->GetFirstMethodDesc() : NULL; |
9193 | } |
9194 | |
9195 | //========================================================================================== |
9196 | MethodDesc * MethodTable::IntroducedMethodIterator::GetNext(MethodDesc * pMD) |
9197 | { |
9198 | WRAPPER_NO_CONTRACT; |
9199 | |
9200 | MethodDescChunk * pChunk = pMD->GetMethodDescChunk(); |
9201 | |
9202 | // Check whether the next MethodDesc is still within the bounds of the current chunk |
9203 | TADDR pNext = dac_cast<TADDR>(pMD) + pMD->SizeOf(); |
9204 | TADDR pEnd = dac_cast<TADDR>(pChunk) + pChunk->SizeOf(); |
9205 | |
9206 | if (pNext < pEnd) |
9207 | { |
9208 | // Just skip to the next method in the same chunk |
9209 | pMD = PTR_MethodDesc(pNext); |
9210 | } |
9211 | else |
9212 | { |
9213 | _ASSERTE(pNext == pEnd); |
9214 | |
9215 | // We have walked all the methods in the current chunk. Move on |
9216 | // to the next chunk. |
9217 | pChunk = pChunk->GetNextChunk(); |
9218 | |
9219 | pMD = (pChunk != NULL) ? pChunk->GetFirstMethodDesc() : NULL; |
9220 | } |
9221 | |
9222 | return pMD; |
9223 | } |
9224 | |
9225 | //========================================================================================== |
9226 | PTR_GuidInfo MethodTable::GetGuidInfo() |
9227 | { |
9228 | CONTRACTL |
9229 | { |
9230 | THROWS; |
9231 | GC_NOTRIGGER; |
9232 | MODE_ANY; |
9233 | } |
9234 | CONTRACTL_END; |
9235 | |
9236 | #ifdef FEATURE_COMINTEROP |
9237 | if (HasGuidInfo()) |
9238 | { |
9239 | return *GetGuidInfoPtr(); |
9240 | } |
9241 | #endif // FEATURE_COMINTEROP |
9242 | _ASSERTE(GetClass()); |
9243 | return GetClass()->GetGuidInfo(); |
9244 | } |
9245 | |
9246 | //========================================================================================== |
9247 | void MethodTable::SetGuidInfo(GuidInfo* pGuidInfo) |
9248 | { |
9249 | CONTRACTL |
9250 | { |
9251 | THROWS; |
9252 | GC_NOTRIGGER; |
9253 | MODE_ANY; |
9254 | } |
9255 | CONTRACTL_END; |
9256 | |
9257 | #ifndef DACCESS_COMPILE |
9258 | |
9259 | #ifdef FEATURE_COMINTEROP |
9260 | if (HasGuidInfo()) |
9261 | { |
9262 | *EnsureWritablePages(GetGuidInfoPtr()) = pGuidInfo; |
9263 | return; |
9264 | } |
9265 | #endif // FEATURE_COMINTEROP |
9266 | _ASSERTE(GetClass()); |
9267 | GetClass()->SetGuidInfo (pGuidInfo); |
9268 | |
9269 | #endif // DACCESS_COMPILE |
9270 | } |
9271 | |
9272 | #if defined(FEATURE_COMINTEROP) && !defined(DACCESS_COMPILE) |
9273 | |
9274 | //========================================================================================== |
9275 | RCWPerTypeData *MethodTable::CreateRCWPerTypeData(bool bThrowOnOOM) |
9276 | { |
9277 | CONTRACTL |
9278 | { |
9279 | if (bThrowOnOOM) THROWS; else NOTHROW; |
9280 | GC_NOTRIGGER; |
9281 | MODE_ANY; |
9282 | PRECONDITION(HasRCWPerTypeData()); |
9283 | } |
9284 | CONTRACTL_END; |
9285 | |
9286 | AllocMemTracker amTracker; |
9287 | |
9288 | RCWPerTypeData *pData; |
9289 | if (bThrowOnOOM) |
9290 | { |
9291 | TaggedMemAllocPtr ptr = GetLoaderAllocator()->GetLowFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(RCWPerTypeData))); |
9292 | pData = (RCWPerTypeData *)amTracker.Track(ptr); |
9293 | } |
9294 | else |
9295 | { |
9296 | TaggedMemAllocPtr ptr = GetLoaderAllocator()->GetLowFrequencyHeap()->AllocMem_NoThrow(S_SIZE_T(sizeof(RCWPerTypeData))); |
9297 | pData = (RCWPerTypeData *)amTracker.Track_NoThrow(ptr); |
9298 | if (pData == NULL) |
9299 | { |
9300 | return NULL; |
9301 | } |
9302 | } |
9303 | |
9304 | // memory is zero-inited which means that nothing has been computed yet |
9305 | _ASSERTE(pData->m_dwFlags == 0); |
9306 | |
9307 | RCWPerTypeData **pDataPtr = GetRCWPerTypeDataPtr(); |
9308 | |
9309 | if (bThrowOnOOM) |
9310 | { |
9311 | EnsureWritablePages(pDataPtr); |
9312 | } |
9313 | else |
9314 | { |
9315 | if (!EnsureWritablePagesNoThrow(pDataPtr, sizeof(*pDataPtr))) |
9316 | { |
9317 | return NULL; |
9318 | } |
9319 | } |
9320 | |
9321 | if (InterlockedCompareExchangeT(pDataPtr, pData, NULL) == NULL) |
9322 | { |
9323 | amTracker.SuppressRelease(); |
9324 | } |
9325 | else |
9326 | { |
9327 | // another thread already published the pointer |
9328 | pData = *pDataPtr; |
9329 | } |
9330 | |
9331 | return pData; |
9332 | } |
9333 | |
9334 | //========================================================================================== |
9335 | RCWPerTypeData *MethodTable::GetRCWPerTypeData(bool bThrowOnOOM /*= true*/) |
9336 | { |
9337 | CONTRACTL |
9338 | { |
9339 | if (bThrowOnOOM) THROWS; else NOTHROW; |
9340 | GC_NOTRIGGER; |
9341 | MODE_ANY; |
9342 | } |
9343 | CONTRACTL_END; |
9344 | |
9345 | if (!HasRCWPerTypeData()) |
9346 | return NULL; |
9347 | |
9348 | RCWPerTypeData *pData = *GetRCWPerTypeDataPtr(); |
9349 | if (pData == NULL) |
9350 | { |
9351 | // creation is factored out into a separate routine to avoid paying the EH cost here |
9352 | pData = CreateRCWPerTypeData(bThrowOnOOM); |
9353 | } |
9354 | |
9355 | return pData; |
9356 | } |
9357 | |
9358 | #endif // FEATURE_COMINTEROP && !DACCESS_COMPILE |
9359 | |
9360 | //========================================================================================== |
9361 | CHECK MethodTable::CheckActivated() |
9362 | { |
9363 | WRAPPER_NO_CONTRACT; |
9364 | |
9365 | if (!IsArray()) |
9366 | { |
9367 | CHECK(GetModule()->CheckActivated()); |
9368 | } |
9369 | |
9370 | // <TODO> Check all generic type parameters as well </TODO> |
9371 | |
9372 | CHECK_OK; |
9373 | } |
9374 | |
9375 | #ifdef _MSC_VER |
9376 | // Optimization intended for EnsureInstanceActive, EnsureActive only |
9377 | #pragma optimize("t", on) |
9378 | #endif // _MSC_VER |
9379 | //========================================================================================== |
9380 | |
9381 | #ifndef DACCESS_COMPILE |
9382 | VOID MethodTable::EnsureInstanceActive() |
9383 | { |
9384 | CONTRACTL |
9385 | { |
9386 | GC_TRIGGERS; |
9387 | THROWS; |
9388 | MODE_ANY; |
9389 | } |
9390 | CONTRACTL_END; |
9391 | |
9392 | Module * pModule = GetModule(); |
9393 | pModule->EnsureActive(); |
9394 | |
9395 | MethodTable * pMT = this; |
9396 | while (pMT->HasModuleDependencies()) |
9397 | { |
9398 | pMT = pMT->GetParentMethodTable(); |
9399 | _ASSERTE(pMT != NULL); |
9400 | |
9401 | Module * pParentModule = pMT->GetModule(); |
9402 | if (pParentModule != pModule) |
9403 | { |
9404 | pModule = pParentModule; |
9405 | pModule->EnsureActive(); |
9406 | } |
9407 | } |
9408 | |
9409 | if (HasInstantiation()) |
9410 | { |
9411 | // This is going to go recursive, so we need to use an interior stack probe |
9412 | |
9413 | INTERIOR_STACK_PROBE(GetThread()); |
9414 | { |
9415 | Instantiation inst = GetInstantiation(); |
9416 | for (DWORD i = 0; i < inst.GetNumArgs(); i++) |
9417 | { |
9418 | TypeHandle thArg = inst[i]; |
9419 | if (!thArg.IsTypeDesc()) |
9420 | { |
9421 | thArg.AsMethodTable()->EnsureInstanceActive(); |
9422 | } |
9423 | } |
9424 | } |
9425 | END_INTERIOR_STACK_PROBE; |
9426 | } |
9427 | |
9428 | } |
9429 | #endif //!DACCESS_COMPILE |
9430 | |
9431 | //========================================================================================== |
9432 | #ifndef DACCESS_COMPILE |
9433 | VOID MethodTable::EnsureActive() |
9434 | { |
9435 | WRAPPER_NO_CONTRACT; |
9436 | |
9437 | GetModule()->EnsureActive(); |
9438 | } |
9439 | #endif |
9440 | |
9441 | #ifdef _MSC_VER |
9442 | #pragma optimize("", on) |
9443 | #endif // _MSC_VER |
9444 | |
9445 | //========================================================================================== |
9446 | CHECK MethodTable::CheckInstanceActivated() |
9447 | { |
9448 | WRAPPER_NO_CONTRACT; |
9449 | |
9450 | if (IsArray()) |
9451 | CHECK_OK; |
9452 | |
9453 | Module * pModule = GetModule(); |
9454 | CHECK(pModule->CheckActivated()); |
9455 | |
9456 | MethodTable * pMT = this; |
9457 | while (pMT->HasModuleDependencies()) |
9458 | { |
9459 | pMT = pMT->GetParentMethodTable(); |
9460 | _ASSERTE(pMT != NULL); |
9461 | |
9462 | Module * pParentModule = pMT->GetModule(); |
9463 | if (pParentModule != pModule) |
9464 | { |
9465 | pModule = pParentModule; |
9466 | CHECK(pModule->CheckActivated()); |
9467 | } |
9468 | } |
9469 | |
9470 | CHECK_OK; |
9471 | } |
9472 | |
9473 | #ifdef DACCESS_COMPILE |
9474 | |
9475 | //========================================================================================== |
9476 | void |
9477 | MethodTable::EnumMemoryRegions(CLRDataEnumMemoryFlags flags) |
9478 | { |
9479 | WRAPPER_NO_CONTRACT; |
9480 | |
9481 | DAC_CHECK_ENUM_THIS(); |
9482 | EMEM_OUT(("MEM: %p MethodTable\n" , dac_cast<TADDR>(this))); |
9483 | |
9484 | DWORD size = GetEndOffsetOfOptionalMembers(); |
9485 | DacEnumMemoryRegion(dac_cast<TADDR>(this), size); |
9486 | |
9487 | if (!IsCanonicalMethodTable()) |
9488 | { |
9489 | PTR_MethodTable pMTCanonical = GetCanonicalMethodTable(); |
9490 | |
9491 | if (pMTCanonical.IsValid()) |
9492 | { |
9493 | pMTCanonical->EnumMemoryRegions(flags); |
9494 | } |
9495 | } |
9496 | else |
9497 | { |
9498 | PTR_EEClass pClass = GetClass(); |
9499 | |
9500 | if (pClass.IsValid()) |
9501 | { |
9502 | if (IsArray()) |
9503 | { |
9504 | // This is kind of a workaround, in that ArrayClass is derived from EEClass, but |
9505 | // it's not virtual, we only cast if the IsArray() predicate holds above. |
9506 | // For minidumps, DAC will choke if we don't have the full size given |
9507 | // by ArrayClass available. If ArrayClass becomes more complex, it |
9508 | // should get it's own EnumMemoryRegions(). |
9509 | DacEnumMemoryRegion(dac_cast<TADDR>(pClass), sizeof(ArrayClass)); |
9510 | } |
9511 | pClass->EnumMemoryRegions(flags, this); |
9512 | } |
9513 | } |
9514 | |
9515 | PTR_MethodTable pMTParent = GetParentMethodTable(); |
9516 | |
9517 | if (pMTParent.IsValid()) |
9518 | { |
9519 | pMTParent->EnumMemoryRegions(flags); |
9520 | } |
9521 | |
9522 | if (HasNonVirtualSlotsArray()) |
9523 | { |
9524 | DacEnumMemoryRegion(dac_cast<TADDR>(GetNonVirtualSlotsArray()), GetNonVirtualSlotsArraySize()); |
9525 | } |
9526 | |
9527 | if (HasInterfaceMap()) |
9528 | { |
9529 | #ifdef FEATURE_COMINTEROP |
9530 | if (HasDynamicInterfaceMap()) |
9531 | DacEnumMemoryRegion(dac_cast<TADDR>(GetInterfaceMap()) - sizeof(DWORD_PTR), GetInterfaceMapSize()); |
9532 | else |
9533 | #endif // FEATURE_COMINTEROP |
9534 | DacEnumMemoryRegion(dac_cast<TADDR>(GetInterfaceMap()), GetInterfaceMapSize()); |
9535 | |
9536 | EnumMemoryRegionsForExtraInterfaceInfo(); |
9537 | } |
9538 | |
9539 | if (HasPerInstInfo() != NULL) |
9540 | { |
9541 | DacEnumMemoryRegion(dac_cast<TADDR>(GetPerInstInfo()) - sizeof(GenericsDictInfo), GetPerInstInfoSize() + sizeof(GenericsDictInfo)); |
9542 | } |
9543 | |
9544 | if (GetDictionary() != NULL) |
9545 | { |
9546 | DacEnumMemoryRegion(dac_cast<TADDR>(GetDictionary()), GetInstAndDictSize()); |
9547 | } |
9548 | |
9549 | VtableIndirectionSlotIterator it = IterateVtableIndirectionSlots(); |
9550 | while (it.Next()) |
9551 | { |
9552 | DacEnumMemoryRegion(dac_cast<TADDR>(it.GetIndirectionSlot()), it.GetSize()); |
9553 | } |
9554 | |
9555 | PTR_MethodTableWriteableData pWriteableData = ReadPointer(this, &MethodTable::m_pWriteableData); |
9556 | if (pWriteableData.IsValid()) |
9557 | { |
9558 | pWriteableData.EnumMem(); |
9559 | } |
9560 | |
9561 | if (flags != CLRDATA_ENUM_MEM_MINI && flags != CLRDATA_ENUM_MEM_TRIAGE) |
9562 | { |
9563 | DispatchMap * pMap = GetDispatchMap(); |
9564 | if (pMap != NULL) |
9565 | { |
9566 | pMap->EnumMemoryRegions(flags); |
9567 | } |
9568 | } |
9569 | } // MethodTable::EnumMemoryRegions |
9570 | |
9571 | #endif // DACCESS_COMPILE |
9572 | |
9573 | //========================================================================================== |
9574 | BOOL MethodTable::ContainsGenericMethodVariables() |
9575 | { |
9576 | CONTRACTL |
9577 | { |
9578 | NOTHROW; |
9579 | GC_NOTRIGGER; |
9580 | FORBID_FAULT; |
9581 | SUPPORTS_DAC; |
9582 | } |
9583 | CONTRACTL_END |
9584 | |
9585 | Instantiation inst = GetInstantiation(); |
9586 | for (DWORD i = 0; i < inst.GetNumArgs(); i++) |
9587 | { |
9588 | CONSISTENCY_CHECK(!inst[i].IsEncodedFixup()); |
9589 | if (inst[i].ContainsGenericVariables(TRUE)) |
9590 | return TRUE; |
9591 | } |
9592 | |
9593 | return FALSE; |
9594 | } |
9595 | |
9596 | //========================================================================================== |
9597 | Module *MethodTable::GetDefiningModuleForOpenType() |
9598 | { |
9599 | CONTRACT(Module*) |
9600 | { |
9601 | NOTHROW; |
9602 | GC_NOTRIGGER; |
9603 | FORBID_FAULT; |
9604 | POSTCONDITION((ContainsGenericVariables() != 0) == (RETVAL != NULL)); |
9605 | SUPPORTS_DAC; |
9606 | } |
9607 | CONTRACT_END |
9608 | |
9609 | if (ContainsGenericVariables()) |
9610 | { |
9611 | Instantiation inst = GetInstantiation(); |
9612 | for (DWORD i = 0; i < inst.GetNumArgs(); i++) |
9613 | { |
9614 | // Encoded fixups are never open types |
9615 | if (!inst[i].IsEncodedFixup()) |
9616 | { |
9617 | Module *pModule = inst[i].GetDefiningModuleForOpenType(); |
9618 | if (pModule != NULL) |
9619 | RETURN pModule; |
9620 | } |
9621 | } |
9622 | } |
9623 | |
9624 | RETURN NULL; |
9625 | } |
9626 | |
9627 | //========================================================================================== |
9628 | PCODE MethodTable::GetRestoredSlot(DWORD slotNumber) |
9629 | { |
9630 | CONTRACTL { |
9631 | NOTHROW; |
9632 | GC_NOTRIGGER; |
9633 | MODE_ANY; |
9634 | SO_TOLERANT; |
9635 | SUPPORTS_DAC; |
9636 | } CONTRACTL_END; |
9637 | |
9638 | // |
9639 | // Keep in sync with code:MethodTable::GetRestoredSlotMT |
9640 | // |
9641 | |
9642 | MethodTable * pMT = this; |
9643 | while (true) |
9644 | { |
9645 | g_IBCLogger.LogMethodTableAccess(pMT); |
9646 | |
9647 | pMT = pMT->GetCanonicalMethodTable(); |
9648 | |
9649 | _ASSERTE(pMT != NULL); |
9650 | |
9651 | PCODE slot = pMT->GetSlot(slotNumber); |
9652 | |
9653 | if ((slot != NULL) |
9654 | #ifdef FEATURE_PREJIT |
9655 | && !pMT->GetLoaderModule()->IsVirtualImportThunk(slot) |
9656 | #endif |
9657 | ) |
9658 | { |
9659 | return slot; |
9660 | } |
9661 | |
9662 | // This is inherited slot that has not been fixed up yet. Find |
9663 | // the value by walking up the inheritance chain |
9664 | pMT = pMT->GetParentMethodTable(); |
9665 | } |
9666 | } |
9667 | |
9668 | //========================================================================================== |
9669 | MethodTable * MethodTable::GetRestoredSlotMT(DWORD slotNumber) |
9670 | { |
9671 | CONTRACTL { |
9672 | NOTHROW; |
9673 | GC_NOTRIGGER; |
9674 | MODE_ANY; |
9675 | SO_TOLERANT; |
9676 | SUPPORTS_DAC; |
9677 | } CONTRACTL_END; |
9678 | |
9679 | // |
9680 | // Keep in sync with code:MethodTable::GetRestoredSlot |
9681 | // |
9682 | |
9683 | MethodTable * pMT = this; |
9684 | while (true) |
9685 | { |
9686 | g_IBCLogger.LogMethodTableAccess(pMT); |
9687 | |
9688 | pMT = pMT->GetCanonicalMethodTable(); |
9689 | |
9690 | _ASSERTE(pMT != NULL); |
9691 | |
9692 | PCODE slot = pMT->GetSlot(slotNumber); |
9693 | |
9694 | if ((slot != NULL) |
9695 | #ifdef FEATURE_PREJIT |
9696 | && !pMT->GetLoaderModule()->IsVirtualImportThunk(slot) |
9697 | #endif |
9698 | ) |
9699 | { |
9700 | return pMT; |
9701 | } |
9702 | |
9703 | // This is inherited slot that has not been fixed up yet. Find |
9704 | // the value by walking up the inheritance chain |
9705 | pMT = pMT->GetParentMethodTable(); |
9706 | } |
9707 | } |
9708 | |
9709 | //========================================================================================== |
9710 | MethodDesc * MethodTable::GetParallelMethodDesc(MethodDesc * pDefMD) |
9711 | { |
9712 | CONTRACTL |
9713 | { |
9714 | NOTHROW; |
9715 | GC_NOTRIGGER; |
9716 | SO_TOLERANT; |
9717 | MODE_ANY; |
9718 | } |
9719 | CONTRACTL_END; |
9720 | return GetMethodDescForSlot(pDefMD->GetSlot()); |
9721 | } |
9722 | |
9723 | #ifndef DACCESS_COMPILE |
9724 | |
9725 | //========================================================================================== |
9726 | void MethodTable::SetSlot(UINT32 slotNumber, PCODE slotCode) |
9727 | { |
9728 | CONTRACTL { |
9729 | NOTHROW; |
9730 | GC_NOTRIGGER; |
9731 | MODE_ANY; |
9732 | } CONTRACTL_END; |
9733 | |
9734 | #ifdef _DEBUG |
9735 | if (slotNumber < GetNumVirtuals()) |
9736 | { |
9737 | // |
9738 | // Verify that slots in shared vtable chunks not owned by this methodtable are only ever patched to stable entrypoint. |
9739 | // This invariant is required to prevent races with code:MethodDesc::SetStableEntryPointInterlocked. |
9740 | // |
9741 | BOOL fSharedVtableChunk = FALSE; |
9742 | DWORD indirectionIndex = MethodTable::GetIndexOfVtableIndirection(slotNumber); |
9743 | |
9744 | if (!IsCanonicalMethodTable()) |
9745 | { |
9746 | if (GetVtableIndirections()[indirectionIndex].GetValueMaybeNull() == GetCanonicalMethodTable()->GetVtableIndirections()[indirectionIndex].GetValueMaybeNull()) |
9747 | fSharedVtableChunk = TRUE; |
9748 | } |
9749 | |
9750 | if (slotNumber < GetNumParentVirtuals()) |
9751 | { |
9752 | if (GetVtableIndirections()[indirectionIndex].GetValueMaybeNull() == GetParentMethodTable()->GetVtableIndirections()[indirectionIndex].GetValueMaybeNull()) |
9753 | fSharedVtableChunk = TRUE; |
9754 | } |
9755 | |
9756 | if (fSharedVtableChunk) |
9757 | { |
9758 | MethodDesc* pMD = GetMethodDescForSlotAddress(slotCode); |
9759 | _ASSERTE(pMD->HasStableEntryPoint()); |
9760 | _ASSERTE(pMD->GetStableEntryPoint() == slotCode); |
9761 | } |
9762 | } |
9763 | #endif |
9764 | |
9765 | // IBC logging is not needed here - slots in ngen images are immutable. |
9766 | |
9767 | #ifdef _TARGET_ARM_ |
9768 | // Ensure on ARM that all target addresses are marked as thumb code. |
9769 | _ASSERTE(IsThumbCode(slotCode)); |
9770 | #endif |
9771 | |
9772 | TADDR slot = GetSlotPtrRaw(slotNumber); |
9773 | if (slotNumber < GetNumVirtuals()) |
9774 | { |
9775 | ((MethodTable::VTableIndir2_t *) slot)->SetValueMaybeNull(slotCode); |
9776 | } |
9777 | else |
9778 | { |
9779 | *((PCODE *)slot) = slotCode; |
9780 | } |
9781 | } |
9782 | |
9783 | //========================================================================================== |
9784 | BOOL MethodTable::HasExplicitOrImplicitPublicDefaultConstructor() |
9785 | { |
9786 | CONTRACTL |
9787 | { |
9788 | NOTHROW; |
9789 | GC_NOTRIGGER; |
9790 | } |
9791 | CONTRACTL_END |
9792 | |
9793 | if (IsValueType()) |
9794 | { |
9795 | // valuetypes have public default ctors implicitly |
9796 | return TRUE; |
9797 | } |
9798 | |
9799 | if (!HasDefaultConstructor()) |
9800 | { |
9801 | return FALSE; |
9802 | } |
9803 | |
9804 | MethodDesc * pCanonMD = GetMethodDescForSlot(GetDefaultConstructorSlot()); |
9805 | return pCanonMD != NULL && pCanonMD->IsPublic(); |
9806 | } |
9807 | |
9808 | //========================================================================================== |
9809 | MethodDesc *MethodTable::GetDefaultConstructor() |
9810 | { |
9811 | WRAPPER_NO_CONTRACT; |
9812 | _ASSERTE(HasDefaultConstructor()); |
9813 | MethodDesc *pCanonMD = GetMethodDescForSlot(GetDefaultConstructorSlot()); |
9814 | // The default constructor for a value type is an instantiating stub. |
9815 | // The easiest way to find the right stub is to use the following function, |
9816 | // which in the simple case of the default constructor for a class simply |
9817 | // returns pCanonMD immediately. |
9818 | return MethodDesc::FindOrCreateAssociatedMethodDesc(pCanonMD, |
9819 | this, |
9820 | FALSE /* no BoxedEntryPointStub */, |
9821 | Instantiation(), /* no method instantiation */ |
9822 | FALSE /* no allowInstParam */); |
9823 | } |
9824 | |
9825 | //========================================================================================== |
9826 | // Finds the (non-unboxing) MethodDesc that implements the interface method pInterfaceMD. |
9827 | // |
9828 | // Note our ability to resolve constraint methods is affected by the degree of code sharing we are |
9829 | // performing for generic code. |
9830 | // |
9831 | // Return Value: |
9832 | // MethodDesc which can be used as unvirtualized call. Returns NULL if VSD has to be used. |
9833 | MethodDesc * |
9834 | MethodTable::TryResolveConstraintMethodApprox( |
9835 | TypeHandle thInterfaceType, |
9836 | MethodDesc * pInterfaceMD, |
9837 | BOOL * pfForceUseRuntimeLookup) // = NULL |
9838 | { |
9839 | CONTRACTL { |
9840 | THROWS; |
9841 | GC_TRIGGERS; |
9842 | } CONTRACTL_END; |
9843 | |
9844 | // We can't resolve constraint calls effectively for reference types, and there's |
9845 | // not a lot of perf. benefit in doing it anyway. |
9846 | // |
9847 | if (!IsValueType()) |
9848 | { |
9849 | LOG((LF_JIT, LL_INFO10000, "TryResolveConstraintmethodApprox: not a value type %s\n" , GetDebugClassName())); |
9850 | return NULL; |
9851 | } |
9852 | |
9853 | // 1. Find the (possibly generic) method that would implement the |
9854 | // constraint if we were making a call on a boxed value type. |
9855 | |
9856 | MethodTable * pCanonMT = GetCanonicalMethodTable(); |
9857 | |
9858 | MethodDesc * pGenInterfaceMD = pInterfaceMD->StripMethodInstantiation(); |
9859 | MethodDesc * pMD = NULL; |
9860 | if (pGenInterfaceMD->IsInterface()) |
9861 | { // Sometimes (when compiling shared generic code) |
9862 | // we don't have enough exact type information at JIT time |
9863 | // even to decide whether we will be able to resolve to an unboxed entry point... |
9864 | // To cope with this case we always go via the helper function if there's any |
9865 | // chance of this happening by checking for all interfaces which might possibly |
9866 | // be compatible with the call (verification will have ensured that |
9867 | // at least one of them will be) |
9868 | |
9869 | // Enumerate all potential interface instantiations |
9870 | MethodTable::InterfaceMapIterator it = pCanonMT->IterateInterfaceMap(); |
9871 | DWORD cPotentialMatchingInterfaces = 0; |
9872 | while (it.Next()) |
9873 | { |
9874 | TypeHandle thPotentialInterfaceType(it.GetInterface()); |
9875 | if (thPotentialInterfaceType.AsMethodTable()->GetCanonicalMethodTable() == |
9876 | thInterfaceType.AsMethodTable()->GetCanonicalMethodTable()) |
9877 | { |
9878 | cPotentialMatchingInterfaces++; |
9879 | pMD = pCanonMT->GetMethodDescForInterfaceMethod(thPotentialInterfaceType, pGenInterfaceMD, FALSE /* throwOnConflict */); |
9880 | |
9881 | // See code:#TryResolveConstraintMethodApprox_DoNotReturnParentMethod |
9882 | if ((pMD != NULL) && !pMD->GetMethodTable()->IsValueType()) |
9883 | { |
9884 | LOG((LF_JIT, LL_INFO10000, "TryResolveConstraintMethodApprox: %s::%s not a value type method\n" , |
9885 | pMD->m_pszDebugClassName, pMD->m_pszDebugMethodName)); |
9886 | return NULL; |
9887 | } |
9888 | } |
9889 | } |
9890 | |
9891 | _ASSERTE_MSG((cPotentialMatchingInterfaces != 0), |
9892 | "At least one interface has to implement the method, otherwise there's a bug in JIT/verification." ); |
9893 | |
9894 | if (cPotentialMatchingInterfaces > 1) |
9895 | { // We have more potentially matching interfaces |
9896 | MethodTable * pInterfaceMT = thInterfaceType.GetMethodTable(); |
9897 | _ASSERTE(pInterfaceMT->HasInstantiation()); |
9898 | |
9899 | BOOL fIsExactMethodResolved = FALSE; |
9900 | |
9901 | if (!pInterfaceMT->IsSharedByGenericInstantiations() && |
9902 | !pInterfaceMT->IsGenericTypeDefinition() && |
9903 | !this->IsSharedByGenericInstantiations() && |
9904 | !this->IsGenericTypeDefinition()) |
9905 | { // We have exact interface and type instantiations (no generic variables and __Canon used |
9906 | // anywhere) |
9907 | if (this->CanCastToInterface(pInterfaceMT)) |
9908 | { |
9909 | // We can resolve to exact method |
9910 | pMD = this->GetMethodDescForInterfaceMethod(pInterfaceMT, pInterfaceMD, FALSE /* throwOnConflict */); |
9911 | fIsExactMethodResolved = pMD != NULL; |
9912 | } |
9913 | } |
9914 | |
9915 | if (!fIsExactMethodResolved) |
9916 | { // We couldn't resolve the interface statically |
9917 | _ASSERTE(pfForceUseRuntimeLookup != NULL); |
9918 | // Notify the caller that it should use runtime lookup |
9919 | // Note that we can leave pMD incorrect, because we will use runtime lookup |
9920 | *pfForceUseRuntimeLookup = TRUE; |
9921 | } |
9922 | } |
9923 | else |
9924 | { |
9925 | // If we can resolve the interface exactly then do so (e.g. when doing the exact |
9926 | // lookup at runtime, or when not sharing generic code). |
9927 | if (pCanonMT->CanCastToInterface(thInterfaceType.GetMethodTable())) |
9928 | { |
9929 | pMD = pCanonMT->GetMethodDescForInterfaceMethod(thInterfaceType, pGenInterfaceMD, FALSE /* throwOnConflict */); |
9930 | if (pMD == NULL) |
9931 | { |
9932 | LOG((LF_JIT, LL_INFO10000, "TryResolveConstraintMethodApprox: failed to find method desc for interface method\n" )); |
9933 | } |
9934 | } |
9935 | } |
9936 | } |
9937 | else if (pGenInterfaceMD->IsVirtual()) |
9938 | { |
9939 | if (pGenInterfaceMD->HasNonVtableSlot() && pGenInterfaceMD->GetMethodTable()->IsValueType()) |
9940 | { // GetMethodDescForSlot would AV for this slot |
9941 | // We can get here for (invalid and unverifiable) IL: |
9942 | // constrained. int32 |
9943 | // callvirt System.Int32::GetHashCode() |
9944 | pMD = pGenInterfaceMD; |
9945 | } |
9946 | else |
9947 | { |
9948 | pMD = GetMethodDescForSlot(pGenInterfaceMD->GetSlot()); |
9949 | } |
9950 | } |
9951 | else |
9952 | { |
9953 | // The pMD will be NULL if calling a non-virtual instance |
9954 | // methods on System.Object, i.e. when these are used as a constraint. |
9955 | pMD = NULL; |
9956 | } |
9957 | |
9958 | if (pMD == NULL) |
9959 | { // Fall back to VSD |
9960 | return NULL; |
9961 | } |
9962 | |
9963 | if (!pMD->GetMethodTable()->IsInterface()) |
9964 | { |
9965 | //#TryResolveConstraintMethodApprox_DoNotReturnParentMethod |
9966 | // Only return a method if the value type itself declares the method |
9967 | // otherwise we might get a method from Object or System.ValueType |
9968 | if (!pMD->GetMethodTable()->IsValueType()) |
9969 | { // Fall back to VSD |
9970 | return NULL; |
9971 | } |
9972 | |
9973 | // We've resolved the method, ignoring its generic method arguments |
9974 | // If the method is a generic method then go and get the instantiated descriptor |
9975 | pMD = MethodDesc::FindOrCreateAssociatedMethodDesc( |
9976 | pMD, |
9977 | this, |
9978 | FALSE /* no BoxedEntryPointStub */, |
9979 | pInterfaceMD->GetMethodInstantiation(), |
9980 | FALSE /* no allowInstParam */); |
9981 | |
9982 | // FindOrCreateAssociatedMethodDesc won't return an BoxedEntryPointStub. |
9983 | _ASSERTE(pMD != NULL); |
9984 | _ASSERTE(!pMD->IsUnboxingStub()); |
9985 | } |
9986 | |
9987 | return pMD; |
9988 | } // MethodTable::TryResolveConstraintMethodApprox |
9989 | |
9990 | //========================================================================================== |
9991 | // Make best-case effort to obtain an image name for use in an error message. |
9992 | // |
9993 | // This routine must expect to be called before the this object is fully loaded. |
9994 | // It can return an empty if the name isn't available or the object isn't initialized |
9995 | // enough to get a name, but it mustn't crash. |
9996 | //========================================================================================== |
9997 | LPCWSTR MethodTable::GetPathForErrorMessages() |
9998 | { |
9999 | CONTRACTL |
10000 | { |
10001 | THROWS; |
10002 | GC_TRIGGERS; |
10003 | INJECT_FAULT(COMPlusThrowOM();); |
10004 | } |
10005 | CONTRACTL_END |
10006 | |
10007 | Module *pModule = GetModule(); |
10008 | |
10009 | if (pModule) |
10010 | { |
10011 | return pModule->GetPathForErrorMessages(); |
10012 | } |
10013 | else |
10014 | { |
10015 | return W("" ); |
10016 | } |
10017 | } |
10018 | |
10019 | BOOL MethodTable::Validate() |
10020 | { |
10021 | LIMITED_METHOD_CONTRACT; |
10022 | |
10023 | ASSERT_AND_CHECK(SanityCheck()); |
10024 | |
10025 | #ifdef _DEBUG |
10026 | ASSERT_AND_CHECK(!m_pWriteableData.IsNull()); |
10027 | |
10028 | MethodTableWriteableData *pWriteableData = m_pWriteableData.GetValue(); |
10029 | DWORD dwLastVerifiedGCCnt = pWriteableData->m_dwLastVerifedGCCnt; |
10030 | // Here we used to assert that (dwLastVerifiedGCCnt <= GCHeapUtilities::GetGCHeap()->GetGcCount()) but |
10031 | // this is no longer true because with background gc. Since the purpose of having |
10032 | // m_dwLastVerifedGCCnt is just to only verify the same method table once for each GC |
10033 | // I am getting rid of the assert. |
10034 | if (g_pConfig->FastGCStressLevel () > 1 && dwLastVerifiedGCCnt == GCHeapUtilities::GetGCHeap()->GetGcCount()) |
10035 | return TRUE; |
10036 | #endif //_DEBUG |
10037 | |
10038 | if (IsArray()) |
10039 | { |
10040 | if (!SanityCheck()) |
10041 | { |
10042 | ASSERT_AND_CHECK(!"Detected use of a corrupted OBJECTREF. Possible GC hole." ); |
10043 | } |
10044 | } |
10045 | else if (!IsCanonicalMethodTable()) |
10046 | { |
10047 | // Non-canonical method tables has to have non-empty instantiation |
10048 | if (GetInstantiation().IsEmpty()) |
10049 | { |
10050 | ASSERT_AND_CHECK(!"Detected use of a corrupted OBJECTREF. Possible GC hole." ); |
10051 | } |
10052 | } |
10053 | |
10054 | #ifdef _DEBUG |
10055 | // It is not a fatal error to fail the update the counter. We will run slower and retry next time, |
10056 | // but the system will function properly. |
10057 | if (EnsureWritablePagesNoThrow(pWriteableData, sizeof(MethodTableWriteableData))) |
10058 | pWriteableData->m_dwLastVerifedGCCnt = GCHeapUtilities::GetGCHeap()->GetGcCount(); |
10059 | #endif //_DEBUG |
10060 | |
10061 | return TRUE; |
10062 | } |
10063 | |
10064 | #endif // !DACCESS_COMPILE |
10065 | |
10066 | NOINLINE BYTE *MethodTable::GetLoaderAllocatorObjectForGC() |
10067 | { |
10068 | WRAPPER_NO_CONTRACT; |
10069 | if (!Collectible()) |
10070 | { |
10071 | return NULL; |
10072 | } |
10073 | BYTE * retVal = *(BYTE**)GetLoaderAllocatorObjectHandle(); |
10074 | return retVal; |
10075 | } |
10076 | |
10077 | #ifdef FEATURE_COMINTEROP |
10078 | //========================================================================================== |
10079 | BOOL MethodTable::IsWinRTRedirectedDelegate() |
10080 | { |
10081 | LIMITED_METHOD_DAC_CONTRACT; |
10082 | |
10083 | if (!IsDelegate()) |
10084 | { |
10085 | return FALSE; |
10086 | } |
10087 | |
10088 | return !!WinRTDelegateRedirector::ResolveRedirectedDelegate(this, nullptr); |
10089 | } |
10090 | |
10091 | //========================================================================================== |
10092 | BOOL MethodTable::IsWinRTRedirectedInterface(TypeHandle::InteropKind interopKind) |
10093 | { |
10094 | LIMITED_METHOD_CONTRACT; |
10095 | |
10096 | if (!IsInterface()) |
10097 | return FALSE; |
10098 | |
10099 | if (!HasRCWPerTypeData()) |
10100 | { |
10101 | // All redirected interfaces have per-type RCW data |
10102 | return FALSE; |
10103 | } |
10104 | |
10105 | #ifdef DACCESS_COMPILE |
10106 | RCWPerTypeData *pData = NULL; |
10107 | #else // DACCESS_COMPILE |
10108 | // We want to keep this function LIMITED_METHOD_CONTRACT so we call GetRCWPerTypeData with |
10109 | // the non-throwing flag. pData can be NULL if it could not be allocated. |
10110 | RCWPerTypeData *pData = GetRCWPerTypeData(false); |
10111 | #endif // DACCESS_COMPILE |
10112 | |
10113 | DWORD dwFlags = (pData != NULL ? pData->m_dwFlags : 0); |
10114 | if ((dwFlags & RCWPerTypeData::InterfaceFlagsInited) == 0) |
10115 | { |
10116 | dwFlags = RCWPerTypeData::InterfaceFlagsInited; |
10117 | |
10118 | if (WinRTInterfaceRedirector::ResolveRedirectedInterface(this, NULL)) |
10119 | { |
10120 | dwFlags |= RCWPerTypeData::IsRedirectedInterface; |
10121 | } |
10122 | else if (HasSameTypeDefAs(MscorlibBinder::GetExistingClass(CLASS__ICOLLECTIONGENERIC)) || |
10123 | HasSameTypeDefAs(MscorlibBinder::GetExistingClass(CLASS__IREADONLYCOLLECTIONGENERIC)) || |
10124 | this == MscorlibBinder::GetExistingClass(CLASS__ICOLLECTION)) |
10125 | { |
10126 | dwFlags |= RCWPerTypeData::IsICollectionGeneric; |
10127 | } |
10128 | |
10129 | if (pData != NULL) |
10130 | { |
10131 | FastInterlockOr(&pData->m_dwFlags, dwFlags); |
10132 | } |
10133 | } |
10134 | |
10135 | if ((dwFlags & RCWPerTypeData::IsRedirectedInterface) != 0) |
10136 | return TRUE; |
10137 | |
10138 | if (interopKind == TypeHandle::Interop_ManagedToNative) |
10139 | { |
10140 | // ICollection<T> is redirected in the managed->WinRT direction (i.e. we have stubs |
10141 | // that implement ICollection<T> methods in terms of IVector/IMap), but it is not |
10142 | // treated specially in the WinRT->managed direction (we don't build a WinRT vtable |
10143 | // for a class that only implements ICollection<T>). IReadOnlyCollection<T> is |
10144 | // treated similarly. |
10145 | if ((dwFlags & RCWPerTypeData::IsICollectionGeneric) != 0) |
10146 | return TRUE; |
10147 | } |
10148 | |
10149 | return FALSE; |
10150 | } |
10151 | |
10152 | #endif // FEATURE_COMINTEROP |
10153 | |
10154 | #ifdef FEATURE_READYTORUN_COMPILER |
10155 | |
10156 | static BOOL ComputeIsLayoutFixedInCurrentVersionBubble(MethodTable * pMT) |
10157 | { |
10158 | STANDARD_VM_CONTRACT; |
10159 | |
10160 | // Primitive types and enums have fixed layout |
10161 | if (pMT->IsTruePrimitive() || pMT->IsEnum()) |
10162 | return TRUE; |
10163 | |
10164 | if (!pMT->GetModule()->IsInCurrentVersionBubble()) |
10165 | { |
10166 | if (!pMT->IsValueType()) |
10167 | { |
10168 | // Eventually, we may respect the non-versionable attribute for reference types too. For now, we are going |
10169 | // to play is safe and ignore it. |
10170 | return FALSE; |
10171 | } |
10172 | |
10173 | // Valuetypes with non-versionable attribute are candidates for fixed layout. Reject the rest. |
10174 | if (pMT->GetModule()->GetMDImport()->GetCustomAttributeByName(pMT->GetCl(), |
10175 | NONVERSIONABLE_TYPE, NULL, NULL) != S_OK) |
10176 | { |
10177 | return FALSE; |
10178 | } |
10179 | } |
10180 | |
10181 | // If the above condition passed, check that all instance fields have fixed layout as well. In particular, |
10182 | // it is important for generic types with non-versionable layout (e.g. Nullable<T>) |
10183 | ApproxFieldDescIterator fieldIterator(pMT, ApproxFieldDescIterator::INSTANCE_FIELDS); |
10184 | for (FieldDesc *pFD = fieldIterator.Next(); pFD != NULL; pFD = fieldIterator.Next()) |
10185 | { |
10186 | if (pFD->GetFieldType() != ELEMENT_TYPE_VALUETYPE) |
10187 | continue; |
10188 | |
10189 | MethodTable * pFieldMT = pFD->GetApproxFieldTypeHandleThrowing().AsMethodTable(); |
10190 | if (!pFieldMT->IsLayoutFixedInCurrentVersionBubble()) |
10191 | return FALSE; |
10192 | } |
10193 | |
10194 | return TRUE; |
10195 | } |
10196 | |
10197 | // |
10198 | // Is field layout in this type fixed within the current version bubble? |
10199 | // This check does not take the inheritance chain into account. |
10200 | // |
10201 | BOOL MethodTable::IsLayoutFixedInCurrentVersionBubble() |
10202 | { |
10203 | STANDARD_VM_CONTRACT; |
10204 | |
10205 | const MethodTableWriteableData * pWriteableData = GetWriteableData(); |
10206 | if (!(pWriteableData->m_dwFlags & MethodTableWriteableData::enum_flag_NGEN_IsLayoutFixedComputed)) |
10207 | { |
10208 | MethodTableWriteableData * pWriteableDataForWrite = GetWriteableDataForWrite(); |
10209 | if (ComputeIsLayoutFixedInCurrentVersionBubble(this)) |
10210 | *EnsureWritablePages(&pWriteableDataForWrite->m_dwFlags) |= MethodTableWriteableData::enum_flag_NGEN_IsLayoutFixed; |
10211 | *EnsureWritablePages(&pWriteableDataForWrite->m_dwFlags) |= MethodTableWriteableData::enum_flag_NGEN_IsLayoutFixedComputed; |
10212 | } |
10213 | |
10214 | return (pWriteableData->m_dwFlags & MethodTableWriteableData::enum_flag_NGEN_IsLayoutFixed) != 0; |
10215 | } |
10216 | |
10217 | // |
10218 | // Is field layout of the inheritance chain fixed within the current version bubble? |
10219 | // |
10220 | BOOL MethodTable::IsInheritanceChainLayoutFixedInCurrentVersionBubble() |
10221 | { |
10222 | STANDARD_VM_CONTRACT; |
10223 | |
10224 | // This method is not expected to be called for value types |
10225 | _ASSERTE(!IsValueType()); |
10226 | |
10227 | MethodTable * pMT = this; |
10228 | |
10229 | while ((pMT != g_pObjectClass) && (pMT != NULL)) |
10230 | { |
10231 | if (!pMT->IsLayoutFixedInCurrentVersionBubble()) |
10232 | return FALSE; |
10233 | |
10234 | pMT = pMT->GetParentMethodTable(); |
10235 | } |
10236 | |
10237 | return TRUE; |
10238 | } |
10239 | |
10240 | // |
10241 | // Is the inheritance chain fixed within the current version bubble? |
10242 | // |
10243 | BOOL MethodTable::IsInheritanceChainFixedInCurrentVersionBubble() |
10244 | { |
10245 | STANDARD_VM_CONTRACT; |
10246 | |
10247 | MethodTable * pMT = this; |
10248 | |
10249 | if (pMT->IsValueType()) |
10250 | { |
10251 | return pMT->GetModule()->IsInCurrentVersionBubble(); |
10252 | } |
10253 | |
10254 | while ((pMT != g_pObjectClass) && (pMT != NULL)) |
10255 | { |
10256 | if (!pMT->GetModule()->IsInCurrentVersionBubble()) |
10257 | return FALSE; |
10258 | |
10259 | pMT = pMT->GetParentMethodTable(); |
10260 | } |
10261 | |
10262 | return TRUE; |
10263 | } |
10264 | |
10265 | #endif // FEATURE_READYTORUN_COMPILER |
10266 | |