1 | // Licensed to the .NET Foundation under one or more agreements. |
2 | // The .NET Foundation licenses this file to you under the MIT license. |
3 | // See the LICENSE file in the project root for more information. |
4 | // |
5 | // File: METHODTABLEBUILDER.CPP |
6 | // |
7 | |
8 | |
9 | // |
10 | |
11 | // |
12 | // ============================================================================ |
13 | |
14 | #include "common.h" |
15 | |
16 | #include "methodtablebuilder.h" |
17 | |
18 | #include "sigbuilder.h" |
19 | #include "dllimport.h" |
20 | #include "fieldmarshaler.h" |
21 | #include "encee.h" |
22 | #include "mdaassistants.h" |
23 | #include "ecmakey.h" |
24 | #include "customattribute.h" |
25 | #include "typestring.h" |
26 | |
27 | //******************************************************************************* |
28 | // Helper functions to sort GCdescs by offset (decending order) |
29 | int __cdecl compareCGCDescSeries(const void *arg1, const void *arg2) |
30 | { |
31 | STATIC_CONTRACT_NOTHROW; |
32 | STATIC_CONTRACT_GC_NOTRIGGER; |
33 | STATIC_CONTRACT_FORBID_FAULT; |
34 | |
35 | CGCDescSeries* gcInfo1 = (CGCDescSeries*) arg1; |
36 | CGCDescSeries* gcInfo2 = (CGCDescSeries*) arg2; |
37 | |
38 | return (int)(gcInfo2->GetSeriesOffset() - gcInfo1->GetSeriesOffset()); |
39 | } |
40 | |
41 | //******************************************************************************* |
42 | |
43 | const char* FormatSig(MethodDesc* pMD, LoaderHeap *pHeap, AllocMemTracker *pamTracker); |
44 | |
45 | #ifdef _DEBUG |
46 | unsigned g_dupMethods = 0; |
47 | #endif // _DEBUG |
48 | |
49 | //========================================================================== |
50 | // This function is very specific about how it constructs a EEClass. It first |
51 | // determines the necessary size of the vtable and the number of statics that |
52 | // this class requires. The necessary memory is then allocated for a EEClass |
53 | // and its vtable and statics. The class members are then initialized and |
54 | // the memory is then returned to the caller |
55 | // |
56 | // LPEEClass CreateClass() |
57 | // |
58 | // Parameters : |
59 | // [in] scope - scope of the current class not the one requested to be opened |
60 | // [in] cl - class token of the class to be created. |
61 | // [out] ppEEClass - pointer to pointer to hold the address of the EEClass |
62 | // allocated in this function. |
63 | // Return : returns an HRESULT indicating the success of this function. |
64 | // |
65 | // This parameter has been removed but might need to be reinstated if the |
66 | // global for the metadata loader is removed. |
67 | // [in] pIMLoad - MetaDataLoader class/object for the current scope. |
68 | |
69 | |
70 | //========================================================================== |
71 | /*static*/ EEClass * |
72 | MethodTableBuilder::CreateClass( Module *pModule, |
73 | mdTypeDef cl, |
74 | BOOL fHasLayout, |
75 | BOOL fDelegate, |
76 | BOOL fIsEnum, |
77 | const MethodTableBuilder::bmtGenericsInfo *bmtGenericsInfo, |
78 | LoaderAllocator * pAllocator, |
79 | AllocMemTracker *pamTracker) |
80 | { |
81 | CONTRACTL |
82 | { |
83 | STANDARD_VM_CHECK; |
84 | PRECONDITION(!(fHasLayout && fDelegate)); |
85 | PRECONDITION(!(fHasLayout && fIsEnum)); |
86 | PRECONDITION(CheckPointer(bmtGenericsInfo)); |
87 | } |
88 | CONTRACTL_END; |
89 | |
90 | EEClass *pEEClass = NULL; |
91 | IMDInternalImport *pInternalImport; |
92 | |
93 | //<TODO>============================================================================ |
94 | // vtabsize and static size need to be converted from pointer sizes to #'s |
95 | // of bytes this will be very important for 64 bit NT! |
96 | // We will need to call on IMetaDataLoad to get these sizes and fill out the |
97 | // tables |
98 | |
99 | // From the classref call on metadata to resolve the classref and check scope |
100 | // to make sure that this class is in the same scope otherwise we need to open |
101 | // a new scope and possibly file. |
102 | |
103 | // if the scopes are different call the code to load a new file and get the new scope |
104 | |
105 | // scopes are the same so we can use the existing scope to get the class info |
106 | |
107 | // This method needs to be fleshed out.more it currently just returns enough |
108 | // space for the defined EEClass and the vtable and statics are not set. |
109 | //=============================================================================</TODO> |
110 | |
111 | if (fHasLayout) |
112 | { |
113 | pEEClass = new (pAllocator->GetLowFrequencyHeap(), pamTracker) LayoutEEClass(); |
114 | } |
115 | else if (fDelegate) |
116 | { |
117 | pEEClass = new (pAllocator->GetLowFrequencyHeap(), pamTracker) DelegateEEClass(); |
118 | } |
119 | else |
120 | { |
121 | pEEClass = new (pAllocator->GetLowFrequencyHeap(), pamTracker) EEClass(sizeof(EEClass)); |
122 | } |
123 | |
124 | DWORD dwAttrClass = 0; |
125 | mdToken tkExtends = mdTokenNil; |
126 | |
127 | // Set up variance info |
128 | if (bmtGenericsInfo->pVarianceInfo) |
129 | { |
130 | // Variance info is an optional field on EEClass, so ensure the optional field descriptor has been |
131 | // allocated. |
132 | EnsureOptionalFieldsAreAllocated(pEEClass, pamTracker, pAllocator->GetLowFrequencyHeap()); |
133 | pEEClass->SetVarianceInfo((BYTE*) pamTracker->Track( |
134 | pAllocator->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(bmtGenericsInfo->GetNumGenericArgs())))); |
135 | |
136 | memcpy(pEEClass->GetVarianceInfo(), bmtGenericsInfo->pVarianceInfo, bmtGenericsInfo->GetNumGenericArgs()); |
137 | } |
138 | |
139 | pInternalImport = pModule->GetMDImport(); |
140 | |
141 | if (pInternalImport == NULL) |
142 | COMPlusThrowHR(COR_E_TYPELOAD); |
143 | |
144 | IfFailThrow(pInternalImport->GetTypeDefProps( |
145 | cl, |
146 | &dwAttrClass, |
147 | &tkExtends)); |
148 | |
149 | pEEClass->m_dwAttrClass = dwAttrClass; |
150 | |
151 | // MDVal check: can't be both tdSequentialLayout and tdExplicitLayout |
152 | if((dwAttrClass & tdLayoutMask) == tdLayoutMask) |
153 | COMPlusThrowHR(COR_E_TYPELOAD); |
154 | |
155 | if (IsTdInterface(dwAttrClass)) |
156 | { |
157 | // MDVal check: must have nil tkExtends and must be tdAbstract |
158 | if((tkExtends & 0x00FFFFFF)||(!IsTdAbstract(dwAttrClass))) |
159 | COMPlusThrowHR(COR_E_TYPELOAD); |
160 | } |
161 | |
162 | if (fHasLayout) |
163 | pEEClass->SetHasLayout(); |
164 | |
165 | #ifdef FEATURE_COMINTEROP |
166 | if (IsTdWindowsRuntime(dwAttrClass)) |
167 | { |
168 | Assembly *pAssembly = pModule->GetAssembly(); |
169 | |
170 | // On the desktop CLR, we do not allow non-FX assemblies to use/define WindowsRuntimeImport attribute. |
171 | // |
172 | // On CoreCLR, however, we do allow non-FX assemblies to have this attribute. This enables scenarios where we can |
173 | // activate 3rd-party WinRT components outside AppContainer - 1st party WinRT components are already allowed |
174 | // to be activated outside AppContainer (on both Desktop and CoreCLR). |
175 | |
176 | pEEClass->SetProjectedFromWinRT(); |
177 | } |
178 | |
179 | if (pEEClass->IsProjectedFromWinRT()) |
180 | { |
181 | if (IsTdInterface(dwAttrClass)) |
182 | { |
183 | // |
184 | // Check for GuidAttribute |
185 | // |
186 | BOOL bHasGuid = FALSE; |
187 | |
188 | GUID guid; |
189 | HRESULT hr = pModule->GetMDImport()->GetItemGuid(cl, &guid); |
190 | IfFailThrow(hr); |
191 | |
192 | if (IsEqualGUID(guid, GUID_NULL)) |
193 | { |
194 | // A WinRT interface should have a GUID |
195 | pModule->GetAssembly()->ThrowTypeLoadException(pModule->GetMDImport(), cl, IDS_EE_WINRT_INTERFACE_WITHOUT_GUID); |
196 | } |
197 | } |
198 | } |
199 | |
200 | WinMDAdapter::RedirectedTypeIndex redirectedTypeIndex; |
201 | redirectedTypeIndex = WinRTTypeNameConverter::GetRedirectedTypeIndexByName(pModule, cl); |
202 | if (redirectedTypeIndex != WinMDAdapter::RedirectedTypeIndex_Invalid) |
203 | { |
204 | EnsureOptionalFieldsAreAllocated(pEEClass, pamTracker, pAllocator->GetLowFrequencyHeap()); |
205 | pEEClass->SetWinRTRedirectedTypeIndex(redirectedTypeIndex); |
206 | } |
207 | #endif // FEAUTRE_COMINTEROP |
208 | |
209 | #ifdef _DEBUG |
210 | pModule->GetClassLoader()->m_dwDebugClasses++; |
211 | #endif |
212 | |
213 | return pEEClass; |
214 | } |
215 | |
216 | //******************************************************************************* |
217 | // |
218 | // Create a hash of all methods in this class. The hash is from method name to MethodDesc. |
219 | // |
220 | MethodTableBuilder::MethodNameHash * |
221 | MethodTableBuilder::CreateMethodChainHash( |
222 | MethodTable *pMT) |
223 | { |
224 | STANDARD_VM_CONTRACT; |
225 | |
226 | MethodNameHash *pHash = new (GetStackingAllocator()) MethodNameHash(); |
227 | pHash->Init(pMT->GetNumVirtuals(), GetStackingAllocator()); |
228 | |
229 | unsigned numVirtuals = GetParentMethodTable()->GetNumVirtuals(); |
230 | for (unsigned i = 0; i < numVirtuals; ++i) |
231 | { |
232 | bmtMethodSlot &slot = (*bmtParent->pSlotTable)[i]; |
233 | bmtRTMethod * pMethod = slot.Decl().AsRTMethod(); |
234 | const MethodSignature &sig = pMethod->GetMethodSignature(); |
235 | pHash->Insert(sig.GetName(), pMethod); |
236 | } |
237 | |
238 | // Success |
239 | return pHash; |
240 | } |
241 | |
242 | //******************************************************************************* |
243 | // |
244 | // Find a method in this class hierarchy - used ONLY by the loader during layout. Do not use at runtime. |
245 | // |
246 | // *ppMemberSignature must be NULL on entry - it and *pcMemberSignature may or may not be filled out |
247 | // |
248 | // ppMethodDesc will be filled out with NULL if no matching method in the hierarchy is found. |
249 | // |
250 | // Returns FALSE if there was an error of some kind. |
251 | // |
252 | // pMethodConstraintsMatch receives the result of comparing the method constraints. |
253 | MethodTableBuilder::bmtRTMethod * |
254 | MethodTableBuilder::LoaderFindMethodInParentClass( |
255 | const MethodSignature & methodSig, |
256 | BOOL * pMethodConstraintsMatch) |
257 | { |
258 | CONTRACTL |
259 | { |
260 | STANDARD_VM_CHECK; |
261 | PRECONDITION(CheckPointer(this)); |
262 | PRECONDITION(CheckPointer(bmtParent)); |
263 | PRECONDITION(CheckPointer(methodSig.GetModule())); |
264 | PRECONDITION(CheckPointer(methodSig.GetSignature())); |
265 | PRECONDITION(HasParent()); |
266 | PRECONDITION(methodSig.GetSignatureLength() != 0); |
267 | } |
268 | CONTRACTL_END; |
269 | |
270 | //#if 0 |
271 | MethodNameHash::HashEntry * pEntry; |
272 | |
273 | // Have we created a hash of all the methods in the class chain? |
274 | if (bmtParent->pParentMethodHash == NULL) |
275 | { |
276 | // There may be such a method, so we will now create a hash table to reduce the pain for |
277 | // further lookups |
278 | |
279 | // <TODO> Are we really sure that this is worth doing? </TODO> |
280 | bmtParent->pParentMethodHash = CreateMethodChainHash(GetParentMethodTable()); |
281 | } |
282 | |
283 | // We have a hash table, so use it |
284 | pEntry = bmtParent->pParentMethodHash->Lookup(methodSig.GetName()); |
285 | |
286 | // Traverse the chain of all methods with this name |
287 | while (pEntry != NULL) |
288 | { |
289 | bmtRTMethod * pEntryMethod = pEntry->m_data; |
290 | const MethodSignature & entrySig = pEntryMethod->GetMethodSignature(); |
291 | |
292 | // Note instantiation info |
293 | { |
294 | if (methodSig.Equivalent(entrySig)) |
295 | { |
296 | if (pMethodConstraintsMatch != NULL) |
297 | { |
298 | // Check the constraints are consistent, |
299 | // and return the result to the caller. |
300 | // We do this here to avoid recalculating pSubst. |
301 | *pMethodConstraintsMatch = MetaSig::CompareMethodConstraints( |
302 | &methodSig.GetSubstitution(), methodSig.GetModule(), methodSig.GetToken(), |
303 | &entrySig.GetSubstitution(), entrySig.GetModule(), entrySig.GetToken()); |
304 | } |
305 | |
306 | return pEntryMethod; |
307 | } |
308 | } |
309 | |
310 | // Advance to next item in the hash chain which has the same name |
311 | pEntry = bmtParent->pParentMethodHash->FindNext(pEntry); |
312 | } |
313 | //#endif |
314 | |
315 | //@TODO: Move to this code, as the use of a HashTable is broken; overriding semantics |
316 | //@TODO: require matching against the most-derived slot of a given name and signature, |
317 | //@TODO: (which deals specifically with newslot methods with identical name and sig), but |
318 | //@TODO: HashTables are by definition unordered and so we've only been getting by with the |
319 | //@TODO: implementation being compatible with the order in which methods were added to |
320 | //@TODO: the HashTable in CreateMethodChainHash. |
321 | #if 0 |
322 | bmtParentInfo::Iterator it(bmtParent->IterateSlots()); |
323 | it.MoveTo(static_cast<size_t>(GetParentMethodTable()->GetNumVirtuals())); |
324 | while (it.Prev()) |
325 | { |
326 | bmtMethodHandle decl(it->Decl()); |
327 | const MethodSignature &declSig(decl.GetMethodSignature()); |
328 | if (declSig == methodSig) |
329 | { |
330 | if (pMethodConstraintsMatch != NULL) |
331 | { |
332 | // Check the constraints are consistent, |
333 | // and return the result to the caller. |
334 | // We do this here to avoid recalculating pSubst. |
335 | *pMethodConstraintsMatch = MetaSig::CompareMethodConstraints( |
336 | &methodSig.GetSubstitution(), methodSig.GetModule(), methodSig.GetToken(), |
337 | &declSig.GetSubstitution(), declSig.GetModule(), declSig.GetToken()); |
338 | } |
339 | |
340 | return decl.AsRTMethod(); |
341 | } |
342 | } |
343 | #endif // 0 |
344 | |
345 | return NULL; |
346 | } |
347 | |
348 | //******************************************************************************* |
349 | // |
350 | // Given an interface map to fill out, expand pNewInterface (and its sub-interfaces) into it, increasing |
351 | // pdwInterfaceListSize as appropriate, and avoiding duplicates. |
352 | // |
353 | void |
354 | MethodTableBuilder::ExpandApproxInterface( |
355 | bmtInterfaceInfo * bmtInterface, // out parameter, various parts cumulatively written to. |
356 | const Substitution * pNewInterfaceSubstChain, |
357 | MethodTable * pNewInterface, |
358 | InterfaceDeclarationScope declScope |
359 | COMMA_INDEBUG(MethodTable * dbg_pClassMT)) |
360 | { |
361 | STANDARD_VM_CONTRACT; |
362 | |
363 | //#ExpandingInterfaces |
364 | // We expand the tree of inherited interfaces into a set by adding the |
365 | // current node BEFORE expanding the parents of the current node. |
366 | // ****** This must be consistent with code:ExpandExactInterface ******* |
367 | // ****** This must be consistent with code:ClassCompat::MethodTableBuilder::BuildInteropVTable_ExpandInterface ******* |
368 | |
369 | // The interface list contains the fully expanded set of interfaces from the parent then |
370 | // we start adding all the interfaces we declare. We need to know which interfaces |
371 | // we declare but do not need duplicates of the ones we declare. This means we can |
372 | // duplicate our parent entries. |
373 | |
374 | // Is it already present in the list? |
375 | for (DWORD i = 0; i < bmtInterface->dwInterfaceMapSize; i++) |
376 | { |
377 | bmtInterfaceEntry * pItfEntry = &bmtInterface->pInterfaceMap[i]; |
378 | bmtRTType * pItfType = pItfEntry->GetInterfaceType(); |
379 | |
380 | // Type Equivalence is not respected for this comparision as you can have multiple type equivalent interfaces on a class |
381 | TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL); |
382 | if (MetaSig::CompareTypeDefsUnderSubstitutions(pItfType->GetMethodTable(), |
383 | pNewInterface, |
384 | &pItfType->GetSubstitution(), |
385 | pNewInterfaceSubstChain, |
386 | &newVisited)) |
387 | { |
388 | if (declScope.fIsInterfaceDeclaredOnType) |
389 | { |
390 | pItfEntry->IsDeclaredOnType() = true; |
391 | } |
392 | #ifdef _DEBUG |
393 | //#InjectInterfaceDuplicates_ApproxInterfaces |
394 | // We can inject duplicate interfaces in check builds. |
395 | // Has to be in sync with code:#InjectInterfaceDuplicates_Main |
396 | if (((dbg_pClassMT == NULL) && bmtInterface->dbg_fShouldInjectInterfaceDuplicates) || |
397 | ((dbg_pClassMT != NULL) && dbg_pClassMT->Debug_HasInjectedInterfaceDuplicates())) |
398 | { |
399 | // The injected duplicate interface should have the same status 'ImplementedByParent' as |
400 | // the original interface (can be false if the interface is implemented indirectly twice) |
401 | declScope.fIsInterfaceDeclaredOnParent = pItfEntry->IsImplementedByParent(); |
402 | // Just pretend we didn't find this match, but mark all duplicates as 'DeclaredOnType' if |
403 | // needed |
404 | continue; |
405 | } |
406 | #endif //_DEBUG |
407 | return; // found it, don't add it again |
408 | } |
409 | } |
410 | |
411 | bmtRTType * pNewItfType = |
412 | new (GetStackingAllocator()) bmtRTType(*pNewInterfaceSubstChain, pNewInterface); |
413 | |
414 | if (bmtInterface->dwInterfaceMapSize >= bmtInterface->dwInterfaceMapAllocated) |
415 | { |
416 | // |
417 | // Grow the array of interfaces |
418 | // |
419 | S_UINT32 dwNewAllocated = S_UINT32(2) * S_UINT32(bmtInterface->dwInterfaceMapAllocated) + S_UINT32(5); |
420 | |
421 | if (dwNewAllocated.IsOverflow()) |
422 | { |
423 | BuildMethodTableThrowException(COR_E_OVERFLOW); |
424 | } |
425 | |
426 | S_SIZE_T safeSize = S_SIZE_T(sizeof(bmtInterfaceEntry)) * |
427 | S_SIZE_T(dwNewAllocated.Value()); |
428 | |
429 | if (safeSize.IsOverflow()) |
430 | { |
431 | BuildMethodTableThrowException(COR_E_OVERFLOW); |
432 | } |
433 | |
434 | bmtInterfaceEntry * pNewMap = (bmtInterfaceEntry *)new (GetStackingAllocator()) BYTE[safeSize.Value()]; |
435 | memcpy(pNewMap, bmtInterface->pInterfaceMap, sizeof(bmtInterfaceEntry) * bmtInterface->dwInterfaceMapAllocated); |
436 | |
437 | bmtInterface->pInterfaceMap = pNewMap; |
438 | bmtInterface->dwInterfaceMapAllocated = dwNewAllocated.Value(); |
439 | } |
440 | |
441 | // The interface map memory was just allocated as an array of bytes, so we use |
442 | // in place new to init the new map entry. No need to do anything with the result, |
443 | // so just chuck it. |
444 | CONSISTENCY_CHECK(bmtInterface->dwInterfaceMapSize < bmtInterface->dwInterfaceMapAllocated); |
445 | new ((void *)&bmtInterface->pInterfaceMap[bmtInterface->dwInterfaceMapSize]) |
446 | bmtInterfaceEntry(pNewItfType, declScope); |
447 | |
448 | bmtInterface->dwInterfaceMapSize++; |
449 | |
450 | // Make sure to pass in the substitution from the new itf type created above as |
451 | // these methods assume that substitutions are allocated in the stacking heap, |
452 | // not the stack. |
453 | InterfaceDeclarationScope declaredItfScope(declScope.fIsInterfaceDeclaredOnParent, false); |
454 | ExpandApproxDeclaredInterfaces( |
455 | bmtInterface, |
456 | bmtTypeHandle(pNewItfType), |
457 | declaredItfScope |
458 | COMMA_INDEBUG(dbg_pClassMT)); |
459 | } // MethodTableBuilder::ExpandApproxInterface |
460 | |
461 | //******************************************************************************* |
462 | // Arguments: |
463 | // dbg_pClassMT - Class on which the interfaces are declared (either explicitly or implicitly). |
464 | // It will never be an interface. It may be NULL (if it is the type being built). |
465 | void |
466 | MethodTableBuilder::ExpandApproxDeclaredInterfaces( |
467 | bmtInterfaceInfo * bmtInterface, // out parameter, various parts cumulatively written to. |
468 | bmtTypeHandle thType, |
469 | InterfaceDeclarationScope declScope |
470 | COMMA_INDEBUG(MethodTable * dbg_pClassMT)) |
471 | { |
472 | STANDARD_VM_CONTRACT; |
473 | |
474 | _ASSERTE((dbg_pClassMT == NULL) || !dbg_pClassMT->IsInterface()); |
475 | |
476 | HRESULT hr; |
477 | // Iterate the list of interfaces declared by thType and add them to the map. |
478 | InterfaceImplEnum ie(thType.GetModule(), thType.GetTypeDefToken(), &thType.GetSubstitution()); |
479 | while ((hr = ie.Next()) == S_OK) |
480 | { |
481 | MethodTable *pGenericIntf = ClassLoader::LoadApproxTypeThrowing( |
482 | thType.GetModule(), ie.CurrentToken(), NULL, NULL).GetMethodTable(); |
483 | CONSISTENCY_CHECK(pGenericIntf->IsInterface()); |
484 | |
485 | ExpandApproxInterface(bmtInterface, |
486 | ie.CurrentSubst(), |
487 | pGenericIntf, |
488 | declScope |
489 | COMMA_INDEBUG(dbg_pClassMT)); |
490 | } |
491 | if (FAILED(hr)) |
492 | { |
493 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
494 | } |
495 | } // MethodTableBuilder::ExpandApproxDeclaredInterfaces |
496 | |
497 | //******************************************************************************* |
498 | void |
499 | MethodTableBuilder::ExpandApproxInheritedInterfaces( |
500 | bmtInterfaceInfo * bmtInterface, |
501 | bmtRTType * pParentType) |
502 | { |
503 | STANDARD_VM_CONTRACT; |
504 | |
505 | INTERIOR_STACK_PROBE(GetThread()); |
506 | |
507 | // Expand interfaces in superclasses first. Interfaces inherited from parents |
508 | // must have identical indexes as in the parent. |
509 | bmtRTType * pParentOfParent = pParentType->GetParentType(); |
510 | |
511 | //#InterfaceMap_SupersetOfParent |
512 | // We have to load parent's interface map the same way the parent did it (as open type). |
513 | // Further code depends on this: |
514 | // code:#InterfaceMap_UseParentInterfaceImplementations |
515 | // We check that it is truth: |
516 | // code:#ApproxInterfaceMap_SupersetOfParent |
517 | // code:#ExactInterfaceMap_SupersetOfParent |
518 | // |
519 | //#InterfaceMap_CanonicalSupersetOfParent |
520 | // Note that canonical instantiation of parent can have different interface instantiations in the |
521 | // interface map than derived type: |
522 | // class MyClass<T> : MyBase<string, T>, I<T> |
523 | // class MyBase<U, V> : I<U> |
524 | // Type MyClass<_Canon> has MyBase<_Canon,_Canon> as parent. The interface maps are: |
525 | // MyBase<_Canon,_Canon> ... I<_Canon> |
526 | // MyClass<_Canon> ... I<string> (#1) |
527 | // I<_Canon> (#2) |
528 | // The I's instantiation I<string> (#1) in MyClass and I<_Canon> from MyBase are not the same |
529 | // instantiations. |
530 | |
531 | // Backup parent substitution |
532 | Substitution parentSubstitution = pParentType->GetSubstitution(); |
533 | // Make parent an open type |
534 | pParentType->SetSubstitution(Substitution()); |
535 | |
536 | if (pParentOfParent != NULL) |
537 | { |
538 | ExpandApproxInheritedInterfaces(bmtInterface, pParentOfParent); |
539 | } |
540 | |
541 | InterfaceDeclarationScope declScope(true, false); |
542 | ExpandApproxDeclaredInterfaces( |
543 | bmtInterface, |
544 | bmtTypeHandle(pParentType), |
545 | declScope |
546 | COMMA_INDEBUG(pParentType->GetMethodTable())); |
547 | |
548 | // Make sure we loaded the same number of interfaces as the parent type itself |
549 | CONSISTENCY_CHECK(pParentType->GetMethodTable()->GetNumInterfaces() == bmtInterface->dwInterfaceMapSize); |
550 | |
551 | // Restore parent's substitution |
552 | pParentType->SetSubstitution(parentSubstitution); |
553 | |
554 | END_INTERIOR_STACK_PROBE; |
555 | } // MethodTableBuilder::ExpandApproxInheritedInterfaces |
556 | |
557 | //******************************************************************************* |
558 | // Fill out a fully expanded interface map, such that if we are declared to |
559 | // implement I3, and I3 extends I1,I2, then I1,I2 are added to our list if |
560 | // they are not already present. |
561 | void |
562 | MethodTableBuilder::LoadApproxInterfaceMap() |
563 | { |
564 | STANDARD_VM_CONTRACT; |
565 | |
566 | bmtInterface->dwInterfaceMapSize = 0; |
567 | |
568 | #ifdef _DEBUG |
569 | //#InjectInterfaceDuplicates_Main |
570 | // We will inject duplicate interfaces in check builds if env. var. |
571 | // COMPLUS_INTERNAL_TypeLoader_InjectInterfaceDuplicates is set to TRUE for all types (incl. non-generic |
572 | // types). |
573 | // This should allow us better test coverage of duplicates in interface map. |
574 | // |
575 | // The duplicates are legal for some types: |
576 | // A<T> : I<T> |
577 | // B<U,V> : A<U>, I<V> |
578 | // C : B<int,int> |
579 | // where the interface maps are: |
580 | // A<T> ... 1 item: I<T> |
581 | // A<int> ... 1 item: I<int> |
582 | // B<U,V> ... 2 items: I<U>, I<V> |
583 | // B<int,int> ... 2 items: I<int>, I<int> |
584 | // B<_Canon,_Canon> ... 2 items: I<_Canon>, I<_Canon> |
585 | // B<string,string> ... 2 items: I<string>, I<string> |
586 | // C ... 2 items: I<int>, I<int> |
587 | // Note: C had only 1 item (I<int>) in CLR 2.0 RTM/SP1/SP2 and early in CLR 4.0. |
588 | // |
589 | // We will create duplicate from every re-implemented interface (incl. non-generic): |
590 | // code:#InjectInterfaceDuplicates_ApproxInterfaces |
591 | // code:#InjectInterfaceDuplicates_LoadExactInterfaceMap |
592 | // code:#InjectInterfaceDuplicates_ExactInterfaces |
593 | // |
594 | // Note that we don't have to do anything for COM, because COM has its own interface map |
595 | // (code:InteropMethodTableData)which is independent on type's interface map and is created only from |
596 | // non-generic interfaces (see code:ClassCompat::MethodTableBuilder::BuildInteropVTable_InterfaceList) |
597 | |
598 | // We need to keep track which interface duplicates were injected. Right now its either all interfaces |
599 | // (declared on the type being built, not inheritted) or none. In the future we could inject duplicates |
600 | // just for some of them. |
601 | bmtInterface->dbg_fShouldInjectInterfaceDuplicates = |
602 | (CLRConfig::GetConfigValue(CLRConfig::INTERNAL_TypeLoader_InjectInterfaceDuplicates) != 0); |
603 | if (bmtGenerics->Debug_GetTypicalMethodTable() != NULL) |
604 | { // It's safer to require that all instantiations have the same injected interface duplicates. |
605 | // In future we could inject different duplicates for various non-shared instantiations. |
606 | |
607 | // Use the same injection status as typical instantiation |
608 | bmtInterface->dbg_fShouldInjectInterfaceDuplicates = |
609 | bmtGenerics->Debug_GetTypicalMethodTable()->Debug_HasInjectedInterfaceDuplicates(); |
610 | |
611 | if (GetModule() == g_pObjectClass->GetModule()) |
612 | { // mscorlib has some weird hardcoded information about interfaces (e.g. |
613 | // code:CEEPreloader::ApplyTypeDependencyForSZArrayHelper), so we don't inject duplicates into |
614 | // mscorlib types |
615 | bmtInterface->dbg_fShouldInjectInterfaceDuplicates = FALSE; |
616 | } |
617 | } |
618 | #endif //_DEBUG |
619 | |
620 | // First inherit all the parent's interfaces. This is important, because our interface map must |
621 | // list the interfaces in identical order to our parent. |
622 | // |
623 | // <NICE> we should document the reasons why. One reason is that DispatchMapTypeIDs can be indexes |
624 | // into the list </NICE> |
625 | if (HasParent()) |
626 | { |
627 | ExpandApproxInheritedInterfaces(bmtInterface, GetParentType()); |
628 | #ifdef _DEBUG |
629 | //#ApproxInterfaceMap_SupersetOfParent |
630 | // Check that parent's interface map is the same as what we just computed |
631 | // See code:#InterfaceMap_SupersetOfParent |
632 | { |
633 | MethodTable * pParentMT = GetParentMethodTable(); |
634 | _ASSERTE(pParentMT->GetNumInterfaces() == bmtInterface->dwInterfaceMapSize); |
635 | |
636 | MethodTable::InterfaceMapIterator parentInterfacesIterator = pParentMT->IterateInterfaceMap(); |
637 | UINT32 nInterfaceIndex = 0; |
638 | while (parentInterfacesIterator.Next()) |
639 | { |
640 | // Compare TypeDefs of the parent's interface and this interface (full MT comparison is in |
641 | // code:#ExactInterfaceMap_SupersetOfParent) |
642 | OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS); |
643 | _ASSERTE(parentInterfacesIterator.GetInterfaceInfo()->GetApproxMethodTable(pParentMT->GetLoaderModule())->HasSameTypeDefAs( |
644 | bmtInterface->pInterfaceMap[nInterfaceIndex].GetInterfaceType()->GetMethodTable())); |
645 | nInterfaceIndex++; |
646 | } |
647 | _ASSERTE(nInterfaceIndex == bmtInterface->dwInterfaceMapSize); |
648 | } |
649 | #endif //_DEBUG |
650 | } |
651 | |
652 | // Now add in any freshly declared interfaces, possibly augmenting the flags |
653 | InterfaceDeclarationScope declScope(false, true); |
654 | ExpandApproxDeclaredInterfaces( |
655 | bmtInterface, |
656 | bmtInternal->pType, |
657 | declScope |
658 | COMMA_INDEBUG(NULL)); |
659 | } // MethodTableBuilder::LoadApproxInterfaceMap |
660 | |
661 | //******************************************************************************* |
662 | // Fills array of TypeIDs with all duplicate occurences of pDeclIntfMT in the interface map. |
663 | // |
664 | // Arguments: |
665 | // rg/c DispatchMapTypeIDs - Array of TypeIDs and its count of elements. |
666 | // pcIfaceDuplicates - Number of duplicate occurences of the interface in the interface map (ideally <= |
667 | // count of elements TypeIDs. |
668 | // |
669 | // Note: If the passed rgDispatchMapTypeIDs array is smaller than the number of duplicates, fills it |
670 | // with the duplicates that fit and returns number of all existing duplicates (not just those fileld in the |
671 | // array) in pcIfaceDuplicates. |
672 | // |
673 | void |
674 | MethodTableBuilder::ComputeDispatchMapTypeIDs( |
675 | MethodTable * pDeclInftMT, |
676 | const Substitution * pDeclIntfSubst, |
677 | DispatchMapTypeID * rgDispatchMapTypeIDs, |
678 | UINT32 cDispatchMapTypeIDs, |
679 | UINT32 * pcIfaceDuplicates) |
680 | { |
681 | STANDARD_VM_CONTRACT; |
682 | |
683 | _ASSERTE(pDeclInftMT->IsInterface()); |
684 | |
685 | // Count of interface duplicates (also used as index into TypeIDs array) |
686 | *pcIfaceDuplicates = 0; |
687 | for (DWORD idx = 0; idx < bmtInterface->dwInterfaceMapSize; idx++) |
688 | { |
689 | bmtInterfaceEntry * pItfEntry = &bmtInterface->pInterfaceMap[idx]; |
690 | bmtRTType * pItfType = pItfEntry->GetInterfaceType(); |
691 | // Type Equivalence is forbidden in interface type ids. |
692 | TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL); |
693 | if (MetaSig::CompareTypeDefsUnderSubstitutions(pItfType->GetMethodTable(), |
694 | pDeclInftMT, |
695 | &pItfType->GetSubstitution(), |
696 | pDeclIntfSubst, |
697 | &newVisited)) |
698 | { // We found another occurence of this interface |
699 | // Can we fit it into the TypeID array? |
700 | if (*pcIfaceDuplicates < cDispatchMapTypeIDs) |
701 | { |
702 | rgDispatchMapTypeIDs[*pcIfaceDuplicates] = DispatchMapTypeID::InterfaceClassID(idx); |
703 | } |
704 | // Increase number of duplicate interfaces |
705 | (*pcIfaceDuplicates)++; |
706 | } |
707 | } |
708 | } // MethodTableBuilder::ComputeDispatchMapTypeIDs |
709 | |
710 | //******************************************************************************* |
711 | /*static*/ |
712 | VOID DECLSPEC_NORETURN |
713 | MethodTableBuilder::BuildMethodTableThrowException( |
714 | HRESULT hr, |
715 | const bmtErrorInfo & bmtError) |
716 | { |
717 | CONTRACTL |
718 | { |
719 | THROWS; |
720 | GC_TRIGGERS; |
721 | INJECT_FAULT(COMPlusThrowOM();); |
722 | } |
723 | CONTRACTL_END |
724 | |
725 | LPCUTF8 pszClassName, pszNameSpace; |
726 | if (FAILED(bmtError.pModule->GetMDImport()->GetNameOfTypeDef(bmtError.cl, &pszClassName, &pszNameSpace))) |
727 | { |
728 | pszClassName = pszNameSpace = "Invalid TypeDef record" ; |
729 | } |
730 | |
731 | if (IsNilToken(bmtError.dMethodDefInError) && (bmtError.szMethodNameForError == NULL)) |
732 | { |
733 | if (hr == E_OUTOFMEMORY) |
734 | { |
735 | COMPlusThrowOM(); |
736 | } |
737 | else |
738 | bmtError.pModule->GetAssembly()->ThrowTypeLoadException( |
739 | pszNameSpace, pszClassName, bmtError.resIDWhy); |
740 | } |
741 | else |
742 | { |
743 | LPCUTF8 szMethodName; |
744 | if (bmtError.szMethodNameForError == NULL) |
745 | { |
746 | if (FAILED((bmtError.pModule->GetMDImport())->GetNameOfMethodDef(bmtError.dMethodDefInError, &szMethodName))) |
747 | { |
748 | szMethodName = "Invalid MethodDef record" ; |
749 | } |
750 | } |
751 | else |
752 | { |
753 | szMethodName = bmtError.szMethodNameForError; |
754 | } |
755 | |
756 | bmtError.pModule->GetAssembly()->ThrowTypeLoadException( |
757 | pszNameSpace, pszClassName, szMethodName, bmtError.resIDWhy); |
758 | } |
759 | } // MethodTableBuilder::BuildMethodTableThrowException |
760 | |
761 | //******************************************************************************* |
762 | void MethodTableBuilder::SetBMTData( |
763 | LoaderAllocator *bmtAllocator, |
764 | bmtErrorInfo *bmtError, |
765 | bmtProperties *bmtProp, |
766 | bmtVtable *bmtVT, |
767 | bmtParentInfo *bmtParent, |
768 | bmtInterfaceInfo *bmtInterface, |
769 | bmtMetaDataInfo *bmtMetaData, |
770 | bmtMethodInfo *bmtMethod, |
771 | bmtMethAndFieldDescs *bmtMFDescs, |
772 | bmtFieldPlacement *bmtFP, |
773 | bmtInternalInfo *bmtInternal, |
774 | bmtGCSeriesInfo *bmtGCSeries, |
775 | bmtMethodImplInfo *bmtMethodImpl, |
776 | const bmtGenericsInfo *bmtGenerics, |
777 | bmtEnumFieldInfo *bmtEnumFields) |
778 | { |
779 | LIMITED_METHOD_CONTRACT; |
780 | this->bmtAllocator = bmtAllocator; |
781 | this->bmtError = bmtError; |
782 | this->bmtProp = bmtProp; |
783 | this->bmtVT = bmtVT; |
784 | this->bmtParent = bmtParent; |
785 | this->bmtInterface = bmtInterface; |
786 | this->bmtMetaData = bmtMetaData; |
787 | this->bmtMethod = bmtMethod; |
788 | this->bmtMFDescs = bmtMFDescs; |
789 | this->bmtFP = bmtFP; |
790 | this->bmtInternal = bmtInternal; |
791 | this->bmtGCSeries = bmtGCSeries; |
792 | this->bmtMethodImpl = bmtMethodImpl; |
793 | this->bmtGenerics = bmtGenerics; |
794 | this->bmtEnumFields = bmtEnumFields; |
795 | } |
796 | |
797 | //******************************************************************************* |
798 | // Used by MethodTableBuilder |
799 | |
800 | MethodTableBuilder::bmtRTType * |
801 | MethodTableBuilder::CreateTypeChain( |
802 | MethodTable * pMT, |
803 | const Substitution & subst) |
804 | { |
805 | CONTRACTL |
806 | { |
807 | STANDARD_VM_CHECK; |
808 | INSTANCE_CHECK; |
809 | PRECONDITION(CheckPointer(GetStackingAllocator())); |
810 | PRECONDITION(CheckPointer(pMT)); |
811 | } CONTRACTL_END; |
812 | |
813 | pMT = pMT->GetCanonicalMethodTable(); |
814 | |
815 | bmtRTType * pType = new (GetStackingAllocator()) |
816 | bmtRTType(subst, pMT); |
817 | |
818 | MethodTable * pMTParent = pMT->GetParentMethodTable(); |
819 | if (pMTParent != NULL) |
820 | { |
821 | pType->SetParentType( |
822 | CreateTypeChain( |
823 | pMTParent, |
824 | pMT->GetSubstitutionForParent(&pType->GetSubstitution()))); |
825 | } |
826 | |
827 | return pType; |
828 | } |
829 | |
830 | //******************************************************************************* |
831 | /* static */ |
832 | MethodTableBuilder::bmtRTType * |
833 | MethodTableBuilder::bmtRTType::FindType( |
834 | bmtRTType * pType, |
835 | MethodTable * pTargetMT) |
836 | { |
837 | CONTRACTL { |
838 | STANDARD_VM_CHECK; |
839 | PRECONDITION(CheckPointer(pType)); |
840 | PRECONDITION(CheckPointer(pTargetMT)); |
841 | } CONTRACTL_END; |
842 | |
843 | pTargetMT = pTargetMT->GetCanonicalMethodTable(); |
844 | while (pType != NULL && |
845 | pType->GetMethodTable()->GetCanonicalMethodTable() != pTargetMT) |
846 | { |
847 | pType = pType->GetParentType(); |
848 | } |
849 | |
850 | return pType; |
851 | } |
852 | |
853 | //******************************************************************************* |
854 | mdTypeDef |
855 | MethodTableBuilder::bmtRTType::GetEnclosingTypeToken() const |
856 | { |
857 | STANDARD_VM_CONTRACT; |
858 | |
859 | mdTypeDef tok = mdTypeDefNil; |
860 | |
861 | if (IsNested()) |
862 | { // This is guaranteed to succeed because the EEClass would not have been |
863 | // set as nested unless a valid token was stored in metadata. |
864 | if (FAILED(GetModule()->GetMDImport()->GetNestedClassProps( |
865 | GetTypeDefToken(), &tok))) |
866 | { |
867 | return mdTypeDefNil; |
868 | } |
869 | } |
870 | |
871 | return tok; |
872 | } |
873 | |
874 | //******************************************************************************* |
875 | /*static*/ bool |
876 | MethodTableBuilder::MethodSignature::NamesEqual( |
877 | const MethodSignature & sig1, |
878 | const MethodSignature & sig2) |
879 | { |
880 | STANDARD_VM_CONTRACT; |
881 | |
882 | if (sig1.GetNameHash() != sig2.GetNameHash()) |
883 | { |
884 | return false; |
885 | } |
886 | |
887 | if (strcmp(sig1.GetName(), sig2.GetName()) != 0) |
888 | { |
889 | return false; |
890 | } |
891 | |
892 | return true; |
893 | } |
894 | |
895 | //******************************************************************************* |
896 | /*static*/ bool |
897 | MethodTableBuilder::MethodSignature::SignaturesEquivalent( |
898 | const MethodSignature & sig1, |
899 | const MethodSignature & sig2) |
900 | { |
901 | STANDARD_VM_CONTRACT; |
902 | |
903 | return !!MetaSig::CompareMethodSigs( |
904 | sig1.GetSignature(), static_cast<DWORD>(sig1.GetSignatureLength()), sig1.GetModule(), &sig1.GetSubstitution(), |
905 | sig2.GetSignature(), static_cast<DWORD>(sig2.GetSignatureLength()), sig2.GetModule(), &sig2.GetSubstitution()); |
906 | } |
907 | |
908 | //******************************************************************************* |
909 | /*static*/ bool |
910 | MethodTableBuilder::MethodSignature::SignaturesExactlyEqual( |
911 | const MethodSignature & sig1, |
912 | const MethodSignature & sig2) |
913 | { |
914 | STANDARD_VM_CONTRACT; |
915 | |
916 | TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL); |
917 | return !!MetaSig::CompareMethodSigs( |
918 | sig1.GetSignature(), static_cast<DWORD>(sig1.GetSignatureLength()), sig1.GetModule(), &sig1.GetSubstitution(), |
919 | sig2.GetSignature(), static_cast<DWORD>(sig2.GetSignatureLength()), sig2.GetModule(), &sig2.GetSubstitution(), |
920 | &newVisited); |
921 | } |
922 | |
923 | //******************************************************************************* |
924 | bool |
925 | MethodTableBuilder::MethodSignature::Equivalent( |
926 | const MethodSignature &rhs) const |
927 | { |
928 | STANDARD_VM_CONTRACT; |
929 | |
930 | return NamesEqual(*this, rhs) && SignaturesEquivalent(*this, rhs); |
931 | } |
932 | |
933 | //******************************************************************************* |
934 | bool |
935 | MethodTableBuilder::MethodSignature::ExactlyEqual( |
936 | const MethodSignature &rhs) const |
937 | { |
938 | STANDARD_VM_CONTRACT; |
939 | |
940 | return NamesEqual(*this, rhs) && SignaturesExactlyEqual(*this, rhs); |
941 | } |
942 | |
943 | //******************************************************************************* |
944 | void |
945 | MethodTableBuilder::MethodSignature::GetMethodAttributes() const |
946 | { |
947 | STANDARD_VM_CONTRACT; |
948 | |
949 | IMDInternalImport * pIMD = GetModule()->GetMDImport(); |
950 | if (TypeFromToken(GetToken()) == mdtMethodDef) |
951 | { |
952 | DWORD cSig; |
953 | if (FAILED(pIMD->GetNameAndSigOfMethodDef(GetToken(), &m_pSig, &cSig, &m_szName))) |
954 | { // We have empty name or signature on error, do nothing |
955 | } |
956 | m_cSig = static_cast<size_t>(cSig); |
957 | } |
958 | else |
959 | { |
960 | CONSISTENCY_CHECK(TypeFromToken(m_tok) == mdtMemberRef); |
961 | DWORD cSig; |
962 | if (FAILED(pIMD->GetNameAndSigOfMemberRef(GetToken(), &m_pSig, &cSig, &m_szName))) |
963 | { // We have empty name or signature on error, do nothing |
964 | } |
965 | m_cSig = static_cast<size_t>(cSig); |
966 | } |
967 | } |
968 | |
969 | //******************************************************************************* |
970 | UINT32 |
971 | MethodTableBuilder::MethodSignature::GetNameHash() const |
972 | { |
973 | STANDARD_VM_CONTRACT; |
974 | |
975 | CheckGetMethodAttributes(); |
976 | |
977 | if (m_nameHash == INVALID_NAME_HASH) |
978 | { |
979 | ULONG nameHash = HashStringA(GetName()); |
980 | if (nameHash == INVALID_NAME_HASH) |
981 | { |
982 | nameHash /= 2; |
983 | } |
984 | m_nameHash = nameHash; |
985 | } |
986 | |
987 | return m_nameHash; |
988 | } |
989 | |
990 | //******************************************************************************* |
991 | MethodTableBuilder::bmtMDType::bmtMDType( |
992 | bmtRTType * pParentType, |
993 | Module * pModule, |
994 | mdTypeDef tok, |
995 | const SigTypeContext & sigContext) |
996 | : m_pParentType(pParentType), |
997 | m_pModule(pModule), |
998 | m_tok(tok), |
999 | m_enclTok(mdTypeDefNil), |
1000 | m_sigContext(sigContext), |
1001 | m_subst(), |
1002 | m_dwAttrs(0), |
1003 | m_pMT(NULL) |
1004 | { |
1005 | STANDARD_VM_CONTRACT; |
1006 | |
1007 | IfFailThrow(m_pModule->GetMDImport()->GetTypeDefProps(m_tok, &m_dwAttrs, NULL)); |
1008 | |
1009 | HRESULT hr = m_pModule->GetMDImport()->GetNestedClassProps(m_tok, &m_enclTok); |
1010 | if (FAILED(hr)) |
1011 | { |
1012 | if (hr != CLDB_E_RECORD_NOTFOUND) |
1013 | { |
1014 | ThrowHR(hr); |
1015 | } |
1016 | // Just in case GetNestedClassProps sets the out param to some other value |
1017 | m_enclTok = mdTypeDefNil; |
1018 | } |
1019 | } |
1020 | |
1021 | //******************************************************************************* |
1022 | MethodTableBuilder::bmtRTMethod::bmtRTMethod( |
1023 | bmtRTType * pOwningType, |
1024 | MethodDesc * pMD) |
1025 | : m_pOwningType(pOwningType), |
1026 | m_pMD(pMD), |
1027 | m_methodSig(pMD->GetModule(), |
1028 | pMD->GetMemberDef(), |
1029 | &pOwningType->GetSubstitution()) |
1030 | { |
1031 | CONTRACTL |
1032 | { |
1033 | THROWS; |
1034 | GC_TRIGGERS; |
1035 | MODE_ANY; |
1036 | } |
1037 | CONTRACTL_END; |
1038 | } |
1039 | |
1040 | //******************************************************************************* |
1041 | MethodTableBuilder::bmtMDMethod::bmtMDMethod( |
1042 | bmtMDType * pOwningType, |
1043 | mdMethodDef tok, |
1044 | DWORD dwDeclAttrs, |
1045 | DWORD dwImplAttrs, |
1046 | DWORD dwRVA, |
1047 | METHOD_TYPE type, |
1048 | METHOD_IMPL_TYPE implType) |
1049 | : m_pOwningType(pOwningType), |
1050 | m_dwDeclAttrs(dwDeclAttrs), |
1051 | m_dwImplAttrs(dwImplAttrs), |
1052 | m_dwRVA(dwRVA), |
1053 | m_type(type), |
1054 | m_implType(implType), |
1055 | m_methodSig(pOwningType->GetModule(), |
1056 | tok, |
1057 | &pOwningType->GetSubstitution()), |
1058 | m_pMD(NULL), |
1059 | m_pUnboxedMD(NULL), |
1060 | m_slotIndex(INVALID_SLOT_INDEX), |
1061 | m_unboxedSlotIndex(INVALID_SLOT_INDEX) |
1062 | { |
1063 | CONTRACTL |
1064 | { |
1065 | THROWS; |
1066 | GC_TRIGGERS; |
1067 | MODE_ANY; |
1068 | } |
1069 | CONTRACTL_END; |
1070 | } |
1071 | //******************************************************************************* |
1072 | void |
1073 | MethodTableBuilder::ImportParentMethods() |
1074 | { |
1075 | STANDARD_VM_CONTRACT; |
1076 | |
1077 | if (!HasParent()) |
1078 | { // If there's no parent, there's no methods to import |
1079 | return; |
1080 | } |
1081 | |
1082 | SLOT_INDEX numMethods = static_cast<SLOT_INDEX> |
1083 | (GetParentMethodTable()->GetNumMethods()); |
1084 | |
1085 | bmtParent->pSlotTable = new (GetStackingAllocator()) |
1086 | bmtMethodSlotTable(numMethods, GetStackingAllocator()); |
1087 | |
1088 | MethodTable::MethodIterator it(GetParentMethodTable()); |
1089 | for (;it.IsValid(); it.Next()) |
1090 | { |
1091 | MethodDesc * pDeclDesc = NULL; |
1092 | MethodTable * pDeclMT = NULL; |
1093 | MethodDesc * pImplDesc = NULL; |
1094 | MethodTable * pImplMT = NULL; |
1095 | |
1096 | if (it.IsVirtual()) |
1097 | { |
1098 | pDeclDesc = it.GetDeclMethodDesc(); |
1099 | pDeclMT = pDeclDesc->GetMethodTable(); |
1100 | pImplDesc = it.GetMethodDesc(); |
1101 | pImplMT = pImplDesc->GetMethodTable(); |
1102 | } |
1103 | else |
1104 | { |
1105 | pDeclDesc = pImplDesc = it.GetMethodDesc(); |
1106 | pDeclMT = pImplMT = it.GetMethodDesc()->GetMethodTable(); |
1107 | } |
1108 | |
1109 | CONSISTENCY_CHECK(CheckPointer(pDeclDesc)); |
1110 | CONSISTENCY_CHECK(CheckPointer(pImplDesc)); |
1111 | |
1112 | // Create and assign to each slot |
1113 | bmtMethodSlot newSlot; |
1114 | newSlot.Decl() = new (GetStackingAllocator()) |
1115 | bmtRTMethod(bmtRTType::FindType(GetParentType(), pDeclMT), pDeclDesc); |
1116 | if (pDeclDesc == pImplDesc) |
1117 | { |
1118 | newSlot.Impl() = newSlot.Decl(); |
1119 | } |
1120 | else |
1121 | { |
1122 | newSlot.Impl() = new (GetStackingAllocator()) |
1123 | bmtRTMethod(bmtRTType::FindType(GetParentType(), pImplMT), pImplDesc); |
1124 | } |
1125 | |
1126 | if (!bmtParent->pSlotTable->AddMethodSlot(newSlot)) |
1127 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
1128 | } |
1129 | } |
1130 | |
1131 | //******************************************************************************* |
1132 | void |
1133 | MethodTableBuilder::CopyParentVtable() |
1134 | { |
1135 | STANDARD_VM_CONTRACT; |
1136 | |
1137 | if (!HasParent()) |
1138 | { |
1139 | return; |
1140 | } |
1141 | |
1142 | for (bmtParentInfo::Iterator it = bmtParent->IterateSlots(); |
1143 | !it.AtEnd() && it.CurrentIndex() < GetParentMethodTable()->GetNumVirtuals(); |
1144 | ++it) |
1145 | { |
1146 | if (!bmtVT->pSlotTable->AddMethodSlot(*it)) |
1147 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
1148 | ++bmtVT->cVirtualSlots; |
1149 | ++bmtVT->cTotalSlots; |
1150 | } |
1151 | } |
1152 | |
1153 | //******************************************************************************* |
1154 | // Determine if this is the special SIMD type System.Numerics.Vector<T>, whose |
1155 | // size is determined dynamically based on the hardware and the presence of JIT |
1156 | // support. |
1157 | // If so: |
1158 | // - Update the NumInstanceFieldBytes on the bmtFieldPlacement. |
1159 | // - Update the m_cbNativeSize and m_cbManagedSize if HasLayout() is true. |
1160 | // Return a BOOL result to indicate whether the size has been updated. |
1161 | // |
1162 | // Will throw IDS_EE_SIMD_NGEN_DISALLOWED if the type is System.Numerics.Vector`1 |
1163 | // and this is an ngen compilation process. |
1164 | // |
1165 | BOOL MethodTableBuilder::CheckIfSIMDAndUpdateSize() |
1166 | { |
1167 | STANDARD_VM_CONTRACT; |
1168 | |
1169 | #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) |
1170 | if (!(GetAssembly()->IsSIMDVectorAssembly() || bmtProp->fIsIntrinsicType)) |
1171 | return false; |
1172 | |
1173 | if (bmtFP->NumInstanceFieldBytes != 16) |
1174 | return false; |
1175 | |
1176 | LPCUTF8 className; |
1177 | LPCUTF8 nameSpace; |
1178 | if (FAILED(GetMDImport()->GetNameOfTypeDef(bmtInternal->pType->GetTypeDefToken(), &className, &nameSpace))) |
1179 | return false; |
1180 | |
1181 | if (strcmp(className, "Vector`1" ) != 0 || strcmp(nameSpace, "System.Numerics" ) != 0) |
1182 | return false; |
1183 | |
1184 | if (IsCompilationProcess()) |
1185 | { |
1186 | COMPlusThrow(kTypeLoadException, IDS_EE_SIMD_NGEN_DISALLOWED); |
1187 | } |
1188 | |
1189 | #ifndef CROSSGEN_COMPILE |
1190 | if (!TargetHasAVXSupport()) |
1191 | return false; |
1192 | |
1193 | EEJitManager *jitMgr = ExecutionManager::GetEEJitManager(); |
1194 | if (jitMgr->LoadJIT()) |
1195 | { |
1196 | CORJIT_FLAGS cpuCompileFlags = jitMgr->GetCPUCompileFlags(); |
1197 | if (cpuCompileFlags.IsSet(CORJIT_FLAGS::CORJIT_FLAG_FEATURE_SIMD)) |
1198 | { |
1199 | unsigned intrinsicSIMDVectorLength = jitMgr->m_jit->getMaxIntrinsicSIMDVectorLength(cpuCompileFlags); |
1200 | if (intrinsicSIMDVectorLength != 0) |
1201 | { |
1202 | bmtFP->NumInstanceFieldBytes = intrinsicSIMDVectorLength; |
1203 | if (HasLayout()) |
1204 | { |
1205 | GetLayoutInfo()->m_cbNativeSize = intrinsicSIMDVectorLength; |
1206 | GetLayoutInfo()->m_cbManagedSize = intrinsicSIMDVectorLength; |
1207 | } |
1208 | return true; |
1209 | } |
1210 | } |
1211 | } |
1212 | #endif // !CROSSGEN_COMPILE |
1213 | #endif // defined(_TARGET_X86_) || defined(_TARGET_AMD64_) |
1214 | return false; |
1215 | } |
1216 | |
1217 | //******************************************************************************* |
1218 | void |
1219 | MethodTableBuilder::bmtInterfaceEntry::CreateSlotTable( |
1220 | StackingAllocator * pStackingAllocator) |
1221 | { |
1222 | STANDARD_VM_CONTRACT; |
1223 | |
1224 | CONSISTENCY_CHECK(m_pImplTable == NULL); |
1225 | |
1226 | SLOT_INDEX cSlots = (SLOT_INDEX)GetInterfaceType()->GetMethodTable()->GetNumVirtuals(); |
1227 | bmtInterfaceSlotImpl * pST = new (pStackingAllocator) bmtInterfaceSlotImpl[cSlots]; |
1228 | |
1229 | MethodTable::MethodIterator it(GetInterfaceType()->GetMethodTable()); |
1230 | for (; it.IsValid(); it.Next()) |
1231 | { |
1232 | if (!it.IsVirtual()) |
1233 | { |
1234 | break; |
1235 | } |
1236 | |
1237 | bmtRTMethod * pCurMethod = new (pStackingAllocator) |
1238 | bmtRTMethod(GetInterfaceType(), it.GetDeclMethodDesc()); |
1239 | |
1240 | CONSISTENCY_CHECK(m_cImplTable == it.GetSlotNumber()); |
1241 | pST[m_cImplTable++] = bmtInterfaceSlotImpl(pCurMethod, INVALID_SLOT_INDEX); |
1242 | } |
1243 | |
1244 | m_pImplTable = pST; |
1245 | } |
1246 | |
1247 | #ifdef _PREFAST_ |
1248 | #pragma warning(push) |
1249 | #pragma warning(disable:21000) // Suppress PREFast warning about overly large function |
1250 | #endif // _PREFAST_ |
1251 | //--------------------------------------------------------------------------------------- |
1252 | // |
1253 | // Builds the method table, allocates MethodDesc, handles overloaded members, attempts to compress |
1254 | // interface storage. All dependent classes must already be resolved! |
1255 | // |
1256 | MethodTable * |
1257 | MethodTableBuilder::BuildMethodTableThrowing( |
1258 | LoaderAllocator * pAllocator, |
1259 | Module * pLoaderModule, |
1260 | Module * pModule, |
1261 | mdToken cl, |
1262 | BuildingInterfaceInfo_t * pBuildingInterfaceList, |
1263 | const LayoutRawFieldInfo * pLayoutRawFieldInfos, |
1264 | MethodTable * pParentMethodTable, |
1265 | const bmtGenericsInfo * bmtGenericsInfo, |
1266 | SigPointer parentInst, |
1267 | WORD cBuildingInterfaceList) |
1268 | { |
1269 | CONTRACTL |
1270 | { |
1271 | STANDARD_VM_CHECK; |
1272 | PRECONDITION(CheckPointer(GetHalfBakedClass())); |
1273 | PRECONDITION(CheckPointer(bmtGenericsInfo)); |
1274 | } |
1275 | CONTRACTL_END; |
1276 | |
1277 | pModule->EnsureLibraryLoaded(); |
1278 | |
1279 | // The following structs, defined as private members of MethodTableBuilder, contain the necessary local |
1280 | // parameters needed for BuildMethodTable Look at the struct definitions for a detailed list of all |
1281 | // parameters available to BuildMethodTableThrowing. |
1282 | |
1283 | SetBMTData( |
1284 | pAllocator, |
1285 | new (GetStackingAllocator()) bmtErrorInfo(), |
1286 | new (GetStackingAllocator()) bmtProperties(), |
1287 | new (GetStackingAllocator()) bmtVtable(), |
1288 | new (GetStackingAllocator()) bmtParentInfo(), |
1289 | new (GetStackingAllocator()) bmtInterfaceInfo(), |
1290 | new (GetStackingAllocator()) bmtMetaDataInfo(), |
1291 | new (GetStackingAllocator()) bmtMethodInfo(), |
1292 | new (GetStackingAllocator()) bmtMethAndFieldDescs(), |
1293 | new (GetStackingAllocator()) bmtFieldPlacement(), |
1294 | new (GetStackingAllocator()) bmtInternalInfo(), |
1295 | new (GetStackingAllocator()) bmtGCSeriesInfo(), |
1296 | new (GetStackingAllocator()) bmtMethodImplInfo(), |
1297 | bmtGenericsInfo, |
1298 | new (GetStackingAllocator()) bmtEnumFieldInfo(pModule->GetMDImport())); |
1299 | |
1300 | //Initialize structs |
1301 | |
1302 | bmtError->resIDWhy = IDS_CLASSLOAD_GENERAL; // Set the reason and the offending method def. If the method information |
1303 | bmtError->pThrowable = NULL; |
1304 | bmtError->pModule = pModule; |
1305 | bmtError->cl = cl; |
1306 | |
1307 | bmtInternal->pInternalImport = pModule->GetMDImport(); |
1308 | bmtInternal->pModule = pModule; |
1309 | |
1310 | bmtInternal->pParentMT = pParentMethodTable; |
1311 | |
1312 | // Create the chain of bmtRTType for the parent types. This allows all imported |
1313 | // parent methods to be associated with their declaring types, and as such it is |
1314 | // easy to access the appropriate Substitution when comparing signatures. |
1315 | bmtRTType * pParent = NULL; |
1316 | if (pParentMethodTable != NULL) |
1317 | { |
1318 | Substitution * pParentSubst = |
1319 | new (GetStackingAllocator()) Substitution(pModule, parentInst, NULL); |
1320 | pParent = CreateTypeChain(pParentMethodTable, *pParentSubst); |
1321 | } |
1322 | |
1323 | // Now create the bmtMDType for the type being built. |
1324 | bmtInternal->pType = new (GetStackingAllocator()) |
1325 | bmtMDType(pParent, pModule, cl, bmtGenericsInfo->typeContext); |
1326 | |
1327 | // put the interior stack probe after all the stack-allocted goop above. We check compare our this pointer to the SP on |
1328 | // the dtor to determine if we are being called on an EH path or not. |
1329 | INTERIOR_STACK_PROBE_FOR(GetThread(), 8); |
1330 | |
1331 | // If not NULL, it means there are some by-value fields, and this contains an entry for each inst |
1332 | |
1333 | #ifdef _DEBUG |
1334 | // Set debug class name string for easier debugging. |
1335 | LPCUTF8 className; |
1336 | LPCUTF8 nameSpace; |
1337 | if (FAILED(GetMDImport()->GetNameOfTypeDef(bmtInternal->pType->GetTypeDefToken(), &className, &nameSpace))) |
1338 | { |
1339 | className = nameSpace = "Invalid TypeDef record" ; |
1340 | } |
1341 | |
1342 | { |
1343 | S_SIZE_T safeLen = S_SIZE_T(sizeof(char))*(S_SIZE_T(strlen(className)) + S_SIZE_T(strlen(nameSpace)) + S_SIZE_T(2)); |
1344 | if(safeLen.IsOverflow()) COMPlusThrowHR(COR_E_OVERFLOW); |
1345 | |
1346 | size_t len = safeLen.Value(); |
1347 | char *name = (char*) AllocateFromHighFrequencyHeap(safeLen); |
1348 | strcpy_s(name, len, nameSpace); |
1349 | if (strlen(nameSpace) > 0) { |
1350 | name[strlen(nameSpace)] = '.'; |
1351 | name[strlen(nameSpace) + 1] = '\0'; |
1352 | } |
1353 | strcat_s(name, len, className); |
1354 | |
1355 | GetHalfBakedClass()->SetDebugClassName(name); |
1356 | } |
1357 | |
1358 | if (g_pConfig->ShouldBreakOnClassBuild(className)) |
1359 | { |
1360 | CONSISTENCY_CHECK_MSGF(false, ("BreakOnClassBuild: typename '%s' " , className)); |
1361 | GetHalfBakedClass()->m_fDebuggingClass = TRUE; |
1362 | } |
1363 | |
1364 | LPCUTF8 pszDebugName,pszDebugNamespace; |
1365 | if (FAILED(pModule->GetMDImport()->GetNameOfTypeDef(bmtInternal->pType->GetTypeDefToken(), &pszDebugName, &pszDebugNamespace))) |
1366 | { |
1367 | pszDebugName = pszDebugNamespace = "Invalid TypeDef record" ; |
1368 | } |
1369 | |
1370 | StackSString debugName(SString::Utf8, pszDebugName); |
1371 | |
1372 | // If there is an instantiation, update the debug name to include instantiation type names. |
1373 | if (bmtGenerics->HasInstantiation()) |
1374 | { |
1375 | StackSString debugName(SString::Utf8, GetDebugClassName()); |
1376 | TypeString::AppendInst(debugName, bmtGenerics->GetInstantiation(), TypeString::FormatBasic); |
1377 | StackScratchBuffer buff; |
1378 | const char* pDebugNameUTF8 = debugName.GetUTF8(buff); |
1379 | S_SIZE_T safeLen = S_SIZE_T(strlen(pDebugNameUTF8)) + S_SIZE_T(1); |
1380 | if(safeLen.IsOverflow()) |
1381 | COMPlusThrowHR(COR_E_OVERFLOW); |
1382 | |
1383 | size_t len = safeLen.Value(); |
1384 | char *name = (char*) AllocateFromLowFrequencyHeap(safeLen); |
1385 | strcpy_s(name, len, pDebugNameUTF8); |
1386 | GetHalfBakedClass()->SetDebugClassName(name); |
1387 | pszDebugName = (LPCUTF8)name; |
1388 | } |
1389 | |
1390 | LOG((LF_CLASSLOADER, LL_INFO1000, "Loading class \"%s%s%S\" from module \"%ws\" in domain 0x%p %s\n" , |
1391 | *pszDebugNamespace ? pszDebugNamespace : "" , |
1392 | *pszDebugNamespace ? NAMESPACE_SEPARATOR_STR : "" , |
1393 | debugName.GetUnicode(), |
1394 | pModule->GetDebugName(), |
1395 | pModule->GetDomain(), |
1396 | (pModule->IsSystem()) ? "System Domain" : "" |
1397 | )); |
1398 | #endif // _DEBUG |
1399 | |
1400 | // If this is mscorlib, then don't perform some sanity checks on the layout |
1401 | bmtProp->fNoSanityChecks = ((g_pObjectClass == NULL) || pModule == g_pObjectClass->GetModule()) || |
1402 | #ifdef FEATURE_READYTORUN |
1403 | // No sanity checks for ready-to-run compiled images if possible |
1404 | (pModule->IsReadyToRun() && pModule->GetReadyToRunInfo()->SkipTypeValidation()) || |
1405 | #endif |
1406 | // No sanity checks for real generic instantiations |
1407 | !bmtGenerics->IsTypicalTypeDefinition(); |
1408 | |
1409 | // Interfaces have a parent class of Object, but we don't really want to inherit all of |
1410 | // Object's virtual methods, so pretend we don't have a parent class - at the bottom of this |
1411 | // function we reset the parent class |
1412 | if (IsInterface()) |
1413 | { |
1414 | bmtInternal->pType->SetParentType(NULL); |
1415 | bmtInternal->pParentMT = NULL; |
1416 | } |
1417 | |
1418 | unsigned totalDeclaredFieldSize=0; |
1419 | |
1420 | // Check to see if the class is a valuetype; but we don't want to mark System.Enum |
1421 | // as a ValueType. To accomplish this, the check takes advantage of the fact |
1422 | // that System.ValueType and System.Enum are loaded one immediately after the |
1423 | // other in that order, and so if the parent MethodTable is System.ValueType and |
1424 | // the System.Enum MethodTable is unset, then we must be building System.Enum and |
1425 | // so we don't mark it as a ValueType. |
1426 | if(HasParent() && |
1427 | ((g_pEnumClass != NULL && GetParentMethodTable() == g_pValueTypeClass) || |
1428 | GetParentMethodTable() == g_pEnumClass)) |
1429 | { |
1430 | bmtProp->fIsValueClass = true; |
1431 | |
1432 | HRESULT hr = GetMDImport()->GetCustomAttributeByName(bmtInternal->pType->GetTypeDefToken(), |
1433 | g_CompilerServicesUnsafeValueTypeAttribute, |
1434 | NULL, NULL); |
1435 | IfFailThrow(hr); |
1436 | if (hr == S_OK) |
1437 | { |
1438 | SetUnsafeValueClass(); |
1439 | } |
1440 | |
1441 | hr = GetMDImport()->GetCustomAttributeByName(bmtInternal->pType->GetTypeDefToken(), |
1442 | g_CompilerServicesIsByRefLikeAttribute, |
1443 | NULL, NULL); |
1444 | IfFailThrow(hr); |
1445 | if (hr == S_OK) |
1446 | { |
1447 | bmtFP->fIsByRefLikeType = true; |
1448 | } |
1449 | } |
1450 | |
1451 | // Check to see if the class is an enumeration. No fancy checks like the one immediately |
1452 | // above for value types are necessary here. |
1453 | if(HasParent() && GetParentMethodTable() == g_pEnumClass) |
1454 | { |
1455 | bmtProp->fIsEnum = true; |
1456 | |
1457 | // Ensure we don't have generic enums, or at least enums that have a |
1458 | // different number of type parameters from their enclosing class. |
1459 | // The goal is to ensure that the enum's values can't depend on the |
1460 | // type parameters in any way. And we don't see any need for an |
1461 | // enum to have additional type parameters. |
1462 | if (bmtGenerics->GetNumGenericArgs() != 0) |
1463 | { |
1464 | // Nested enums can have generic type parameters from their enclosing class. |
1465 | // CLS rules require type parameters to be propogated to nested types. |
1466 | // Note that class G<T> { enum E { } } will produce "G`1+E<T>". |
1467 | // We want to disallow class G<T> { enum E<T, U> { } } |
1468 | // Perhaps the IL equivalent of class G<T> { enum E { } } should be legal. |
1469 | if (!IsNested()) |
1470 | { |
1471 | BuildMethodTableThrowException(IDS_CLASSLOAD_ENUM_EXTRA_GENERIC_TYPE_PARAM); |
1472 | } |
1473 | |
1474 | mdTypeDef tdEnclosing = mdTypeDefNil; |
1475 | HRESULT hr = GetMDImport()->GetNestedClassProps(GetCl(), &tdEnclosing); |
1476 | if (FAILED(hr)) |
1477 | ThrowHR(hr, BFA_UNABLE_TO_GET_NESTED_PROPS); |
1478 | |
1479 | HENUMInternalHolder hEnumGenericPars(GetMDImport()); |
1480 | if (FAILED(hEnumGenericPars.EnumInitNoThrow(mdtGenericParam, tdEnclosing))) |
1481 | { |
1482 | GetAssembly()->ThrowTypeLoadException(GetMDImport(), tdEnclosing, IDS_CLASSLOAD_BADFORMAT); |
1483 | } |
1484 | |
1485 | if (hEnumGenericPars.EnumGetCount() != bmtGenerics->GetNumGenericArgs()) |
1486 | { |
1487 | BuildMethodTableThrowException(IDS_CLASSLOAD_ENUM_EXTRA_GENERIC_TYPE_PARAM); |
1488 | } |
1489 | } |
1490 | } |
1491 | |
1492 | #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) || defined(_TARGET_ARM64_) |
1493 | if (GetModule()->IsSystem() && !bmtGenerics->HasInstantiation()) |
1494 | { |
1495 | LPCUTF8 className; |
1496 | LPCUTF8 nameSpace; |
1497 | HRESULT hr = GetMDImport()->GetNameOfTypeDef(bmtInternal->pType->GetTypeDefToken(), &className, &nameSpace); |
1498 | |
1499 | #if defined(_TARGET_ARM64_) |
1500 | // All the funtions in System.Runtime.Intrinsics.Arm.Arm64 are hardware intrinsics. |
1501 | if (hr == S_OK && strcmp(nameSpace, "System.Runtime.Intrinsics.Arm.Arm64" ) == 0) |
1502 | #else |
1503 | // All the funtions in System.Runtime.Intrinsics.X86 are hardware intrinsics. |
1504 | if (bmtInternal->pType->IsNested()) |
1505 | { |
1506 | IfFailThrow(GetMDImport()->GetNameOfTypeDef(bmtInternal->pType->GetEnclosingTypeToken(), NULL, &nameSpace)); |
1507 | } |
1508 | |
1509 | if (hr == S_OK && (strcmp(nameSpace, "System.Runtime.Intrinsics.X86" ) == 0)) |
1510 | #endif |
1511 | { |
1512 | if (IsCompilationProcess()) |
1513 | { |
1514 | // Disable AOT compiling for managed implementation of hardware intrinsics in mscorlib. |
1515 | // We specially treat them here to ensure correct ISA features are set during compilation |
1516 | COMPlusThrow(kTypeLoadException, IDS_EE_HWINTRINSIC_NGEN_DISALLOWED); |
1517 | } |
1518 | bmtProp->fIsHardwareIntrinsic = true; |
1519 | } |
1520 | } |
1521 | #endif |
1522 | |
1523 | // If this type is marked by [Intrinsic] attribute, it may be specially treated by the runtime/compiler |
1524 | // Currently, only SIMD types have [Intrinsic] attribute |
1525 | // |
1526 | // We check this here fairly early to ensure other downstream checks on these types can be slightly more efficient. |
1527 | if (GetModule()->IsSystem() || GetAssembly()->IsSIMDVectorAssembly()) |
1528 | { |
1529 | HRESULT hr = GetMDImport()->GetCustomAttributeByName(bmtInternal->pType->GetTypeDefToken(), |
1530 | g_CompilerServicesIntrinsicAttribute, |
1531 | NULL, |
1532 | NULL); |
1533 | |
1534 | if (hr == S_OK) |
1535 | { |
1536 | bmtProp->fIsIntrinsicType = true; |
1537 | } |
1538 | } |
1539 | |
1540 | // Com Import classes are special. These types must derive from System.Object, |
1541 | // and we then substitute the parent with System._ComObject. |
1542 | if (IsComImport() && !IsEnum() && !IsInterface() && !IsValueClass() && !IsDelegate()) |
1543 | { |
1544 | #ifdef FEATURE_COMINTEROP |
1545 | // ComImport classes must either extend from Object or be a WinRT class |
1546 | // that extends from another WinRT class (and so form a chain of WinRT classes |
1547 | // that ultimately extend from object). |
1548 | MethodTable* pMTParent = GetParentMethodTable(); |
1549 | if ((pMTParent == NULL) || !( |
1550 | // is the parent valid? |
1551 | (pMTParent == g_pObjectClass) || |
1552 | (GetHalfBakedClass()->IsProjectedFromWinRT() && pMTParent->IsProjectedFromWinRT()) |
1553 | )) |
1554 | { |
1555 | BuildMethodTableThrowException(IDS_CLASSLOAD_CANTEXTEND); |
1556 | } |
1557 | |
1558 | if (HasLayout()) |
1559 | { |
1560 | // ComImport classes cannot have layout information. |
1561 | BuildMethodTableThrowException(IDS_CLASSLOAD_COMIMPCANNOTHAVELAYOUT); |
1562 | } |
1563 | |
1564 | if (pMTParent == g_pObjectClass) |
1565 | { |
1566 | // ComImport classes ultimately extend from our __ComObject or RuntimeClass class |
1567 | MethodTable *pCOMMT = NULL; |
1568 | if (GetHalfBakedClass()->IsProjectedFromWinRT()) |
1569 | pCOMMT = g_pBaseRuntimeClass; |
1570 | else |
1571 | pCOMMT = g_pBaseCOMObject; |
1572 | |
1573 | _ASSERTE(pCOMMT); |
1574 | |
1575 | // We could have had COM interop classes derive from System._ComObject, |
1576 | // but instead we have them derive from System.Object, have them set the |
1577 | // ComImport bit in the type attributes, and then we swap out the parent |
1578 | // type under the covers. |
1579 | bmtInternal->pType->SetParentType(CreateTypeChain(pCOMMT, Substitution())); |
1580 | bmtInternal->pParentMT = pCOMMT; |
1581 | } |
1582 | #endif |
1583 | // if the current class is imported |
1584 | bmtProp->fIsComObjectType = true; |
1585 | } |
1586 | |
1587 | #ifdef FEATURE_COMINTEROP |
1588 | if (GetHalfBakedClass()->IsProjectedFromWinRT() && IsValueClass() && !IsEnum()) |
1589 | { |
1590 | // WinRT structures must have sequential layout |
1591 | if (!GetHalfBakedClass()->HasSequentialLayout()) |
1592 | { |
1593 | BuildMethodTableThrowException(IDS_EE_STRUCTLAYOUT_WINRT); |
1594 | } |
1595 | } |
1596 | |
1597 | // Check for special COM interop types. |
1598 | CheckForSpecialTypes(); |
1599 | |
1600 | CheckForTypeEquivalence(cBuildingInterfaceList, pBuildingInterfaceList); |
1601 | |
1602 | if (HasParent()) |
1603 | { // Types that inherit from com object types are themselves com object types. |
1604 | if (GetParentMethodTable()->IsComObjectType()) |
1605 | { |
1606 | // if the parent class is of ComObjectType |
1607 | // so is the child |
1608 | bmtProp->fIsComObjectType = true; |
1609 | } |
1610 | |
1611 | #ifdef FEATURE_TYPEEQUIVALENCE |
1612 | // If your parent is type equivalent then so are you |
1613 | if (GetParentMethodTable()->HasTypeEquivalence()) |
1614 | { |
1615 | bmtProp->fHasTypeEquivalence = true; |
1616 | } |
1617 | #endif |
1618 | } |
1619 | |
1620 | #endif // FEATURE_COMINTEROP |
1621 | |
1622 | if (!HasParent() && !IsInterface()) |
1623 | { |
1624 | if(g_pObjectClass != NULL) |
1625 | { |
1626 | if(!IsGlobalClass()) |
1627 | { |
1628 | // Non object derived types that are not the global class are prohibited by spec |
1629 | BuildMethodTableThrowException(IDS_CLASSLOAD_PARENTNULL); |
1630 | } |
1631 | } |
1632 | } |
1633 | |
1634 | // NOTE: This appears to be the earliest point during class loading that other classes MUST be loaded |
1635 | // resolve unresolved interfaces, determine an upper bound on the size of the interface map, |
1636 | // and determine the size of the largest interface (in # slots) |
1637 | ResolveInterfaces(cBuildingInterfaceList, pBuildingInterfaceList); |
1638 | |
1639 | // Enumerate this class's methodImpls |
1640 | EnumerateMethodImpls(); |
1641 | |
1642 | // Enumerate this class's methods and fields |
1643 | EnumerateClassMethods(); |
1644 | ValidateMethods(); |
1645 | |
1646 | EnumerateClassFields(); |
1647 | |
1648 | // Import the slots of the parent for use in placing this type's methods. |
1649 | ImportParentMethods(); |
1650 | |
1651 | // This will allocate the working versions of the VTable and NonVTable in bmtVT |
1652 | AllocateWorkingSlotTables(); |
1653 | |
1654 | // Allocate a MethodDesc* for each method (needed later when doing interfaces), and a FieldDesc* for each field |
1655 | AllocateFieldDescs(); |
1656 | |
1657 | // Copy the parent's vtable into the current type's vtable |
1658 | CopyParentVtable(); |
1659 | |
1660 | bmtVT->pDispatchMapBuilder = new (GetStackingAllocator()) DispatchMapBuilder(GetStackingAllocator()); |
1661 | |
1662 | // Determine vtable placement for each member in this class |
1663 | PlaceVirtualMethods(); |
1664 | PlaceNonVirtualMethods(); |
1665 | |
1666 | // Allocate MethodDescs (expects methods placed methods) |
1667 | AllocAndInitMethodDescs(); |
1668 | |
1669 | if (IsInterface()) |
1670 | { |
1671 | // |
1672 | // We need to process/place method impls for default interface method overrides. |
1673 | // We won't build dispatch map for interfaces, though. |
1674 | // |
1675 | ProcessMethodImpls(); |
1676 | PlaceMethodImpls(); |
1677 | } |
1678 | else |
1679 | { |
1680 | // |
1681 | // If we are a class, then there may be some unplaced vtable methods (which are by definition |
1682 | // interface methods, otherwise they'd already have been placed). Place as many unplaced methods |
1683 | // as possible, in the order preferred by interfaces. However, do not allow any duplicates - once |
1684 | // a method has been placed, it cannot be placed again - if we are unable to neatly place an interface, |
1685 | // create duplicate slots for it starting at dwCurrentDuplicateVtableSlot. Fill out the interface |
1686 | // map for all interfaces as they are placed. |
1687 | // |
1688 | // If we are an interface, then all methods are already placed. Fill out the interface map for |
1689 | // interfaces as they are placed. |
1690 | // |
1691 | ComputeInterfaceMapEquivalenceSet(); |
1692 | |
1693 | PlaceInterfaceMethods(); |
1694 | |
1695 | ProcessMethodImpls(); |
1696 | ProcessInexactMethodImpls(); |
1697 | PlaceMethodImpls(); |
1698 | |
1699 | if (!bmtProp->fNoSanityChecks) |
1700 | { |
1701 | // Now that interface method implementation have been fully resolved, |
1702 | // we need to make sure that type constraints are also met. |
1703 | ValidateInterfaceMethodConstraints(); |
1704 | } |
1705 | } |
1706 | |
1707 | // Verify that we have not overflowed the number of slots. |
1708 | if (!FitsInU2((UINT64)bmtVT->pSlotTable->GetSlotCount())) |
1709 | { |
1710 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
1711 | } |
1712 | |
1713 | // ensure we didn't overflow the temporary vtable |
1714 | _ASSERTE(bmtVT->pSlotTable->GetSlotCount() <= bmtVT->dwMaxVtableSize); |
1715 | |
1716 | // Allocate and initialize the dictionary for the type. This will be filled out later |
1717 | // with the final values. |
1718 | AllocAndInitDictionary(); |
1719 | |
1720 | //////////////////////////////////////////////////////////////////////////////////////////////// |
1721 | // Fields |
1722 | // |
1723 | |
1724 | // We decide here if we need a dynamic entry for our statics. We need it here because |
1725 | // the offsets of our fields will depend on this. For the dynamic case (which requires |
1726 | // an extra indirection (indirect depending of methodtable) we'll allocate the slot |
1727 | // in setupmethodtable |
1728 | if (((pAllocator->IsCollectible() || pModule->IsReflection() || bmtGenerics->HasInstantiation() || !pModule->IsStaticStoragePrepared(cl)) && |
1729 | (bmtVT->GetClassCtorSlotIndex() != INVALID_SLOT_INDEX || bmtEnumFields->dwNumStaticFields !=0)) |
1730 | #ifdef EnC_SUPPORTED |
1731 | // Classes in modules that have been edited (would do on class level if there were a |
1732 | // way to tell if the class had been edited) also have dynamic statics as the number |
1733 | // of statics might have changed, so can't use the static module-wide storage |
1734 | || (pModule->IsEditAndContinueEnabled() && |
1735 | ((EditAndContinueModule*)pModule)->GetApplyChangesCount() > CorDB_DEFAULT_ENC_FUNCTION_VERSION) |
1736 | #endif // EnC_SUPPORTED |
1737 | ) |
1738 | { |
1739 | // We will need a dynamic id |
1740 | bmtProp->fDynamicStatics = true; |
1741 | |
1742 | if (bmtGenerics->HasInstantiation()) |
1743 | { |
1744 | bmtProp->fGenericsStatics = true; |
1745 | } |
1746 | } |
1747 | |
1748 | // If not NULL, it means there are some by-value fields, and this contains an entry for each instance or static field, |
1749 | // which is NULL if not a by value field, and points to the EEClass of the field if a by value field. Instance fields |
1750 | // come first, statics come second. |
1751 | MethodTable ** pByValueClassCache = NULL; |
1752 | |
1753 | // Go thru all fields and initialize their FieldDescs. |
1754 | InitializeFieldDescs(GetApproxFieldDescListRaw(), pLayoutRawFieldInfos, bmtInternal, bmtGenerics, |
1755 | bmtMetaData, bmtEnumFields, bmtError, |
1756 | &pByValueClassCache, bmtMFDescs, bmtFP, |
1757 | &totalDeclaredFieldSize); |
1758 | |
1759 | // Place regular static fields |
1760 | PlaceRegularStaticFields(); |
1761 | |
1762 | // Place thread static fields |
1763 | PlaceThreadStaticFields(); |
1764 | |
1765 | LOG((LF_CODESHARING, |
1766 | LL_INFO10000, |
1767 | "Placing %d statics (%d handles) for class %s.\n" , |
1768 | GetNumStaticFields(), GetNumHandleRegularStatics() + GetNumHandleThreadStatics(), |
1769 | pszDebugName)); |
1770 | |
1771 | if (IsBlittable() || IsManagedSequential()) |
1772 | { |
1773 | bmtFP->NumGCPointerSeries = 0; |
1774 | bmtFP->NumInstanceGCPointerFields = 0; |
1775 | |
1776 | _ASSERTE(HasLayout()); |
1777 | |
1778 | bmtFP->NumInstanceFieldBytes = IsBlittable() ? GetLayoutInfo()->m_cbNativeSize |
1779 | : GetLayoutInfo()->m_cbManagedSize; |
1780 | |
1781 | // For simple Blittable types we still need to check if they have any overlapping |
1782 | // fields and call the method SetHasOverLayedFields() when they are detected. |
1783 | // |
1784 | if (HasExplicitFieldOffsetLayout()) |
1785 | { |
1786 | _ASSERTE(!bmtGenerics->fContainsGenericVariables); // A simple Blittable type can't ever be an open generic type. |
1787 | HandleExplicitLayout(pByValueClassCache); |
1788 | } |
1789 | } |
1790 | else |
1791 | { |
1792 | _ASSERTE(!IsBlittable()); |
1793 | // HandleExplicitLayout fails for the GenericTypeDefinition when |
1794 | // it will succeed for some particular instantiations. |
1795 | // Thus we only do explicit layout for real instantiations, e.g. C<int>, not |
1796 | // the open types such as the GenericTypeDefinition C<!0> or any |
1797 | // of the "fake" types involving generic type variables which are |
1798 | // used for reflection and verification, e.g. C<List<!0>>. |
1799 | // |
1800 | if (!bmtGenerics->fContainsGenericVariables && HasExplicitFieldOffsetLayout()) |
1801 | { |
1802 | HandleExplicitLayout(pByValueClassCache); |
1803 | } |
1804 | else |
1805 | { |
1806 | // Place instance fields |
1807 | PlaceInstanceFields(pByValueClassCache); |
1808 | } |
1809 | } |
1810 | |
1811 | if (CheckIfSIMDAndUpdateSize()) |
1812 | { |
1813 | totalDeclaredFieldSize = bmtFP->NumInstanceFieldBytes; |
1814 | } |
1815 | |
1816 | // We enforce that all value classes have non-zero size |
1817 | if (IsValueClass() && bmtFP->NumInstanceFieldBytes == 0) |
1818 | { |
1819 | BuildMethodTableThrowException(IDS_CLASSLOAD_ZEROSIZE); |
1820 | } |
1821 | |
1822 | if (bmtFP->fHasSelfReferencingStaticValueTypeField_WithRVA) |
1823 | { // Verify self-referencing statics with RVA (now when the ValueType size is known) |
1824 | VerifySelfReferencingStaticValueTypeFields_WithRVA(pByValueClassCache); |
1825 | } |
1826 | |
1827 | |
1828 | // Now setup the method table |
1829 | |
1830 | #ifdef FEATURE_PREJIT |
1831 | Module *pComputedPZM = pLoaderModule; |
1832 | |
1833 | if (bmtGenerics->GetNumGenericArgs() > 0) |
1834 | { |
1835 | pComputedPZM = Module::ComputePreferredZapModule(pModule, bmtGenerics->GetInstantiation()); |
1836 | } |
1837 | |
1838 | SetupMethodTable2(pLoaderModule, pComputedPZM); |
1839 | #else // FEATURE_PREJIT |
1840 | SetupMethodTable2(pLoaderModule); |
1841 | #endif // FEATURE_PREJIT |
1842 | |
1843 | MethodTable * pMT = GetHalfBakedMethodTable(); |
1844 | |
1845 | #ifdef FEATURE_64BIT_ALIGNMENT |
1846 | if (GetHalfBakedClass()->IsAlign8Candidate()) |
1847 | pMT->SetRequiresAlign8(); |
1848 | #endif |
1849 | |
1850 | if (bmtGenerics->pVarianceInfo != NULL) |
1851 | { |
1852 | pMT->SetHasVariance(); |
1853 | } |
1854 | |
1855 | if (bmtFP->NumRegularStaticGCBoxedFields != 0) |
1856 | { |
1857 | pMT->SetHasBoxedRegularStatics(); |
1858 | } |
1859 | |
1860 | if (bmtFP->fIsByRefLikeType) |
1861 | { |
1862 | pMT->SetIsByRefLike(); |
1863 | } |
1864 | |
1865 | if (IsValueClass()) |
1866 | { |
1867 | if (bmtFP->NumInstanceFieldBytes != totalDeclaredFieldSize || HasOverLayedField()) |
1868 | GetHalfBakedClass()->SetIsNotTightlyPacked(); |
1869 | |
1870 | #ifdef FEATURE_HFA |
1871 | GetHalfBakedClass()->CheckForHFA(pByValueClassCache); |
1872 | #endif |
1873 | #ifdef UNIX_AMD64_ABI |
1874 | #ifdef FEATURE_HFA |
1875 | #error Can't have FEATURE_HFA and UNIX_AMD64_ABI defined at the same time. |
1876 | #endif // FEATURE_HFA |
1877 | SystemVAmd64CheckForPassStructInRegister(); |
1878 | #endif // UNIX_AMD64_ABI |
1879 | } |
1880 | |
1881 | #ifdef UNIX_AMD64_ABI |
1882 | #ifdef FEATURE_HFA |
1883 | #error Can't have FEATURE_HFA and UNIX_AMD64_ABI defined at the same time. |
1884 | #endif // FEATURE_HFA |
1885 | if (HasLayout()) |
1886 | { |
1887 | SystemVAmd64CheckForPassNativeStructInRegister(); |
1888 | } |
1889 | #endif // UNIX_AMD64_ABI |
1890 | #ifdef FEATURE_HFA |
1891 | if (HasLayout()) |
1892 | { |
1893 | GetHalfBakedClass()->CheckForNativeHFA(); |
1894 | } |
1895 | #endif |
1896 | |
1897 | #ifdef _DEBUG |
1898 | pMT->SetDebugClassName(GetDebugClassName()); |
1899 | #endif |
1900 | |
1901 | #ifdef FEATURE_COMINTEROP |
1902 | if (IsInterface()) |
1903 | { |
1904 | GetCoClassAttribInfo(); |
1905 | } |
1906 | #endif // FEATURE_COMINTEROP |
1907 | |
1908 | if (HasExplicitFieldOffsetLayout()) |
1909 | // Perform relevant GC calculations for tdexplicit |
1910 | HandleGCForExplicitLayout(); |
1911 | else |
1912 | // Perform relevant GC calculations for value classes |
1913 | HandleGCForValueClasses(pByValueClassCache); |
1914 | |
1915 | // GC reqires the series to be sorted. |
1916 | // TODO: fix it so that we emit them in the correct order in the first place. |
1917 | if (pMT->ContainsPointers()) |
1918 | { |
1919 | CGCDesc* gcDesc = CGCDesc::GetCGCDescFromMT(pMT); |
1920 | qsort(gcDesc->GetLowestSeries(), (int)gcDesc->GetNumSeries(), sizeof(CGCDescSeries), compareCGCDescSeries); |
1921 | } |
1922 | |
1923 | SetFinalizationSemantics(); |
1924 | |
1925 | // Allocate dynamic slot if necessary |
1926 | if (bmtProp->fDynamicStatics) |
1927 | { |
1928 | if (bmtProp->fGenericsStatics) |
1929 | { |
1930 | FieldDesc* pStaticFieldDescs = NULL; |
1931 | |
1932 | if (bmtEnumFields->dwNumStaticFields != 0) |
1933 | { |
1934 | pStaticFieldDescs = pMT->GetApproxFieldDescListRaw() + bmtEnumFields->dwNumInstanceFields; |
1935 | } |
1936 | |
1937 | pMT->SetupGenericsStaticsInfo(pStaticFieldDescs); |
1938 | } |
1939 | else |
1940 | { |
1941 | // Get an id for the dynamic class. We store it in the class because |
1942 | // no class that is persisted in ngen should have it (ie, if the class is ngened |
1943 | // The id is stored in an optional field so we need to ensure an optional field descriptor has |
1944 | // been allocated for this EEClass instance. |
1945 | EnsureOptionalFieldsAreAllocated(GetHalfBakedClass(), m_pAllocMemTracker, pAllocator->GetLowFrequencyHeap()); |
1946 | SetModuleDynamicID(GetModule()->AllocateDynamicEntry(pMT)); |
1947 | } |
1948 | } |
1949 | |
1950 | // |
1951 | // if there are context or thread static set the info in the method table optional members |
1952 | // |
1953 | |
1954 | // Check for the RemotingProxy Attribute |
1955 | // structs with GC pointers MUST be pointer sized aligned because the GC assumes it |
1956 | if (IsValueClass() && pMT->ContainsPointers() && (bmtFP->NumInstanceFieldBytes % TARGET_POINTER_SIZE != 0)) |
1957 | { |
1958 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
1959 | } |
1960 | |
1961 | if (IsInterface()) |
1962 | { |
1963 | // Reset parent class |
1964 | pMT->SetParentMethodTable (g_pObjectClass); |
1965 | } |
1966 | |
1967 | #ifdef _DEBUG |
1968 | // Reset the debug method names for BoxedEntryPointStubs |
1969 | // so they reflect the very best debug information for the methods |
1970 | { |
1971 | DeclaredMethodIterator methIt(*this); |
1972 | while (methIt.Next()) |
1973 | { |
1974 | if (methIt->GetUnboxedMethodDesc() != NULL) |
1975 | { |
1976 | { |
1977 | MethodDesc *pMD = methIt->GetUnboxedMethodDesc(); |
1978 | StackSString name(SString::Utf8); |
1979 | TypeString::AppendMethodDebug(name, pMD); |
1980 | StackScratchBuffer buff; |
1981 | const char* pDebugNameUTF8 = name.GetUTF8(buff); |
1982 | S_SIZE_T safeLen = S_SIZE_T(strlen(pDebugNameUTF8)) + S_SIZE_T(1); |
1983 | if(safeLen.IsOverflow()) COMPlusThrowHR(COR_E_OVERFLOW); |
1984 | size_t len = safeLen.Value(); |
1985 | pMD->m_pszDebugMethodName = (char*) AllocateFromLowFrequencyHeap(safeLen); |
1986 | _ASSERTE(pMD->m_pszDebugMethodName); |
1987 | strcpy_s((char *) pMD->m_pszDebugMethodName, len, pDebugNameUTF8); |
1988 | } |
1989 | |
1990 | { |
1991 | MethodDesc *pMD = methIt->GetMethodDesc(); |
1992 | |
1993 | StackSString name(SString::Utf8); |
1994 | TypeString::AppendMethodDebug(name, pMD); |
1995 | StackScratchBuffer buff; |
1996 | const char* pDebugNameUTF8 = name.GetUTF8(buff); |
1997 | S_SIZE_T safeLen = S_SIZE_T(strlen(pDebugNameUTF8))+S_SIZE_T(1); |
1998 | if(safeLen.IsOverflow()) COMPlusThrowHR(COR_E_OVERFLOW); |
1999 | size_t len = safeLen.Value(); |
2000 | pMD->m_pszDebugMethodName = (char*) AllocateFromLowFrequencyHeap(safeLen); |
2001 | _ASSERTE(pMD->m_pszDebugMethodName); |
2002 | strcpy_s((char *) pMD->m_pszDebugMethodName, len, pDebugNameUTF8); |
2003 | } |
2004 | } |
2005 | } |
2006 | } |
2007 | #endif // _DEBUG |
2008 | |
2009 | |
2010 | //If this is a value type, then propagate the UnsafeValueTypeAttribute from |
2011 | //its instance members to this type. |
2012 | if (IsValueClass() && !IsUnsafeValueClass()) |
2013 | { |
2014 | ApproxFieldDescIterator fields(GetHalfBakedMethodTable(), |
2015 | ApproxFieldDescIterator::INSTANCE_FIELDS ); |
2016 | FieldDesc * current; |
2017 | while (NULL != (current = fields.Next())) |
2018 | { |
2019 | CONSISTENCY_CHECK(!current->IsStatic()); |
2020 | if (current->GetFieldType() == ELEMENT_TYPE_VALUETYPE) |
2021 | { |
2022 | TypeHandle th = current->LookupApproxFieldTypeHandle(); |
2023 | CONSISTENCY_CHECK(!th.IsNull()); |
2024 | if (th.AsMethodTable()->GetClass()->IsUnsafeValueClass()) |
2025 | { |
2026 | SetUnsafeValueClass(); |
2027 | break; |
2028 | } |
2029 | } |
2030 | } |
2031 | } |
2032 | |
2033 | #ifdef FEATURE_ICASTABLE |
2034 | if (!IsValueClass() && g_pICastableInterface != NULL && pMT->CanCastToInterface(g_pICastableInterface)) |
2035 | { |
2036 | pMT->SetICastable(); |
2037 | } |
2038 | #endif // FEATURE_ICASTABLE |
2039 | |
2040 | // Grow the typedef ridmap in advance as we can't afford to |
2041 | // fail once we set the resolve bit |
2042 | pModule->EnsureTypeDefCanBeStored(bmtInternal->pType->GetTypeDefToken()); |
2043 | |
2044 | // Grow the tables in advance so that RID map filling cannot fail |
2045 | // once we're past the commit point. |
2046 | EnsureRIDMapsCanBeFilled(); |
2047 | |
2048 | { |
2049 | // NOTE. NOTE!! the EEclass can now be accessed by other threads. |
2050 | // Do NOT place any initialization after this point. |
2051 | // You may NOT fail the call after this point. |
2052 | FAULT_FORBID(); |
2053 | CANNOTTHROWCOMPLUSEXCEPTION(); |
2054 | |
2055 | /* |
2056 | GetMemTracker()->SuppressRelease(); |
2057 | */ |
2058 | } |
2059 | |
2060 | #ifdef _DEBUG |
2061 | if (g_pConfig->ShouldDumpOnClassLoad(pszDebugName)) |
2062 | { |
2063 | LOG((LF_ALWAYS, LL_ALWAYS, "Method table summary for '%s':\n" , pszDebugName)); |
2064 | LOG((LF_ALWAYS, LL_ALWAYS, "Number of static fields: %d\n" , bmtEnumFields->dwNumStaticFields)); |
2065 | LOG((LF_ALWAYS, LL_ALWAYS, "Number of instance fields: %d\n" , bmtEnumFields->dwNumInstanceFields)); |
2066 | LOG((LF_ALWAYS, LL_ALWAYS, "Number of static obj ref fields: %d\n" , bmtEnumFields->dwNumStaticObjRefFields)); |
2067 | LOG((LF_ALWAYS, LL_ALWAYS, "Number of static boxed fields: %d\n" , bmtEnumFields->dwNumStaticBoxedFields)); |
2068 | LOG((LF_ALWAYS, LL_ALWAYS, "Number of declared fields: %d\n" , NumDeclaredFields())); |
2069 | LOG((LF_ALWAYS, LL_ALWAYS, "Number of declared methods: %d\n" , NumDeclaredMethods())); |
2070 | LOG((LF_ALWAYS, LL_ALWAYS, "Number of declared non-abstract methods: %d\n" , bmtMethod->dwNumDeclaredNonAbstractMethods)); |
2071 | pMT->Debug_DumpInterfaceMap("Approximate" ); |
2072 | pMT->DebugDumpVtable(pszDebugName, FALSE); |
2073 | pMT->DebugDumpFieldLayout(pszDebugName, FALSE); |
2074 | pMT->DebugDumpGCDesc(pszDebugName, FALSE); |
2075 | pMT->Debug_DumpDispatchMap(); |
2076 | } |
2077 | #endif //_DEBUG |
2078 | |
2079 | STRESS_LOG3(LF_CLASSLOADER, LL_INFO1000, "MethodTableBuilder: finished method table for module %p token %x = %pT \n" , |
2080 | pModule, |
2081 | GetCl(), |
2082 | GetHalfBakedMethodTable()); |
2083 | |
2084 | #ifdef MDA_SUPPORTED |
2085 | MdaMarshaling* mda = MDA_GET_ASSISTANT(Marshaling); |
2086 | if (mda && HasLayout()) |
2087 | { |
2088 | FieldMarshaler *pFieldMarshaler = (FieldMarshaler*)GetLayoutInfo()->GetFieldMarshalers(); |
2089 | UINT numReferenceFields = GetLayoutInfo()->GetNumCTMFields(); |
2090 | |
2091 | while (numReferenceFields--) |
2092 | { |
2093 | mda->ReportFieldMarshal(pFieldMarshaler); |
2094 | |
2095 | ((BYTE*&)pFieldMarshaler) += MAXFIELDMARSHALERSIZE; |
2096 | } |
2097 | } |
2098 | #endif // MDA_SUPPORTED |
2099 | |
2100 | #ifdef FEATURE_PREJIT |
2101 | _ASSERTE(pComputedPZM == Module::GetPreferredZapModuleForMethodTable(pMT)); |
2102 | #endif // FEATURE_PREJIT |
2103 | |
2104 | END_INTERIOR_STACK_PROBE; |
2105 | |
2106 | return GetHalfBakedMethodTable(); |
2107 | } // MethodTableBuilder::BuildMethodTableThrowing |
2108 | #ifdef _PREFAST_ |
2109 | #pragma warning(pop) |
2110 | #endif |
2111 | |
2112 | |
2113 | //--------------------------------------------------------------------------------------- |
2114 | // |
2115 | // Resolve unresolved interfaces, determine an upper bound on the size of the interface map. |
2116 | // |
2117 | VOID |
2118 | MethodTableBuilder::ResolveInterfaces( |
2119 | WORD cBuildingInterfaceList, |
2120 | BuildingInterfaceInfo_t * pBuildingInterfaceList) |
2121 | { |
2122 | CONTRACTL |
2123 | { |
2124 | STANDARD_VM_CHECK; |
2125 | PRECONDITION(CheckPointer(this)); |
2126 | PRECONDITION(CheckPointer(bmtAllocator)); |
2127 | PRECONDITION(CheckPointer(bmtInterface)); |
2128 | PRECONDITION(CheckPointer(bmtVT)); |
2129 | PRECONDITION(CheckPointer(bmtParent)); |
2130 | } |
2131 | CONTRACTL_END; |
2132 | |
2133 | // resolve unresolved interfaces and determine the size of the largest interface (in # slots) |
2134 | |
2135 | |
2136 | LoadApproxInterfaceMap(); |
2137 | |
2138 | // Inherit parental slot counts |
2139 | //@TODO: This doesn't belong here. |
2140 | if (HasParent()) |
2141 | { |
2142 | MethodTable * pParentClass = GetParentMethodTable(); |
2143 | PREFIX_ASSUME(pParentClass != NULL); |
2144 | |
2145 | bmtParent->NumParentPointerSeries = pParentClass->ContainsPointers() ? |
2146 | (DWORD)CGCDesc::GetCGCDescFromMT(pParentClass)->GetNumSeries() : 0; |
2147 | |
2148 | if (pParentClass->HasFieldsWhichMustBeInited()) |
2149 | { |
2150 | SetHasFieldsWhichMustBeInited(); |
2151 | } |
2152 | #ifdef FEATURE_READYTORUN |
2153 | if (!(IsValueClass() || (pParentClass == g_pObjectClass))) |
2154 | { |
2155 | CheckLayoutDependsOnOtherModules(pParentClass); |
2156 | } |
2157 | #endif |
2158 | } |
2159 | else |
2160 | { |
2161 | bmtParent->NumParentPointerSeries = 0; |
2162 | } |
2163 | } // MethodTableBuilder::ResolveInterfaces |
2164 | |
2165 | //******************************************************************************* |
2166 | /* static */ |
2167 | int __cdecl MethodTableBuilder::bmtMetaDataInfo::MethodImplTokenPair::Compare( |
2168 | const void *elem1, |
2169 | const void *elem2) |
2170 | { |
2171 | STATIC_CONTRACT_LEAF; |
2172 | MethodImplTokenPair *e1 = (MethodImplTokenPair *)elem1; |
2173 | MethodImplTokenPair *e2 = (MethodImplTokenPair *)elem2; |
2174 | if (e1->methodBody < e2->methodBody) return -1; |
2175 | else if (e1->methodBody > e2->methodBody) return 1; |
2176 | else if (e1->methodDecl < e2->methodDecl) return -1; |
2177 | else if (e1->methodDecl > e2->methodDecl) return 1; |
2178 | else return 0; |
2179 | } |
2180 | |
2181 | //******************************************************************************* |
2182 | /* static */ |
2183 | BOOL MethodTableBuilder::bmtMetaDataInfo::MethodImplTokenPair::Equal( |
2184 | const MethodImplTokenPair *elem1, |
2185 | const MethodImplTokenPair *elem2) |
2186 | { |
2187 | STATIC_CONTRACT_LEAF; |
2188 | return ((elem1->methodBody == elem2->methodBody) && |
2189 | (elem1->methodDecl == elem2->methodDecl)); |
2190 | } |
2191 | |
2192 | //******************************************************************************* |
2193 | VOID |
2194 | MethodTableBuilder::EnumerateMethodImpls() |
2195 | { |
2196 | STANDARD_VM_CONTRACT; |
2197 | |
2198 | HRESULT hr = S_OK; |
2199 | IMDInternalImport * pMDInternalImport = GetMDImport(); |
2200 | DWORD rid, maxRidMD, maxRidMR; |
2201 | HENUMInternalMethodImplHolder hEnumMethodImpl(pMDInternalImport); |
2202 | hr = hEnumMethodImpl.EnumMethodImplInitNoThrow(GetCl()); |
2203 | |
2204 | if (FAILED(hr)) |
2205 | { |
2206 | BuildMethodTableThrowException(hr, *bmtError); |
2207 | } |
2208 | |
2209 | // This gets the count out of the metadata interface. |
2210 | bmtMethod->dwNumberMethodImpls = hEnumMethodImpl.EnumMethodImplGetCount(); |
2211 | bmtMethod->dwNumberInexactMethodImplCandidates = 0; |
2212 | |
2213 | // This is the first pass. In this we will simply enumerate the token pairs and fill in |
2214 | // the data structures. In addition, we'll sort the list and eliminate duplicates. |
2215 | if (bmtMethod->dwNumberMethodImpls > 0) |
2216 | { |
2217 | // |
2218 | // Allocate the structures to keep track of the token pairs |
2219 | // |
2220 | bmtMetaData->rgMethodImplTokens = new (GetStackingAllocator()) |
2221 | bmtMetaDataInfo::MethodImplTokenPair[bmtMethod->dwNumberMethodImpls]; |
2222 | |
2223 | // Iterate through each MethodImpl declared on this class |
2224 | for (DWORD i = 0; i < bmtMethod->dwNumberMethodImpls; i++) |
2225 | { |
2226 | hr = hEnumMethodImpl.EnumMethodImplNext( |
2227 | &bmtMetaData->rgMethodImplTokens[i].methodBody, |
2228 | &bmtMetaData->rgMethodImplTokens[i].methodDecl); |
2229 | bmtMetaData->rgMethodImplTokens[i].fConsiderDuringInexactMethodImplProcessing = false; |
2230 | bmtMetaData->rgMethodImplTokens[i].fThrowIfUnmatchedDuringInexactMethodImplProcessing = false; |
2231 | bmtMetaData->rgMethodImplTokens[i].interfaceEquivalenceSet = 0; |
2232 | |
2233 | if (FAILED(hr)) |
2234 | { |
2235 | BuildMethodTableThrowException(hr, *bmtError); |
2236 | } |
2237 | // Grab the next set of body/decl tokens |
2238 | if (hr == S_FALSE) |
2239 | { |
2240 | // In the odd case that the enumerator fails before we've reached the total reported |
2241 | // entries, let's reset the count and just break out. (Should we throw?) |
2242 | bmtMethod->dwNumberMethodImpls = i; |
2243 | break; |
2244 | } |
2245 | } |
2246 | |
2247 | // No need to do any sorting or duplicate elimination if there's not two or more methodImpls |
2248 | if (bmtMethod->dwNumberMethodImpls > 1) |
2249 | { |
2250 | // Now sort |
2251 | qsort(bmtMetaData->rgMethodImplTokens, |
2252 | bmtMethod->dwNumberMethodImpls, |
2253 | sizeof(bmtMetaDataInfo::MethodImplTokenPair), |
2254 | &bmtMetaDataInfo::MethodImplTokenPair::Compare); |
2255 | |
2256 | // Now eliminate duplicates |
2257 | for (DWORD i = 0; i < bmtMethod->dwNumberMethodImpls - 1; i++) |
2258 | { |
2259 | CONSISTENCY_CHECK((i + 1) < bmtMethod->dwNumberMethodImpls); |
2260 | |
2261 | bmtMetaDataInfo::MethodImplTokenPair *e1 = &bmtMetaData->rgMethodImplTokens[i]; |
2262 | bmtMetaDataInfo::MethodImplTokenPair *e2 = &bmtMetaData->rgMethodImplTokens[i + 1]; |
2263 | |
2264 | // If the pair are equal, eliminate the first one, and reduce the total count by one. |
2265 | if (bmtMetaDataInfo::MethodImplTokenPair::Equal(e1, e2)) |
2266 | { |
2267 | DWORD dwCopyNum = bmtMethod->dwNumberMethodImpls - (i + 1); |
2268 | memcpy(e1, e2, dwCopyNum * sizeof(bmtMetaDataInfo::MethodImplTokenPair)); |
2269 | bmtMethod->dwNumberMethodImpls--; |
2270 | CONSISTENCY_CHECK(bmtMethod->dwNumberMethodImpls > 0); |
2271 | } |
2272 | } |
2273 | } |
2274 | } |
2275 | |
2276 | if (bmtMethod->dwNumberMethodImpls != 0) |
2277 | { |
2278 | // |
2279 | // Allocate the structures to keep track of the impl matches |
2280 | // |
2281 | bmtMetaData->pMethodDeclSubsts = new (GetStackingAllocator()) |
2282 | Substitution[bmtMethod->dwNumberMethodImpls]; |
2283 | |
2284 | // These are used for verification |
2285 | maxRidMD = pMDInternalImport->GetCountWithTokenKind(mdtMethodDef); |
2286 | maxRidMR = pMDInternalImport->GetCountWithTokenKind(mdtMemberRef); |
2287 | |
2288 | // Iterate through each MethodImpl declared on this class |
2289 | for (DWORD i = 0; i < bmtMethod->dwNumberMethodImpls; i++) |
2290 | { |
2291 | PCCOR_SIGNATURE pSigDecl = NULL; |
2292 | PCCOR_SIGNATURE pSigBody = NULL; |
2293 | ULONG cbSigDecl; |
2294 | ULONG cbSigBody; |
2295 | mdToken tkParent; |
2296 | |
2297 | mdToken theBody, theDecl; |
2298 | Substitution theDeclSubst(GetModule(), SigPointer(), NULL); // this can get updated later below. |
2299 | |
2300 | theBody = bmtMetaData->rgMethodImplTokens[i].methodBody; |
2301 | theDecl = bmtMetaData->rgMethodImplTokens[i].methodDecl; |
2302 | |
2303 | // IMPLEMENTATION LIMITATION: currently, we require that the body of a methodImpl |
2304 | // belong to the current type. This is because we need to allocate a different |
2305 | // type of MethodDesc for bodies that are part of methodImpls. |
2306 | if (TypeFromToken(theBody) != mdtMethodDef) |
2307 | { |
2308 | hr = FindMethodDeclarationForMethodImpl( |
2309 | theBody, |
2310 | &theBody, |
2311 | TRUE); |
2312 | if (FAILED(hr)) |
2313 | { |
2314 | BuildMethodTableThrowException(hr, IDS_CLASSLOAD_MI_ILLEGAL_BODY, mdMethodDefNil); |
2315 | } |
2316 | |
2317 | // Make sure to update the stored token with the resolved token. |
2318 | bmtMetaData->rgMethodImplTokens[i].methodBody = theBody; |
2319 | } |
2320 | |
2321 | if (TypeFromToken(theBody) != mdtMethodDef) |
2322 | { |
2323 | BuildMethodTableThrowException(BFA_METHODDECL_NOT_A_METHODDEF); |
2324 | } |
2325 | CONSISTENCY_CHECK(theBody == bmtMetaData->rgMethodImplTokens[i].methodBody); |
2326 | |
2327 | // |
2328 | // Now that the tokens of Decl and Body are obtained, do the MD validation |
2329 | // |
2330 | |
2331 | rid = RidFromToken(theDecl); |
2332 | |
2333 | // Perform initial rudimentary validation of the token. Full token verification |
2334 | // will be done in TestMethodImpl when placing the methodImpls. |
2335 | if (TypeFromToken(theDecl) == mdtMethodDef) |
2336 | { |
2337 | // Decl must be valid token |
2338 | if ((rid == 0) || (rid > maxRidMD)) |
2339 | { |
2340 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_ILLEGAL_TOKEN_DECL); |
2341 | } |
2342 | // Get signature and length |
2343 | if (FAILED(pMDInternalImport->GetSigOfMethodDef(theDecl, &cbSigDecl, &pSigDecl))) |
2344 | { |
2345 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
2346 | } |
2347 | } |
2348 | |
2349 | // The token is not a MethodDef (likely a MemberRef) |
2350 | else |
2351 | { |
2352 | // Decl must be valid token |
2353 | if ((TypeFromToken(theDecl) != mdtMemberRef) || (rid == 0) || (rid > maxRidMR)) |
2354 | { |
2355 | bmtError->resIDWhy = IDS_CLASSLOAD_MI_ILLEGAL_TOKEN_DECL; |
2356 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_ILLEGAL_TOKEN_DECL); |
2357 | } |
2358 | |
2359 | // Get signature and length |
2360 | LPCSTR szDeclName; |
2361 | if (FAILED(pMDInternalImport->GetNameAndSigOfMemberRef(theDecl, &pSigDecl, &cbSigDecl, &szDeclName))) |
2362 | { |
2363 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
2364 | } |
2365 | |
2366 | // Get parent |
2367 | hr = pMDInternalImport->GetParentToken(theDecl,&tkParent); |
2368 | if (FAILED(hr)) |
2369 | BuildMethodTableThrowException(hr, *bmtError); |
2370 | |
2371 | theDeclSubst = Substitution(tkParent, GetModule(), NULL); |
2372 | } |
2373 | |
2374 | // Perform initial rudimentary validation of the token. Full token verification |
2375 | // will be done in TestMethodImpl when placing the methodImpls. |
2376 | { |
2377 | // Body must be valid token |
2378 | rid = RidFromToken(theBody); |
2379 | if ((rid == 0)||(rid > maxRidMD)) |
2380 | { |
2381 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_ILLEGAL_TOKEN_BODY); |
2382 | } |
2383 | // Body's parent must be this class |
2384 | hr = pMDInternalImport->GetParentToken(theBody,&tkParent); |
2385 | if (FAILED(hr)) |
2386 | BuildMethodTableThrowException(hr, *bmtError); |
2387 | if(tkParent != GetCl()) |
2388 | { |
2389 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_ILLEGAL_BODY); |
2390 | } |
2391 | } |
2392 | // Decl's and Body's signatures must match |
2393 | if(pSigDecl && cbSigDecl) |
2394 | { |
2395 | if (FAILED(pMDInternalImport->GetSigOfMethodDef(theBody, &cbSigBody, &pSigBody)) || |
2396 | (pSigBody == NULL) || |
2397 | (cbSigBody == 0)) |
2398 | { |
2399 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_MISSING_SIG_BODY); |
2400 | } |
2401 | // Can't use memcmp because there may be two AssemblyRefs |
2402 | // in this scope, pointing to the same assembly, etc.). |
2403 | if (!MetaSig::CompareMethodSigs( |
2404 | pSigDecl, |
2405 | cbSigDecl, |
2406 | GetModule(), |
2407 | &theDeclSubst, |
2408 | pSigBody, |
2409 | cbSigBody, |
2410 | GetModule(), |
2411 | NULL)) |
2412 | { |
2413 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_BODY_DECL_MISMATCH); |
2414 | } |
2415 | } |
2416 | else |
2417 | { |
2418 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_MISSING_SIG_DECL); |
2419 | } |
2420 | |
2421 | bmtMetaData->pMethodDeclSubsts[i] = theDeclSubst; |
2422 | } |
2423 | } |
2424 | } // MethodTableBuilder::EnumerateMethodImpls |
2425 | |
2426 | //******************************************************************************* |
2427 | // |
2428 | // Find a method declaration that must reside in the scope passed in. This method cannot be called if |
2429 | // the reference travels to another scope. |
2430 | // |
2431 | // Protect against finding a declaration that lives within |
2432 | // us (the type being created) |
2433 | // |
2434 | HRESULT MethodTableBuilder::FindMethodDeclarationForMethodImpl( |
2435 | mdToken pToken, // Token that is being located (MemberRef or MemberDef) |
2436 | mdToken* pDeclaration, // [OUT] Method definition for Member |
2437 | BOOL fSameClass) // Does the declaration need to be in this class |
2438 | { |
2439 | STANDARD_VM_CONTRACT; |
2440 | |
2441 | HRESULT hr = S_OK; |
2442 | |
2443 | IMDInternalImport *pMDInternalImport = GetMDImport(); |
2444 | |
2445 | PCCOR_SIGNATURE pSig; // Signature of Member |
2446 | DWORD cSig; |
2447 | LPCUTF8 szMember = NULL; |
2448 | |
2449 | // The token should be a member ref or def. If it is a ref then we need to travel |
2450 | // back to us hopefully. |
2451 | if(TypeFromToken(pToken) == mdtMemberRef) |
2452 | { |
2453 | // Get the parent |
2454 | mdToken typeref; |
2455 | if (FAILED(pMDInternalImport->GetParentOfMemberRef(pToken, &typeref))) |
2456 | { |
2457 | BAD_FORMAT_NOTHROW_ASSERT(!"Invalid MemberRef record" ); |
2458 | IfFailRet(COR_E_TYPELOAD); |
2459 | } |
2460 | GOTPARENT: |
2461 | if (TypeFromToken(typeref) == mdtMethodDef) |
2462 | { // If parent is a method def then this is a varags method |
2463 | mdTypeDef typeDef; |
2464 | hr = pMDInternalImport->GetParentToken(typeref, &typeDef); |
2465 | |
2466 | if (TypeFromToken(typeDef) != mdtTypeDef) |
2467 | { // A mdtMethodDef must be parented by a mdtTypeDef |
2468 | BAD_FORMAT_NOTHROW_ASSERT(!"MethodDef without TypeDef as Parent" ); |
2469 | IfFailRet(COR_E_TYPELOAD); |
2470 | } |
2471 | |
2472 | BAD_FORMAT_NOTHROW_ASSERT(typeDef == GetCl()); |
2473 | |
2474 | // This is the real method we are overriding |
2475 | *pDeclaration = typeref; |
2476 | } |
2477 | else if (TypeFromToken(typeref) == mdtTypeSpec) |
2478 | { // Added so that method impls can refer to instantiated interfaces or classes |
2479 | if (FAILED(pMDInternalImport->GetSigFromToken(typeref, &cSig, &pSig))) |
2480 | { |
2481 | BAD_FORMAT_NOTHROW_ASSERT(!"Invalid TypeSpec record" ); |
2482 | IfFailRet(COR_E_TYPELOAD); |
2483 | } |
2484 | CorElementType elemType = (CorElementType) *pSig++; |
2485 | |
2486 | if (elemType == ELEMENT_TYPE_GENERICINST) |
2487 | { // If this is a generic inst, we expect that the next elem is ELEMENT_TYPE_CLASS, |
2488 | // which is handled in the case below. |
2489 | elemType = (CorElementType) *pSig++; |
2490 | BAD_FORMAT_NOTHROW_ASSERT(elemType == ELEMENT_TYPE_CLASS); |
2491 | } |
2492 | |
2493 | if (elemType == ELEMENT_TYPE_CLASS) |
2494 | { // This covers E_T_GENERICINST and E_T_CLASS typespec formats. We don't expect |
2495 | // any other kinds to come through here. |
2496 | CorSigUncompressToken(pSig, &typeref); |
2497 | } |
2498 | else |
2499 | { // This is an unrecognized signature format. |
2500 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, |
2501 | IDS_CLASSLOAD_MI_BAD_SIG, |
2502 | mdMethodDefNil); |
2503 | } |
2504 | goto GOTPARENT; |
2505 | } |
2506 | else |
2507 | { // Verify that the ref points back to us |
2508 | mdToken tkDef = mdTokenNil; |
2509 | |
2510 | if (TypeFromToken(typeref) == mdtTypeRef) |
2511 | { // We only get here when we know the token does not reference a type in a different scope. |
2512 | LPCUTF8 pszNameSpace; |
2513 | LPCUTF8 pszClassName; |
2514 | |
2515 | if (FAILED(pMDInternalImport->GetNameOfTypeRef(typeref, &pszNameSpace, &pszClassName))) |
2516 | { |
2517 | IfFailRet(COR_E_TYPELOAD); |
2518 | } |
2519 | mdToken tkRes; |
2520 | if (FAILED(pMDInternalImport->GetResolutionScopeOfTypeRef(typeref, &tkRes))) |
2521 | { |
2522 | IfFailRet(COR_E_TYPELOAD); |
2523 | } |
2524 | hr = pMDInternalImport->FindTypeDef(pszNameSpace, |
2525 | pszClassName, |
2526 | (TypeFromToken(tkRes) == mdtTypeRef) ? tkRes : mdTokenNil, |
2527 | &tkDef); |
2528 | if (FAILED(hr)) |
2529 | { |
2530 | IfFailRet(COR_E_TYPELOAD); |
2531 | } |
2532 | } |
2533 | else if (TypeFromToken(typeref) == mdtTypeDef) |
2534 | { // We get a typedef when the parent of the token is a typespec to the type. |
2535 | tkDef = typeref; |
2536 | } |
2537 | else |
2538 | { |
2539 | CONSISTENCY_CHECK_MSGF(FALSE, ("Invalid methodimpl signature in class %s." , GetDebugClassName())); |
2540 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, |
2541 | IDS_CLASSLOAD_MI_BAD_SIG, |
2542 | mdMethodDefNil); |
2543 | } |
2544 | |
2545 | if (fSameClass && tkDef != GetCl()) |
2546 | { // If we required that the typedef be the same type as the current class, |
2547 | // and it doesn't match, we need to return a failure result. |
2548 | IfFailRet(COR_E_TYPELOAD); |
2549 | } |
2550 | |
2551 | IfFailRet(pMDInternalImport->GetNameAndSigOfMemberRef(pToken, &pSig, &cSig, &szMember)); |
2552 | |
2553 | if (isCallConv( |
2554 | MetaSig::GetCallingConvention(GetModule(), Signature(pSig, cSig)), |
2555 | IMAGE_CEE_CS_CALLCONV_FIELD)) |
2556 | { |
2557 | return VLDTR_E_MR_BADCALLINGCONV; |
2558 | } |
2559 | |
2560 | hr = pMDInternalImport->FindMethodDef( |
2561 | tkDef, szMember, pSig, cSig, pDeclaration); |
2562 | |
2563 | IfFailRet(hr); |
2564 | } |
2565 | } |
2566 | else if (TypeFromToken(pToken) == mdtMethodDef) |
2567 | { |
2568 | mdTypeDef typeDef; |
2569 | |
2570 | // Verify that we are the parent |
2571 | hr = pMDInternalImport->GetParentToken(pToken, &typeDef); |
2572 | IfFailRet(hr); |
2573 | |
2574 | if(typeDef != GetCl()) |
2575 | { |
2576 | IfFailRet(COR_E_TYPELOAD); |
2577 | } |
2578 | |
2579 | *pDeclaration = pToken; |
2580 | } |
2581 | else |
2582 | { |
2583 | IfFailRet(COR_E_TYPELOAD); |
2584 | } |
2585 | return hr; |
2586 | } |
2587 | |
2588 | #ifdef _PREFAST_ |
2589 | #pragma warning(push) |
2590 | #pragma warning(disable:21000) // Suppress PREFast warning about overly large function |
2591 | #endif // _PREFAST_ |
2592 | //--------------------------------------------------------------------------------------- |
2593 | // |
2594 | // Used by BuildMethodTable |
2595 | // |
2596 | // Enumerate this class's members |
2597 | // |
2598 | VOID |
2599 | MethodTableBuilder::EnumerateClassMethods() |
2600 | { |
2601 | CONTRACTL |
2602 | { |
2603 | STANDARD_VM_CHECK; |
2604 | PRECONDITION(CheckPointer(bmtInternal)); |
2605 | PRECONDITION(CheckPointer(bmtEnumFields)); |
2606 | PRECONDITION(CheckPointer(bmtMFDescs)); |
2607 | PRECONDITION(CheckPointer(bmtProp)); |
2608 | PRECONDITION(CheckPointer(bmtMetaData)); |
2609 | PRECONDITION(CheckPointer(bmtVT)); |
2610 | PRECONDITION(CheckPointer(bmtError)); |
2611 | } |
2612 | CONTRACTL_END; |
2613 | |
2614 | HRESULT hr = S_OK; |
2615 | DWORD i; |
2616 | IMDInternalImport *pMDInternalImport = GetMDImport(); |
2617 | mdToken tok; |
2618 | DWORD dwMemberAttrs; |
2619 | BOOL fIsClassEnum = IsEnum(); |
2620 | BOOL fIsClassInterface = IsInterface(); |
2621 | BOOL fIsClassValueType = IsValueClass(); |
2622 | BOOL fIsClassComImport = IsComImport(); |
2623 | BOOL fIsClassNotAbstract = (IsTdAbstract(GetAttrClass()) == 0); |
2624 | PCCOR_SIGNATURE pMemberSignature; |
2625 | ULONG cMemberSignature; |
2626 | |
2627 | // |
2628 | // Run through the method list and calculate the following: |
2629 | // # methods. |
2630 | // # "other" methods (i.e. static or private) |
2631 | // # non-other methods |
2632 | // |
2633 | |
2634 | HENUMInternalHolder hEnumMethod(pMDInternalImport); |
2635 | hr = hEnumMethod.EnumInitNoThrow(mdtMethodDef, GetCl()); |
2636 | if (FAILED(hr)) |
2637 | { |
2638 | BuildMethodTableThrowException(hr, *bmtError); |
2639 | } |
2640 | |
2641 | // Allocate an array to contain the method tokens as well as information about the methods. |
2642 | DWORD cMethAndGaps = hEnumMethod.EnumGetCount(); |
2643 | |
2644 | if ((DWORD)MAX_SLOT_INDEX <= cMethAndGaps) |
2645 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
2646 | |
2647 | bmtMethod->m_cMaxDeclaredMethods = (SLOT_INDEX)cMethAndGaps; |
2648 | bmtMethod->m_cDeclaredMethods = 0; |
2649 | bmtMethod->m_rgDeclaredMethods = new (GetStackingAllocator()) |
2650 | bmtMDMethod *[bmtMethod->m_cMaxDeclaredMethods]; |
2651 | |
2652 | enum { SeenCtor = 1, SeenInvoke = 2, SeenBeginInvoke = 4, SeenEndInvoke = 8}; |
2653 | unsigned delegateMethodsSeen = 0; |
2654 | |
2655 | for (i = 0; i < cMethAndGaps; i++) |
2656 | { |
2657 | ULONG dwMethodRVA; |
2658 | DWORD dwImplFlags; |
2659 | METHOD_TYPE type; |
2660 | METHOD_IMPL_TYPE implType; |
2661 | LPSTR strMethodName; |
2662 | |
2663 | #ifdef FEATURE_TYPEEQUIVALENCE |
2664 | // TypeEquivalent structs must not have methods |
2665 | if (bmtProp->fIsTypeEquivalent && fIsClassValueType) |
2666 | { |
2667 | BuildMethodTableThrowException(IDS_CLASSLOAD_EQUIVALENTSTRUCTMETHODS); |
2668 | } |
2669 | #endif |
2670 | |
2671 | // |
2672 | // Go to the next method and retrieve its attributes. |
2673 | // |
2674 | |
2675 | hEnumMethod.EnumNext(&tok); |
2676 | DWORD rid = RidFromToken(tok); |
2677 | if ((rid == 0)||(rid > pMDInternalImport->GetCountWithTokenKind(mdtMethodDef))) |
2678 | { |
2679 | BuildMethodTableThrowException(BFA_METHOD_TOKEN_OUT_OF_RANGE); |
2680 | } |
2681 | |
2682 | if (FAILED(pMDInternalImport->GetMethodDefProps(tok, &dwMemberAttrs))) |
2683 | { |
2684 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
2685 | } |
2686 | if (IsMdRTSpecialName(dwMemberAttrs) || IsMdVirtual(dwMemberAttrs) || IsDelegate()) |
2687 | { |
2688 | if (FAILED(pMDInternalImport->GetNameOfMethodDef(tok, (LPCSTR *)&strMethodName))) |
2689 | { |
2690 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
2691 | } |
2692 | if(IsStrLongerThan(strMethodName,MAX_CLASS_NAME)) |
2693 | { |
2694 | BuildMethodTableThrowException(BFA_METHOD_NAME_TOO_LONG); |
2695 | } |
2696 | } |
2697 | else |
2698 | { |
2699 | strMethodName = NULL; |
2700 | } |
2701 | |
2702 | DWORD numGenericMethodArgs = 0; |
2703 | |
2704 | { |
2705 | HENUMInternalHolder hEnumTyPars(pMDInternalImport); |
2706 | hr = hEnumTyPars.EnumInitNoThrow(mdtGenericParam, tok); |
2707 | if (FAILED(hr)) |
2708 | { |
2709 | BuildMethodTableThrowException(hr, *bmtError); |
2710 | } |
2711 | |
2712 | numGenericMethodArgs = hEnumTyPars.EnumGetCount(); |
2713 | |
2714 | // We do not want to support context-bound objects with generic methods. |
2715 | |
2716 | if (numGenericMethodArgs != 0) |
2717 | { |
2718 | HENUMInternalHolder hEnumGenericPars(pMDInternalImport); |
2719 | |
2720 | hEnumGenericPars.EnumInit(mdtGenericParam, tok); |
2721 | |
2722 | for (unsigned methIdx = 0; methIdx < numGenericMethodArgs; methIdx++) |
2723 | { |
2724 | mdGenericParam tkTyPar; |
2725 | pMDInternalImport->EnumNext(&hEnumGenericPars, &tkTyPar); |
2726 | DWORD flags; |
2727 | if (FAILED(pMDInternalImport->GetGenericParamProps(tkTyPar, NULL, &flags, NULL, NULL, NULL))) |
2728 | { |
2729 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
2730 | } |
2731 | |
2732 | if (0 != (flags & ~(gpVarianceMask | gpSpecialConstraintMask))) |
2733 | { |
2734 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
2735 | } |
2736 | switch (flags & gpVarianceMask) |
2737 | { |
2738 | case gpNonVariant: |
2739 | break; |
2740 | |
2741 | case gpCovariant: // intentional fallthru |
2742 | case gpContravariant: |
2743 | BuildMethodTableThrowException(VLDTR_E_GP_ILLEGAL_VARIANT_MVAR); |
2744 | break; |
2745 | |
2746 | default: |
2747 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
2748 | } |
2749 | |
2750 | } |
2751 | } |
2752 | } |
2753 | |
2754 | // |
2755 | // We need to check if there are any gaps in the vtable. These are |
2756 | // represented by methods with the mdSpecial flag and a name of the form |
2757 | // _VTblGap_nnn (to represent nnn empty slots) or _VTblGap (to represent a |
2758 | // single empty slot). |
2759 | // |
2760 | |
2761 | if (IsMdRTSpecialName(dwMemberAttrs)) |
2762 | { |
2763 | PREFIX_ASSUME(strMethodName != NULL); // if we've gotten here we've called GetNameOfMethodDef |
2764 | |
2765 | // The slot is special, but it might not be a vtable spacer. To |
2766 | // determine that we must look at the name. |
2767 | if (strncmp(strMethodName, "_VtblGap" , 8) == 0) |
2768 | { |
2769 | // |
2770 | // This slot doesn't really exist, don't add it to the method |
2771 | // table. Instead it represents one or more empty slots, encoded |
2772 | // in the method name. Locate the beginning of the count in the |
2773 | // name. There are these points to consider: |
2774 | // There may be no count present at all (in which case the |
2775 | // count is taken as one). |
2776 | // There may be an additional count just after Gap but before |
2777 | // the '_'. We ignore this. |
2778 | // |
2779 | |
2780 | LPCSTR pos = strMethodName + 8; |
2781 | |
2782 | // Skip optional number. |
2783 | while (IS_DIGIT(*pos)) |
2784 | pos++; |
2785 | |
2786 | WORD n = 0; |
2787 | |
2788 | // Check for presence of count. |
2789 | if (*pos == '\0') |
2790 | n = 1; |
2791 | else |
2792 | { |
2793 | if (*pos != '_') |
2794 | { |
2795 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, |
2796 | IDS_CLASSLOAD_BADSPECIALMETHOD, |
2797 | tok); |
2798 | } |
2799 | |
2800 | // Skip '_'. |
2801 | pos++; |
2802 | |
2803 | // Read count. |
2804 | bool fReadAtLeastOneDigit = false; |
2805 | while (IS_DIGIT(*pos)) |
2806 | { |
2807 | _ASSERTE(n < 6552); |
2808 | n *= 10; |
2809 | n += DIGIT_TO_INT(*pos); |
2810 | pos++; |
2811 | fReadAtLeastOneDigit = true; |
2812 | } |
2813 | |
2814 | // Check for end of name. |
2815 | if (*pos != '\0' || !fReadAtLeastOneDigit) |
2816 | { |
2817 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, |
2818 | IDS_CLASSLOAD_BADSPECIALMETHOD, |
2819 | tok); |
2820 | } |
2821 | } |
2822 | |
2823 | #ifdef FEATURE_COMINTEROP |
2824 | // Record vtable gap in mapping list. The map is an optional field, so ensure we've allocated |
2825 | // these fields first. |
2826 | EnsureOptionalFieldsAreAllocated(GetHalfBakedClass(), m_pAllocMemTracker, GetLoaderAllocator()->GetLowFrequencyHeap()); |
2827 | if (GetHalfBakedClass()->GetSparseCOMInteropVTableMap() == NULL) |
2828 | GetHalfBakedClass()->SetSparseCOMInteropVTableMap(new SparseVTableMap()); |
2829 | |
2830 | GetHalfBakedClass()->GetSparseCOMInteropVTableMap()->RecordGap((WORD)NumDeclaredMethods(), n); |
2831 | |
2832 | bmtProp->fSparse = true; |
2833 | #endif // FEATURE_COMINTEROP |
2834 | continue; |
2835 | } |
2836 | |
2837 | } |
2838 | |
2839 | |
2840 | // |
2841 | // This is a real method so add it to the enumeration of methods. We now need to retrieve |
2842 | // information on the method and store it for later use. |
2843 | // |
2844 | if (FAILED(pMDInternalImport->GetMethodImplProps(tok, &dwMethodRVA, &dwImplFlags))) |
2845 | { |
2846 | BuildMethodTableThrowException( |
2847 | COR_E_BADIMAGEFORMAT, |
2848 | IDS_CLASSLOAD_BADSPECIALMETHOD, |
2849 | tok); |
2850 | } |
2851 | // |
2852 | // But first - minimal flags validity checks |
2853 | // |
2854 | // No methods in Enums! |
2855 | if (fIsClassEnum) |
2856 | { |
2857 | BuildMethodTableThrowException(BFA_METHOD_IN_A_ENUM); |
2858 | } |
2859 | // RVA : 0 |
2860 | if (dwMethodRVA != 0) |
2861 | { |
2862 | if(fIsClassComImport) |
2863 | { |
2864 | BuildMethodTableThrowException(BFA_METHOD_WITH_NONZERO_RVA); |
2865 | } |
2866 | if(IsMdAbstract(dwMemberAttrs)) |
2867 | { |
2868 | BuildMethodTableThrowException(BFA_ABSTRACT_METHOD_WITH_RVA); |
2869 | } |
2870 | if(IsMiRuntime(dwImplFlags)) |
2871 | { |
2872 | BuildMethodTableThrowException(BFA_RUNTIME_METHOD_WITH_RVA); |
2873 | } |
2874 | if(IsMiInternalCall(dwImplFlags)) |
2875 | { |
2876 | BuildMethodTableThrowException(BFA_INTERNAL_METHOD_WITH_RVA); |
2877 | } |
2878 | } |
2879 | |
2880 | // Abstract / not abstract |
2881 | if(IsMdAbstract(dwMemberAttrs)) |
2882 | { |
2883 | if(fIsClassNotAbstract) |
2884 | { |
2885 | BuildMethodTableThrowException(BFA_AB_METHOD_IN_AB_CLASS); |
2886 | } |
2887 | if(!IsMdVirtual(dwMemberAttrs)) |
2888 | { |
2889 | BuildMethodTableThrowException(BFA_NONVIRT_AB_METHOD); |
2890 | } |
2891 | } |
2892 | else if(fIsClassInterface) |
2893 | { |
2894 | if (IsMdRTSpecialName(dwMemberAttrs)) |
2895 | { |
2896 | CONSISTENCY_CHECK(CheckPointer(strMethodName)); |
2897 | if (strcmp(strMethodName, COR_CCTOR_METHOD_NAME)) |
2898 | { |
2899 | BuildMethodTableThrowException(BFA_NONAB_NONCCTOR_METHOD_ON_INT); |
2900 | } |
2901 | } |
2902 | } |
2903 | |
2904 | // Virtual / not virtual |
2905 | if(IsMdVirtual(dwMemberAttrs)) |
2906 | { |
2907 | if(IsMdPinvokeImpl(dwMemberAttrs)) |
2908 | { |
2909 | BuildMethodTableThrowException(BFA_VIRTUAL_PINVOKE_METHOD); |
2910 | } |
2911 | if(IsMdStatic(dwMemberAttrs)) |
2912 | { |
2913 | BuildMethodTableThrowException(BFA_VIRTUAL_STATIC_METHOD); |
2914 | } |
2915 | if(strMethodName && (0==strcmp(strMethodName, COR_CTOR_METHOD_NAME))) |
2916 | { |
2917 | BuildMethodTableThrowException(BFA_VIRTUAL_INSTANCE_CTOR); |
2918 | } |
2919 | } |
2920 | |
2921 | #ifndef FEATURE_DEFAULT_INTERFACES |
2922 | // Some interface checks. |
2923 | if (fIsClassInterface) |
2924 | { |
2925 | if (IsMdVirtual(dwMemberAttrs)) |
2926 | { |
2927 | if (!IsMdAbstract(dwMemberAttrs)) |
2928 | { |
2929 | BuildMethodTableThrowException(BFA_VIRTUAL_NONAB_INT_METHOD); |
2930 | } |
2931 | } |
2932 | else |
2933 | { |
2934 | // Instance field/method |
2935 | if (!IsMdStatic(dwMemberAttrs)) |
2936 | { |
2937 | BuildMethodTableThrowException(BFA_NONVIRT_INST_INT_METHOD); |
2938 | } |
2939 | } |
2940 | } |
2941 | #endif |
2942 | |
2943 | // No synchronized methods in ValueTypes |
2944 | if(fIsClassValueType && IsMiSynchronized(dwImplFlags)) |
2945 | { |
2946 | BuildMethodTableThrowException(BFA_SYNC_METHOD_IN_VT); |
2947 | } |
2948 | |
2949 | // Global methods: |
2950 | if(IsGlobalClass()) |
2951 | { |
2952 | if(!IsMdStatic(dwMemberAttrs)) |
2953 | { |
2954 | BuildMethodTableThrowException(BFA_NONSTATIC_GLOBAL_METHOD); |
2955 | } |
2956 | if (strMethodName) //<TODO>@todo: investigate mc++ generating null name</TODO> |
2957 | { |
2958 | if(0==strcmp(strMethodName, COR_CTOR_METHOD_NAME)) |
2959 | { |
2960 | BuildMethodTableThrowException(BFA_GLOBAL_INST_CTOR); |
2961 | } |
2962 | } |
2963 | } |
2964 | //@GENERICS: |
2965 | // Generic methods or methods in generic classes |
2966 | // may not be part of a COM Import class (except for WinRT), PInvoke, internal call outside mscorlib. |
2967 | if ((bmtGenerics->GetNumGenericArgs() != 0 || numGenericMethodArgs != 0) && |
2968 | ( |
2969 | #ifdef FEATURE_COMINTEROP |
2970 | fIsClassComImport || |
2971 | bmtProp->fComEventItfType || |
2972 | #endif // FEATURE_COMINTEROP |
2973 | IsMdPinvokeImpl(dwMemberAttrs) || |
2974 | (IsMiInternalCall(dwImplFlags) && !GetModule()->IsSystem()))) |
2975 | { |
2976 | #ifdef FEATURE_COMINTEROP |
2977 | if (!GetHalfBakedClass()->IsProjectedFromWinRT()) |
2978 | #endif // FEATURE_COMINTEROP |
2979 | { |
2980 | BuildMethodTableThrowException(BFA_BAD_PLACE_FOR_GENERIC_METHOD); |
2981 | } |
2982 | } |
2983 | |
2984 | // Generic methods may not be marked "runtime". However note that |
2985 | // methods in generic delegate classes are, hence we don't apply this to |
2986 | // methods in generic classes in general. |
2987 | if (numGenericMethodArgs != 0 && IsMiRuntime(dwImplFlags)) |
2988 | { |
2989 | BuildMethodTableThrowException(BFA_GENERIC_METHOD_RUNTIME_IMPL); |
2990 | } |
2991 | |
2992 | |
2993 | // Signature validation |
2994 | if (FAILED(pMDInternalImport->GetSigOfMethodDef(tok, &cMemberSignature, &pMemberSignature))) |
2995 | { |
2996 | BuildMethodTableThrowException(hr, BFA_BAD_SIGNATURE, mdMethodDefNil); |
2997 | } |
2998 | hr = validateTokenSig(tok,pMemberSignature,cMemberSignature,dwMemberAttrs,pMDInternalImport); |
2999 | if (FAILED(hr)) |
3000 | { |
3001 | BuildMethodTableThrowException(hr, BFA_BAD_SIGNATURE, mdMethodDefNil); |
3002 | } |
3003 | |
3004 | // Check the appearance of covariant and contravariant in the method signature |
3005 | // Note that variance is only supported for interfaces |
3006 | if (bmtGenerics->pVarianceInfo != NULL) |
3007 | { |
3008 | SigPointer sp(pMemberSignature, cMemberSignature); |
3009 | ULONG callConv; |
3010 | IfFailThrow(sp.GetCallingConvInfo(&callConv)); |
3011 | |
3012 | if (callConv & IMAGE_CEE_CS_CALLCONV_GENERIC) |
3013 | IfFailThrow(sp.GetData(NULL)); |
3014 | |
3015 | DWORD numArgs; |
3016 | IfFailThrow(sp.GetData(&numArgs)); |
3017 | |
3018 | // Return type behaves covariantly |
3019 | if (!EEClass::CheckVarianceInSig( |
3020 | bmtGenerics->GetNumGenericArgs(), |
3021 | bmtGenerics->pVarianceInfo, |
3022 | GetModule(), |
3023 | sp, |
3024 | gpCovariant)) |
3025 | { |
3026 | BuildMethodTableThrowException(IDS_CLASSLOAD_VARIANCE_IN_METHOD_RESULT, tok); |
3027 | } |
3028 | IfFailThrow(sp.SkipExactlyOne()); |
3029 | for (DWORD j = 0; j < numArgs; j++) |
3030 | { |
3031 | // Argument types behave contravariantly |
3032 | if (!EEClass::CheckVarianceInSig(bmtGenerics->GetNumGenericArgs(), |
3033 | bmtGenerics->pVarianceInfo, |
3034 | GetModule(), |
3035 | sp, |
3036 | gpContravariant)) |
3037 | { |
3038 | BuildMethodTableThrowException(IDS_CLASSLOAD_VARIANCE_IN_METHOD_ARG, tok); |
3039 | } |
3040 | IfFailThrow(sp.SkipExactlyOne()); |
3041 | } |
3042 | } |
3043 | |
3044 | // |
3045 | // Determine the method's type |
3046 | // |
3047 | |
3048 | if (IsReallyMdPinvokeImpl(dwMemberAttrs) || IsMiInternalCall(dwImplFlags)) |
3049 | { |
3050 | hr = NDirect::HasNAT_LAttribute(pMDInternalImport, tok, dwMemberAttrs); |
3051 | |
3052 | // There was a problem querying for the attribute |
3053 | if (FAILED(hr)) |
3054 | { |
3055 | BuildMethodTableThrowException(hr, IDS_CLASSLOAD_BADPINVOKE, tok); |
3056 | } |
3057 | |
3058 | // The attribute is not present |
3059 | if (hr == S_FALSE) |
3060 | { |
3061 | #ifdef FEATURE_COMINTEROP |
3062 | if (fIsClassComImport |
3063 | || GetHalfBakedClass()->IsProjectedFromWinRT() |
3064 | || bmtProp->fComEventItfType |
3065 | ) |
3066 | { |
3067 | // ComImport classes have methods which are just used |
3068 | // for implementing all interfaces the class supports |
3069 | type = METHOD_TYPE_COMINTEROP; |
3070 | |
3071 | // constructor is special |
3072 | if (IsMdRTSpecialName(dwMemberAttrs)) |
3073 | { |
3074 | // Note: Method name (.ctor) will be checked in code:ValidateMethods |
3075 | |
3076 | // WinRT ctors are interop calls via stubs |
3077 | if (!GetHalfBakedClass()->IsProjectedFromWinRT()) |
3078 | { |
3079 | // Ctor on a non-WinRT class |
3080 | type = METHOD_TYPE_FCALL; |
3081 | } |
3082 | } |
3083 | } |
3084 | else |
3085 | #endif //FEATURE_COMINTEROP |
3086 | if (dwMethodRVA == 0) |
3087 | { |
3088 | type = METHOD_TYPE_FCALL; |
3089 | } |
3090 | else |
3091 | { |
3092 | type = METHOD_TYPE_NDIRECT; |
3093 | } |
3094 | } |
3095 | // The NAT_L attribute is present, marking this method as NDirect |
3096 | else |
3097 | { |
3098 | CONSISTENCY_CHECK(hr == S_OK); |
3099 | type = METHOD_TYPE_NDIRECT; |
3100 | } |
3101 | } |
3102 | else if (IsMiRuntime(dwImplFlags)) |
3103 | { |
3104 | // currently the only runtime implemented functions are delegate instance methods |
3105 | if (!IsDelegate() || IsMdStatic(dwMemberAttrs) || IsMdAbstract(dwMemberAttrs)) |
3106 | { |
3107 | BuildMethodTableThrowException(BFA_BAD_RUNTIME_IMPL); |
3108 | } |
3109 | |
3110 | unsigned newDelegateMethodSeen = 0; |
3111 | |
3112 | if (IsMdRTSpecialName(dwMemberAttrs)) // .ctor |
3113 | { |
3114 | if (strcmp(strMethodName, COR_CTOR_METHOD_NAME) != 0 || IsMdVirtual(dwMemberAttrs)) |
3115 | { |
3116 | BuildMethodTableThrowException(BFA_BAD_FLAGS_ON_DELEGATE); |
3117 | } |
3118 | newDelegateMethodSeen = SeenCtor; |
3119 | type = METHOD_TYPE_FCALL; |
3120 | } |
3121 | else |
3122 | { |
3123 | if (strcmp(strMethodName, "Invoke" ) == 0) |
3124 | newDelegateMethodSeen = SeenInvoke; |
3125 | else if (strcmp(strMethodName, "BeginInvoke" ) == 0) |
3126 | newDelegateMethodSeen = SeenBeginInvoke; |
3127 | else if (strcmp(strMethodName, "EndInvoke" ) == 0) |
3128 | newDelegateMethodSeen = SeenEndInvoke; |
3129 | else |
3130 | { |
3131 | BuildMethodTableThrowException(BFA_UNKNOWN_DELEGATE_METHOD); |
3132 | } |
3133 | type = METHOD_TYPE_EEIMPL; |
3134 | } |
3135 | |
3136 | // If we get here we have either set newDelegateMethodSeen or we have thrown a BMT exception |
3137 | _ASSERTE(newDelegateMethodSeen != 0); |
3138 | |
3139 | if ((delegateMethodsSeen & newDelegateMethodSeen) != 0) |
3140 | { |
3141 | BuildMethodTableThrowException(BFA_DUPLICATE_DELEGATE_METHOD); |
3142 | } |
3143 | |
3144 | delegateMethodsSeen |= newDelegateMethodSeen; |
3145 | } |
3146 | else if (numGenericMethodArgs != 0) |
3147 | { |
3148 | //We use an instantiated method desc to represent a generic method |
3149 | type = METHOD_TYPE_INSTANTIATED; |
3150 | } |
3151 | else if (fIsClassInterface) |
3152 | { |
3153 | #ifdef FEATURE_COMINTEROP |
3154 | if (IsMdStatic(dwMemberAttrs)) |
3155 | { |
3156 | // Static methods in interfaces need nothing special. |
3157 | type = METHOD_TYPE_NORMAL; |
3158 | } |
3159 | else if (bmtGenerics->GetNumGenericArgs() != 0 && |
3160 | (bmtGenerics->fSharedByGenericInstantiations || (!bmtProp->fIsRedirectedInterface && !GetHalfBakedClass()->IsProjectedFromWinRT()))) |
3161 | { |
3162 | // Methods in instantiated interfaces need nothing special - they are not visible from COM etc. |
3163 | // mcComInterop is only useful for unshared instantiated WinRT interfaces. If the interface is |
3164 | // shared by multiple instantiations, the MD would be useless for interop anyway. |
3165 | type = METHOD_TYPE_NORMAL; |
3166 | } |
3167 | else if (bmtProp->fIsMngStandardItf) |
3168 | { |
3169 | // If the interface is a standard managed interface then allocate space for an FCall method desc. |
3170 | type = METHOD_TYPE_FCALL; |
3171 | } |
3172 | else if (IsMdAbstract(dwMemberAttrs)) |
3173 | { |
3174 | // If COM interop is supported then all other interface MDs may be |
3175 | // accessed via COM interop. mcComInterop MDs have an additional |
3176 | // pointer-sized field pointing to COM interop data which are |
3177 | // allocated lazily when/if the MD actually gets used for interop. |
3178 | type = METHOD_TYPE_COMINTEROP; |
3179 | } |
3180 | else |
3181 | #endif // !FEATURE_COMINTEROP |
3182 | { |
3183 | // This codepath is used by remoting |
3184 | type = METHOD_TYPE_NORMAL; |
3185 | } |
3186 | } |
3187 | else |
3188 | { |
3189 | type = METHOD_TYPE_NORMAL; |
3190 | } |
3191 | |
3192 | // Generic methods should always be METHOD_TYPE_INSTANTIATED |
3193 | if ((numGenericMethodArgs != 0) && (type != METHOD_TYPE_INSTANTIATED)) |
3194 | { |
3195 | BuildMethodTableThrowException(BFA_GENERIC_METHODS_INST); |
3196 | } |
3197 | |
3198 | // count how many overrides this method does All methods bodies are defined |
3199 | // on this type so we can just compare the tok with the body token found |
3200 | // from the overrides. |
3201 | implType = METHOD_IMPL_NOT; |
3202 | for (DWORD impls = 0; impls < bmtMethod->dwNumberMethodImpls; impls++) |
3203 | { |
3204 | if (bmtMetaData->rgMethodImplTokens[impls].methodBody == tok) |
3205 | { |
3206 | implType = METHOD_IMPL; |
3207 | break; |
3208 | } |
3209 | } |
3210 | |
3211 | // For delegates we don't allow any non-runtime implemented bodies |
3212 | // for any of the four special methods |
3213 | if (IsDelegate() && !IsMiRuntime(dwImplFlags)) |
3214 | { |
3215 | if ((strcmp(strMethodName, COR_CTOR_METHOD_NAME) == 0) || |
3216 | (strcmp(strMethodName, "Invoke" ) == 0) || |
3217 | (strcmp(strMethodName, "BeginInvoke" ) == 0) || |
3218 | (strcmp(strMethodName, "EndInvoke" ) == 0) ) |
3219 | { |
3220 | BuildMethodTableThrowException(BFA_ILLEGAL_DELEGATE_METHOD); |
3221 | } |
3222 | } |
3223 | |
3224 | // |
3225 | // Create a new bmtMDMethod representing this method and add it to the |
3226 | // declared method list. |
3227 | // |
3228 | |
3229 | bmtMDMethod * pNewMethod = new (GetStackingAllocator()) bmtMDMethod( |
3230 | bmtInternal->pType, |
3231 | tok, |
3232 | dwMemberAttrs, |
3233 | dwImplFlags, |
3234 | dwMethodRVA, |
3235 | type, |
3236 | implType); |
3237 | |
3238 | bmtMethod->AddDeclaredMethod(pNewMethod); |
3239 | |
3240 | // |
3241 | // Update the count of the various types of methods. |
3242 | // |
3243 | |
3244 | bmtVT->dwMaxVtableSize++; |
3245 | |
3246 | // Increment the number of non-abstract declared methods |
3247 | if (!IsMdAbstract(dwMemberAttrs)) |
3248 | { |
3249 | bmtMethod->dwNumDeclaredNonAbstractMethods++; |
3250 | } |
3251 | } |
3252 | |
3253 | // Check to see that we have all of the required delegate methods (ECMA 13.6 Delegates) |
3254 | if (IsDelegate()) |
3255 | { |
3256 | // Do we have all four special delegate methods |
3257 | // or just the two special delegate methods |
3258 | if ((delegateMethodsSeen != (SeenCtor | SeenInvoke | SeenBeginInvoke | SeenEndInvoke)) && |
3259 | (delegateMethodsSeen != (SeenCtor | SeenInvoke)) ) |
3260 | { |
3261 | BuildMethodTableThrowException(BFA_MISSING_DELEGATE_METHOD); |
3262 | } |
3263 | } |
3264 | |
3265 | if (i != cMethAndGaps) |
3266 | { |
3267 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_BAD_METHOD_COUNT, mdTokenNil); |
3268 | } |
3269 | |
3270 | #ifdef FEATURE_COMINTEROP |
3271 | // |
3272 | // If the interface is sparse, we need to finalize the mapping list by |
3273 | // telling it how many real methods we found. |
3274 | // |
3275 | |
3276 | if (bmtProp->fSparse) |
3277 | { |
3278 | GetHalfBakedClass()->GetSparseCOMInteropVTableMap()->FinalizeMapping(NumDeclaredMethods()); |
3279 | } |
3280 | #endif // FEATURE_COMINTEROP |
3281 | } // MethodTableBuilder::EnumerateClassMethods |
3282 | #ifdef _PREFAST_ |
3283 | #pragma warning(pop) |
3284 | #endif |
3285 | |
3286 | //******************************************************************************* |
3287 | // |
3288 | // Run through the field list and calculate the following: |
3289 | // # static fields |
3290 | // # static fields that contain object refs. |
3291 | // # instance fields |
3292 | // |
3293 | VOID |
3294 | MethodTableBuilder::EnumerateClassFields() |
3295 | { |
3296 | STANDARD_VM_CONTRACT; |
3297 | |
3298 | HRESULT hr = S_OK; |
3299 | DWORD i; |
3300 | IMDInternalImport *pMDInternalImport = GetMDImport(); |
3301 | mdToken tok; |
3302 | DWORD dwMemberAttrs; |
3303 | |
3304 | bmtEnumFields->dwNumStaticFields = 0; |
3305 | bmtEnumFields->dwNumStaticObjRefFields = 0; |
3306 | bmtEnumFields->dwNumStaticBoxedFields = 0; |
3307 | |
3308 | bmtEnumFields->dwNumThreadStaticFields = 0; |
3309 | bmtEnumFields->dwNumThreadStaticObjRefFields = 0; |
3310 | bmtEnumFields->dwNumThreadStaticBoxedFields = 0; |
3311 | |
3312 | bmtEnumFields->dwNumInstanceFields = 0; |
3313 | |
3314 | HENUMInternalHolder hEnumField(pMDInternalImport); |
3315 | hr = hEnumField.EnumInitNoThrow(mdtFieldDef, GetCl()); |
3316 | if (FAILED(hr)) |
3317 | { |
3318 | BuildMethodTableThrowException(hr, *bmtError); |
3319 | } |
3320 | |
3321 | bmtMetaData->cFields = hEnumField.EnumGetCount(); |
3322 | |
3323 | // Retrieve the fields and store them in a temp array. |
3324 | bmtMetaData->pFields = new (GetStackingAllocator()) mdToken[bmtMetaData->cFields]; |
3325 | bmtMetaData->pFieldAttrs = new (GetStackingAllocator()) DWORD[bmtMetaData->cFields]; |
3326 | |
3327 | DWORD dwFieldLiteralInitOnly = fdLiteral | fdInitOnly; |
3328 | DWORD dwMaxFieldDefRid = pMDInternalImport->GetCountWithTokenKind(mdtFieldDef); |
3329 | |
3330 | for (i = 0; hEnumField.EnumNext(&tok); i++) |
3331 | { |
3332 | // |
3333 | // Retrieve the attributes of the field. |
3334 | // |
3335 | DWORD rid = RidFromToken(tok); |
3336 | if ((rid == 0)||(rid > dwMaxFieldDefRid)) |
3337 | { |
3338 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, BFA_BAD_FIELD_TOKEN, mdTokenNil); |
3339 | } |
3340 | |
3341 | if (FAILED(pMDInternalImport->GetFieldDefProps(tok, &dwMemberAttrs))) |
3342 | { |
3343 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, BFA_BAD_FIELD_TOKEN, tok); |
3344 | } |
3345 | |
3346 | // |
3347 | // Store the field and its attributes in the bmtMetaData structure for later use. |
3348 | // |
3349 | |
3350 | bmtMetaData->pFields[i] = tok; |
3351 | bmtMetaData->pFieldAttrs[i] = dwMemberAttrs; |
3352 | |
3353 | if((dwMemberAttrs & fdFieldAccessMask)==fdFieldAccessMask) |
3354 | { |
3355 | BuildMethodTableThrowException(BFA_INVALID_FIELD_ACC_FLAGS); |
3356 | } |
3357 | if((dwMemberAttrs & dwFieldLiteralInitOnly)==dwFieldLiteralInitOnly) |
3358 | { |
3359 | BuildMethodTableThrowException(BFA_FIELD_LITERAL_AND_INIT); |
3360 | } |
3361 | |
3362 | // can only have static global fields |
3363 | if(IsGlobalClass()) |
3364 | { |
3365 | if(!IsFdStatic(dwMemberAttrs)) |
3366 | { |
3367 | BuildMethodTableThrowException(BFA_NONSTATIC_GLOBAL_FIELD); |
3368 | } |
3369 | } |
3370 | |
3371 | // |
3372 | // Update the count of the various types of fields. |
3373 | // |
3374 | |
3375 | if (IsFdStatic(dwMemberAttrs)) |
3376 | { |
3377 | if (!IsFdLiteral(dwMemberAttrs)) |
3378 | { |
3379 | #ifdef FEATURE_TYPEEQUIVALENCE |
3380 | if (bmtProp->fIsTypeEquivalent) |
3381 | { |
3382 | BuildMethodTableThrowException(IDS_CLASSLOAD_EQUIVALENTSTRUCTFIELDS); |
3383 | } |
3384 | #endif |
3385 | |
3386 | bmtEnumFields->dwNumStaticFields++; |
3387 | |
3388 | // If this static field is thread static, then we need |
3389 | // to increment bmtEnumFields->dwNumThreadStaticFields |
3390 | hr = pMDInternalImport->GetCustomAttributeByName(tok, |
3391 | g_ThreadStaticAttributeClassName, |
3392 | NULL, NULL); |
3393 | IfFailThrow(hr); |
3394 | if (hr == S_OK) |
3395 | { |
3396 | // It's a thread static, so increment the count |
3397 | bmtEnumFields->dwNumThreadStaticFields++; |
3398 | } |
3399 | } |
3400 | } |
3401 | else |
3402 | { |
3403 | #ifdef FEATURE_TYPEEQUIVALENCE |
3404 | if (!IsFdPublic(dwMemberAttrs) && bmtProp->fIsTypeEquivalent) |
3405 | { |
3406 | BuildMethodTableThrowException(IDS_CLASSLOAD_EQUIVALENTSTRUCTFIELDS); |
3407 | } |
3408 | #endif |
3409 | |
3410 | if (!IsFdLiteral(dwMemberAttrs)) |
3411 | { |
3412 | bmtEnumFields->dwNumInstanceFields++; |
3413 | } |
3414 | if(IsInterface()) |
3415 | { |
3416 | BuildMethodTableThrowException(BFA_INSTANCE_FIELD_IN_INT); |
3417 | } |
3418 | } |
3419 | } |
3420 | |
3421 | if (i != bmtMetaData->cFields) |
3422 | { |
3423 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_BAD_FIELD_COUNT, mdTokenNil); |
3424 | } |
3425 | |
3426 | if(IsEnum() && (bmtEnumFields->dwNumInstanceFields==0)) |
3427 | { |
3428 | BuildMethodTableThrowException(BFA_INSTANCE_FIELD_IN_ENUM); |
3429 | } |
3430 | |
3431 | bmtEnumFields->dwNumDeclaredFields = bmtEnumFields->dwNumStaticFields + bmtEnumFields->dwNumInstanceFields; |
3432 | } |
3433 | |
3434 | //******************************************************************************* |
3435 | // |
3436 | // Used by BuildMethodTable |
3437 | // |
3438 | // Determines the maximum size of the vtable and allocates the temporary storage arrays |
3439 | // Also copies the parent's vtable into the working vtable. |
3440 | // |
3441 | VOID MethodTableBuilder::AllocateWorkingSlotTables() |
3442 | { |
3443 | CONTRACTL |
3444 | { |
3445 | STANDARD_VM_CHECK; |
3446 | PRECONDITION(CheckPointer(this)); |
3447 | PRECONDITION(CheckPointer(bmtAllocator)); |
3448 | PRECONDITION(CheckPointer(bmtMFDescs)); |
3449 | PRECONDITION(CheckPointer(bmtMetaData)); |
3450 | PRECONDITION(CheckPointer(bmtVT)); |
3451 | PRECONDITION(CheckPointer(bmtEnumFields)); |
3452 | PRECONDITION(CheckPointer(bmtInterface)); |
3453 | PRECONDITION(CheckPointer(bmtFP)); |
3454 | PRECONDITION(CheckPointer(bmtParent)); |
3455 | |
3456 | } |
3457 | CONTRACTL_END; |
3458 | |
3459 | // Allocate a FieldDesc* for each field |
3460 | bmtMFDescs->ppFieldDescList = new (GetStackingAllocator()) FieldDesc*[bmtMetaData->cFields]; |
3461 | ZeroMemory(bmtMFDescs->ppFieldDescList, bmtMetaData->cFields * sizeof(FieldDesc *)); |
3462 | |
3463 | // Create a temporary function table (we don't know how large the vtable will be until the very end, |
3464 | // since we don't yet know how many declared methods are overrides vs. newslots). |
3465 | |
3466 | if (IsValueClass()) |
3467 | { // ValueClass virtuals are converted into non-virtual methods and the virtual slots |
3468 | // become unboxing stubs that forward to these new non-virtual methods. This has the |
3469 | // side effect of doubling the number of slots introduced by newslot virtuals. |
3470 | bmtVT->dwMaxVtableSize += NumDeclaredMethods(); |
3471 | } |
3472 | |
3473 | _ASSERTE(!HasParent() || (bmtInterface->dwInterfaceMapSize - GetParentMethodTable()->GetNumInterfaces()) >= 0); |
3474 | |
3475 | if (HasParent()) |
3476 | { // Add parent vtable size. <TODO> This should actually be the parent's virtual method count. </TODO> |
3477 | bmtVT->dwMaxVtableSize += bmtParent->pSlotTable->GetSlotCount(); |
3478 | } |
3479 | |
3480 | S_SLOT_INDEX cMaxSlots = AsClrSafeInt(bmtVT->dwMaxVtableSize) + AsClrSafeInt(NumDeclaredMethods()); |
3481 | |
3482 | if (cMaxSlots.IsOverflow() || MAX_SLOT_INDEX < cMaxSlots.Value()) |
3483 | cMaxSlots = S_SLOT_INDEX(MAX_SLOT_INDEX); |
3484 | |
3485 | // Allocate the temporary vtable |
3486 | bmtVT->pSlotTable = new (GetStackingAllocator()) |
3487 | bmtMethodSlotTable(cMaxSlots.Value(), GetStackingAllocator()); |
3488 | |
3489 | if (HasParent()) |
3490 | { |
3491 | #if 0 |
3492 | // @<TODO>todo: Figure out the right way to override Equals for value |
3493 | // types only. |
3494 | // |
3495 | // This is broken because |
3496 | // (a) g_pObjectClass->FindMethod("Equals", &gsig_IM_Obj_RetBool); will return |
3497 | // the EqualsValue method |
3498 | // (b) When mscorlib has been preloaded (and thus the munge already done |
3499 | // ahead of time), we cannot easily find both methods |
3500 | // to compute EqualsAddr & EqualsSlot |
3501 | // |
3502 | // For now, the Equals method has a runtime check to see if it's |
3503 | // comparing value types. |
3504 | //</TODO> |
3505 | |
3506 | // If it is a value type, over ride a few of the base class methods. |
3507 | if (IsValueClass()) |
3508 | { |
3509 | static WORD EqualsSlot; |
3510 | |
3511 | // If we haven't been through here yet, get some stuff from the Object class definition. |
3512 | if (EqualsSlot == NULL) |
3513 | { |
3514 | // Get the slot of the Equals method. |
3515 | MethodDesc *pEqualsMD = g_pObjectClass->FindMethod("Equals" , &gsig_IM_Obj_RetBool); |
3516 | THROW_BAD_FORMAT_MAYBE(pEqualsMD != NULL, 0, this); |
3517 | EqualsSlot = pEqualsMD->GetSlot(); |
3518 | |
3519 | // Get the address of the EqualsValue method. |
3520 | MethodDesc *pEqualsValueMD = g_pObjectClass->FindMethod("EqualsValue" , &gsig_IM_Obj_RetBool); |
3521 | THROW_BAD_FORMAT_MAYBE(pEqualsValueMD != NULL, 0, this); |
3522 | |
3523 | // Patch the EqualsValue method desc in a dangerous way to |
3524 | // look like the Equals method desc. |
3525 | pEqualsValueMD->SetSlot(EqualsSlot); |
3526 | pEqualsValueMD->SetMemberDef(pEqualsMD->GetMemberDef()); |
3527 | } |
3528 | |
3529 | // Override the valuetype "Equals" with "EqualsValue". |
3530 | bmtVT->SetMethodDescForSlot(EqualsSlot, EqualsSlot); |
3531 | } |
3532 | #endif // 0 |
3533 | } |
3534 | |
3535 | S_UINT32 cEntries = S_UINT32(2) * S_UINT32(NumDeclaredMethods()); |
3536 | if (cEntries.IsOverflow()) |
3537 | { |
3538 | ThrowHR(COR_E_OVERFLOW); |
3539 | } |
3540 | } |
3541 | |
3542 | //******************************************************************************* |
3543 | // |
3544 | // Used by BuildMethodTable |
3545 | // |
3546 | // Allocate a MethodDesc* for each method (needed later when doing interfaces), and a FieldDesc* for each field |
3547 | // |
3548 | VOID MethodTableBuilder::AllocateFieldDescs() |
3549 | { |
3550 | CONTRACTL |
3551 | { |
3552 | STANDARD_VM_CHECK; |
3553 | PRECONDITION(CheckPointer(this)); |
3554 | PRECONDITION(CheckPointer(bmtAllocator)); |
3555 | PRECONDITION(CheckPointer(bmtMFDescs)); |
3556 | PRECONDITION(CheckPointer(bmtMetaData)); |
3557 | PRECONDITION(CheckPointer(bmtVT)); |
3558 | PRECONDITION(CheckPointer(bmtEnumFields)); |
3559 | PRECONDITION(CheckPointer(bmtFP)); |
3560 | PRECONDITION(CheckPointer(bmtParent)); |
3561 | |
3562 | } |
3563 | CONTRACTL_END; |
3564 | |
3565 | // We'll be counting the # fields of each size as we go along |
3566 | for (DWORD i = 0; i <= MAX_LOG2_PRIMITIVE_FIELD_SIZE; i++) |
3567 | { |
3568 | bmtFP->NumRegularStaticFieldsOfSize[i] = 0; |
3569 | bmtFP->NumThreadStaticFieldsOfSize[i] = 0; |
3570 | bmtFP->NumInstanceFieldsOfSize[i] = 0; |
3571 | } |
3572 | |
3573 | // |
3574 | // Allocate blocks of MethodDescs and FieldDescs for all declared methods and fields |
3575 | // |
3576 | // In order to avoid allocating a field pointing back to the method |
3577 | // table in every single method desc, we allocate memory in the |
3578 | // following manner: |
3579 | // o Field descs get a single contiguous block. |
3580 | // o Method descs of different sizes (normal vs NDirect) are |
3581 | // allocated in different MethodDescChunks. |
3582 | // o Each method desc chunk starts with a header, and has |
3583 | // at most MAX_ method descs (if there are more |
3584 | // method descs of a given size, multiple chunks are allocated). |
3585 | // This way method descs can use an 8-bit offset field to locate the |
3586 | // pointer to their method table. |
3587 | // |
3588 | |
3589 | ///////////////////////////////////////////////////////////////// |
3590 | // Allocate fields |
3591 | if (NumDeclaredFields() > 0) |
3592 | { |
3593 | GetHalfBakedClass()->SetFieldDescList((FieldDesc *) |
3594 | AllocateFromHighFrequencyHeap(S_SIZE_T(NumDeclaredFields()) * S_SIZE_T(sizeof(FieldDesc)))); |
3595 | INDEBUG(GetClassLoader()->m_dwDebugFieldDescs += NumDeclaredFields();) |
3596 | INDEBUG(GetClassLoader()->m_dwFieldDescData += (NumDeclaredFields() * sizeof(FieldDesc));) |
3597 | } |
3598 | } |
3599 | |
3600 | #ifdef FEATURE_DOUBLE_ALIGNMENT_HINT |
3601 | //******************************************************************************* |
3602 | // |
3603 | // Heuristic to determine if we should have instances of this class 8 byte aligned |
3604 | // |
3605 | BOOL MethodTableBuilder::ShouldAlign8(DWORD dwR8Fields, DWORD dwTotalFields) |
3606 | { |
3607 | LIMITED_METHOD_CONTRACT; |
3608 | |
3609 | return dwR8Fields*2>dwTotalFields && dwR8Fields>=2; |
3610 | } |
3611 | #endif |
3612 | |
3613 | //******************************************************************************* |
3614 | BOOL MethodTableBuilder::IsSelfReferencingStaticValueTypeField(mdToken dwByValueClassToken, |
3615 | bmtInternalInfo* bmtInternal, |
3616 | const bmtGenericsInfo *bmtGenerics, |
3617 | PCCOR_SIGNATURE pMemberSignature, |
3618 | DWORD cMemberSignature) |
3619 | { |
3620 | STANDARD_VM_CONTRACT; |
3621 | |
3622 | if (dwByValueClassToken != this->GetCl()) |
3623 | { |
3624 | return FALSE; |
3625 | } |
3626 | |
3627 | if (!bmtGenerics->HasInstantiation()) |
3628 | { |
3629 | return TRUE; |
3630 | } |
3631 | |
3632 | // The value class is generic. Check that the signature of the field |
3633 | // is _exactly_ equivalent to VC<!0, !1, !2, ...>. Do this by consing up a fake |
3634 | // signature. |
3635 | DWORD nGenericArgs = bmtGenerics->GetNumGenericArgs(); |
3636 | CONSISTENCY_CHECK(nGenericArgs != 0); |
3637 | |
3638 | SigBuilder sigBuilder; |
3639 | |
3640 | sigBuilder.AppendElementType(ELEMENT_TYPE_GENERICINST); |
3641 | sigBuilder.AppendElementType(ELEMENT_TYPE_VALUETYPE); |
3642 | sigBuilder.AppendToken(dwByValueClassToken); |
3643 | sigBuilder.AppendData(nGenericArgs); |
3644 | for (unsigned int typearg = 0; typearg < nGenericArgs; typearg++) |
3645 | { |
3646 | sigBuilder.AppendElementType(ELEMENT_TYPE_VAR); |
3647 | sigBuilder.AppendData(typearg); |
3648 | } |
3649 | |
3650 | DWORD cFakeSig; |
3651 | PCCOR_SIGNATURE pFakeSig = (PCCOR_SIGNATURE)sigBuilder.GetSignature(&cFakeSig); |
3652 | |
3653 | PCCOR_SIGNATURE pFieldSig = pMemberSignature + 1; // skip the CALLCONV_FIELD |
3654 | |
3655 | return MetaSig::CompareElementType(pFakeSig, pFieldSig, |
3656 | pFakeSig + cFakeSig, pMemberSignature + cMemberSignature, |
3657 | GetModule(), GetModule(), |
3658 | NULL, NULL); |
3659 | |
3660 | } |
3661 | |
3662 | //******************************************************************************* |
3663 | // |
3664 | // Used pByValueClass cache to mark self-references |
3665 | // |
3666 | static BOOL IsSelfRef(MethodTable * pMT) |
3667 | { |
3668 | return pMT == (MethodTable *)-1; |
3669 | } |
3670 | |
3671 | //******************************************************************************* |
3672 | // |
3673 | // Used by BuildMethodTable |
3674 | // |
3675 | // Go thru all fields and initialize their FieldDescs. |
3676 | // |
3677 | #ifdef _PREFAST_ |
3678 | #pragma warning(push) |
3679 | #pragma warning(disable:21000) // Suppress PREFast warning about overly large function |
3680 | #endif // _PREFAST_ |
3681 | |
3682 | VOID MethodTableBuilder::InitializeFieldDescs(FieldDesc *pFieldDescList, |
3683 | const LayoutRawFieldInfo* pLayoutRawFieldInfos, |
3684 | bmtInternalInfo* bmtInternal, |
3685 | const bmtGenericsInfo* bmtGenerics, |
3686 | bmtMetaDataInfo* bmtMetaData, |
3687 | bmtEnumFieldInfo* bmtEnumFields, |
3688 | bmtErrorInfo* bmtError, |
3689 | MethodTable *** pByValueClassCache, |
3690 | bmtMethAndFieldDescs* bmtMFDescs, |
3691 | bmtFieldPlacement* bmtFP, |
3692 | unsigned* totalDeclaredSize) |
3693 | { |
3694 | CONTRACTL |
3695 | { |
3696 | STANDARD_VM_CHECK; |
3697 | PRECONDITION(CheckPointer(this)); |
3698 | PRECONDITION(CheckPointer(bmtInternal)); |
3699 | PRECONDITION(CheckPointer(bmtGenerics)); |
3700 | PRECONDITION(CheckPointer(bmtMetaData)); |
3701 | PRECONDITION(CheckPointer(bmtEnumFields)); |
3702 | PRECONDITION(CheckPointer(bmtError)); |
3703 | PRECONDITION(CheckPointer(pByValueClassCache)); |
3704 | PRECONDITION(CheckPointer(bmtMFDescs)); |
3705 | PRECONDITION(CheckPointer(bmtFP)); |
3706 | PRECONDITION(CheckPointer(totalDeclaredSize)); |
3707 | } |
3708 | CONTRACTL_END; |
3709 | |
3710 | DWORD i; |
3711 | IMDInternalImport * pInternalImport = GetMDImport(); // to avoid multiple dereferencings |
3712 | |
3713 | FieldMarshaler * pNextFieldMarshaler = NULL; |
3714 | if (HasLayout()) |
3715 | { |
3716 | pNextFieldMarshaler = (FieldMarshaler*)(GetLayoutInfo()->GetFieldMarshalers()); |
3717 | } |
3718 | |
3719 | |
3720 | //======================================================================== |
3721 | // BEGIN: |
3722 | // Go thru all fields and initialize their FieldDescs. |
3723 | //======================================================================== |
3724 | |
3725 | DWORD dwCurrentDeclaredField = 0; |
3726 | DWORD dwCurrentStaticField = 0; |
3727 | DWORD dwCurrentThreadStaticField = 0; |
3728 | |
3729 | |
3730 | DWORD dwR8Fields = 0; // Number of R8's the class has |
3731 | |
3732 | #ifdef FEATURE_64BIT_ALIGNMENT |
3733 | // Track whether any field in this type requires 8-byte alignment |
3734 | BOOL fFieldRequiresAlign8 = HasParent() ? GetParentMethodTable()->RequiresAlign8() : FALSE; |
3735 | #endif |
3736 | |
3737 | for (i = 0; i < bmtMetaData->cFields; i++) |
3738 | { |
3739 | PCCOR_SIGNATURE pMemberSignature; |
3740 | DWORD cMemberSignature; |
3741 | DWORD dwMemberAttrs; |
3742 | |
3743 | dwMemberAttrs = bmtMetaData->pFieldAttrs[i]; |
3744 | |
3745 | BOOL fIsStatic = IsFdStatic(dwMemberAttrs); |
3746 | |
3747 | // We don't store static final primitive fields in the class layout |
3748 | if (IsFdLiteral(dwMemberAttrs)) |
3749 | continue; |
3750 | |
3751 | if (!IsFdPublic(dwMemberAttrs)) |
3752 | SetHasNonPublicFields(); |
3753 | |
3754 | if (IsFdNotSerialized(dwMemberAttrs)) |
3755 | SetCannotBeBlittedByObjectCloner(); |
3756 | |
3757 | IfFailThrow(pInternalImport->GetSigOfFieldDef(bmtMetaData->pFields[i], &cMemberSignature, &pMemberSignature)); |
3758 | // Signature validation |
3759 | IfFailThrow(validateTokenSig(bmtMetaData->pFields[i],pMemberSignature,cMemberSignature,dwMemberAttrs,pInternalImport)); |
3760 | |
3761 | FieldDesc * pFD; |
3762 | DWORD dwLog2FieldSize = 0; |
3763 | BOOL bCurrentFieldIsGCPointer = FALSE; |
3764 | mdToken dwByValueClassToken = 0; |
3765 | MethodTable * pByValueClass = NULL; |
3766 | BOOL fIsByValue = FALSE; |
3767 | BOOL fIsThreadStatic = FALSE; |
3768 | BOOL fHasRVA = FALSE; |
3769 | |
3770 | MetaSig fsig(pMemberSignature, |
3771 | cMemberSignature, |
3772 | GetModule(), |
3773 | &bmtGenerics->typeContext, |
3774 | MetaSig::sigField); |
3775 | CorElementType ElementType = fsig.NextArg(); |
3776 | |
3777 | |
3778 | // Get type |
3779 | if (!isCallConv(fsig.GetCallingConvention(), IMAGE_CEE_CS_CALLCONV_FIELD)) |
3780 | { |
3781 | IfFailThrow(COR_E_TYPELOAD); |
3782 | } |
3783 | |
3784 | // Determine if a static field is special i.e. RVA based, local to |
3785 | // a thread or a context |
3786 | if (fIsStatic) |
3787 | { |
3788 | if (IsFdHasFieldRVA(dwMemberAttrs)) |
3789 | { |
3790 | fHasRVA = TRUE; |
3791 | } |
3792 | |
3793 | HRESULT hr; |
3794 | |
3795 | hr = pInternalImport->GetCustomAttributeByName(bmtMetaData->pFields[i], |
3796 | g_ThreadStaticAttributeClassName, |
3797 | NULL, NULL); |
3798 | IfFailThrow(hr); |
3799 | if (hr == S_OK) |
3800 | { |
3801 | fIsThreadStatic = TRUE; |
3802 | } |
3803 | |
3804 | |
3805 | if (ElementType == ELEMENT_TYPE_VALUETYPE) |
3806 | { |
3807 | hr = pInternalImport->GetCustomAttributeByName(bmtMetaData->pFields[i], |
3808 | g_CompilerServicesFixedAddressValueTypeAttribute, |
3809 | NULL, NULL); |
3810 | IfFailThrow(hr); |
3811 | if (hr == S_OK) |
3812 | { |
3813 | bmtFP->fHasFixedAddressValueTypes = true; |
3814 | } |
3815 | } |
3816 | |
3817 | |
3818 | // Do some sanity checks that we are not mixing context and thread |
3819 | // relative statics. |
3820 | if (fHasRVA && fIsThreadStatic) |
3821 | { |
3822 | IfFailThrow(COR_E_TYPELOAD); |
3823 | } |
3824 | |
3825 | if (bmtFP->fHasFixedAddressValueTypes && GetAssembly()->IsCollectible()) |
3826 | { |
3827 | BuildMethodTableThrowException(IDS_CLASSLOAD_COLLECTIBLEFIXEDVTATTR); |
3828 | } |
3829 | } |
3830 | |
3831 | |
3832 | GOT_ELEMENT_TYPE: |
3833 | // Type to store in FieldDesc - we don't want to have extra case statements for |
3834 | // ELEMENT_TYPE_STRING, SDARRAY etc., so we convert all object types to CLASS. |
3835 | // Also, BOOLEAN, CHAR are converted to U1, I2. |
3836 | CorElementType FieldDescElementType = ElementType; |
3837 | |
3838 | switch (ElementType) |
3839 | { |
3840 | case ELEMENT_TYPE_I1: |
3841 | case ELEMENT_TYPE_U1: |
3842 | { |
3843 | dwLog2FieldSize = 0; |
3844 | break; |
3845 | } |
3846 | |
3847 | case ELEMENT_TYPE_I2: |
3848 | case ELEMENT_TYPE_U2: |
3849 | { |
3850 | dwLog2FieldSize = 1; |
3851 | break; |
3852 | } |
3853 | |
3854 | case ELEMENT_TYPE_I4: |
3855 | case ELEMENT_TYPE_U4: |
3856 | IN_TARGET_32BIT(case ELEMENT_TYPE_I:) |
3857 | IN_TARGET_32BIT(case ELEMENT_TYPE_U:) |
3858 | case ELEMENT_TYPE_R4: |
3859 | { |
3860 | dwLog2FieldSize = 2; |
3861 | break; |
3862 | } |
3863 | |
3864 | case ELEMENT_TYPE_BOOLEAN: |
3865 | { |
3866 | // FieldDescElementType = ELEMENT_TYPE_U1; |
3867 | dwLog2FieldSize = 0; |
3868 | break; |
3869 | } |
3870 | |
3871 | case ELEMENT_TYPE_CHAR: |
3872 | { |
3873 | // FieldDescElementType = ELEMENT_TYPE_U2; |
3874 | dwLog2FieldSize = 1; |
3875 | break; |
3876 | } |
3877 | |
3878 | case ELEMENT_TYPE_R8: |
3879 | { |
3880 | dwR8Fields++; |
3881 | |
3882 | // Deliberate fall through... |
3883 | } |
3884 | |
3885 | case ELEMENT_TYPE_I8: |
3886 | case ELEMENT_TYPE_U8: |
3887 | IN_TARGET_64BIT(case ELEMENT_TYPE_I:) |
3888 | IN_TARGET_64BIT(case ELEMENT_TYPE_U:) |
3889 | { |
3890 | #ifdef FEATURE_64BIT_ALIGNMENT |
3891 | // Record that this field requires alignment for Int64/UInt64. |
3892 | if(!fIsStatic) |
3893 | fFieldRequiresAlign8 = true; |
3894 | #endif |
3895 | dwLog2FieldSize = 3; |
3896 | break; |
3897 | } |
3898 | |
3899 | case ELEMENT_TYPE_FNPTR: |
3900 | case ELEMENT_TYPE_PTR: // ptrs are unmanaged scalars, for layout |
3901 | { |
3902 | dwLog2FieldSize = LOG2_PTRSIZE; |
3903 | break; |
3904 | } |
3905 | |
3906 | // Class type variable (method type variables aren't allowed in fields) |
3907 | // These only occur in open types used for verification/reflection. |
3908 | case ELEMENT_TYPE_VAR: |
3909 | case ELEMENT_TYPE_MVAR: |
3910 | // deliberate drop through - do fake field layout |
3911 | case ELEMENT_TYPE_STRING: |
3912 | case ELEMENT_TYPE_SZARRAY: // single dim, zero |
3913 | case ELEMENT_TYPE_ARRAY: // all other arrays |
3914 | case ELEMENT_TYPE_CLASS: // objectrefs |
3915 | case ELEMENT_TYPE_OBJECT: |
3916 | { |
3917 | dwLog2FieldSize = LOG2_PTRSIZE; |
3918 | bCurrentFieldIsGCPointer = TRUE; |
3919 | FieldDescElementType = ELEMENT_TYPE_CLASS; |
3920 | |
3921 | if (!fIsStatic) |
3922 | { |
3923 | SetHasFieldsWhichMustBeInited(); |
3924 | if (ElementType != ELEMENT_TYPE_STRING) |
3925 | SetCannotBeBlittedByObjectCloner(); |
3926 | } |
3927 | else |
3928 | { // EnumerateFieldDescs already counted the total number of static vs. instance |
3929 | // fields, now we're further subdividing the static field count by GC and non-GC. |
3930 | bmtEnumFields->dwNumStaticObjRefFields++; |
3931 | if (fIsThreadStatic) |
3932 | bmtEnumFields->dwNumThreadStaticObjRefFields++; |
3933 | } |
3934 | break; |
3935 | } |
3936 | |
3937 | case ELEMENT_TYPE_VALUETYPE: // a byvalue class field |
3938 | { |
3939 | Module * pTokenModule; |
3940 | dwByValueClassToken = fsig.GetArgProps().PeekValueTypeTokenClosed(GetModule(), &bmtGenerics->typeContext, &pTokenModule); |
3941 | fIsByValue = TRUE; |
3942 | |
3943 | // By-value class |
3944 | BAD_FORMAT_NOTHROW_ASSERT(dwByValueClassToken != 0); |
3945 | |
3946 | if (this->IsValueClass() && (pTokenModule == GetModule())) |
3947 | { |
3948 | if (TypeFromToken(dwByValueClassToken) == mdtTypeRef) |
3949 | { |
3950 | // It's a typeref - check if it's a class that has a static field of itself |
3951 | LPCUTF8 pszNameSpace; |
3952 | LPCUTF8 pszClassName; |
3953 | if (FAILED(pInternalImport->GetNameOfTypeRef(dwByValueClassToken, &pszNameSpace, &pszClassName))) |
3954 | { |
3955 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
3956 | } |
3957 | |
3958 | if (IsStrLongerThan((char *)pszClassName, MAX_CLASS_NAME) |
3959 | || IsStrLongerThan((char *)pszNameSpace, MAX_CLASS_NAME) |
3960 | || (strlen(pszClassName) + strlen(pszNameSpace) + 1 >= MAX_CLASS_NAME)) |
3961 | { |
3962 | BuildMethodTableThrowException(BFA_TYPEREG_NAME_TOO_LONG, mdMethodDefNil); |
3963 | } |
3964 | |
3965 | mdToken tkRes; |
3966 | if (FAILED(pInternalImport->GetResolutionScopeOfTypeRef(dwByValueClassToken, &tkRes))) |
3967 | { |
3968 | BuildMethodTableThrowException(BFA_BAD_TYPEREF_TOKEN, dwByValueClassToken); |
3969 | } |
3970 | |
3971 | if (TypeFromToken(tkRes) == mdtTypeRef) |
3972 | { |
3973 | if (!pInternalImport->IsValidToken(tkRes)) |
3974 | { |
3975 | BuildMethodTableThrowException(BFA_BAD_TYPEREF_TOKEN, mdMethodDefNil); |
3976 | } |
3977 | } |
3978 | else |
3979 | { |
3980 | tkRes = mdTokenNil; |
3981 | } |
3982 | |
3983 | if (FAILED(pInternalImport->FindTypeDef(pszNameSpace, |
3984 | pszClassName, |
3985 | tkRes, |
3986 | &dwByValueClassToken))) |
3987 | { |
3988 | dwByValueClassToken = mdTokenNil; |
3989 | } |
3990 | } // If field is static typeref |
3991 | |
3992 | BOOL selfref = IsSelfReferencingStaticValueTypeField(dwByValueClassToken, |
3993 | bmtInternal, |
3994 | bmtGenerics, |
3995 | pMemberSignature, |
3996 | cMemberSignature); |
3997 | |
3998 | if (selfref) |
3999 | { // immediately self-referential fields must be static. |
4000 | if (!fIsStatic) |
4001 | { |
4002 | BuildMethodTableThrowException(IDS_CLASSLOAD_VALUEINSTANCEFIELD, mdMethodDefNil); |
4003 | } |
4004 | |
4005 | if (!IsValueClass()) |
4006 | { |
4007 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_MUST_BE_BYVAL, mdTokenNil); |
4008 | } |
4009 | |
4010 | pByValueClass = (MethodTable *)-1; |
4011 | } |
4012 | } // If 'this' is a value class |
4013 | |
4014 | // It's not self-referential so try to load it |
4015 | if (pByValueClass == NULL) |
4016 | { |
4017 | // Loading a non-self-ref valuetype field. |
4018 | OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS); |
4019 | // We load the approximate type of the field to avoid recursion problems. |
4020 | // MethodTable::DoFullyLoad() will later load it fully |
4021 | pByValueClass = fsig.GetArgProps().GetTypeHandleThrowing(GetModule(), |
4022 | &bmtGenerics->typeContext, |
4023 | ClassLoader::LoadTypes, |
4024 | CLASS_LOAD_APPROXPARENTS, |
4025 | TRUE |
4026 | ).GetMethodTable(); |
4027 | } |
4028 | |
4029 | // #FieldDescTypeMorph IF it is an enum, strip it down to its underlying type |
4030 | if (IsSelfRef(pByValueClass) ? IsEnum() : pByValueClass->IsEnum()) |
4031 | { |
4032 | if (IsSelfRef(pByValueClass)) |
4033 | { // It is self-referencing enum (ValueType) static field - it is forbidden in the ECMA spec, but supported by CLR since v1 |
4034 | // Note: literal static fields are skipped early in this loop |
4035 | if (bmtMFDescs->ppFieldDescList[0] == NULL) |
4036 | { // The field is defined before (the only) instance field |
4037 | // AppCompat with 3.5 SP1 and 4.0 RTM behavior |
4038 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_BAD_FIELD, mdTokenNil); |
4039 | } |
4040 | // We will treat the field type as if it was its underlying type (we know its size and will check correctly RVA with the size |
4041 | // later in this method) |
4042 | // Therefore we do not have to run code:VerifySelfReferencingStaticValueTypeFields_WithRVA or code:#SelfReferencingStaticValueTypeField_Checks |
4043 | } |
4044 | BAD_FORMAT_NOTHROW_ASSERT((IsSelfRef(pByValueClass) ? |
4045 | bmtEnumFields->dwNumInstanceFields : pByValueClass->GetNumInstanceFields()) |
4046 | == 1); // enums must have exactly one field |
4047 | FieldDesc * enumField = IsSelfRef(pByValueClass) ? |
4048 | bmtMFDescs->ppFieldDescList[0] : pByValueClass->GetApproxFieldDescListRaw(); |
4049 | BAD_FORMAT_NOTHROW_ASSERT(!enumField->IsStatic()); // no real static fields on enums |
4050 | ElementType = enumField->GetFieldType(); |
4051 | BAD_FORMAT_NOTHROW_ASSERT(ElementType != ELEMENT_TYPE_VALUETYPE); |
4052 | fIsByValue = FALSE; // we're going to treat it as the underlying type now |
4053 | goto GOT_ELEMENT_TYPE; |
4054 | } |
4055 | |
4056 | // Check ByRefLike fields |
4057 | if (!IsSelfRef(pByValueClass) && pByValueClass->IsByRefLike()) |
4058 | { |
4059 | if (fIsStatic) |
4060 | { |
4061 | // Byref-like types cannot be used for static fields |
4062 | BuildMethodTableThrowException(IDS_CLASSLOAD_BYREFLIKE_STATICFIELD); |
4063 | } |
4064 | if (!bmtFP->fIsByRefLikeType) |
4065 | { |
4066 | // Non-byref-like types cannot contain byref-like instance fields |
4067 | BuildMethodTableThrowException(IDS_CLASSLOAD_BYREFLIKE_INSTANCEFIELD); |
4068 | } |
4069 | } |
4070 | |
4071 | if (!IsSelfRef(pByValueClass) && pByValueClass->GetClass()->HasNonPublicFields()) |
4072 | { // If a class has a field of type ValueType with non-public fields in it, |
4073 | // the class must "inherit" this characteristic |
4074 | SetHasNonPublicFields(); |
4075 | } |
4076 | |
4077 | if (!fHasRVA) |
4078 | { |
4079 | if (!fIsStatic) |
4080 | { |
4081 | // Inherit instance attributes |
4082 | EEClass * pFieldClass = pByValueClass->GetClass(); |
4083 | |
4084 | #ifdef FEATURE_64BIT_ALIGNMENT |
4085 | // If a value type requires 8-byte alignment this requirement must be inherited by any |
4086 | // class/struct that embeds it as a field. |
4087 | if (pFieldClass->IsAlign8Candidate()) |
4088 | fFieldRequiresAlign8 = true; |
4089 | #endif |
4090 | if (pFieldClass->HasNonPublicFields()) |
4091 | SetHasNonPublicFields(); |
4092 | if (pFieldClass->HasFieldsWhichMustBeInited()) |
4093 | SetHasFieldsWhichMustBeInited(); |
4094 | |
4095 | #ifdef FEATURE_READYTORUN |
4096 | if (!(pByValueClass->IsTruePrimitive() || pByValueClass->IsEnum())) |
4097 | { |
4098 | CheckLayoutDependsOnOtherModules(pByValueClass); |
4099 | } |
4100 | #endif |
4101 | } |
4102 | else |
4103 | { // Increment the number of static fields that contain object references. |
4104 | bmtEnumFields->dwNumStaticBoxedFields++; |
4105 | if (fIsThreadStatic) |
4106 | bmtEnumFields->dwNumThreadStaticBoxedFields++; |
4107 | } |
4108 | } |
4109 | |
4110 | if (*pByValueClassCache == NULL) |
4111 | { |
4112 | DWORD dwNumFields = bmtEnumFields->dwNumInstanceFields + bmtEnumFields->dwNumStaticFields; |
4113 | |
4114 | *pByValueClassCache = new (GetStackingAllocator()) MethodTable * [dwNumFields]; |
4115 | memset (*pByValueClassCache, 0, dwNumFields * sizeof(MethodTable **)); |
4116 | } |
4117 | |
4118 | // Thread static fields come after instance fields and regular static fields in this list |
4119 | if (fIsThreadStatic) |
4120 | { |
4121 | (*pByValueClassCache)[bmtEnumFields->dwNumInstanceFields + bmtEnumFields->dwNumStaticFields - bmtEnumFields->dwNumThreadStaticFields + dwCurrentThreadStaticField] = pByValueClass; |
4122 | // make sure to record the correct size for static field |
4123 | // layout |
4124 | dwLog2FieldSize = LOG2_PTRSIZE; // handle |
4125 | } |
4126 | // Regular static fields come after instance fields in this list |
4127 | else if (fIsStatic) |
4128 | { |
4129 | (*pByValueClassCache)[bmtEnumFields->dwNumInstanceFields + dwCurrentStaticField] = pByValueClass; |
4130 | // make sure to record the correct size for static field |
4131 | // layout |
4132 | dwLog2FieldSize = LOG2_PTRSIZE; // handle |
4133 | } |
4134 | else |
4135 | { |
4136 | (*pByValueClassCache)[dwCurrentDeclaredField] = pByValueClass; |
4137 | dwLog2FieldSize = 0; // unused |
4138 | } |
4139 | |
4140 | break; |
4141 | } |
4142 | default: |
4143 | { |
4144 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_BAD_FIELD, mdTokenNil); |
4145 | } |
4146 | } |
4147 | |
4148 | if (!fIsStatic) |
4149 | { |
4150 | pFD = &pFieldDescList[dwCurrentDeclaredField]; |
4151 | *totalDeclaredSize += (1 << dwLog2FieldSize); |
4152 | } |
4153 | else /* (dwMemberAttrs & mdStatic) */ |
4154 | { |
4155 | if (fIsThreadStatic) |
4156 | { |
4157 | pFD = &pFieldDescList[bmtEnumFields->dwNumInstanceFields + bmtEnumFields->dwNumStaticFields - bmtEnumFields->dwNumThreadStaticFields + dwCurrentThreadStaticField]; |
4158 | } |
4159 | else |
4160 | { |
4161 | pFD = &pFieldDescList[bmtEnumFields->dwNumInstanceFields + dwCurrentStaticField]; |
4162 | } |
4163 | } |
4164 | |
4165 | bmtMFDescs->ppFieldDescList[i] = pFD; |
4166 | |
4167 | const LayoutRawFieldInfo *pLayoutFieldInfo = NULL; |
4168 | |
4169 | if (HasLayout()) |
4170 | { |
4171 | const LayoutRawFieldInfo *pwalk = pLayoutRawFieldInfos; |
4172 | while (pwalk->m_MD != mdFieldDefNil) |
4173 | { |
4174 | if (pwalk->m_MD == bmtMetaData->pFields[i]) |
4175 | { |
4176 | pLayoutFieldInfo = pwalk; |
4177 | |
4178 | const FieldMarshaler *pSrcFieldMarshaler = (const FieldMarshaler *) &pwalk->m_FieldMarshaler; |
4179 | |
4180 | pSrcFieldMarshaler->CopyTo(pNextFieldMarshaler, MAXFIELDMARSHALERSIZE); |
4181 | |
4182 | pNextFieldMarshaler->SetFieldDesc(pFD); |
4183 | pNextFieldMarshaler->SetExternalOffset(pwalk->m_offset); |
4184 | |
4185 | ((BYTE*&)pNextFieldMarshaler) += MAXFIELDMARSHALERSIZE; |
4186 | break; |
4187 | } |
4188 | pwalk++; |
4189 | } |
4190 | } |
4191 | |
4192 | LPCSTR pszFieldName = NULL; |
4193 | #ifdef _DEBUG |
4194 | if (FAILED(pInternalImport->GetNameOfFieldDef(bmtMetaData->pFields[i], &pszFieldName))) |
4195 | { |
4196 | pszFieldName = "Invalid FieldDef record" ; |
4197 | } |
4198 | #endif |
4199 | // #InitCall Initialize contents of the field descriptor called from |
4200 | pFD->Init( |
4201 | bmtMetaData->pFields[i], |
4202 | FieldDescElementType, |
4203 | dwMemberAttrs, |
4204 | fIsStatic, |
4205 | fHasRVA, |
4206 | fIsThreadStatic, |
4207 | pszFieldName |
4208 | ); |
4209 | |
4210 | // We're using FieldDesc::m_pMTOfEnclosingClass to temporarily store the field's size. |
4211 | // |
4212 | if (fIsByValue) |
4213 | { |
4214 | if (!fIsStatic && |
4215 | (IsBlittable() || HasExplicitFieldOffsetLayout())) |
4216 | { |
4217 | (DWORD_PTR &)pFD->m_pMTOfEnclosingClass = |
4218 | (*pByValueClassCache)[dwCurrentDeclaredField]->GetNumInstanceFieldBytes(); |
4219 | |
4220 | if (pLayoutFieldInfo) |
4221 | IfFailThrow(pFD->SetOffset(pLayoutFieldInfo->m_offset)); |
4222 | else |
4223 | pFD->SetOffset(FIELD_OFFSET_VALUE_CLASS); |
4224 | } |
4225 | else if (!fIsStatic && IsManagedSequential()) |
4226 | { |
4227 | (DWORD_PTR &)pFD->m_pMTOfEnclosingClass = |
4228 | (*pByValueClassCache)[dwCurrentDeclaredField]->GetNumInstanceFieldBytes(); |
4229 | |
4230 | IfFailThrow(pFD->SetOffset(pLayoutFieldInfo->m_managedOffset)); |
4231 | } |
4232 | else |
4233 | { |
4234 | // static value class fields hold a handle, which is ptr sized |
4235 | // (instance field layout ignores this value) |
4236 | (DWORD_PTR&)(pFD->m_pMTOfEnclosingClass) = LOG2_PTRSIZE; |
4237 | pFD->SetOffset(FIELD_OFFSET_VALUE_CLASS); |
4238 | } |
4239 | } |
4240 | else |
4241 | { |
4242 | (DWORD_PTR &)(pFD->m_pMTOfEnclosingClass) = (size_t)dwLog2FieldSize; |
4243 | |
4244 | // -1 (FIELD_OFFSET_UNPLACED) means that this is a non-GC field that has not yet been placed |
4245 | // -2 (FIELD_OFFSET_UNPLACED_GC_PTR) means that this is a GC pointer field that has not yet been placed |
4246 | |
4247 | // If there is any kind of explicit layout information for this field, use it. If not, then |
4248 | // mark it as either GC or non-GC and as unplaced; it will get placed later on in an optimized way. |
4249 | |
4250 | if ((IsBlittable() || HasExplicitFieldOffsetLayout()) && !fIsStatic) |
4251 | IfFailThrow(pFD->SetOffset(pLayoutFieldInfo->m_offset)); |
4252 | else if (IsManagedSequential() && !fIsStatic) |
4253 | IfFailThrow(pFD->SetOffset(pLayoutFieldInfo->m_managedOffset)); |
4254 | else if (bCurrentFieldIsGCPointer) |
4255 | pFD->SetOffset(FIELD_OFFSET_UNPLACED_GC_PTR); |
4256 | else |
4257 | pFD->SetOffset(FIELD_OFFSET_UNPLACED); |
4258 | } |
4259 | |
4260 | if (!fIsStatic) |
4261 | { |
4262 | if (!fIsByValue) |
4263 | { |
4264 | if (++bmtFP->NumInstanceFieldsOfSize[dwLog2FieldSize] == 1) |
4265 | bmtFP->FirstInstanceFieldOfSize[dwLog2FieldSize] = dwCurrentDeclaredField; |
4266 | } |
4267 | |
4268 | dwCurrentDeclaredField++; |
4269 | |
4270 | if (bCurrentFieldIsGCPointer) |
4271 | { |
4272 | bmtFP->NumInstanceGCPointerFields++; |
4273 | } |
4274 | } |
4275 | else /* static fields */ |
4276 | { |
4277 | // Static fields are stored in the vtable after the vtable and interface slots. We don't |
4278 | // know how large the vtable will be, so we will have to fixup the slot number by |
4279 | // <vtable + interface size> later. |
4280 | |
4281 | if (fIsThreadStatic) |
4282 | { |
4283 | dwCurrentThreadStaticField++; |
4284 | } |
4285 | else |
4286 | { |
4287 | dwCurrentStaticField++; |
4288 | } |
4289 | |
4290 | if (fHasRVA) |
4291 | { |
4292 | if (FieldDescElementType == ELEMENT_TYPE_CLASS) |
4293 | { // RVA fields are not allowed to have GC pointers. |
4294 | BAD_FORMAT_NOTHROW_ASSERT(!"ObjectRef in an RVA field" ); |
4295 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_BAD_FIELD, mdTokenNil); |
4296 | } |
4297 | if (FieldDescElementType == ELEMENT_TYPE_VALUETYPE) |
4298 | { |
4299 | if (IsSelfRef(pByValueClass)) |
4300 | { // We will verify self-referencing statics after the loop through all fields - see code:#SelfReferencingStaticValueTypeField_Checks |
4301 | bmtFP->fHasSelfReferencingStaticValueTypeField_WithRVA = TRUE; |
4302 | } |
4303 | else |
4304 | { |
4305 | if (pByValueClass->GetClass()->HasFieldsWhichMustBeInited()) |
4306 | { // RVA fields are not allowed to have GC pointers. |
4307 | BAD_FORMAT_NOTHROW_ASSERT(!"ObjectRef in an RVA field" ); |
4308 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_BAD_FIELD, mdTokenNil); |
4309 | } |
4310 | } |
4311 | } |
4312 | |
4313 | // Set the field offset |
4314 | DWORD rva; |
4315 | IfFailThrow(pInternalImport->GetFieldRVA(pFD->GetMemberDef(), &rva)); |
4316 | |
4317 | // Ensure that the IL image is loaded. Note that this assembly may |
4318 | // have an ngen image, but this type may have failed to load during ngen. |
4319 | GetModule()->GetFile()->LoadLibrary(FALSE); |
4320 | |
4321 | DWORD fldSize; |
4322 | if (FieldDescElementType == ELEMENT_TYPE_VALUETYPE) |
4323 | { |
4324 | if (IsSelfRef(pByValueClass)) |
4325 | { |
4326 | _ASSERTE(bmtFP->fHasSelfReferencingStaticValueTypeField_WithRVA); |
4327 | |
4328 | // We do not known the size yet |
4329 | _ASSERTE(bmtFP->NumInstanceFieldBytes == 0); |
4330 | // We will check just the RVA with size 0 now, the full size verification will happen in code:VerifySelfReferencingStaticValueTypeFields_WithRVA |
4331 | fldSize = 0; |
4332 | } |
4333 | else |
4334 | { |
4335 | fldSize = pByValueClass->GetNumInstanceFieldBytes(); |
4336 | } |
4337 | } |
4338 | else |
4339 | { |
4340 | fldSize = GetSizeForCorElementType(FieldDescElementType); |
4341 | } |
4342 | |
4343 | pFD->SetOffsetRVA(rva); |
4344 | } |
4345 | else if (fIsThreadStatic) |
4346 | { |
4347 | bmtFP->NumThreadStaticFieldsOfSize[dwLog2FieldSize]++; |
4348 | |
4349 | if (bCurrentFieldIsGCPointer) |
4350 | bmtFP->NumThreadStaticGCPointerFields++; |
4351 | |
4352 | if (fIsByValue) |
4353 | bmtFP->NumThreadStaticGCBoxedFields++; |
4354 | } |
4355 | else |
4356 | { |
4357 | bmtFP->NumRegularStaticFieldsOfSize[dwLog2FieldSize]++; |
4358 | |
4359 | if (bCurrentFieldIsGCPointer) |
4360 | bmtFP->NumRegularStaticGCPointerFields++; |
4361 | |
4362 | if (fIsByValue) |
4363 | bmtFP->NumRegularStaticGCBoxedFields++; |
4364 | } |
4365 | } |
4366 | } |
4367 | // We processed all fields |
4368 | |
4369 | //#SelfReferencingStaticValueTypeField_Checks |
4370 | if (bmtFP->fHasSelfReferencingStaticValueTypeField_WithRVA) |
4371 | { // The type has self-referencing static ValueType field with RVA, do more checks now that depend on all fields being processed |
4372 | |
4373 | // For enums we already checked its underlying type, we should not get here |
4374 | _ASSERTE(!IsEnum()); |
4375 | |
4376 | if (HasFieldsWhichMustBeInited()) |
4377 | { // RVA fields are not allowed to have GC pointers. |
4378 | BAD_FORMAT_NOTHROW_ASSERT(!"ObjectRef in an RVA self-referencing static field" ); |
4379 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_BAD_FIELD, mdTokenNil); |
4380 | } |
4381 | } |
4382 | |
4383 | DWORD dwNumInstanceFields = dwCurrentDeclaredField + (HasParent() ? GetParentMethodTable()->GetNumInstanceFields() : 0); |
4384 | DWORD dwNumStaticFields = bmtEnumFields->dwNumStaticFields; |
4385 | DWORD dwNumThreadStaticFields = bmtEnumFields->dwNumThreadStaticFields; |
4386 | |
4387 | if (!FitsIn<WORD>(dwNumInstanceFields) || |
4388 | !FitsIn<WORD>(dwNumStaticFields)) |
4389 | { // An implementation limitation means that it's an error if there are greater that MAX_WORD fields. |
4390 | BuildMethodTableThrowException(IDS_EE_TOOMANYFIELDS); |
4391 | } |
4392 | |
4393 | GetHalfBakedClass()->SetNumInstanceFields((WORD)dwNumInstanceFields); |
4394 | GetHalfBakedClass()->SetNumStaticFields((WORD)dwNumStaticFields); |
4395 | GetHalfBakedClass()->SetNumThreadStaticFields((WORD)dwNumThreadStaticFields); |
4396 | |
4397 | if (bmtFP->fHasFixedAddressValueTypes) |
4398 | { |
4399 | // To make things simpler, if the class has any field with this requirement, we'll set |
4400 | // all the statics to have this property. This allows us to only need to persist one bit |
4401 | // for the ngen case. |
4402 | GetHalfBakedClass()->SetHasFixedAddressVTStatics(); |
4403 | } |
4404 | |
4405 | #ifdef FEATURE_64BIT_ALIGNMENT |
4406 | // For types with layout we drop any 64-bit alignment requirement if the packing size was less than 8 |
4407 | // bytes (this mimics what the native compiler does and ensures we match up calling conventions during |
4408 | // interop). |
4409 | if (HasLayout() && GetLayoutInfo()->GetPackingSize() < 8) |
4410 | { |
4411 | fFieldRequiresAlign8 = false; |
4412 | } |
4413 | |
4414 | if (fFieldRequiresAlign8) |
4415 | { |
4416 | SetAlign8Candidate(); |
4417 | } |
4418 | #endif // FEATURE_64BIT_ALIGNMENT |
4419 | |
4420 | #ifdef FEATURE_DOUBLE_ALIGNMENT_HINT |
4421 | if (ShouldAlign8(dwR8Fields, dwNumInstanceFields)) |
4422 | { |
4423 | SetAlign8Candidate(); |
4424 | } |
4425 | #endif // FEATURE_DOUBLE_ALIGNMENT_HINT |
4426 | |
4427 | |
4428 | //======================================================================== |
4429 | // END: |
4430 | // Go thru all fields and initialize their FieldDescs. |
4431 | //======================================================================== |
4432 | |
4433 | return; |
4434 | } // MethodTableBuilder::InitializeFieldDescs |
4435 | |
4436 | #ifdef _PREFAST_ |
4437 | #pragma warning(pop) |
4438 | #endif |
4439 | |
4440 | //******************************************************************************* |
4441 | // Verify self-referencing static ValueType fields with RVA (when the size of the ValueType is known). |
4442 | void |
4443 | MethodTableBuilder::VerifySelfReferencingStaticValueTypeFields_WithRVA( |
4444 | MethodTable ** pByValueClassCache) |
4445 | { |
4446 | STANDARD_VM_CONTRACT; |
4447 | |
4448 | _ASSERTE(bmtFP->fHasSelfReferencingStaticValueTypeField_WithRVA); |
4449 | // Enum's static self-referencing fields have been verified as the underlying type of the enum, we should not get here for them |
4450 | _ASSERTE(!IsEnum()); |
4451 | // The size of the ValueType should be known at this point (the caller throws if it is 0) |
4452 | _ASSERTE(bmtFP->NumInstanceFieldBytes != 0); |
4453 | |
4454 | FieldDesc * pFieldDescList = GetApproxFieldDescListRaw(); |
4455 | DWORD nFirstThreadStaticFieldIndex = bmtEnumFields->dwNumInstanceFields + bmtEnumFields->dwNumStaticFields - bmtEnumFields->dwNumThreadStaticFields; |
4456 | for (DWORD i = bmtEnumFields->dwNumInstanceFields; i < nFirstThreadStaticFieldIndex; i++) |
4457 | { |
4458 | FieldDesc * pFD = &pFieldDescList[i]; |
4459 | _ASSERTE(pFD->IsStatic()); |
4460 | |
4461 | if (pFD->IsRVA() && pFD->IsByValue()) |
4462 | { |
4463 | _ASSERTE(pByValueClassCache[i] != NULL); |
4464 | |
4465 | if (IsSelfRef(pByValueClassCache[i])) |
4466 | { |
4467 | DWORD rva; |
4468 | IfFailThrow(GetMDImport()->GetFieldRVA(pFD->GetMemberDef(), &rva)); |
4469 | } |
4470 | } |
4471 | } |
4472 | } // MethodTableBuilder::VerifySelfReferencingStaticValueTypeFields_WithRVA |
4473 | |
4474 | //******************************************************************************* |
4475 | // Returns true if hEnclosingTypeCandidate encloses, at any arbitrary depth, |
4476 | // hNestedTypeCandidate; returns false otherwise. |
4477 | |
4478 | bool MethodTableBuilder::IsEnclosingNestedTypePair( |
4479 | bmtTypeHandle hEnclosingTypeCandidate, |
4480 | bmtTypeHandle hNestedTypeCandidate) |
4481 | { |
4482 | STANDARD_VM_CONTRACT; |
4483 | |
4484 | CONSISTENCY_CHECK(!hEnclosingTypeCandidate.IsNull()); |
4485 | CONSISTENCY_CHECK(!hNestedTypeCandidate.IsNull()); |
4486 | CONSISTENCY_CHECK(!bmtTypeHandle::Equal(hEnclosingTypeCandidate, hNestedTypeCandidate)); |
4487 | |
4488 | Module * pModule = hEnclosingTypeCandidate.GetModule(); |
4489 | |
4490 | if (pModule != hNestedTypeCandidate.GetModule()) |
4491 | { // If the modules aren't the same, then there's no way |
4492 | // hBase could be an enclosing type of hChild. We make |
4493 | // this check early so that the code can deal with only |
4494 | // one Module and IMDInternalImport instance and can avoid |
4495 | // extra checks. |
4496 | return false; |
4497 | } |
4498 | |
4499 | IMDInternalImport * pMDImport = pModule->GetMDImport(); |
4500 | |
4501 | mdTypeDef tkEncl = hEnclosingTypeCandidate.GetTypeDefToken(); |
4502 | mdTypeDef tkNest = hNestedTypeCandidate.GetTypeDefToken(); |
4503 | |
4504 | while (tkEncl != tkNest) |
4505 | { // Do this using the metadata APIs because MethodTableBuilder does |
4506 | // not construct type representations for enclosing type chains. |
4507 | if (FAILED(pMDImport->GetNestedClassProps(tkNest, &tkNest))) |
4508 | { // tokNest is not a nested type. |
4509 | return false; |
4510 | } |
4511 | } |
4512 | |
4513 | // tkNest's enclosing type is tkEncl, so we've shown that |
4514 | // hEnclosingTypeCandidate encloses hNestedTypeCandidate |
4515 | return true; |
4516 | } |
4517 | |
4518 | //******************************************************************************* |
4519 | // Given an arbitrary nesting+subclassing pattern like this: |
4520 | // |
4521 | // class C1 { |
4522 | // private virtual void Foo() { ... } |
4523 | // class C2 : C1 { |
4524 | // ... |
4525 | // class CN : CN-1 { |
4526 | // private override void Foo() { ... } |
4527 | // } |
4528 | // ... |
4529 | // } |
4530 | // } |
4531 | // |
4532 | // this method will return true, where hChild == N and hBase == C1 |
4533 | // |
4534 | // Note that there is no requirement that a type derive from its immediately |
4535 | // enclosing type, but can skip a level, such as this example: |
4536 | // |
4537 | // class A |
4538 | // { |
4539 | // private virtual void Foo() { } |
4540 | // public class B |
4541 | // { |
4542 | // public class C : A |
4543 | // { |
4544 | // private override void Foo() { } |
4545 | // } |
4546 | // } |
4547 | // } |
4548 | // |
4549 | // NOTE: IMPORTANT: This code assumes that hBase is indeed a base type of hChild, |
4550 | // and behaviour is undefined if this is not the case. |
4551 | |
4552 | bool MethodTableBuilder::IsBaseTypeAlsoEnclosingType( |
4553 | bmtTypeHandle hBase, |
4554 | bmtTypeHandle hChild) |
4555 | { |
4556 | STANDARD_VM_CONTRACT; |
4557 | |
4558 | CONSISTENCY_CHECK(!hBase.IsNull()); |
4559 | CONSISTENCY_CHECK(!hChild.IsNull()); |
4560 | CONSISTENCY_CHECK(!bmtTypeHandle::Equal(hBase, hChild)); |
4561 | |
4562 | // The idea of this algorithm is that if we climb the inheritance chain |
4563 | // starting at hChild then we'll eventually hit hBase. If we check that |
4564 | // for every (hParent, hChild) pair in the chain that hParent encloses |
4565 | // hChild, then we've shown that hBase encloses hChild. |
4566 | |
4567 | while (!bmtTypeHandle::Equal(hBase, hChild)) |
4568 | { |
4569 | CONSISTENCY_CHECK(!hChild.GetParentType().IsNull()); |
4570 | bmtTypeHandle hParent(hChild.GetParentType()); |
4571 | |
4572 | if (!IsEnclosingNestedTypePair(hParent, hChild)) |
4573 | { // First, the parent type must enclose the child type. |
4574 | // If this is not the case we fail immediately. |
4575 | return false; |
4576 | } |
4577 | |
4578 | // Move up one in the inheritance chain, and try again. |
4579 | hChild = hParent; |
4580 | } |
4581 | |
4582 | // If the loop worked itself from the original hChild all the way |
4583 | // up to hBase, then we know that for every (hParent, hChild) |
4584 | // pair in the chain that hParent enclosed hChild, and so we know |
4585 | // that hBase encloses the original hChild |
4586 | return true; |
4587 | } |
4588 | |
4589 | //******************************************************************************* |
4590 | BOOL MethodTableBuilder::TestOverrideForAccessibility( |
4591 | bmtMethodHandle hParentMethod, |
4592 | bmtTypeHandle hChildType) |
4593 | { |
4594 | STANDARD_VM_CONTRACT; |
4595 | |
4596 | bmtTypeHandle hParentType(hParentMethod.GetOwningType()); |
4597 | |
4598 | Module * pParentModule = hParentType.GetModule(); |
4599 | Module * pChildModule = hChildType.GetModule(); |
4600 | |
4601 | Assembly * pParentAssembly = pParentModule->GetAssembly(); |
4602 | Assembly * pChildAssembly = pChildModule->GetAssembly(); |
4603 | |
4604 | BOOL isSameAssembly = (pChildAssembly == pParentAssembly); |
4605 | |
4606 | DWORD dwParentAttrs = hParentMethod.GetDeclAttrs(); |
4607 | |
4608 | // AKA "strict bit". This means that overridability is tightly bound to accessibility. |
4609 | if (IsMdCheckAccessOnOverride(dwParentAttrs)) |
4610 | { |
4611 | // Same Assembly |
4612 | if (isSameAssembly || pParentAssembly->GrantsFriendAccessTo(pChildAssembly, hParentMethod.GetMethodDesc()) |
4613 | || pChildAssembly->IgnoresAccessChecksTo(pParentAssembly)) |
4614 | { |
4615 | // Can always override any method that has accessibility greater than mdPrivate |
4616 | if ((dwParentAttrs & mdMemberAccessMask) > mdPrivate) |
4617 | { // Fall through |
4618 | } |
4619 | // Generally, types cannot override inherited mdPrivate methods, except: |
4620 | // Types can access enclosing type's private members, so it can |
4621 | // override them if the nested type extends its enclosing type. |
4622 | else if ((dwParentAttrs & mdMemberAccessMask) == mdPrivate && |
4623 | IsBaseTypeAlsoEnclosingType(hParentType, hChildType)) |
4624 | { // Fall through |
4625 | } |
4626 | else |
4627 | { |
4628 | return FALSE; |
4629 | } |
4630 | } |
4631 | // Cross-Assembly |
4632 | else |
4633 | { |
4634 | // If the method marks itself as check visibility the the method must be |
4635 | // public, FamORAssem, or family |
4636 | if((dwParentAttrs & mdMemberAccessMask) <= mdAssem) |
4637 | { |
4638 | return FALSE; |
4639 | } |
4640 | } |
4641 | } |
4642 | return TRUE; |
4643 | } |
4644 | |
4645 | //******************************************************************************* |
4646 | VOID MethodTableBuilder::TestOverRide(bmtMethodHandle hParentMethod, |
4647 | bmtMethodHandle hChildMethod) |
4648 | { |
4649 | CONTRACTL { |
4650 | STANDARD_VM_CHECK; |
4651 | PRECONDITION(IsMdVirtual(hParentMethod.GetDeclAttrs())); |
4652 | PRECONDITION(IsMdVirtual(hChildMethod.GetDeclAttrs())); |
4653 | } CONTRACTL_END; |
4654 | |
4655 | DWORD dwAttrs = hChildMethod.GetDeclAttrs(); |
4656 | DWORD dwParentAttrs = hParentMethod.GetDeclAttrs(); |
4657 | |
4658 | Module *pModule = hChildMethod.GetOwningType().GetModule(); |
4659 | Module *pParentModule = hParentMethod.GetOwningType().GetModule(); |
4660 | |
4661 | Assembly *pAssembly = pModule->GetAssembly(); |
4662 | Assembly *pParentAssembly = pParentModule->GetAssembly(); |
4663 | |
4664 | BOOL isSameModule = (pModule == pParentModule); |
4665 | BOOL isSameAssembly = (pAssembly == pParentAssembly); |
4666 | |
4667 | if (!TestOverrideForAccessibility(hParentMethod, hChildMethod.GetOwningType())) |
4668 | { |
4669 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_ACCESS_FAILURE, hChildMethod.GetMethodSignature().GetToken()); |
4670 | } |
4671 | |
4672 | // |
4673 | // Refer to Partition II, 9.3.3 for more information on what is permitted. |
4674 | // |
4675 | |
4676 | enum WIDENING_STATUS |
4677 | { |
4678 | e_NO, // NO |
4679 | e_YES, // YES |
4680 | e_SA, // YES, but only when same assembly |
4681 | e_NSA, // YES, but only when NOT same assembly |
4682 | e_SM, // YES, but only when same module |
4683 | }; |
4684 | |
4685 | static_assert_no_msg(mdPrivateScope == 0x00); |
4686 | static_assert_no_msg(mdPrivate == 0x01); |
4687 | static_assert_no_msg(mdFamANDAssem == 0x02); |
4688 | static_assert_no_msg(mdAssem == 0x03); |
4689 | static_assert_no_msg(mdFamily == 0x04); |
4690 | static_assert_no_msg(mdFamORAssem == 0x05); |
4691 | static_assert_no_msg(mdPublic == 0x06); |
4692 | |
4693 | static const DWORD dwCount = mdPublic - mdPrivateScope + 1; |
4694 | static const WIDENING_STATUS rgWideningTable[dwCount][dwCount] = |
4695 | |
4696 | // | Base type |
4697 | // Subtype | mdPrivateScope mdPrivate mdFamANDAssem mdAssem mdFamily mdFamORAssem mdPublic |
4698 | // --------------+------------------------------------------------------------------------------------------------------- |
4699 | /*mdPrivateScope | */ { { e_SM, e_NO, e_NO, e_NO, e_NO, e_NO, e_NO }, |
4700 | /*mdPrivate | */ { e_SM, e_YES, e_NO, e_NO, e_NO, e_NO, e_NO }, |
4701 | /*mdFamANDAssem | */ { e_SM, e_YES, e_SA, e_NO, e_NO, e_NO, e_NO }, |
4702 | /*mdAssem | */ { e_SM, e_YES, e_SA, e_SA, e_NO, e_NO, e_NO }, |
4703 | /*mdFamily | */ { e_SM, e_YES, e_YES, e_NO, e_YES, e_NSA, e_NO }, |
4704 | /*mdFamORAssem | */ { e_SM, e_YES, e_YES, e_SA, e_YES, e_YES, e_NO }, |
4705 | /*mdPublic | */ { e_SM, e_YES, e_YES, e_YES, e_YES, e_YES, e_YES } }; |
4706 | |
4707 | DWORD idxParent = (dwParentAttrs & mdMemberAccessMask) - mdPrivateScope; |
4708 | DWORD idxMember = (dwAttrs & mdMemberAccessMask) - mdPrivateScope; |
4709 | CONSISTENCY_CHECK(idxParent < dwCount); |
4710 | CONSISTENCY_CHECK(idxMember < dwCount); |
4711 | |
4712 | WIDENING_STATUS entry = rgWideningTable[idxMember][idxParent]; |
4713 | |
4714 | if (entry == e_NO || |
4715 | (entry == e_SA && !isSameAssembly && !pParentAssembly->GrantsFriendAccessTo(pAssembly, hParentMethod.GetMethodDesc()) |
4716 | && !pAssembly->IgnoresAccessChecksTo(pParentAssembly)) || |
4717 | (entry == e_NSA && isSameAssembly) || |
4718 | (entry == e_SM && !isSameModule) |
4719 | ) |
4720 | { |
4721 | BuildMethodTableThrowException(IDS_CLASSLOAD_REDUCEACCESS, hChildMethod.GetMethodSignature().GetToken()); |
4722 | } |
4723 | |
4724 | return; |
4725 | } |
4726 | |
4727 | //******************************************************************************* |
4728 | VOID MethodTableBuilder::TestMethodImpl( |
4729 | bmtMethodHandle hDeclMethod, |
4730 | bmtMethodHandle hImplMethod) |
4731 | { |
4732 | CONTRACTL |
4733 | { |
4734 | STANDARD_VM_CHECK; |
4735 | PRECONDITION(!hDeclMethod.IsNull()); |
4736 | PRECONDITION(!hImplMethod.IsNull()); |
4737 | } |
4738 | CONTRACTL_END |
4739 | |
4740 | Module * pDeclModule = hDeclMethod.GetOwningType().GetModule(); |
4741 | Module * pImplModule = hImplMethod.GetOwningType().GetModule(); |
4742 | |
4743 | mdTypeDef tokDecl = hDeclMethod.GetMethodSignature().GetToken(); |
4744 | mdTypeDef tokImpl = hImplMethod.GetMethodSignature().GetToken(); |
4745 | |
4746 | BOOL isSameModule = pDeclModule->Equals(pImplModule); |
4747 | |
4748 | IMDInternalImport *pIMDDecl = pDeclModule->GetMDImport(); |
4749 | IMDInternalImport *pIMDImpl = pImplModule->GetMDImport(); |
4750 | |
4751 | DWORD dwDeclAttrs; |
4752 | if (FAILED(pIMDDecl->GetMethodDefProps(tokDecl, &dwDeclAttrs))) |
4753 | { |
4754 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
4755 | } |
4756 | DWORD dwImplAttrs; |
4757 | if (FAILED(pIMDImpl->GetMethodDefProps(tokImpl, &dwImplAttrs))) |
4758 | { |
4759 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
4760 | } |
4761 | |
4762 | HRESULT hr = COR_E_TYPELOAD; |
4763 | |
4764 | if (!IsMdVirtual(dwDeclAttrs)) |
4765 | { |
4766 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_NONVIRTUAL_DECL); |
4767 | } |
4768 | if (!IsMdVirtual(dwImplAttrs)) |
4769 | { |
4770 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_MUSTBEVIRTUAL); |
4771 | } |
4772 | // Virtual methods cannot be static |
4773 | if (IsMdStatic(dwDeclAttrs)) |
4774 | { |
4775 | BuildMethodTableThrowException(IDS_CLASSLOAD_STATICVIRTUAL); |
4776 | } |
4777 | if (IsMdStatic(dwImplAttrs)) |
4778 | { |
4779 | BuildMethodTableThrowException(IDS_CLASSLOAD_STATICVIRTUAL); |
4780 | } |
4781 | if (IsMdFinal(dwDeclAttrs)) |
4782 | { |
4783 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_FINAL_DECL); |
4784 | } |
4785 | |
4786 | // Interface method body that has methodimpl should always be final |
4787 | if (IsInterface() && !IsMdFinal(dwImplAttrs)) |
4788 | { |
4789 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_FINAL_IMPL); |
4790 | } |
4791 | |
4792 | // Since MethodImpl's do not affect the visibility of the Decl method, there's |
4793 | // no need to check. |
4794 | |
4795 | // If Decl's parent is other than this class, Decl must not be private |
4796 | mdTypeDef tkImplParent = mdTypeDefNil; |
4797 | mdTypeDef tkDeclParent = mdTypeDefNil; |
4798 | |
4799 | if (FAILED(hr = pIMDDecl->GetParentToken(tokDecl, &tkDeclParent))) |
4800 | { |
4801 | BuildMethodTableThrowException(hr, *bmtError); |
4802 | } |
4803 | if (FAILED(hr = pIMDImpl->GetParentToken(tokImpl, &tkImplParent))) |
4804 | { |
4805 | BuildMethodTableThrowException(hr, *bmtError); |
4806 | } |
4807 | |
4808 | // Make sure that we test for accessibility restrictions only if the decl is |
4809 | // not within our own type, as we are allowed to methodImpl a private with the |
4810 | // strict bit set if it is in our own type. |
4811 | if (!isSameModule || tkDeclParent != tkImplParent) |
4812 | { |
4813 | if (!TestOverrideForAccessibility(hDeclMethod, hImplMethod.GetOwningType())) |
4814 | { |
4815 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_ACCESS_FAILURE, tokImpl); |
4816 | } |
4817 | |
4818 | // Decl's parent must not be tdSealed |
4819 | mdToken tkGrandParentDummyVar; |
4820 | DWORD dwDeclTypeAttrs; |
4821 | if (FAILED(hr = pIMDDecl->GetTypeDefProps(tkDeclParent, &dwDeclTypeAttrs, &tkGrandParentDummyVar))) |
4822 | { |
4823 | BuildMethodTableThrowException(hr, *bmtError); |
4824 | } |
4825 | if (IsTdSealed(dwDeclTypeAttrs)) |
4826 | { |
4827 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_SEALED_DECL); |
4828 | } |
4829 | } |
4830 | |
4831 | return; |
4832 | } |
4833 | |
4834 | |
4835 | //******************************************************************************* |
4836 | // |
4837 | // Used by BuildMethodTable |
4838 | // |
4839 | VOID |
4840 | MethodTableBuilder::ValidateMethods() |
4841 | { |
4842 | CONTRACTL |
4843 | { |
4844 | STANDARD_VM_CHECK; |
4845 | |
4846 | PRECONDITION(CheckPointer(this)); |
4847 | PRECONDITION(CheckPointer(bmtInternal)); |
4848 | PRECONDITION(CheckPointer(bmtMetaData)); |
4849 | PRECONDITION(CheckPointer(bmtError)); |
4850 | PRECONDITION(CheckPointer(bmtProp)); |
4851 | PRECONDITION(CheckPointer(bmtInterface)); |
4852 | PRECONDITION(CheckPointer(bmtParent)); |
4853 | PRECONDITION(CheckPointer(bmtMFDescs)); |
4854 | PRECONDITION(CheckPointer(bmtEnumFields)); |
4855 | PRECONDITION(CheckPointer(bmtMethodImpl)); |
4856 | PRECONDITION(CheckPointer(bmtVT)); |
4857 | } |
4858 | CONTRACTL_END; |
4859 | |
4860 | // Used to keep track of located default and type constructors. |
4861 | CONSISTENCY_CHECK(bmtVT->pCCtor == NULL); |
4862 | CONSISTENCY_CHECK(bmtVT->pDefaultCtor == NULL); |
4863 | |
4864 | // Fetch the hard-coded signatures for the type constructor and the |
4865 | // default constructor and create MethodSignature objects for both at |
4866 | // the method level so this does not happen for every specialname |
4867 | // method. |
4868 | |
4869 | Signature sig; |
4870 | |
4871 | sig = MscorlibBinder::GetSignature(&gsig_SM_RetVoid); |
4872 | |
4873 | MethodSignature cctorSig(MscorlibBinder::GetModule(), |
4874 | COR_CCTOR_METHOD_NAME, |
4875 | sig.GetRawSig(), sig.GetRawSigLen()); |
4876 | |
4877 | sig = MscorlibBinder::GetSignature(&gsig_IM_RetVoid); |
4878 | |
4879 | MethodSignature defaultCtorSig(MscorlibBinder::GetModule(), |
4880 | COR_CTOR_METHOD_NAME, |
4881 | sig.GetRawSig(), sig.GetRawSigLen()); |
4882 | |
4883 | Module * pModule = GetModule(); |
4884 | DeclaredMethodIterator it(*this); |
4885 | while (it.Next()) |
4886 | { |
4887 | // The RVA is only valid/testable if it has not been overwritten |
4888 | // for something like edit-and-continue |
4889 | // Complete validation of non-zero RVAs is done later inside MethodDesc::GetILHeader. |
4890 | if ((it.RVA() == 0) && (pModule->GetDynamicIL(it.Token(), FALSE) == NULL)) |
4891 | { |
4892 | // for IL code that is implemented here must have a valid code RVA |
4893 | // this came up due to a linker bug where the ImplFlags/DescrOffset were |
4894 | // being set to null and we weren't coping with it |
4895 | if((IsMiIL(it.ImplFlags()) || IsMiOPTIL(it.ImplFlags())) && |
4896 | !IsMdAbstract(it.Attrs()) && |
4897 | !IsReallyMdPinvokeImpl(it.Attrs()) && |
4898 | !IsMiInternalCall(it.ImplFlags())) |
4899 | { |
4900 | BuildMethodTableThrowException(IDS_CLASSLOAD_MISSINGMETHODRVA, it.Token()); |
4901 | } |
4902 | } |
4903 | |
4904 | if (IsMdRTSpecialName(it.Attrs())) |
4905 | { |
4906 | if (IsMdVirtual(it.Attrs())) |
4907 | { // Virtual specialname methods are illegal |
4908 | BuildMethodTableThrowException(IDS_CLASSLOAD_GENERAL); |
4909 | } |
4910 | |
4911 | // Constructors (.ctor) and class initialisers (.cctor) are special |
4912 | const MethodSignature &curSig(it->GetMethodSignature()); |
4913 | |
4914 | if (IsMdStatic(it.Attrs())) |
4915 | { // The only rtSpecialName static method allowed is the .cctor |
4916 | if (!curSig.ExactlyEqual(cctorSig)) |
4917 | { // Bad method |
4918 | BuildMethodTableThrowException(IDS_CLASSLOAD_GENERAL); |
4919 | } |
4920 | |
4921 | // Remember it for later |
4922 | bmtVT->pCCtor = *it; |
4923 | } |
4924 | else |
4925 | { |
4926 | if(!MethodSignature::NamesEqual(curSig, defaultCtorSig)) |
4927 | { // The only rtSpecialName instance methods allowed are .ctors |
4928 | BuildMethodTableThrowException(IDS_CLASSLOAD_GENERAL); |
4929 | } |
4930 | |
4931 | // .ctor must return void |
4932 | MetaSig methodMetaSig(curSig.GetSignature(), |
4933 | static_cast<DWORD>(curSig.GetSignatureLength()), |
4934 | curSig.GetModule(), |
4935 | NULL); |
4936 | |
4937 | if (methodMetaSig.GetReturnType() != ELEMENT_TYPE_VOID) |
4938 | { // All constructors must have a void return type |
4939 | BuildMethodTableThrowException(IDS_CLASSLOAD_GENERAL); |
4940 | } |
4941 | |
4942 | // See if this is a default constructor. If so, remember it for later. |
4943 | if (curSig.ExactlyEqual(defaultCtorSig)) |
4944 | { |
4945 | bmtVT->pDefaultCtor = *it; |
4946 | } |
4947 | } |
4948 | } |
4949 | |
4950 | // Make sure that fcalls have a 0 rva. This is assumed by the prejit fixup logic |
4951 | if (it.MethodType() == METHOD_TYPE_FCALL && it.RVA() != 0) |
4952 | { |
4953 | BuildMethodTableThrowException(BFA_ECALLS_MUST_HAVE_ZERO_RVA, it.Token()); |
4954 | } |
4955 | |
4956 | // check for proper use of the Managed and native flags |
4957 | if (IsMiManaged(it.ImplFlags())) |
4958 | { |
4959 | if (IsMiIL(it.ImplFlags()) || IsMiRuntime(it.ImplFlags())) // IsMiOPTIL(it.ImplFlags()) no longer supported |
4960 | { |
4961 | // No need to set code address, pre stub used automatically. |
4962 | } |
4963 | else |
4964 | { |
4965 | if (IsMiNative(it.ImplFlags())) |
4966 | { |
4967 | // For now simply disallow managed native code if you turn this on you have to at least |
4968 | // insure that we have SkipVerificationPermission or equivalent |
4969 | BuildMethodTableThrowException(BFA_MANAGED_NATIVE_NYI, it.Token()); |
4970 | } |
4971 | else |
4972 | { |
4973 | BuildMethodTableThrowException(BFA_BAD_IMPL_FLAGS, it.Token()); |
4974 | } |
4975 | } |
4976 | } |
4977 | else |
4978 | { |
4979 | if (IsMiNative(it.ImplFlags()) && IsGlobalClass()) |
4980 | { |
4981 | // global function unmanaged entrypoint via IJW thunk was handled |
4982 | // above. |
4983 | } |
4984 | else |
4985 | { |
4986 | BuildMethodTableThrowException(IDS_CLASSLOAD_BAD_UNMANAGED_RVA, it.Token()); |
4987 | } |
4988 | if (it.MethodType() != METHOD_TYPE_NDIRECT) |
4989 | { |
4990 | BuildMethodTableThrowException(BFA_BAD_UNMANAGED_ENTRY_POINT); |
4991 | } |
4992 | } |
4993 | |
4994 | // Vararg methods are not allowed inside generic classes |
4995 | // and nor can they be generic methods. |
4996 | if (bmtGenerics->GetNumGenericArgs() > 0 || (it.MethodType() == METHOD_TYPE_INSTANTIATED) ) |
4997 | { |
4998 | DWORD cMemberSignature; |
4999 | PCCOR_SIGNATURE pMemberSignature = it.GetSig(&cMemberSignature); |
5000 | // We've been trying to avoid asking for the signature - now we need it |
5001 | if (pMemberSignature == NULL) |
5002 | { |
5003 | pMemberSignature = it.GetSig(&cMemberSignature); |
5004 | } |
5005 | |
5006 | if (MetaSig::IsVarArg(pModule, Signature(pMemberSignature, cMemberSignature))) |
5007 | { |
5008 | BuildMethodTableThrowException(BFA_GENCODE_NOT_BE_VARARG); |
5009 | } |
5010 | } |
5011 | |
5012 | if (IsMdVirtual(it.Attrs()) && IsMdPublic(it.Attrs()) && it.Name() == NULL) |
5013 | { |
5014 | BuildMethodTableThrowException(IDS_CLASSLOAD_NOMETHOD_NAME); |
5015 | } |
5016 | |
5017 | if (it.IsMethodImpl()) |
5018 | { |
5019 | if (!IsMdVirtual(it.Attrs())) |
5020 | { // Non-virtual methods cannot participate in a methodImpl pair. |
5021 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_MUSTBEVIRTUAL, it.Token()); |
5022 | } |
5023 | } |
5024 | |
5025 | // Virtual static methods are not allowed. |
5026 | if (IsMdStatic(it.Attrs()) && IsMdVirtual(it.Attrs())) |
5027 | { |
5028 | BuildMethodTableThrowException(IDS_CLASSLOAD_STATICVIRTUAL, it.Token()); |
5029 | } |
5030 | } |
5031 | } |
5032 | |
5033 | //******************************************************************************* |
5034 | // Essentially, this is a helper method that combines calls to InitMethodDesc and |
5035 | // SetSecurityFlagsOnMethod. It then assigns the newly initialized MethodDesc to |
5036 | // the bmtMDMethod. |
5037 | VOID |
5038 | MethodTableBuilder::InitNewMethodDesc( |
5039 | bmtMDMethod * pMethod, |
5040 | MethodDesc * pNewMD) |
5041 | { |
5042 | STANDARD_VM_CONTRACT; |
5043 | |
5044 | // |
5045 | // First, set all flags that control layout of optional slots |
5046 | // |
5047 | pNewMD->SetClassification(GetMethodClassification(pMethod->GetMethodType())); |
5048 | |
5049 | if (pMethod->GetMethodImplType() == METHOD_IMPL) |
5050 | pNewMD->SetHasMethodImplSlot(); |
5051 | |
5052 | if (pMethod->GetSlotIndex() >= bmtVT->cVtableSlots) |
5053 | pNewMD->SetHasNonVtableSlot(); |
5054 | |
5055 | if (NeedsNativeCodeSlot(pMethod)) |
5056 | pNewMD->SetHasNativeCodeSlot(); |
5057 | |
5058 | // Now we know the classification we can allocate the correct type of |
5059 | // method desc and perform any classification specific initialization. |
5060 | |
5061 | LPCSTR pName = pMethod->GetMethodSignature().GetName(); |
5062 | if (pName == NULL) |
5063 | { |
5064 | if (FAILED(GetMDImport()->GetNameOfMethodDef(pMethod->GetMethodSignature().GetToken(), &pName))) |
5065 | { |
5066 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
5067 | } |
5068 | } |
5069 | |
5070 | #ifdef _DEBUG |
5071 | LPCUTF8 pszDebugMethodName; |
5072 | if (FAILED(GetMDImport()->GetNameOfMethodDef(pMethod->GetMethodSignature().GetToken(), &pszDebugMethodName))) |
5073 | { |
5074 | pszDebugMethodName = "Invalid MethodDef record" ; |
5075 | } |
5076 | S_SIZE_T safeLen = S_SIZE_T(strlen(pszDebugMethodName)) + S_SIZE_T(1); |
5077 | if(safeLen.IsOverflow()) |
5078 | COMPlusThrowHR(COR_E_OVERFLOW); |
5079 | |
5080 | size_t len = safeLen.Value(); |
5081 | LPCUTF8 pszDebugMethodNameCopy = (char*) AllocateFromLowFrequencyHeap(safeLen); |
5082 | strcpy_s((char *) pszDebugMethodNameCopy, len, pszDebugMethodName); |
5083 | #endif // _DEBUG |
5084 | |
5085 | // Do the init specific to each classification of MethodDesc & assign some common fields |
5086 | InitMethodDesc(pNewMD, |
5087 | GetMethodClassification(pMethod->GetMethodType()), |
5088 | pMethod->GetMethodSignature().GetToken(), |
5089 | pMethod->GetImplAttrs(), |
5090 | pMethod->GetDeclAttrs(), |
5091 | FALSE, |
5092 | pMethod->GetRVA(), |
5093 | GetMDImport(), |
5094 | pName |
5095 | COMMA_INDEBUG(pszDebugMethodNameCopy) |
5096 | COMMA_INDEBUG(GetDebugClassName()) |
5097 | COMMA_INDEBUG("" ) // FIX this happens on global methods, give better info |
5098 | ); |
5099 | |
5100 | pMethod->SetMethodDesc(pNewMD); |
5101 | |
5102 | bmtRTMethod * pParentMethod = NULL; |
5103 | |
5104 | if (HasParent()) |
5105 | { |
5106 | SLOT_INDEX idx = pMethod->GetSlotIndex(); |
5107 | CONSISTENCY_CHECK(idx != INVALID_SLOT_INDEX); |
5108 | |
5109 | if (idx < GetParentMethodTable()->GetNumVirtuals()) |
5110 | { |
5111 | pParentMethod = (*bmtParent->pSlotTable)[idx].Decl().AsRTMethod(); |
5112 | } |
5113 | } |
5114 | |
5115 | // Turn off inlining for any calls |
5116 | // that are marked in the metadata as not being inlineable. |
5117 | if(IsMiNoInlining(pMethod->GetImplAttrs())) |
5118 | { |
5119 | pNewMD->SetNotInline(true); |
5120 | } |
5121 | |
5122 | // Check for methods marked as [Intrinsic] |
5123 | if (GetModule()->IsSystem() || GetAssembly()->IsSIMDVectorAssembly()) |
5124 | { |
5125 | HRESULT hr = GetMDImport()->GetCustomAttributeByName(pMethod->GetMethodSignature().GetToken(), |
5126 | g_CompilerServicesIntrinsicAttribute, |
5127 | NULL, |
5128 | NULL); |
5129 | |
5130 | if (hr == S_OK || bmtProp->fIsHardwareIntrinsic) |
5131 | { |
5132 | pNewMD->SetIsJitIntrinsic(); |
5133 | } |
5134 | |
5135 | } |
5136 | |
5137 | pNewMD->SetSlot(pMethod->GetSlotIndex()); |
5138 | } |
5139 | |
5140 | //******************************************************************************* |
5141 | // Determine vtable placement for each non-virtual in the class, while also |
5142 | // looking for default and type constructors. |
5143 | VOID |
5144 | MethodTableBuilder::PlaceNonVirtualMethods() |
5145 | { |
5146 | CONTRACTL |
5147 | { |
5148 | STANDARD_VM_CHECK; |
5149 | |
5150 | PRECONDITION(CheckPointer(this)); |
5151 | PRECONDITION(CheckPointer(bmtInternal)); |
5152 | PRECONDITION(CheckPointer(bmtMetaData)); |
5153 | PRECONDITION(CheckPointer(bmtError)); |
5154 | PRECONDITION(CheckPointer(bmtProp)); |
5155 | PRECONDITION(CheckPointer(bmtInterface)); |
5156 | PRECONDITION(CheckPointer(bmtParent)); |
5157 | PRECONDITION(CheckPointer(bmtMFDescs)); |
5158 | PRECONDITION(CheckPointer(bmtEnumFields)); |
5159 | PRECONDITION(CheckPointer(bmtMethodImpl)); |
5160 | PRECONDITION(CheckPointer(bmtVT)); |
5161 | } |
5162 | CONTRACTL_END; |
5163 | |
5164 | INDEBUG(bmtVT->SealVirtualSlotSection();) |
5165 | |
5166 | // |
5167 | // For each non-virtual method, place the method in the next available non-virtual method slot. |
5168 | // |
5169 | |
5170 | // Place the cctor and default ctor first. code::MethodTableGetCCtorSlot and code:MethodTable::GetDefaultCtorSlot |
5171 | // depends on this. |
5172 | if (bmtVT->pCCtor != NULL) |
5173 | { |
5174 | if (!bmtVT->AddNonVirtualMethod(bmtVT->pCCtor)) |
5175 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
5176 | } |
5177 | |
5178 | if (bmtVT->pDefaultCtor != NULL) |
5179 | { |
5180 | if (!bmtVT->AddNonVirtualMethod(bmtVT->pDefaultCtor)) |
5181 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
5182 | } |
5183 | |
5184 | // We use slot during remoting and to map methods between generic instantiations |
5185 | // (see MethodTable::GetParallelMethodDesc). The current implementation |
5186 | // of this mechanism requires real slots. |
5187 | BOOL fCanHaveNonVtableSlots = (bmtGenerics->GetNumGenericArgs() == 0) && !IsInterface(); |
5188 | |
5189 | // Flag to avoid second pass when possible |
5190 | BOOL fHasNonVtableSlots = FALSE; |
5191 | |
5192 | // |
5193 | // Place all methods that require real vtable slot first. This is necessary so |
5194 | // that they get consequitive slot numbers right after virtual slots. |
5195 | // |
5196 | |
5197 | DeclaredMethodIterator it(*this); |
5198 | while (it.Next()) |
5199 | { |
5200 | // Skip methods that are placed already |
5201 | if (it->GetSlotIndex() != INVALID_SLOT_INDEX) |
5202 | continue; |
5203 | |
5204 | #ifdef _DEBUG |
5205 | if(GetHalfBakedClass()->m_fDebuggingClass && g_pConfig->ShouldBreakOnMethod(it.Name())) |
5206 | CONSISTENCY_CHECK_MSGF(false, ("BreakOnMethodName: '%s' " , it.Name())); |
5207 | #endif // _DEBUG |
5208 | |
5209 | if (!fCanHaveNonVtableSlots || |
5210 | it->GetMethodType() == METHOD_TYPE_INSTANTIATED) |
5211 | { |
5212 | // We use slot during remoting and to map methods between generic instantiations |
5213 | // (see MethodTable::GetParallelMethodDesc). The current implementation |
5214 | // of this mechanism requires real slots. |
5215 | } |
5216 | else |
5217 | { |
5218 | // This method does not need real vtable slot |
5219 | fHasNonVtableSlots = TRUE; |
5220 | continue; |
5221 | } |
5222 | |
5223 | // This will update slot index in bmtMDMethod |
5224 | if (!bmtVT->AddNonVirtualMethod(*it)) |
5225 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
5226 | } |
5227 | |
5228 | // Remeber last real vtable slot |
5229 | bmtVT->cVtableSlots = bmtVT->cTotalSlots; |
5230 | |
5231 | // Are there any Non-vtable slots to place? |
5232 | if (!fHasNonVtableSlots) |
5233 | return; |
5234 | |
5235 | // |
5236 | // Now, place the remaining methods. They will get non-vtable slot. |
5237 | // |
5238 | |
5239 | DeclaredMethodIterator it2(*this); |
5240 | while (it2.Next()) |
5241 | { |
5242 | // Skip methods that are placed already |
5243 | if (it2->GetSlotIndex() != INVALID_SLOT_INDEX) |
5244 | continue; |
5245 | |
5246 | if (!bmtVT->AddNonVirtualMethod(*it2)) |
5247 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
5248 | } |
5249 | |
5250 | } |
5251 | |
5252 | //******************************************************************************* |
5253 | // Determine vtable placement for each virtual member in this class. |
5254 | VOID |
5255 | MethodTableBuilder::PlaceVirtualMethods() |
5256 | { |
5257 | CONTRACTL |
5258 | { |
5259 | STANDARD_VM_CHECK; |
5260 | |
5261 | PRECONDITION(CheckPointer(this)); |
5262 | PRECONDITION(CheckPointer(bmtInternal)); |
5263 | PRECONDITION(CheckPointer(bmtMetaData)); |
5264 | PRECONDITION(CheckPointer(bmtError)); |
5265 | PRECONDITION(CheckPointer(bmtProp)); |
5266 | PRECONDITION(CheckPointer(bmtInterface)); |
5267 | PRECONDITION(CheckPointer(bmtParent)); |
5268 | PRECONDITION(CheckPointer(bmtMFDescs)); |
5269 | PRECONDITION(CheckPointer(bmtEnumFields)); |
5270 | PRECONDITION(CheckPointer(bmtMethodImpl)); |
5271 | PRECONDITION(CheckPointer(bmtVT)); |
5272 | } |
5273 | CONTRACTL_END; |
5274 | |
5275 | #ifdef _DEBUG |
5276 | LPCUTF8 pszDebugName, pszDebugNamespace; |
5277 | if (FAILED(GetMDImport()->GetNameOfTypeDef(GetCl(), &pszDebugName, &pszDebugNamespace))) |
5278 | { |
5279 | pszDebugName = pszDebugNamespace = "Invalid TypeDef record" ; |
5280 | } |
5281 | #endif // _DEBUG |
5282 | |
5283 | // |
5284 | // For each virtual method |
5285 | // - If the method is not declared as newslot, search all virtual methods in the parent |
5286 | // type for an override candidate. |
5287 | // - If such a candidate is found, test to see if the override is valid. If |
5288 | // the override is not valid, throw TypeLoadException |
5289 | // - If a candidate is found above, place the method in the inherited slot as both |
5290 | // the Decl and the Impl. |
5291 | // - Else, place the method in the next available empty vtable slot. |
5292 | // |
5293 | |
5294 | DeclaredMethodIterator it(*this); |
5295 | while (it.Next()) |
5296 | { |
5297 | if (!IsMdVirtual(it.Attrs())) |
5298 | { // Only processing declared virtual methods |
5299 | continue; |
5300 | } |
5301 | |
5302 | #ifdef _DEBUG |
5303 | if(GetHalfBakedClass()->m_fDebuggingClass && g_pConfig->ShouldBreakOnMethod(it.Name())) |
5304 | CONSISTENCY_CHECK_MSGF(false, ("BreakOnMethodName: '%s' " , it.Name())); |
5305 | #endif // _DEBUG |
5306 | |
5307 | // If this member is a method which overrides a parent method, it will be set to non-NULL |
5308 | bmtRTMethod * pParentMethod = NULL; |
5309 | |
5310 | // Hash that a method with this name exists in this class |
5311 | // Note that ctors and static ctors are not added to the table |
5312 | BOOL fMethodConstraintsMatch = FALSE; |
5313 | |
5314 | // If the member is marked with a new slot we do not need to find it in the parent |
5315 | if (HasParent() && !IsMdNewSlot(it.Attrs())) |
5316 | { |
5317 | // Attempt to find the method with this name and signature in the parent class. |
5318 | // This method may or may not create pParentMethodHash (if it does not already exist). |
5319 | // It also may or may not fill in pMemberSignature/cMemberSignature. |
5320 | // An error is only returned when we can not create the hash. |
5321 | // NOTE: This operation touches metadata |
5322 | pParentMethod = LoaderFindMethodInParentClass( |
5323 | it->GetMethodSignature(), bmtProp->fNoSanityChecks ? NULL : &fMethodConstraintsMatch); |
5324 | |
5325 | if (pParentMethod != NULL) |
5326 | { // Found an override candidate |
5327 | DWORD dwParentAttrs = pParentMethod->GetDeclAttrs(); |
5328 | |
5329 | if (!IsMdVirtual(dwParentAttrs)) |
5330 | { // Can't override a non-virtual methods |
5331 | BuildMethodTableThrowException(BFA_NONVIRT_NO_SEARCH, it.Token()); |
5332 | } |
5333 | |
5334 | if(IsMdFinal(dwParentAttrs)) |
5335 | { // Can't override a final methods |
5336 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_FINAL_DECL, it.Token()); |
5337 | } |
5338 | |
5339 | if(!bmtProp->fNoSanityChecks) |
5340 | { |
5341 | TestOverRide(bmtMethodHandle(pParentMethod), |
5342 | bmtMethodHandle(*it)); |
5343 | |
5344 | if (!fMethodConstraintsMatch) |
5345 | { |
5346 | BuildMethodTableThrowException( |
5347 | IDS_CLASSLOAD_CONSTRAINT_MISMATCH_ON_IMPLICIT_OVERRIDE, |
5348 | it.Token()); |
5349 | } |
5350 | } |
5351 | } |
5352 | } |
5353 | |
5354 | // vtable method |
5355 | if (IsInterface()) |
5356 | { |
5357 | CONSISTENCY_CHECK(pParentMethod == NULL); |
5358 | // Also sets new slot number on bmtRTMethod and MethodDesc |
5359 | if (!bmtVT->AddVirtualMethod(*it)) |
5360 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
5361 | } |
5362 | else if (pParentMethod != NULL) |
5363 | { |
5364 | bmtVT->SetVirtualMethodOverride(pParentMethod->GetSlotIndex(), *it); |
5365 | } |
5366 | else |
5367 | { |
5368 | if (!bmtVT->AddVirtualMethod(*it)) |
5369 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
5370 | } |
5371 | } |
5372 | } |
5373 | |
5374 | // Given an interface map entry, and a name+signature, compute the method on the interface |
5375 | // that the name+signature corresponds to. Used by ProcessMethodImpls and ProcessInexactMethodImpls |
5376 | // Always returns the first match that it finds. Affects the ambiguities in code:#ProcessInexactMethodImpls_Ambiguities |
5377 | MethodTableBuilder::bmtMethodHandle |
5378 | MethodTableBuilder::FindDeclMethodOnInterfaceEntry(bmtInterfaceEntry *pItfEntry, MethodSignature &declSig) |
5379 | { |
5380 | STANDARD_VM_CONTRACT; |
5381 | |
5382 | bmtMethodHandle declMethod; |
5383 | |
5384 | bmtInterfaceEntry::InterfaceSlotIterator slotIt = |
5385 | pItfEntry->IterateInterfaceSlots(GetStackingAllocator()); |
5386 | // Check for exact match |
5387 | for (; !slotIt.AtEnd(); slotIt.Next()) |
5388 | { |
5389 | bmtRTMethod * pCurDeclMethod = slotIt->Decl().AsRTMethod(); |
5390 | |
5391 | if (declSig.ExactlyEqual(pCurDeclMethod->GetMethodSignature())) |
5392 | { |
5393 | declMethod = slotIt->Decl(); |
5394 | break; |
5395 | } |
5396 | } |
5397 | slotIt.ResetToStart(); |
5398 | |
5399 | // Check for equivalent match if exact match wasn't found |
5400 | if (declMethod.IsNull()) |
5401 | { |
5402 | for (; !slotIt.AtEnd(); slotIt.Next()) |
5403 | { |
5404 | bmtRTMethod * pCurDeclMethod = slotIt->Decl().AsRTMethod(); |
5405 | |
5406 | // Type Equivalence is forbidden in MethodImpl MemberRefs |
5407 | if (declSig.Equivalent(pCurDeclMethod->GetMethodSignature())) |
5408 | { |
5409 | declMethod = slotIt->Decl(); |
5410 | break; |
5411 | } |
5412 | } |
5413 | } |
5414 | |
5415 | return declMethod; |
5416 | } |
5417 | |
5418 | //******************************************************************************* |
5419 | // |
5420 | // Used by BuildMethodTable |
5421 | // Process the list of inexact method impls generated during ProcessMethodImpls. |
5422 | // This list is used to cause a methodImpl to an interface to override |
5423 | // methods on several equivalent interfaces in the interface map. This logic is necessary |
5424 | // so that in the presence of an embedded interface the behavior appears to mimic |
5425 | // the behavior if the interface was not embedded. |
5426 | // |
5427 | // In particular, the logic here is to handle cases such as |
5428 | // |
5429 | // Assembly A |
5430 | // [TypeIdentifier("x","y")] |
5431 | // interface I' |
5432 | // { void Method(); } |
5433 | // interface IOther : I' {} |
5434 | // |
5435 | // Assembly B |
5436 | // [TypeIdentifier("x","y")] |
5437 | // interface I |
5438 | // { void Method(); } |
5439 | // class Test : I, IOther |
5440 | // { |
5441 | // void I.Method() |
5442 | // {} |
5443 | // } |
5444 | // |
5445 | // In this case, there is one method, and one methodimpl, but there are 2 interfaces on the class that both |
5446 | // require an implementation of their method. The correct semantic for type equivalence, is that any |
5447 | // methodimpl directly targeting a method on an interface must be respected, and if it also applies to a type |
5448 | // equivalent interface method, then if that method was not methodimpl'd directly, then the methodimpl should apply |
5449 | // there as well. The ProcessInexactMethodImpls function does this secondary MethodImpl mapping. |
5450 | // |
5451 | //#ProcessInexactMethodImpls_Ambiguities |
5452 | // In the presence of ambiguities, such as there are 3 equivalent interfaces implemented on a class and 2 methodimpls, |
5453 | // we will apply the 2 method impls exactly to appropriate interface methods, and arbitrarily pick one to apply to the |
5454 | // other interface. This is clearly ambiguous, but tricky to detect in the type loader efficiently, and should hopefully |
5455 | // not cause too many problems. |
5456 | // |
5457 | VOID |
5458 | MethodTableBuilder::ProcessInexactMethodImpls() |
5459 | { |
5460 | STANDARD_VM_CONTRACT; |
5461 | |
5462 | if (bmtMethod->dwNumberInexactMethodImplCandidates == 0) |
5463 | return; |
5464 | |
5465 | DeclaredMethodIterator it(*this); |
5466 | while (it.Next()) |
5467 | { |
5468 | // Non-virtual methods cannot be classified as methodImpl - we should have thrown an |
5469 | // error before reaching this point. |
5470 | CONSISTENCY_CHECK(!(!IsMdVirtual(it.Attrs()) && it.IsMethodImpl())); |
5471 | |
5472 | if (!IsMdVirtual(it.Attrs())) |
5473 | { // Only virtual methods can participate in methodImpls |
5474 | continue; |
5475 | } |
5476 | |
5477 | if(!it.IsMethodImpl()) |
5478 | { |
5479 | // Skip methods which are not the bodies of MethodImpl specifications |
5480 | continue; |
5481 | } |
5482 | |
5483 | // If this method serves as the BODY of a MethodImpl specification, then |
5484 | // we should iterate all the MethodImpl's for this class and see just how many |
5485 | // of them this method participates in as the BODY. |
5486 | for(DWORD m = 0; m < bmtMethod->dwNumberMethodImpls; m++) |
5487 | { |
5488 | // Inexact matching logic only works on MethodImpls that have been opted into inexactness by ProcessMethodImpls. |
5489 | if (!bmtMetaData->rgMethodImplTokens[m].fConsiderDuringInexactMethodImplProcessing) |
5490 | { |
5491 | continue; |
5492 | } |
5493 | |
5494 | // If the methodimpl we are working with does not match this method, continue to next methodimpl |
5495 | if(it.Token() != bmtMetaData->rgMethodImplTokens[m].methodBody) |
5496 | { |
5497 | continue; |
5498 | } |
5499 | |
5500 | bool fMatchFound = false; |
5501 | |
5502 | LPCUTF8 szName = NULL; |
5503 | PCCOR_SIGNATURE pSig = NULL; |
5504 | ULONG cbSig; |
5505 | |
5506 | mdToken mdDecl = bmtMetaData->rgMethodImplTokens[m].methodDecl; |
5507 | |
5508 | if (TypeFromToken(mdDecl) == mdtMethodDef) |
5509 | { // Different methods are aused to access MethodDef and MemberRef |
5510 | // names and signatures. |
5511 | if (FAILED(GetMDImport()->GetNameOfMethodDef(mdDecl, &szName)) || |
5512 | FAILED(GetMDImport()->GetSigOfMethodDef(mdDecl, &cbSig, &pSig))) |
5513 | { |
5514 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
5515 | } |
5516 | } |
5517 | else |
5518 | { |
5519 | if (FAILED(GetMDImport()->GetNameAndSigOfMemberRef(mdDecl, &pSig, &cbSig, &szName))) |
5520 | { |
5521 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
5522 | } |
5523 | } |
5524 | |
5525 | Substitution *pDeclSubst = &bmtMetaData->pMethodDeclSubsts[m]; |
5526 | MethodSignature declSig(GetModule(), szName, pSig, cbSig, pDeclSubst); |
5527 | bmtInterfaceEntry * pItfEntry = NULL; |
5528 | |
5529 | for (DWORD i = 0; i < bmtInterface->dwInterfaceMapSize; i++) |
5530 | { |
5531 | if (bmtInterface->pInterfaceMap[i].GetInterfaceEquivalenceSet() != bmtMetaData->rgMethodImplTokens[m].interfaceEquivalenceSet) |
5532 | continue; |
5533 | |
5534 | bmtMethodHandle declMethod; |
5535 | pItfEntry = &bmtInterface->pInterfaceMap[i]; |
5536 | |
5537 | // Search for declmethod on this interface |
5538 | declMethod = FindDeclMethodOnInterfaceEntry(pItfEntry, declSig); |
5539 | |
5540 | // If we didn't find a match, continue on to next interface in the equivalence set |
5541 | if (declMethod.IsNull()) |
5542 | continue; |
5543 | |
5544 | if (!IsMdVirtual(declMethod.GetDeclAttrs())) |
5545 | { // Make sure the decl is virtual |
5546 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_MUSTBEVIRTUAL, it.Token()); |
5547 | } |
5548 | |
5549 | fMatchFound = true; |
5550 | |
5551 | bool fPreexistingImplFound = false; |
5552 | |
5553 | // Check to ensure there isn't already a matching declMethod in the method impl list |
5554 | for (DWORD iMethodImpl = 0; iMethodImpl < bmtMethodImpl->pIndex; iMethodImpl++) |
5555 | { |
5556 | if (bmtMethodImpl->GetDeclarationMethod(iMethodImpl) == declMethod) |
5557 | { |
5558 | fPreexistingImplFound = true; |
5559 | break; |
5560 | } |
5561 | } |
5562 | |
5563 | // Search for other matches |
5564 | if (fPreexistingImplFound) |
5565 | continue; |
5566 | |
5567 | // Otherwise, record the method impl discovery if the match is |
5568 | bmtMethodImpl->AddMethodImpl(*it, declMethod, bmtMetaData->rgMethodImplTokens[m].methodDecl, GetStackingAllocator()); |
5569 | } |
5570 | |
5571 | if (!fMatchFound && bmtMetaData->rgMethodImplTokens[m].fThrowIfUnmatchedDuringInexactMethodImplProcessing) |
5572 | { |
5573 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_DECLARATIONNOTFOUND, it.Token()); |
5574 | } |
5575 | } |
5576 | } |
5577 | } |
5578 | |
5579 | //******************************************************************************* |
5580 | // |
5581 | // Used by BuildMethodTable |
5582 | // |
5583 | VOID |
5584 | MethodTableBuilder::ProcessMethodImpls() |
5585 | { |
5586 | STANDARD_VM_CONTRACT; |
5587 | |
5588 | if (bmtMethod->dwNumberMethodImpls == 0) |
5589 | return; |
5590 | |
5591 | HRESULT hr = S_OK; |
5592 | |
5593 | DeclaredMethodIterator it(*this); |
5594 | while (it.Next()) |
5595 | { |
5596 | // Non-virtual methods cannot be classified as methodImpl - we should have thrown an |
5597 | // error before reaching this point. |
5598 | CONSISTENCY_CHECK(!(!IsMdVirtual(it.Attrs()) && it.IsMethodImpl())); |
5599 | |
5600 | if (!IsMdVirtual(it.Attrs())) |
5601 | { // Only virtual methods can participate in methodImpls |
5602 | continue; |
5603 | } |
5604 | |
5605 | // If this method serves as the BODY of a MethodImpl specification, then |
5606 | // we should iterate all the MethodImpl's for this class and see just how many |
5607 | // of them this method participates in as the BODY. |
5608 | if(it.IsMethodImpl()) |
5609 | { |
5610 | for(DWORD m = 0; m < bmtMethod->dwNumberMethodImpls; m++) |
5611 | { |
5612 | if(it.Token() == bmtMetaData->rgMethodImplTokens[m].methodBody) |
5613 | { |
5614 | mdToken mdDecl = bmtMetaData->rgMethodImplTokens[m].methodDecl; |
5615 | bmtMethodHandle declMethod; |
5616 | |
5617 | // Get the parent token for the decl method token |
5618 | mdToken tkParent = mdTypeDefNil; |
5619 | if (TypeFromToken(mdDecl) == mdtMethodDef || TypeFromToken(mdDecl) == mdtMemberRef) |
5620 | { |
5621 | if (FAILED(hr = GetMDImport()->GetParentToken(mdDecl,&tkParent))) |
5622 | { |
5623 | BuildMethodTableThrowException(hr, *bmtError); |
5624 | } |
5625 | } |
5626 | |
5627 | if (GetCl() == tkParent) |
5628 | { // The DECL has been declared within the class that we're currently building. |
5629 | hr = S_OK; |
5630 | |
5631 | if(bmtError->pThrowable != NULL) |
5632 | { |
5633 | *(bmtError->pThrowable) = NULL; |
5634 | } |
5635 | |
5636 | if(TypeFromToken(mdDecl) != mdtMethodDef) |
5637 | { |
5638 | if (FAILED(hr = FindMethodDeclarationForMethodImpl( |
5639 | mdDecl, &mdDecl, TRUE))) |
5640 | { |
5641 | BuildMethodTableThrowException(hr, *bmtError); |
5642 | } |
5643 | } |
5644 | |
5645 | CONSISTENCY_CHECK(TypeFromToken(mdDecl) == mdtMethodDef); |
5646 | declMethod = bmtMethod->FindDeclaredMethodByToken(mdDecl); |
5647 | } |
5648 | else |
5649 | { // We can't call GetDescFromMemberDefOrRef here because this |
5650 | // method depends on a fully-loaded type, including parent types, |
5651 | // which is not always guaranteed. In particular, it requires that |
5652 | // the instantiation dictionary be filled. The solution is the following: |
5653 | // 1. Load the approximate type that the method belongs to. |
5654 | // 2. Get or create the correct substitution for the type involved |
5655 | // 3. Iterate the introduced methods on that type looking for a matching |
5656 | // method. |
5657 | |
5658 | LPCUTF8 szName = NULL; |
5659 | PCCOR_SIGNATURE pSig = NULL; |
5660 | ULONG cbSig; |
5661 | if (TypeFromToken(mdDecl) == mdtMethodDef) |
5662 | { // Different methods are aused to access MethodDef and MemberRef |
5663 | // names and signatures. |
5664 | if (FAILED(GetMDImport()->GetNameOfMethodDef(mdDecl, &szName)) || |
5665 | FAILED(GetMDImport()->GetSigOfMethodDef(mdDecl, &cbSig, &pSig))) |
5666 | { |
5667 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
5668 | } |
5669 | } |
5670 | else |
5671 | { |
5672 | if (FAILED(GetMDImport()->GetNameAndSigOfMemberRef(mdDecl, &pSig, &cbSig, &szName))) |
5673 | { |
5674 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
5675 | } |
5676 | } |
5677 | |
5678 | Substitution *pDeclSubst = &bmtMetaData->pMethodDeclSubsts[m]; |
5679 | MethodTable * pDeclMT = NULL; |
5680 | MethodSignature declSig(GetModule(), szName, pSig, cbSig, pDeclSubst); |
5681 | |
5682 | { // 1. Load the approximate type. |
5683 | // Block for the LoadsTypeViolation. |
5684 | CONTRACT_VIOLATION(LoadsTypeViolation); |
5685 | pDeclMT = ClassLoader::LoadTypeDefOrRefOrSpecThrowing( |
5686 | GetModule(), |
5687 | tkParent, |
5688 | &bmtGenerics->typeContext, |
5689 | ClassLoader::ThrowIfNotFound, |
5690 | ClassLoader::PermitUninstDefOrRef, |
5691 | ClassLoader::LoadTypes, |
5692 | CLASS_LOAD_APPROXPARENTS, |
5693 | TRUE).GetMethodTable()->GetCanonicalMethodTable(); |
5694 | } |
5695 | |
5696 | { // 2. Get or create the correct substitution |
5697 | bmtRTType * pDeclType = NULL; |
5698 | |
5699 | if (pDeclMT->IsInterface()) |
5700 | { // If the declaration method is a part of an interface, search through |
5701 | // the interface map to find the matching interface so we can provide |
5702 | // the correct substitution chain. |
5703 | pDeclType = NULL; |
5704 | |
5705 | bmtInterfaceEntry * pItfEntry = NULL; |
5706 | for (DWORD i = 0; i < bmtInterface->dwInterfaceMapSize; i++) |
5707 | { |
5708 | bmtRTType * pCurItf = bmtInterface->pInterfaceMap[i].GetInterfaceType(); |
5709 | // Type Equivalence is not respected for this comparision as you can have multiple type equivalent interfaces on a class |
5710 | TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL); |
5711 | if (MetaSig::CompareTypeDefsUnderSubstitutions( |
5712 | pCurItf->GetMethodTable(), pDeclMT, |
5713 | &pCurItf->GetSubstitution(), pDeclSubst, |
5714 | &newVisited)) |
5715 | { |
5716 | pItfEntry = &bmtInterface->pInterfaceMap[i]; |
5717 | pDeclType = pCurItf; |
5718 | break; |
5719 | } |
5720 | } |
5721 | |
5722 | if (IsInterface()) |
5723 | { |
5724 | if (pDeclType == NULL) |
5725 | { |
5726 | // Interface is not implemented by this type. |
5727 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_NOTIMPLEMENTED, it.Token()); |
5728 | } |
5729 | } |
5730 | else |
5731 | { |
5732 | if (pDeclType == NULL) |
5733 | { |
5734 | DWORD equivalenceSet = 0; |
5735 | |
5736 | for (DWORD i = 0; i < bmtInterface->dwInterfaceMapSize; i++) |
5737 | { |
5738 | bmtRTType * pCurItf = bmtInterface->pInterfaceMap[i].GetInterfaceType(); |
5739 | // Type Equivalence is respected for this comparision as we just need to find an |
5740 | // equivalent interface, the particular interface is unimportant |
5741 | if (MetaSig::CompareTypeDefsUnderSubstitutions( |
5742 | pCurItf->GetMethodTable(), pDeclMT, |
5743 | &pCurItf->GetSubstitution(), pDeclSubst, |
5744 | NULL)) |
5745 | { |
5746 | equivalenceSet = bmtInterface->pInterfaceMap[i].GetInterfaceEquivalenceSet(); |
5747 | pItfEntry = &bmtInterface->pInterfaceMap[i]; |
5748 | break; |
5749 | } |
5750 | } |
5751 | |
5752 | if (equivalenceSet == 0) |
5753 | { |
5754 | // Interface is not implemented by this type. |
5755 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_NOTIMPLEMENTED, it.Token()); |
5756 | } |
5757 | |
5758 | // Interface is not implemented by this type exactly. We need to consider this MethodImpl on non exact interface matches, |
5759 | // as the only match may be one of the non-exact matches |
5760 | bmtMetaData->rgMethodImplTokens[m].fConsiderDuringInexactMethodImplProcessing = true; |
5761 | bmtMetaData->rgMethodImplTokens[m].fThrowIfUnmatchedDuringInexactMethodImplProcessing = true; |
5762 | bmtMetaData->rgMethodImplTokens[m].interfaceEquivalenceSet = equivalenceSet; |
5763 | bmtMethod->dwNumberInexactMethodImplCandidates++; |
5764 | continue; // Move on to other MethodImpls |
5765 | } |
5766 | else |
5767 | { |
5768 | // This method impl may need to match other methods during inexact processing |
5769 | if (pItfEntry->InEquivalenceSetWithMultipleEntries()) |
5770 | { |
5771 | bmtMetaData->rgMethodImplTokens[m].fConsiderDuringInexactMethodImplProcessing = true; |
5772 | bmtMetaData->rgMethodImplTokens[m].fThrowIfUnmatchedDuringInexactMethodImplProcessing = false; |
5773 | bmtMetaData->rgMethodImplTokens[m].interfaceEquivalenceSet = pItfEntry->GetInterfaceEquivalenceSet(); |
5774 | bmtMethod->dwNumberInexactMethodImplCandidates++; |
5775 | } |
5776 | } |
5777 | } |
5778 | |
5779 | // 3. Find the matching method. |
5780 | declMethod = FindDeclMethodOnInterfaceEntry(pItfEntry, declSig); |
5781 | } |
5782 | else |
5783 | { // Assume the MethodTable is a parent of the current type, |
5784 | // and create the substitution chain to match it. |
5785 | |
5786 | pDeclType = NULL; |
5787 | |
5788 | for (bmtRTType *pCur = GetParentType(); |
5789 | pCur != NULL; |
5790 | pCur = pCur->GetParentType()) |
5791 | { |
5792 | if (pCur->GetMethodTable() == pDeclMT) |
5793 | { |
5794 | pDeclType = pCur; |
5795 | break; |
5796 | } |
5797 | } |
5798 | |
5799 | if (pDeclType == NULL) |
5800 | { // Method's type is not a parent. |
5801 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_DECLARATIONNOTFOUND, it.Token()); |
5802 | } |
5803 | |
5804 | // 3. Find the matching method. |
5805 | bmtRTType *pCurDeclType = pDeclType; |
5806 | do |
5807 | { |
5808 | // two pass algorithm. search for exact matches followed |
5809 | // by equivalent matches. |
5810 | for (int iPass = 0; (iPass < 2) && (declMethod.IsNull()); iPass++) |
5811 | { |
5812 | MethodTable *pCurDeclMT = pCurDeclType->GetMethodTable(); |
5813 | |
5814 | MethodTable::IntroducedMethodIterator methIt(pCurDeclMT); |
5815 | for(; methIt.IsValid(); methIt.Next()) |
5816 | { |
5817 | MethodDesc * pCurMD = methIt.GetMethodDesc(); |
5818 | |
5819 | if (pCurDeclMT != pDeclMT) |
5820 | { |
5821 | // If the method isn't on the declaring type, then it must be virtual. |
5822 | if (!pCurMD->IsVirtual()) |
5823 | continue; |
5824 | } |
5825 | if (strcmp(szName, pCurMD->GetName()) == 0) |
5826 | { |
5827 | PCCOR_SIGNATURE pCurMDSig; |
5828 | DWORD cbCurMDSig; |
5829 | pCurMD->GetSig(&pCurMDSig, &cbCurMDSig); |
5830 | |
5831 | // First pass searches for declaration methods should not use type equivalence |
5832 | TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL); |
5833 | |
5834 | if (MetaSig::CompareMethodSigs( |
5835 | declSig.GetSignature(), |
5836 | static_cast<DWORD>(declSig.GetSignatureLength()), |
5837 | declSig.GetModule(), |
5838 | &declSig.GetSubstitution(), |
5839 | pCurMDSig, |
5840 | cbCurMDSig, |
5841 | pCurMD->GetModule(), |
5842 | &pCurDeclType->GetSubstitution(), |
5843 | iPass == 0 ? &newVisited : NULL)) |
5844 | { |
5845 | declMethod = (*bmtParent->pSlotTable)[pCurMD->GetSlot()].Decl(); |
5846 | break; |
5847 | } |
5848 | } |
5849 | } |
5850 | } |
5851 | |
5852 | pCurDeclType = pCurDeclType->GetParentType(); |
5853 | } while ((pCurDeclType != NULL) && (declMethod.IsNull())); |
5854 | } |
5855 | |
5856 | if (declMethod.IsNull()) |
5857 | { // Would prefer to let this fall out to the BuildMethodTableThrowException |
5858 | // below, but due to v2.0 and earlier behaviour throwing a MissingMethodException, |
5859 | // primarily because this code used to be a simple call to |
5860 | // MemberLoader::GetDescFromMemberDefOrRef (see above for reason why), |
5861 | // we must continue to do the same. |
5862 | MemberLoader::ThrowMissingMethodException( |
5863 | pDeclMT, |
5864 | declSig.GetName(), |
5865 | declSig.GetModule(), |
5866 | declSig.GetSignature(), |
5867 | static_cast<DWORD>(declSig.GetSignatureLength()), |
5868 | &bmtGenerics->typeContext); |
5869 | } |
5870 | } |
5871 | } |
5872 | |
5873 | if (declMethod.IsNull()) |
5874 | { // Method not found, throw. |
5875 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_DECLARATIONNOTFOUND, it.Token()); |
5876 | } |
5877 | |
5878 | if (!IsMdVirtual(declMethod.GetDeclAttrs())) |
5879 | { // Make sure the decl is virtual |
5880 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_MUSTBEVIRTUAL, it.Token()); |
5881 | } |
5882 | |
5883 | bmtMethodImpl->AddMethodImpl(*it, declMethod, mdDecl, GetStackingAllocator()); |
5884 | } |
5885 | } |
5886 | } |
5887 | } /* end ... for each member */ |
5888 | } |
5889 | |
5890 | //******************************************************************************* |
5891 | // InitMethodDesc takes a pointer to space that's already allocated for the |
5892 | // particular type of MethodDesc, and initializes based on the other info. |
5893 | // This factors logic between PlaceMembers (the regular code path) & AddMethod |
5894 | // (Edit & Continue (EnC) code path) so we don't have to maintain separate copies. |
5895 | VOID |
5896 | MethodTableBuilder::InitMethodDesc( |
5897 | MethodDesc * pNewMD, // This is should actually be of the correct sub-type, based on Classification |
5898 | DWORD Classification, |
5899 | mdToken tok, |
5900 | DWORD dwImplFlags, |
5901 | DWORD dwMemberAttrs, |
5902 | BOOL fEnC, |
5903 | DWORD RVA, // Only needed for NDirect case |
5904 | IMDInternalImport * pIMDII, // Needed for NDirect, EEImpl(Delegate) cases |
5905 | LPCSTR pMethodName // Only needed for mcEEImpl (Delegate) case |
5906 | COMMA_INDEBUG(LPCUTF8 pszDebugMethodName) |
5907 | COMMA_INDEBUG(LPCUTF8 pszDebugClassName) |
5908 | COMMA_INDEBUG(LPCUTF8 pszDebugMethodSignature) |
5909 | ) |
5910 | { |
5911 | CONTRACTL |
5912 | { |
5913 | THROWS; |
5914 | if (fEnC) { GC_NOTRIGGER; } else { GC_TRIGGERS; } |
5915 | MODE_ANY; |
5916 | } |
5917 | CONTRACTL_END; |
5918 | |
5919 | LOG((LF_CORDB, LL_EVERYTHING, "EEC::IMD: pNewMD:0x%x for tok:0x%x (%s::%s)\n" , |
5920 | pNewMD, tok, pszDebugClassName, pszDebugMethodName)); |
5921 | |
5922 | // Now we know the classification we can perform any classification specific initialization. |
5923 | |
5924 | // The method desc is zero inited by the caller. |
5925 | |
5926 | switch (Classification) |
5927 | { |
5928 | case mcNDirect: |
5929 | { |
5930 | // NDirect specific initialization. |
5931 | NDirectMethodDesc *pNewNMD = (NDirectMethodDesc*)pNewMD; |
5932 | |
5933 | // Allocate writeable data |
5934 | pNewNMD->ndirect.m_pWriteableData.SetValue((NDirectWriteableData*) |
5935 | AllocateFromHighFrequencyHeap(S_SIZE_T(sizeof(NDirectWriteableData)))); |
5936 | |
5937 | #ifdef HAS_NDIRECT_IMPORT_PRECODE |
5938 | pNewNMD->ndirect.m_pImportThunkGlue.SetValue(Precode::Allocate(PRECODE_NDIRECT_IMPORT, pNewMD, |
5939 | GetLoaderAllocator(), GetMemTracker())->AsNDirectImportPrecode()); |
5940 | #else // !HAS_NDIRECT_IMPORT_PRECODE |
5941 | pNewNMD->GetNDirectImportThunkGlue()->Init(pNewNMD); |
5942 | #endif // !HAS_NDIRECT_IMPORT_PRECODE |
5943 | |
5944 | #if defined(_TARGET_X86_) |
5945 | pNewNMD->ndirect.m_cbStackArgumentSize = 0xFFFF; |
5946 | #endif // defined(_TARGET_X86_) |
5947 | |
5948 | // If the RVA of a native method is set, this is an early-bound IJW call |
5949 | if (RVA != 0 && IsMiUnmanaged(dwImplFlags) && IsMiNative(dwImplFlags)) |
5950 | { |
5951 | // Note that we cannot initialize the stub directly now in the general case, |
5952 | // as LoadLibrary may not have been performed yet. |
5953 | pNewNMD->SetIsEarlyBound(); |
5954 | } |
5955 | |
5956 | pNewNMD->GetWriteableData()->m_pNDirectTarget = pNewNMD->GetNDirectImportThunkGlue()->GetEntrypoint(); |
5957 | } |
5958 | break; |
5959 | |
5960 | case mcFCall: |
5961 | break; |
5962 | |
5963 | case mcEEImpl: |
5964 | // For the Invoke method we will set a standard invoke method. |
5965 | BAD_FORMAT_NOTHROW_ASSERT(IsDelegate()); |
5966 | |
5967 | // For the asserts, either the pointer is NULL (since the class hasn't |
5968 | // been constructed yet), or we're in EnC mode, meaning that the class |
5969 | // does exist, but we may be re-assigning the field to point to an |
5970 | // updated MethodDesc |
5971 | |
5972 | // It is not allowed for EnC to replace one of the runtime builtin methods |
5973 | |
5974 | if (strcmp(pMethodName, "Invoke" ) == 0) |
5975 | { |
5976 | BAD_FORMAT_NOTHROW_ASSERT(((DelegateEEClass*)GetHalfBakedClass())->m_pInvokeMethod.IsNull()); |
5977 | ((DelegateEEClass*)GetHalfBakedClass())->m_pInvokeMethod.SetValue(pNewMD); |
5978 | } |
5979 | else if (strcmp(pMethodName, "BeginInvoke" ) == 0) |
5980 | { |
5981 | BAD_FORMAT_NOTHROW_ASSERT(((DelegateEEClass*)GetHalfBakedClass())->m_pBeginInvokeMethod.IsNull()); |
5982 | ((DelegateEEClass*)GetHalfBakedClass())->m_pBeginInvokeMethod.SetValue(pNewMD); |
5983 | } |
5984 | else if (strcmp(pMethodName, "EndInvoke" ) == 0) |
5985 | { |
5986 | BAD_FORMAT_NOTHROW_ASSERT(((DelegateEEClass*)GetHalfBakedClass())->m_pEndInvokeMethod.IsNull()); |
5987 | ((DelegateEEClass*)GetHalfBakedClass())->m_pEndInvokeMethod.SetValue(pNewMD); |
5988 | } |
5989 | else |
5990 | { |
5991 | BuildMethodTableThrowException(IDS_CLASSLOAD_GENERAL); |
5992 | } |
5993 | |
5994 | // StoredSig specific intialization |
5995 | { |
5996 | StoredSigMethodDesc *pNewSMD = (StoredSigMethodDesc*) pNewMD;; |
5997 | DWORD cSig; |
5998 | PCCOR_SIGNATURE pSig; |
5999 | if (FAILED(pIMDII->GetSigOfMethodDef(tok, &cSig, &pSig))) |
6000 | { |
6001 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
6002 | } |
6003 | pNewSMD->SetStoredMethodSig(pSig, cSig); |
6004 | } |
6005 | break; |
6006 | |
6007 | #ifdef FEATURE_COMINTEROP |
6008 | case mcComInterop: |
6009 | #endif // FEATURE_COMINTEROP |
6010 | case mcIL: |
6011 | break; |
6012 | |
6013 | case mcInstantiated: |
6014 | #ifdef EnC_SUPPORTED |
6015 | if (fEnC) |
6016 | { |
6017 | // We reuse the instantiated methoddescs to get the slot |
6018 | InstantiatedMethodDesc* pNewIMD = (InstantiatedMethodDesc*) pNewMD; |
6019 | pNewIMD->SetupEnCAddedMethod(); |
6020 | } |
6021 | else |
6022 | #endif // EnC_SUPPORTED |
6023 | { |
6024 | // Initialize the typical instantiation. |
6025 | InstantiatedMethodDesc* pNewIMD = (InstantiatedMethodDesc*) pNewMD; |
6026 | //data has the same lifetime as method table, use our allocator |
6027 | pNewIMD->SetupGenericMethodDefinition(pIMDII, GetLoaderAllocator(), GetMemTracker(), GetModule(), |
6028 | tok); |
6029 | } |
6030 | break; |
6031 | |
6032 | default: |
6033 | BAD_FORMAT_NOTHROW_ASSERT(!"Failed to set a method desc classification" ); |
6034 | } |
6035 | |
6036 | // Check the method desc's classification. |
6037 | _ASSERTE(pNewMD->GetClassification() == Classification); |
6038 | |
6039 | pNewMD->SetMemberDef(tok); |
6040 | |
6041 | if (IsMdStatic(dwMemberAttrs)) |
6042 | pNewMD->SetStatic(); |
6043 | |
6044 | // Set suppress unmanaged code access permission attribute |
6045 | |
6046 | if (pNewMD->IsNDirect()) |
6047 | pNewMD->ComputeSuppressUnmanagedCodeAccessAttr(pIMDII); |
6048 | |
6049 | #ifdef _DEBUG |
6050 | // Mark as many methods as synchronized as possible. |
6051 | // |
6052 | // Note that this can easily cause programs to deadlock, and that |
6053 | // should not be treated as a bug in the program. |
6054 | |
6055 | static ConfigDWORD stressSynchronized; |
6056 | DWORD stressSynchronizedVal = stressSynchronized.val(CLRConfig::INTERNAL_stressSynchronized); |
6057 | |
6058 | bool isStressSynchronized = stressSynchronizedVal && |
6059 | pNewMD->IsIL() && // Synchronized is not supported on Ecalls, NDirect method, etc |
6060 | // IsValueClass() and IsEnum() do not work for System.ValueType and System.Enum themselves |
6061 | ((g_pValueTypeClass != NULL && g_pEnumClass != NULL && |
6062 | !IsValueClass()) || // Can not synchronize on byref "this" |
6063 | IsMdStatic(dwMemberAttrs)) && // IsStatic() blows up in _DEBUG as pNewMD is not fully inited |
6064 | g_pObjectClass != NULL; // Ignore Object:* since "this" could be a boxed object |
6065 | |
6066 | // stressSynchronized=1 turns off the stress in the system domain to reduce |
6067 | // the chances of spurious deadlocks. Deadlocks in user code can still occur. |
6068 | // stressSynchronized=2 will probably cause more deadlocks, and is not recommended |
6069 | if (stressSynchronizedVal == 1 && GetAssembly()->IsSystem()) |
6070 | isStressSynchronized = false; |
6071 | |
6072 | if (IsMiSynchronized(dwImplFlags) || isStressSynchronized) |
6073 | #else // !_DEBUG |
6074 | if (IsMiSynchronized(dwImplFlags)) |
6075 | #endif // !_DEBUG |
6076 | pNewMD->SetSynchronized(); |
6077 | |
6078 | #ifdef _DEBUG |
6079 | pNewMD->m_pszDebugMethodName = (LPUTF8)pszDebugMethodName; |
6080 | pNewMD->m_pszDebugClassName = (LPUTF8)pszDebugClassName; |
6081 | pNewMD->m_pDebugMethodTable.SetValue(GetHalfBakedMethodTable()); |
6082 | |
6083 | if (pszDebugMethodSignature == NULL) |
6084 | pNewMD->m_pszDebugMethodSignature = FormatSig(pNewMD,pNewMD->GetLoaderAllocator()->GetLowFrequencyHeap(),GetMemTracker()); |
6085 | else |
6086 | pNewMD->m_pszDebugMethodSignature = pszDebugMethodSignature; |
6087 | #endif // _DEBUG |
6088 | } // MethodTableBuilder::InitMethodDesc |
6089 | |
6090 | //******************************************************************************* |
6091 | // |
6092 | // Used by BuildMethodTable |
6093 | // |
6094 | VOID |
6095 | MethodTableBuilder::AddMethodImplDispatchMapping( |
6096 | DispatchMapTypeID typeID, |
6097 | SLOT_INDEX slotNumber, |
6098 | bmtMDMethod * pImplMethod) |
6099 | { |
6100 | STANDARD_VM_CONTRACT; |
6101 | |
6102 | MethodDesc * pMDImpl = pImplMethod->GetMethodDesc(); |
6103 | |
6104 | // Look for an existing entry in the map. |
6105 | DispatchMapBuilder::Iterator it(bmtVT->pDispatchMapBuilder); |
6106 | if (bmtVT->pDispatchMapBuilder->Find(typeID, slotNumber, it)) |
6107 | { |
6108 | // Throw if this entry has already previously been MethodImpl'd. |
6109 | if (it.IsMethodImpl()) |
6110 | { |
6111 | // NOTE: This is where we check for duplicate overrides. This is the easiest place to check |
6112 | // because duplicate overrides could in fact have separate MemberRefs to the same |
6113 | // member and so just comparing tokens at the very start would not be enough. |
6114 | if (it.GetTargetMD() != pMDImpl) |
6115 | { |
6116 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_MULTIPLEOVERRIDES, pMDImpl->GetMemberDef()); |
6117 | } |
6118 | } |
6119 | // This is the first MethodImpl. That's ok. |
6120 | else |
6121 | { |
6122 | it.SetTarget(pMDImpl); |
6123 | it.SetIsMethodImpl(); |
6124 | } |
6125 | } |
6126 | // A mapping for this interface method does not exist, so insert it. |
6127 | else |
6128 | { |
6129 | bmtVT->pDispatchMapBuilder->InsertMDMapping( |
6130 | typeID, |
6131 | slotNumber, |
6132 | pMDImpl, |
6133 | TRUE); |
6134 | } |
6135 | |
6136 | // Save the entry into the vtable as well, if it isn't an interface methodImpl |
6137 | if (typeID == DispatchMapTypeID::ThisClassID()) |
6138 | { |
6139 | bmtVT->SetVirtualMethodImpl(slotNumber, pImplMethod); |
6140 | } |
6141 | } // MethodTableBuilder::AddMethodImplDispatchMapping |
6142 | |
6143 | //******************************************************************************* |
6144 | VOID |
6145 | MethodTableBuilder::MethodImplCompareSignatures( |
6146 | bmtMethodHandle hDecl, |
6147 | bmtMethodHandle hImpl, |
6148 | DWORD dwConstraintErrorCode) |
6149 | { |
6150 | CONTRACTL { |
6151 | STANDARD_VM_CHECK; |
6152 | PRECONDITION(!hDecl.IsNull()); |
6153 | PRECONDITION(!hImpl.IsNull()); |
6154 | PRECONDITION(TypeFromToken(hDecl.GetMethodSignature().GetToken()) == mdtMethodDef); |
6155 | PRECONDITION(TypeFromToken(hImpl.GetMethodSignature().GetToken()) == mdtMethodDef); |
6156 | } CONTRACTL_END; |
6157 | |
6158 | const MethodSignature &declSig(hDecl.GetMethodSignature()); |
6159 | const MethodSignature &implSig(hImpl.GetMethodSignature()); |
6160 | |
6161 | if (!MethodSignature::SignaturesEquivalent(declSig, implSig)) |
6162 | { |
6163 | LOG((LF_CLASSLOADER, LL_INFO1000, "BADSIG placing MethodImpl: %x\n" , declSig.GetToken())); |
6164 | BuildMethodTableThrowException(COR_E_TYPELOAD, IDS_CLASSLOAD_MI_BADSIGNATURE, declSig.GetToken()); |
6165 | } |
6166 | |
6167 | //now compare the method constraints |
6168 | if (!MetaSig::CompareMethodConstraints(&implSig.GetSubstitution(), implSig.GetModule(), implSig.GetToken(), |
6169 | &declSig.GetSubstitution(), declSig.GetModule(), declSig.GetToken())) |
6170 | { |
6171 | BuildMethodTableThrowException(dwConstraintErrorCode, implSig.GetToken()); |
6172 | } |
6173 | } |
6174 | |
6175 | //******************************************************************************* |
6176 | // We should have collected all the method impls. Cycle through them creating the method impl |
6177 | // structure that holds the information about which slots are overridden. |
6178 | VOID |
6179 | MethodTableBuilder::PlaceMethodImpls() |
6180 | { |
6181 | STANDARD_VM_CONTRACT; |
6182 | |
6183 | if(bmtMethodImpl->pIndex == 0) |
6184 | { |
6185 | return; |
6186 | } |
6187 | |
6188 | // Allocate some temporary storage. The number of overrides for a single method impl |
6189 | // cannot be greater then the number of vtable slots for classes. But for interfaces |
6190 | // it might contain overrides for other interface methods. |
6191 | DWORD dwMaxSlotSize = IsInterface() ? bmtMethod->dwNumberMethodImpls : bmtVT->cVirtualSlots; |
6192 | |
6193 | DWORD * slots = new (&GetThread()->m_MarshalAlloc) DWORD[dwMaxSlotSize]; |
6194 | mdToken * tokens = new (&GetThread()->m_MarshalAlloc) mdToken[dwMaxSlotSize]; |
6195 | RelativePointer<MethodDesc *> * replaced = new (&GetThread()->m_MarshalAlloc) RelativePointer<MethodDesc*>[dwMaxSlotSize]; |
6196 | |
6197 | DWORD iEntry = 0; |
6198 | bmtMDMethod * pCurImplMethod = bmtMethodImpl->GetImplementationMethod(iEntry); |
6199 | |
6200 | DWORD slotIndex = 0; |
6201 | |
6202 | // The impls are sorted according to the method descs for the body of the method impl. |
6203 | // Loop through the impls until the next body is found. When a single body |
6204 | // has been done move the slots implemented and method descs replaced into the storage |
6205 | // found on the body method desc. |
6206 | while (true) |
6207 | { // collect information until we reach the next body |
6208 | |
6209 | tokens[slotIndex] = bmtMethodImpl->GetDeclarationToken(iEntry); |
6210 | |
6211 | // Get the declaration part of the method impl. It will either be a token |
6212 | // (declaration is on this type) or a method desc. |
6213 | bmtMethodHandle hDeclMethod = bmtMethodImpl->GetDeclarationMethod(iEntry); |
6214 | if(hDeclMethod.IsMDMethod()) |
6215 | { |
6216 | // The declaration is on the type being built |
6217 | bmtMDMethod * pCurDeclMethod = hDeclMethod.AsMDMethod(); |
6218 | |
6219 | mdToken mdef = pCurDeclMethod->GetMethodSignature().GetToken(); |
6220 | if (bmtMethodImpl->IsBody(mdef)) |
6221 | { // A method declared on this class cannot be both a decl and an impl |
6222 | BuildMethodTableThrowException(IDS_CLASSLOAD_MI_MULTIPLEOVERRIDES, mdef); |
6223 | } |
6224 | |
6225 | if (IsInterface()) |
6226 | { |
6227 | // Throws |
6228 | PlaceInterfaceDeclarationOnInterface( |
6229 | hDeclMethod, |
6230 | pCurImplMethod, |
6231 | slots, // Adds override to the slot and replaced arrays. |
6232 | replaced, |
6233 | &slotIndex, |
6234 | dwMaxSlotSize); // Increments count |
6235 | } |
6236 | else |
6237 | { |
6238 | // Throws |
6239 | PlaceLocalDeclarationOnClass( |
6240 | pCurDeclMethod, |
6241 | pCurImplMethod, |
6242 | slots, // Adds override to the slot and replaced arrays. |
6243 | replaced, |
6244 | &slotIndex, |
6245 | dwMaxSlotSize); // Increments count |
6246 | } |
6247 | } |
6248 | else |
6249 | { |
6250 | bmtRTMethod * pCurDeclMethod = hDeclMethod.AsRTMethod(); |
6251 | |
6252 | if (IsInterface()) |
6253 | { |
6254 | // Throws |
6255 | PlaceInterfaceDeclarationOnInterface( |
6256 | hDeclMethod, |
6257 | pCurImplMethod, |
6258 | slots, // Adds override to the slot and replaced arrays. |
6259 | replaced, |
6260 | &slotIndex, |
6261 | dwMaxSlotSize); // Increments count |
6262 | } |
6263 | else |
6264 | { |
6265 | // Do not use pDecl->IsInterface here as that asks the method table and the MT may not yet be set up. |
6266 | if (pCurDeclMethod->GetOwningType()->IsInterface()) |
6267 | { |
6268 | // Throws |
6269 | PlaceInterfaceDeclarationOnClass( |
6270 | pCurDeclMethod, |
6271 | pCurImplMethod); |
6272 | } |
6273 | else |
6274 | { |
6275 | // Throws |
6276 | PlaceParentDeclarationOnClass( |
6277 | pCurDeclMethod, |
6278 | pCurImplMethod, |
6279 | slots, |
6280 | replaced, |
6281 | &slotIndex, |
6282 | dwMaxSlotSize); // Increments count |
6283 | } |
6284 | } |
6285 | } |
6286 | |
6287 | iEntry++; |
6288 | |
6289 | if(iEntry == bmtMethodImpl->pIndex) |
6290 | { |
6291 | // We hit the end of the list so dump the current data and leave |
6292 | WriteMethodImplData(pCurImplMethod, slotIndex, slots, tokens, replaced); |
6293 | break; |
6294 | } |
6295 | else |
6296 | { |
6297 | bmtMDMethod * pNextImplMethod = bmtMethodImpl->GetImplementationMethod(iEntry); |
6298 | |
6299 | if (pNextImplMethod != pCurImplMethod) |
6300 | { |
6301 | // If we're moving on to a new body, dump the current data and reset the counter |
6302 | WriteMethodImplData(pCurImplMethod, slotIndex, slots, tokens, replaced); |
6303 | slotIndex = 0; |
6304 | } |
6305 | |
6306 | pCurImplMethod = pNextImplMethod; |
6307 | } |
6308 | } // while(next != NULL) |
6309 | } // MethodTableBuilder::PlaceMethodImpls |
6310 | |
6311 | //******************************************************************************* |
6312 | VOID |
6313 | MethodTableBuilder::WriteMethodImplData( |
6314 | bmtMDMethod * pImplMethod, |
6315 | DWORD cSlots, |
6316 | DWORD * rgSlots, |
6317 | mdToken * rgTokens, |
6318 | RelativePointer<MethodDesc *> * rgDeclMD) |
6319 | { |
6320 | STANDARD_VM_CONTRACT; |
6321 | |
6322 | // Use the number of overrides to |
6323 | // push information on to the method desc. We store the slots that |
6324 | // are overridden and the method desc that is replaced. That way |
6325 | // when derived classes need to determine if the method is to be |
6326 | // overridden then it can check the name against the replaced |
6327 | // method desc not the bodies name. |
6328 | if (cSlots == 0) |
6329 | { |
6330 | //@TODO:NEWVTWORK: Determine methodImpl status so that we don't need this workaround. |
6331 | //@TODO:NEWVTWORK: This occurs when only interface decls are involved, since |
6332 | //@TODO:NEWVTWORK: these are stored in the dispatch map and not on the methoddesc. |
6333 | } |
6334 | else |
6335 | { |
6336 | MethodImpl * pImpl = pImplMethod->GetMethodDesc()->GetMethodImpl(); |
6337 | |
6338 | // Set the size of the info the MethodImpl needs to keep track of. |
6339 | pImpl->SetSize(GetLoaderAllocator()->GetHighFrequencyHeap(), GetMemTracker(), cSlots); |
6340 | |
6341 | if (!IsInterface()) |
6342 | { |
6343 | // If we are currently builting an interface, the slots here has no meaning and we can skip it |
6344 | // Sort the two arrays in slot index order |
6345 | // This is required in MethodImpl::FindSlotIndex and MethodImpl::Iterator as we'll be using |
6346 | // binary search later |
6347 | for (DWORD i = 0; i < cSlots; i++) |
6348 | { |
6349 | int min = i; |
6350 | for (DWORD j = i + 1; j < cSlots; j++) |
6351 | { |
6352 | if (rgSlots[j] < rgSlots[min]) |
6353 | { |
6354 | min = j; |
6355 | } |
6356 | } |
6357 | |
6358 | if (min != i) |
6359 | { |
6360 | MethodDesc * mTmp = rgDeclMD[i].GetValue(); |
6361 | rgDeclMD[i].SetValue(rgDeclMD[min].GetValue()); |
6362 | rgDeclMD[min].SetValue(mTmp); |
6363 | |
6364 | DWORD sTmp = rgSlots[i]; |
6365 | rgSlots[i] = rgSlots[min]; |
6366 | rgSlots[min] = sTmp; |
6367 | |
6368 | mdToken tTmp = rgTokens[i]; |
6369 | rgTokens[i] = rgTokens[min]; |
6370 | rgTokens[min] = tTmp; |
6371 | } |
6372 | } |
6373 | } |
6374 | |
6375 | // Go and set the method impl |
6376 | pImpl->SetData(rgSlots, rgTokens, rgDeclMD); |
6377 | |
6378 | GetHalfBakedClass()->SetContainsMethodImpls(); |
6379 | } |
6380 | } // MethodTableBuilder::WriteMethodImplData |
6381 | |
6382 | //******************************************************************************* |
6383 | VOID |
6384 | MethodTableBuilder::PlaceLocalDeclarationOnClass( |
6385 | bmtMDMethod * pDecl, |
6386 | bmtMDMethod * pImpl, |
6387 | DWORD * slots, |
6388 | RelativePointer<MethodDesc *> * replaced, |
6389 | DWORD * pSlotIndex, |
6390 | DWORD dwMaxSlotSize) |
6391 | { |
6392 | CONTRACTL |
6393 | { |
6394 | STANDARD_VM_CHECK; |
6395 | PRECONDITION(CheckPointer(bmtVT->pDispatchMapBuilder)); |
6396 | PRECONDITION(CheckPointer(pDecl)); |
6397 | PRECONDITION(CheckPointer(pImpl)); |
6398 | } |
6399 | CONTRACTL_END |
6400 | |
6401 | if (!bmtProp->fNoSanityChecks) |
6402 | { |
6403 | /////////////////////////////// |
6404 | // Verify the signatures match |
6405 | |
6406 | MethodImplCompareSignatures( |
6407 | pDecl, |
6408 | pImpl, |
6409 | IDS_CLASSLOAD_CONSTRAINT_MISMATCH_ON_LOCAL_METHOD_IMPL); |
6410 | |
6411 | /////////////////////////////// |
6412 | // Validate the method impl. |
6413 | |
6414 | TestMethodImpl( |
6415 | bmtMethodHandle(pDecl), |
6416 | bmtMethodHandle(pImpl)); |
6417 | } |
6418 | |
6419 | // Don't allow overrides for any of the four special runtime implemented delegate methods |
6420 | if (IsDelegate()) |
6421 | { |
6422 | LPCUTF8 strMethodName = pDecl->GetMethodSignature().GetName(); |
6423 | if ((strcmp(strMethodName, COR_CTOR_METHOD_NAME) == 0) || |
6424 | (strcmp(strMethodName, "Invoke" ) == 0) || |
6425 | (strcmp(strMethodName, "BeginInvoke" ) == 0) || |
6426 | (strcmp(strMethodName, "EndInvoke" ) == 0)) |
6427 | { |
6428 | BuildMethodTableThrowException( |
6429 | IDS_CLASSLOAD_MI_CANNOT_OVERRIDE, |
6430 | pDecl->GetMethodSignature().GetToken()); |
6431 | } |
6432 | } |
6433 | |
6434 | /////////////////// |
6435 | // Add the mapping |
6436 | |
6437 | // Call helper to add it. Will throw if decl is already MethodImpl'd |
6438 | CONSISTENCY_CHECK(pDecl->GetSlotIndex() == static_cast<SLOT_INDEX>(pDecl->GetMethodDesc()->GetSlot())); |
6439 | AddMethodImplDispatchMapping( |
6440 | DispatchMapTypeID::ThisClassID(), |
6441 | pDecl->GetSlotIndex(), |
6442 | pImpl); |
6443 | |
6444 | // We implement this slot, record it |
6445 | ASSERT(*pSlotIndex < dwMaxSlotSize); |
6446 | slots[*pSlotIndex] = pDecl->GetSlotIndex(); |
6447 | replaced[*pSlotIndex].SetValue(pDecl->GetMethodDesc()); |
6448 | |
6449 | // increment the counter |
6450 | (*pSlotIndex)++; |
6451 | } // MethodTableBuilder::PlaceLocalDeclarationOnClass |
6452 | |
6453 | //******************************************************************************* |
6454 | VOID MethodTableBuilder::PlaceInterfaceDeclarationOnClass( |
6455 | bmtRTMethod * pDecl, |
6456 | bmtMDMethod * pImpl) |
6457 | { |
6458 | CONTRACTL { |
6459 | STANDARD_VM_CHECK; |
6460 | PRECONDITION(CheckPointer(pDecl)); |
6461 | PRECONDITION(CheckPointer(pImpl)); |
6462 | PRECONDITION(pDecl->GetMethodDesc()->IsInterface()); |
6463 | PRECONDITION(CheckPointer(bmtVT->pDispatchMapBuilder)); |
6464 | } CONTRACTL_END; |
6465 | |
6466 | MethodDesc * pDeclMD = pDecl->GetMethodDesc(); |
6467 | MethodTable * pDeclMT = pDeclMD->GetMethodTable(); |
6468 | |
6469 | // Note that the fact that pDecl is non-NULL means that we found the |
6470 | // declaration token to be owned by a declared interface for this type. |
6471 | |
6472 | if (!bmtProp->fNoSanityChecks) |
6473 | { |
6474 | /////////////////////////////// |
6475 | // Verify the signatures match |
6476 | |
6477 | MethodImplCompareSignatures( |
6478 | pDecl, |
6479 | pImpl, |
6480 | IDS_CLASSLOAD_CONSTRAINT_MISMATCH_ON_INTERFACE_METHOD_IMPL); |
6481 | |
6482 | /////////////////////////////// |
6483 | // Validate the method impl. |
6484 | |
6485 | TestMethodImpl( |
6486 | bmtMethodHandle(pDecl), |
6487 | bmtMethodHandle(pImpl)); |
6488 | } |
6489 | |
6490 | /////////////////// |
6491 | // Add the mapping |
6492 | |
6493 | // Note that we need only one DispatchMapTypeID for this interface (though there might be more if there |
6494 | // are duplicates). The first one is easy to get, but we could (in theory) use the last one or a random |
6495 | // one. |
6496 | // Q: Why don't we have to place this method for all duplicate interfaces? Because VSD knows about |
6497 | // duplicates and finds the right (latest) implementation for us - see |
6498 | // code:MethodTable::MethodDataInterfaceImpl::PopulateNextLevel#ProcessAllDuplicates. |
6499 | UINT32 cInterfaceDuplicates; |
6500 | DispatchMapTypeID firstDispatchMapTypeID; |
6501 | ComputeDispatchMapTypeIDs( |
6502 | pDeclMT, |
6503 | &pDecl->GetMethodSignature().GetSubstitution(), |
6504 | &firstDispatchMapTypeID, |
6505 | 1, |
6506 | &cInterfaceDuplicates); |
6507 | CONSISTENCY_CHECK(cInterfaceDuplicates >= 1); |
6508 | CONSISTENCY_CHECK(firstDispatchMapTypeID.IsImplementedInterface()); |
6509 | |
6510 | // Call helper to add it. Will throw if decl is already MethodImpl'd |
6511 | CONSISTENCY_CHECK(pDecl->GetSlotIndex() == static_cast<SLOT_INDEX>(pDecl->GetMethodDesc()->GetSlot())); |
6512 | AddMethodImplDispatchMapping( |
6513 | firstDispatchMapTypeID, |
6514 | pDecl->GetSlotIndex(), |
6515 | pImpl); |
6516 | |
6517 | #ifdef FEATURE_PREJIT |
6518 | if (IsCompilationProcess()) |
6519 | { |
6520 | // |
6521 | // Mark this interface as overridable. It is used to skip generation of |
6522 | // CCWs stubs during NGen (see code:MethodNeedsReverseComStub) |
6523 | // |
6524 | if (!IsMdFinal(pImpl->GetDeclAttrs())) |
6525 | { |
6526 | pDeclMT->GetWriteableDataForWrite()->SetIsOverridingInterface(); |
6527 | } |
6528 | } |
6529 | #endif |
6530 | |
6531 | #ifdef _DEBUG |
6532 | if (bmtInterface->dbg_fShouldInjectInterfaceDuplicates) |
6533 | { // We injected interface duplicates |
6534 | |
6535 | // We have to MethodImpl all interface duplicates as all duplicates are 'declared on type' (see |
6536 | // code:#InjectInterfaceDuplicates_ApproxInterfaces) |
6537 | DispatchMapTypeID * rgDispatchMapTypeIDs = (DispatchMapTypeID *)_alloca(sizeof(DispatchMapTypeID) * cInterfaceDuplicates); |
6538 | ComputeDispatchMapTypeIDs( |
6539 | pDeclMT, |
6540 | &pDecl->GetMethodSignature().GetSubstitution(), |
6541 | rgDispatchMapTypeIDs, |
6542 | cInterfaceDuplicates, |
6543 | &cInterfaceDuplicates); |
6544 | for (UINT32 nInterfaceDuplicate = 1; nInterfaceDuplicate < cInterfaceDuplicates; nInterfaceDuplicate++) |
6545 | { |
6546 | // Add MethodImpl record for each injected interface duplicate |
6547 | AddMethodImplDispatchMapping( |
6548 | rgDispatchMapTypeIDs[nInterfaceDuplicate], |
6549 | pDecl->GetSlotIndex(), |
6550 | pImpl); |
6551 | } |
6552 | } |
6553 | #endif //_DEBUG |
6554 | } // MethodTableBuilder::PlaceInterfaceDeclarationOnClass |
6555 | |
6556 | //******************************************************************************* |
6557 | VOID MethodTableBuilder::PlaceInterfaceDeclarationOnInterface( |
6558 | bmtMethodHandle hDecl, |
6559 | bmtMDMethod *pImpl, |
6560 | DWORD * slots, |
6561 | RelativePointer<MethodDesc *> * replaced, |
6562 | DWORD * pSlotIndex, |
6563 | DWORD dwMaxSlotSize) |
6564 | { |
6565 | CONTRACTL { |
6566 | STANDARD_VM_CHECK; |
6567 | PRECONDITION(CheckPointer(pImpl)); |
6568 | PRECONDITION(IsInterface()); |
6569 | PRECONDITION(hDecl.GetMethodDesc()->IsInterface()); |
6570 | } CONTRACTL_END; |
6571 | |
6572 | MethodDesc * pDeclMD = hDecl.GetMethodDesc(); |
6573 | |
6574 | if (!bmtProp->fNoSanityChecks) |
6575 | { |
6576 | /////////////////////////////// |
6577 | // Verify the signatures match |
6578 | |
6579 | MethodImplCompareSignatures( |
6580 | hDecl, |
6581 | bmtMethodHandle(pImpl), |
6582 | IDS_CLASSLOAD_CONSTRAINT_MISMATCH_ON_INTERFACE_METHOD_IMPL); |
6583 | |
6584 | /////////////////////////////// |
6585 | // Validate the method impl. |
6586 | |
6587 | TestMethodImpl(hDecl, bmtMethodHandle(pImpl)); |
6588 | } |
6589 | |
6590 | // We implement this slot, record it |
6591 | ASSERT(*pSlotIndex < dwMaxSlotSize); |
6592 | slots[*pSlotIndex] = hDecl.GetSlotIndex(); |
6593 | replaced[*pSlotIndex].SetValue(pDeclMD); |
6594 | |
6595 | // increment the counter |
6596 | (*pSlotIndex)++; |
6597 | } // MethodTableBuilder::PlaceInterfaceDeclarationOnInterface |
6598 | |
6599 | //******************************************************************************* |
6600 | VOID |
6601 | MethodTableBuilder::PlaceParentDeclarationOnClass( |
6602 | bmtRTMethod * pDecl, |
6603 | bmtMDMethod * pImpl, |
6604 | DWORD * slots, |
6605 | RelativePointer<MethodDesc *> * replaced, |
6606 | DWORD * pSlotIndex, |
6607 | DWORD dwMaxSlotSize) |
6608 | { |
6609 | CONTRACTL { |
6610 | STANDARD_VM_CHECK; |
6611 | PRECONDITION(CheckPointer(pDecl)); |
6612 | PRECONDITION(CheckPointer(pImpl)); |
6613 | PRECONDITION(CheckPointer(bmtVT->pDispatchMapBuilder)); |
6614 | PRECONDITION(CheckPointer(GetParentMethodTable())); |
6615 | } CONTRACTL_END; |
6616 | |
6617 | MethodDesc * pDeclMD = pDecl->GetMethodDesc(); |
6618 | |
6619 | // Note that the fact that pDecl is non-NULL means that we found the |
6620 | // declaration token to be owned by a parent type. |
6621 | |
6622 | if (!bmtProp->fNoSanityChecks) |
6623 | { |
6624 | ///////////////////////////////////////// |
6625 | // Verify that the signatures match |
6626 | |
6627 | MethodImplCompareSignatures( |
6628 | pDecl, |
6629 | pImpl, |
6630 | IDS_CLASSLOAD_CONSTRAINT_MISMATCH_ON_PARENT_METHOD_IMPL); |
6631 | |
6632 | //////////////////////////////// |
6633 | // Verify rules of method impls |
6634 | |
6635 | TestMethodImpl( |
6636 | bmtMethodHandle(pDecl), |
6637 | bmtMethodHandle(pImpl)); |
6638 | } |
6639 | |
6640 | /////////////////// |
6641 | // Add the mapping |
6642 | |
6643 | // Call helper to add it. Will throw if DECL is already MethodImpl'd |
6644 | AddMethodImplDispatchMapping( |
6645 | DispatchMapTypeID::ThisClassID(), |
6646 | pDeclMD->GetSlot(), |
6647 | pImpl); |
6648 | |
6649 | // We implement this slot, record it |
6650 | ASSERT(*pSlotIndex < dwMaxSlotSize); |
6651 | slots[*pSlotIndex] = pDeclMD->GetSlot(); |
6652 | replaced[*pSlotIndex].SetValue(pDeclMD); |
6653 | |
6654 | // increment the counter |
6655 | (*pSlotIndex)++; |
6656 | } // MethodTableBuilder::PlaceParentDeclarationOnClass |
6657 | |
6658 | //******************************************************************************* |
6659 | // This will validate that all interface methods that were matched during |
6660 | // layout also validate against type constraints. |
6661 | |
6662 | VOID MethodTableBuilder::ValidateInterfaceMethodConstraints() |
6663 | { |
6664 | STANDARD_VM_CONTRACT; |
6665 | |
6666 | DispatchMapBuilder::Iterator it(bmtVT->pDispatchMapBuilder); |
6667 | for (; it.IsValid(); it.Next()) |
6668 | { |
6669 | if (it.GetTypeID() != DispatchMapTypeID::ThisClassID()) |
6670 | { |
6671 | bmtRTType * pItf = bmtInterface->pInterfaceMap[it.GetTypeID().GetInterfaceNum()].GetInterfaceType(); |
6672 | |
6673 | // Grab the method token |
6674 | MethodTable * pMTItf = pItf->GetMethodTable(); |
6675 | CONSISTENCY_CHECK(CheckPointer(pMTItf->GetMethodDescForSlot(it.GetSlotNumber()))); |
6676 | mdMethodDef mdTok = pItf->GetMethodTable()->GetMethodDescForSlot(it.GetSlotNumber())->GetMemberDef(); |
6677 | |
6678 | // Default to the current module. The code immediately below determines if this |
6679 | // assumption is incorrect. |
6680 | Module * pTargetModule = GetModule(); |
6681 | |
6682 | // Get the module of the target method. Get it through the chunk to |
6683 | // avoid triggering the assert that MethodTable is non-NULL. It may |
6684 | // be null since it may belong to the type we're building right now. |
6685 | MethodDesc * pTargetMD = it.GetTargetMD(); |
6686 | |
6687 | // If pTargetMT is null, this indicates that the target MethodDesc belongs |
6688 | // to the current type. Otherwise, the MethodDesc MUST be owned by a parent |
6689 | // of the type we're building. |
6690 | BOOL fTargetIsOwnedByParent = !pTargetMD->GetMethodTablePtr()->IsNull(); |
6691 | |
6692 | // If the method is owned by a parent, we need to use the parent's module, |
6693 | // and we must construct the substitution chain all the way up to the parent. |
6694 | const Substitution *pSubstTgt = NULL; |
6695 | if (fTargetIsOwnedByParent) |
6696 | { |
6697 | CONSISTENCY_CHECK(CheckPointer(GetParentType())); |
6698 | bmtRTType *pTargetType = bmtRTType::FindType(GetParentType(), pTargetMD->GetMethodTable()); |
6699 | pSubstTgt = &pTargetType->GetSubstitution(); |
6700 | pTargetModule = pTargetType->GetModule(); |
6701 | } |
6702 | |
6703 | // Now compare the method constraints. |
6704 | if (!MetaSig::CompareMethodConstraints(pSubstTgt, |
6705 | pTargetModule, |
6706 | pTargetMD->GetMemberDef(), |
6707 | &pItf->GetSubstitution(), |
6708 | pMTItf->GetModule(), |
6709 | mdTok)) |
6710 | { |
6711 | LOG((LF_CLASSLOADER, LL_INFO1000, |
6712 | "BADCONSTRAINTS on interface method implementation: %x\n" , pTargetMD)); |
6713 | // This exception will be due to an implicit implementation, since explicit errors |
6714 | // will be detected in MethodImplCompareSignatures (for now, anyway). |
6715 | CONSISTENCY_CHECK(!it.IsMethodImpl()); |
6716 | DWORD idsError = it.IsMethodImpl() ? |
6717 | IDS_CLASSLOAD_CONSTRAINT_MISMATCH_ON_INTERFACE_METHOD_IMPL : |
6718 | IDS_CLASSLOAD_CONSTRAINT_MISMATCH_ON_IMPLICIT_IMPLEMENTATION; |
6719 | if (fTargetIsOwnedByParent) |
6720 | { |
6721 | DefineFullyQualifiedNameForClass(); |
6722 | LPCUTF8 szClassName = GetFullyQualifiedNameForClassNestedAware(pTargetMD->GetMethodTable()); |
6723 | LPCUTF8 szMethodName = pTargetMD->GetName(); |
6724 | |
6725 | CQuickBytes qb; |
6726 | // allocate enough room for "<class>.<method>\0" |
6727 | size_t cchFullName = strlen(szClassName) + 1 + strlen(szMethodName) + 1; |
6728 | LPUTF8 szFullName = (LPUTF8) qb.AllocThrows(cchFullName); |
6729 | strcpy_s(szFullName, cchFullName, szClassName); |
6730 | strcat_s(szFullName, cchFullName, "." ); |
6731 | strcat_s(szFullName, cchFullName, szMethodName); |
6732 | |
6733 | BuildMethodTableThrowException(idsError, szFullName); |
6734 | } |
6735 | else |
6736 | { |
6737 | BuildMethodTableThrowException(idsError, pTargetMD->GetMemberDef()); |
6738 | } |
6739 | } |
6740 | } |
6741 | } |
6742 | } // MethodTableBuilder::ValidateInterfaceMethodConstraints |
6743 | |
6744 | //******************************************************************************* |
6745 | // Used to allocate and initialize MethodDescs (both the boxed and unboxed entrypoints) |
6746 | VOID MethodTableBuilder::AllocAndInitMethodDescs() |
6747 | { |
6748 | STANDARD_VM_CONTRACT; |
6749 | |
6750 | // |
6751 | // Go over all MethodDescs and create smallest number of MethodDescChunks possible. |
6752 | // |
6753 | // Iterate over all methods and start a new chunk only if: |
6754 | // - Token range (upper 24 bits of the method token) has changed. |
6755 | // - The maximum size of the chunk has been reached. |
6756 | // |
6757 | |
6758 | int currentTokenRange = -1; // current token range |
6759 | SIZE_T sizeOfMethodDescs = 0; // current running size of methodDesc chunk |
6760 | int startIndex = 0; // start of the current chunk (index into bmtMethod array) |
6761 | |
6762 | DeclaredMethodIterator it(*this); |
6763 | while (it.Next()) |
6764 | { |
6765 | int tokenRange = GetTokenRange(it.Token()); |
6766 | |
6767 | // This code assumes that iterator returns tokens in ascending order. If this assumption does not hold, |
6768 | // the code will still work with small performance penalty (method desc chunk layout will be less efficient). |
6769 | _ASSERTE(tokenRange >= currentTokenRange); |
6770 | |
6771 | SIZE_T size = MethodDesc::GetBaseSize(GetMethodClassification(it->GetMethodType())); |
6772 | |
6773 | // Add size of optional slots |
6774 | |
6775 | if (it->GetMethodImplType() == METHOD_IMPL) |
6776 | size += sizeof(MethodImpl); |
6777 | |
6778 | if (it->GetSlotIndex() >= bmtVT->cVtableSlots) |
6779 | size += sizeof(MethodDesc::NonVtableSlot); // slot |
6780 | |
6781 | if (NeedsNativeCodeSlot(*it)) |
6782 | size += sizeof(MethodDesc::NativeCodeSlot); |
6783 | |
6784 | // See comment in AllocAndInitMethodDescChunk |
6785 | if (NeedsTightlyBoundUnboxingStub(*it)) |
6786 | { |
6787 | size *= 2; |
6788 | |
6789 | if (bmtGenerics->GetNumGenericArgs() == 0) { |
6790 | size += sizeof(MethodDesc::NonVtableSlot); |
6791 | } |
6792 | else { |
6793 | bmtVT->cVtableSlots++; |
6794 | } |
6795 | } |
6796 | |
6797 | #ifndef CROSSGEN_COMPILE |
6798 | if (tokenRange != currentTokenRange || |
6799 | sizeOfMethodDescs + size > MethodDescChunk::MaxSizeOfMethodDescs) |
6800 | #endif // CROSSGEN_COMPILE |
6801 | { |
6802 | if (sizeOfMethodDescs != 0) |
6803 | { |
6804 | AllocAndInitMethodDescChunk(startIndex, it.CurrentIndex() - startIndex, sizeOfMethodDescs); |
6805 | startIndex = it.CurrentIndex(); |
6806 | } |
6807 | |
6808 | currentTokenRange = tokenRange; |
6809 | sizeOfMethodDescs = 0; |
6810 | } |
6811 | |
6812 | sizeOfMethodDescs += size; |
6813 | } |
6814 | |
6815 | if (sizeOfMethodDescs != 0) |
6816 | { |
6817 | AllocAndInitMethodDescChunk(startIndex, NumDeclaredMethods() - startIndex, sizeOfMethodDescs); |
6818 | } |
6819 | } |
6820 | |
6821 | //******************************************************************************* |
6822 | // Allocates and initializes one method desc chunk. |
6823 | // |
6824 | // Arguments: |
6825 | // startIndex - index of first method in bmtMethod array. |
6826 | // count - number of methods in this chunk (contiguous region from startIndex) |
6827 | // sizeOfMethodDescs - total expected size of MethodDescs in this chunk |
6828 | // |
6829 | // Used by AllocAndInitMethodDescs. |
6830 | // |
6831 | VOID MethodTableBuilder::AllocAndInitMethodDescChunk(COUNT_T startIndex, COUNT_T count, SIZE_T sizeOfMethodDescs) |
6832 | { |
6833 | CONTRACTL { |
6834 | STANDARD_VM_CHECK; |
6835 | PRECONDITION(sizeOfMethodDescs <= MethodDescChunk::MaxSizeOfMethodDescs); |
6836 | } CONTRACTL_END; |
6837 | |
6838 | void * pMem = GetMemTracker()->Track( |
6839 | GetLoaderAllocator()->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(TADDR) + sizeof(MethodDescChunk) + sizeOfMethodDescs))); |
6840 | |
6841 | // Skip pointer to temporary entrypoints |
6842 | MethodDescChunk * pChunk = (MethodDescChunk *)((BYTE*)pMem + sizeof(TADDR)); |
6843 | |
6844 | COUNT_T methodDescCount = 0; |
6845 | |
6846 | SIZE_T offset = sizeof(MethodDescChunk); |
6847 | |
6848 | #ifdef _PREFAST_ |
6849 | #pragma warning(push) |
6850 | #pragma warning(disable:22019) // Suppress PREFast warning about integer underflow |
6851 | #endif // _PREFAST_ |
6852 | for (COUNT_T i = 0; i < count; i++) |
6853 | #ifdef _PREFAST_ |
6854 | #pragma warning(pop) |
6855 | #endif // _PREFAST_ |
6856 | |
6857 | { |
6858 | bmtMDMethod * pMDMethod = (*bmtMethod)[static_cast<SLOT_INDEX>(startIndex + i)]; |
6859 | |
6860 | MethodDesc * pMD = (MethodDesc *)((BYTE *)pChunk + offset); |
6861 | |
6862 | pMD->SetChunkIndex(pChunk); |
6863 | |
6864 | InitNewMethodDesc(pMDMethod, pMD); |
6865 | |
6866 | #ifdef _PREFAST_ |
6867 | #pragma warning(push) |
6868 | #pragma warning(disable:22018) // Suppress PREFast warning about integer underflow |
6869 | #endif // _PREFAST_ |
6870 | offset += pMD->SizeOf(); |
6871 | #ifdef _PREFAST_ |
6872 | #pragma warning(pop) |
6873 | #endif // _PREFAST_ |
6874 | |
6875 | methodDescCount++; |
6876 | |
6877 | // If we're a value class, we want to create duplicate slots |
6878 | // and MethodDescs for all methods in the vtable |
6879 | // section (i.e. not non-virtual instance methods or statics). |
6880 | // In the name of uniformity it would be much nicer |
6881 | // if we created _all_ value class BoxedEntryPointStubs at this point. |
6882 | // However, non-virtual instance methods only require unboxing |
6883 | // stubs in the rare case that we create a delegate to such a |
6884 | // method, and thus it would be inefficient to create them on |
6885 | // loading: after all typical structs will have many non-virtual |
6886 | // instance methods. |
6887 | // |
6888 | // Unboxing stubs for non-virtual instance methods are created |
6889 | // in code:MethodDesc::FindOrCreateAssociatedMethodDesc. |
6890 | |
6891 | if (NeedsTightlyBoundUnboxingStub(pMDMethod)) |
6892 | { |
6893 | MethodDesc * pUnboxedMD = (MethodDesc *)((BYTE *)pChunk + offset); |
6894 | |
6895 | ////////////////////////////////// |
6896 | // Initialize the new MethodDesc |
6897 | |
6898 | // <NICE> memcpy operations on data structures like MethodDescs are extremely fragile |
6899 | // and should not be used. We should go to the effort of having proper constructors |
6900 | // in the MethodDesc class. </NICE> |
6901 | |
6902 | memcpy(pUnboxedMD, pMD, pMD->SizeOf()); |
6903 | |
6904 | // Reset the chunk index |
6905 | pUnboxedMD->SetChunkIndex(pChunk); |
6906 | |
6907 | if (bmtGenerics->GetNumGenericArgs() == 0) { |
6908 | pUnboxedMD->SetHasNonVtableSlot(); |
6909 | } |
6910 | |
6911 | ////////////////////////////////////////////////////////// |
6912 | // Modify the original MethodDesc to be an unboxing stub |
6913 | |
6914 | pMD->SetIsUnboxingStub(); |
6915 | |
6916 | //////////////////////////////////////////////////////////////////// |
6917 | // Add the new MethodDesc to the non-virtual portion of the vtable |
6918 | |
6919 | if (!bmtVT->AddUnboxedMethod(pMDMethod)) |
6920 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS); |
6921 | |
6922 | pUnboxedMD->SetSlot(pMDMethod->GetUnboxedSlotIndex()); |
6923 | pMDMethod->SetUnboxedMethodDesc(pUnboxedMD); |
6924 | |
6925 | offset += pUnboxedMD->SizeOf(); |
6926 | methodDescCount++; |
6927 | } |
6928 | } |
6929 | _ASSERTE(offset == sizeof(MethodDescChunk) + sizeOfMethodDescs); |
6930 | |
6931 | pChunk->SetSizeAndCount((ULONG)sizeOfMethodDescs, methodDescCount); |
6932 | |
6933 | GetHalfBakedClass()->AddChunk(pChunk); |
6934 | } |
6935 | |
6936 | //******************************************************************************* |
6937 | BOOL |
6938 | MethodTableBuilder::NeedsTightlyBoundUnboxingStub(bmtMDMethod * pMDMethod) |
6939 | { |
6940 | STANDARD_VM_CONTRACT; |
6941 | |
6942 | return IsValueClass() && |
6943 | !IsMdStatic(pMDMethod->GetDeclAttrs()) && |
6944 | IsMdVirtual(pMDMethod->GetDeclAttrs()) && |
6945 | (pMDMethod->GetMethodType() != METHOD_TYPE_INSTANTIATED) && |
6946 | !IsMdRTSpecialName(pMDMethod->GetDeclAttrs()); |
6947 | } |
6948 | |
6949 | //******************************************************************************* |
6950 | BOOL |
6951 | MethodTableBuilder::NeedsNativeCodeSlot(bmtMDMethod * pMDMethod) |
6952 | { |
6953 | LIMITED_METHOD_CONTRACT; |
6954 | |
6955 | |
6956 | #ifdef FEATURE_TIERED_COMPILATION |
6957 | // Keep in-sync with MethodDesc::IsEligibleForTieredCompilation() |
6958 | if (g_pConfig->TieredCompilation() && |
6959 | (pMDMethod->GetMethodType() == METHOD_TYPE_NORMAL || pMDMethod->GetMethodType() == METHOD_TYPE_INSTANTIATED)) |
6960 | { |
6961 | return TRUE; |
6962 | } |
6963 | #endif |
6964 | |
6965 | #if defined(FEATURE_JIT_PITCHING) |
6966 | if ((CLRConfig::GetConfigValue(CLRConfig::INTERNAL_JitPitchEnabled) != 0) && |
6967 | (CLRConfig::GetConfigValue(CLRConfig::INTERNAL_JitPitchMemThreshold) != 0)) |
6968 | return TRUE; |
6969 | #endif |
6970 | |
6971 | return GetModule()->IsEditAndContinueEnabled(); |
6972 | } |
6973 | |
6974 | //******************************************************************************* |
6975 | VOID |
6976 | MethodTableBuilder::AllocAndInitDictionary() |
6977 | { |
6978 | STANDARD_VM_CONTRACT; |
6979 | |
6980 | // Allocate dictionary layout used by all compatible instantiations |
6981 | |
6982 | if (bmtGenerics->fSharedByGenericInstantiations && !bmtGenerics->fContainsGenericVariables) |
6983 | { |
6984 | // We use the number of methods as a heuristic for the number of slots in the dictionary |
6985 | // attached to shared class method tables. |
6986 | // If there are no declared methods then we have no slots, and we will never do any token lookups |
6987 | // |
6988 | // Heuristics |
6989 | // - Classes with a small number of methods (2-3) tend to be more likely to use new slots, |
6990 | // i.e. further methods tend to reuse slots from previous methods. |
6991 | // = treat all classes with only 2-3 methods as if they have an extra method. |
6992 | // - Classes with more generic parameters tend to use more slots. |
6993 | // = multiply by 1.5 for 2 params or more |
6994 | |
6995 | DWORD numMethodsAdjusted = |
6996 | (bmtMethod->dwNumDeclaredNonAbstractMethods == 0) |
6997 | ? 0 |
6998 | : (bmtMethod->dwNumDeclaredNonAbstractMethods < 3) |
6999 | ? 3 |
7000 | : bmtMethod->dwNumDeclaredNonAbstractMethods; |
7001 | |
7002 | _ASSERTE(bmtGenerics->GetNumGenericArgs() != 0); |
7003 | DWORD nTypeFactorBy2 = (bmtGenerics->GetNumGenericArgs() == 1) |
7004 | ? 2 |
7005 | : 3; |
7006 | |
7007 | DWORD estNumTypeSlots = (numMethodsAdjusted * nTypeFactorBy2 + 2) / 3; |
7008 | // estNumTypeSlots should fit in a WORD as long as we maintain the current |
7009 | // limit on the number of methods in a type (approx 2^16). |
7010 | _ASSERTE(FitsIn<WORD>(estNumTypeSlots)); |
7011 | WORD numTypeSlots = static_cast<WORD>(estNumTypeSlots); |
7012 | |
7013 | if (numTypeSlots > 0) |
7014 | { |
7015 | // Dictionary layout is an optional field on EEClass, so ensure the optional field descriptor has |
7016 | // been allocated. |
7017 | EnsureOptionalFieldsAreAllocated(GetHalfBakedClass(), m_pAllocMemTracker, GetLoaderAllocator()->GetLowFrequencyHeap()); |
7018 | GetHalfBakedClass()->SetDictionaryLayout(DictionaryLayout::Allocate(numTypeSlots, bmtAllocator, m_pAllocMemTracker)); |
7019 | } |
7020 | } |
7021 | |
7022 | } |
7023 | |
7024 | //******************************************************************************* |
7025 | // |
7026 | // Used by BuildMethodTable |
7027 | // |
7028 | // Compute the set of interfaces which are equivalent. Duplicates in the interface map |
7029 | // will be placed into different equivalence sets unless they participate in type equivalence. |
7030 | // This is a bit odd, but it turns out we only need to know about equivalence classes if |
7031 | // there is type equivalence involved in the interface, and not detecting, or detecting equivalence |
7032 | // in other cases does not result in differing behavior. |
7033 | // |
7034 | // By restricting the reasons for having equivalence matches, we reduce the algorithm from one which |
7035 | // is O(n*n) best case to an algorithm which will typically execute something more like O(m*n) best case time |
7036 | // where m is the number of generic interface (although still n*n in worst case). The assumption is that equivalent |
7037 | // and generic interfaces are relatively rare. |
7038 | VOID |
7039 | MethodTableBuilder::ComputeInterfaceMapEquivalenceSet() |
7040 | { |
7041 | STANDARD_VM_CONTRACT; |
7042 | |
7043 | UINT32 nextEquivalenceSet = 1; |
7044 | |
7045 | for (DWORD dwCurInterface = 0; |
7046 | dwCurInterface < bmtInterface->dwInterfaceMapSize; |
7047 | dwCurInterface++) |
7048 | { |
7049 | // Keep track of the current interface we are trying to calculate the equivalence set of |
7050 | bmtInterfaceEntry * pCurItfEntry = &bmtInterface->pInterfaceMap[dwCurInterface]; |
7051 | bmtRTType * pCurItf = pCurItfEntry->GetInterfaceType(); |
7052 | MethodTable * pCurItfMT = pCurItf->GetMethodTable(); |
7053 | const Substitution * pCurItfSubst = &pCurItf->GetSubstitution(); |
7054 | |
7055 | UINT32 currentEquivalenceSet = 0; |
7056 | |
7057 | // Only interfaces with type equivalence, or that are generic need to be compared for equivalence |
7058 | if (pCurItfMT->HasTypeEquivalence() || pCurItfMT->HasInstantiation()) |
7059 | { |
7060 | for (DWORD dwCurInterfaceCompare = 0; |
7061 | dwCurInterfaceCompare < dwCurInterface; |
7062 | dwCurInterfaceCompare++) |
7063 | { |
7064 | // Keep track of the current interface we are trying to calculate the equivalence set of |
7065 | bmtInterfaceEntry * pCompareItfEntry = &bmtInterface->pInterfaceMap[dwCurInterfaceCompare]; |
7066 | bmtRTType * pCompareItf = pCompareItfEntry->GetInterfaceType(); |
7067 | MethodTable * pCompareItfMT = pCompareItf->GetMethodTable(); |
7068 | const Substitution * pCompareItfSubst = &pCompareItf->GetSubstitution(); |
7069 | |
7070 | // Only interfaces with type equivalence, or that are generic need to be compared for equivalence |
7071 | if (pCompareItfMT->HasTypeEquivalence() || pCompareItfMT->HasInstantiation()) |
7072 | { |
7073 | if (MetaSig::CompareTypeDefsUnderSubstitutions(pCurItfMT, |
7074 | pCompareItfMT, |
7075 | pCurItfSubst, |
7076 | pCompareItfSubst, |
7077 | NULL)) |
7078 | { |
7079 | currentEquivalenceSet = pCompareItfEntry->GetInterfaceEquivalenceSet(); |
7080 | // Use the equivalence set of the interface map entry we just found |
7081 | pCurItfEntry->SetInterfaceEquivalenceSet(currentEquivalenceSet, true); |
7082 | // Update the interface map entry we just found to indicate that it is part of an equivalence |
7083 | // set with multiple entries. |
7084 | pCompareItfEntry->SetInterfaceEquivalenceSet(currentEquivalenceSet, true); |
7085 | break; |
7086 | } |
7087 | } |
7088 | } |
7089 | } |
7090 | |
7091 | // If we did not find an equivalent interface above, use the next available equivalence set indicator |
7092 | if (currentEquivalenceSet == 0) |
7093 | { |
7094 | pCurItfEntry->SetInterfaceEquivalenceSet(nextEquivalenceSet, false); |
7095 | nextEquivalenceSet++; |
7096 | } |
7097 | } |
7098 | } |
7099 | |
7100 | //******************************************************************************* |
7101 | // |
7102 | // Used by PlaceInterfaceMethods |
7103 | // |
7104 | // Given an interface in our interface map, and a particular method on that interface, place |
7105 | // a method from the parent types implementation of an equivalent interface into that method |
7106 | // slot. Used by PlaceInterfaceMethods to make equivalent interface implementations have the |
7107 | // same behavior as if the parent interface was implemented on this type instead of an equivalent interface. |
7108 | // |
7109 | // This logic is used in situations such as below. I and I' are equivalent interfaces |
7110 | // |
7111 | //# |
7112 | // class Base : I |
7113 | // {void I.Method() { } } |
7114 | // interface IOther : I' {} |
7115 | // class Derived : IOther |
7116 | // { virtual void Method() {}} |
7117 | // |
7118 | // We should Map I'.Method to Base.Method, not Derived.Method |
7119 | // |
7120 | // Another example |
7121 | // class Base : I |
7122 | // { virtual void Method() } |
7123 | // interface IOther : I' {} |
7124 | // class Derived : IOther |
7125 | // { virtual void Method() {}} |
7126 | // |
7127 | // We should map I'.Method to Base.Method, not Derived.Method |
7128 | // |
7129 | // class Base : I |
7130 | // {void I.Method() { } } |
7131 | // class Derived : I' |
7132 | // {} |
7133 | // |
7134 | // We should Map I'.Method to Base.Method, and not throw TypeLoadException |
7135 | // |
7136 | #ifdef FEATURE_COMINTEROP |
7137 | VOID |
7138 | MethodTableBuilder::PlaceMethodFromParentEquivalentInterfaceIntoInterfaceSlot( |
7139 | bmtInterfaceEntry::InterfaceSlotIterator & itfSlotIt, |
7140 | bmtInterfaceEntry * pCurItfEntry, |
7141 | DispatchMapTypeID ** prgInterfaceDispatchMapTypeIDs, |
7142 | DWORD dwCurInterface) |
7143 | { |
7144 | STANDARD_VM_CONTRACT; |
7145 | |
7146 | bmtRTMethod * pCurItfMethod = itfSlotIt->Decl().AsRTMethod(); |
7147 | |
7148 | if (itfSlotIt->Impl() != INVALID_SLOT_INDEX) |
7149 | { |
7150 | return; |
7151 | } |
7152 | |
7153 | // For every equivalent interface entry that was actually implemented by parent, then look at equivalent method slot on that entry |
7154 | // and if it matches and has a slot implementation, then record and continue |
7155 | for (DWORD dwEquivalentInterface = 0; |
7156 | (dwEquivalentInterface < bmtInterface->dwInterfaceMapSize) && (itfSlotIt->Impl() == INVALID_SLOT_INDEX); |
7157 | dwEquivalentInterface++) |
7158 | { |
7159 | bmtInterfaceEntry * pEquivItfEntry = &bmtInterface->pInterfaceMap[dwEquivalentInterface]; |
7160 | bmtRTType * pEquivItf = pEquivItfEntry->GetInterfaceType(); |
7161 | MethodTable * pEquivItfMT = pEquivItf->GetMethodTable(); |
7162 | const Substitution * pEquivItfSubst = &pEquivItf->GetSubstitution(); |
7163 | if (pEquivItfEntry->GetInterfaceEquivalenceSet() != pCurItfEntry->GetInterfaceEquivalenceSet()) |
7164 | { |
7165 | // Not equivalent |
7166 | continue; |
7167 | } |
7168 | if (!pEquivItfEntry->IsImplementedByParent()) |
7169 | { |
7170 | // Not implemented by parent |
7171 | continue; |
7172 | } |
7173 | |
7174 | WORD slot = static_cast<WORD>(itfSlotIt.CurrentIndex()); |
7175 | BOOL fFound = FALSE; |
7176 | |
7177 | // Determine which slot on the equivalent interface would map to the slot we are attempting to fill |
7178 | // in with an implementation. |
7179 | WORD otherMTSlot = GetEquivalentMethodSlot(pCurItfEntry->GetInterfaceType()->GetMethodTable(), |
7180 | pEquivItfEntry->GetInterfaceType()->GetMethodTable(), |
7181 | slot, |
7182 | &fFound); |
7183 | |
7184 | if (fFound) |
7185 | { |
7186 | UINT32 cInterfaceDuplicates; |
7187 | if (*prgInterfaceDispatchMapTypeIDs == NULL) |
7188 | { |
7189 | *prgInterfaceDispatchMapTypeIDs = |
7190 | new (GetStackingAllocator()) DispatchMapTypeID[bmtInterface->dwInterfaceMapSize]; |
7191 | } |
7192 | |
7193 | // Compute all TypeIDs for this interface (all duplicates in the interface map) |
7194 | ComputeDispatchMapTypeIDs( |
7195 | pEquivItfMT, |
7196 | pEquivItfSubst, |
7197 | *prgInterfaceDispatchMapTypeIDs, |
7198 | bmtInterface->dwInterfaceMapSize, |
7199 | &cInterfaceDuplicates); |
7200 | // There cannot be more duplicates than number of interfaces |
7201 | _ASSERTE(cInterfaceDuplicates <= bmtInterface->dwInterfaceMapSize); |
7202 | _ASSERTE(cInterfaceDuplicates > 0); |
7203 | |
7204 | // NOTE: This override does not cache the resulting MethodData object |
7205 | MethodTable::MethodDataWrapper hParentData; |
7206 | hParentData = MethodTable::GetMethodData( |
7207 | *prgInterfaceDispatchMapTypeIDs, |
7208 | cInterfaceDuplicates, |
7209 | pEquivItfMT, |
7210 | GetParentMethodTable()); |
7211 | |
7212 | SLOT_INDEX slotIndex = static_cast<SLOT_INDEX> |
7213 | (hParentData->GetImplSlotNumber(static_cast<UINT32>(otherMTSlot))); |
7214 | |
7215 | // Interface is implemented on parent abstract type and this particular slot was not implemented |
7216 | if (slotIndex == INVALID_SLOT_INDEX) |
7217 | { |
7218 | continue; |
7219 | } |
7220 | |
7221 | bmtMethodSlot & parentSlotImplementation = (*bmtParent->pSlotTable)[slotIndex]; |
7222 | bmtMethodHandle & parentImplementation = parentSlotImplementation.Impl(); |
7223 | |
7224 | // Check to verify that the equivalent slot on the equivalent interface actually matches the method |
7225 | // on the current interface. If not, then the slot is not a match, and we should search other interfaces |
7226 | // for an implementation of the method. |
7227 | if (!MethodSignature::SignaturesEquivalent(pCurItfMethod->GetMethodSignature(), parentImplementation.GetMethodSignature())) |
7228 | { |
7229 | continue; |
7230 | } |
7231 | |
7232 | itfSlotIt->Impl() = slotIndex; |
7233 | |
7234 | MethodDesc * pMD = hParentData->GetImplMethodDesc(static_cast<UINT32>(otherMTSlot)); |
7235 | |
7236 | DispatchMapTypeID dispatchMapTypeID = |
7237 | DispatchMapTypeID::InterfaceClassID(dwCurInterface); |
7238 | bmtVT->pDispatchMapBuilder->InsertMDMapping( |
7239 | dispatchMapTypeID, |
7240 | static_cast<UINT32>(itfSlotIt.CurrentIndex()), |
7241 | pMD, |
7242 | FALSE); |
7243 | } |
7244 | } |
7245 | } // MethodTableBuilder::PlaceMethodFromParentEquivalentInterfaceIntoInterfaceSlot |
7246 | #endif // FEATURE_COMINTEROP |
7247 | |
7248 | //******************************************************************************* |
7249 | // |
7250 | // Used by BuildMethodTable |
7251 | // |
7252 | // |
7253 | // If we are a class, then there may be some unplaced vtable methods (which are by definition |
7254 | // interface methods, otherwise they'd already have been placed). Place as many unplaced methods |
7255 | // as possible, in the order preferred by interfaces. However, do not allow any duplicates - once |
7256 | // a method has been placed, it cannot be placed again - if we are unable to neatly place an interface, |
7257 | // create duplicate slots for it starting at dwCurrentDuplicateVtableSlot. Fill out the interface |
7258 | // map for all interfaces as they are placed. |
7259 | // |
7260 | // If we are an interface, then all methods are already placed. Fill out the interface map for |
7261 | // interfaces as they are placed. |
7262 | // |
7263 | // BEHAVIOUR (based on Partition II: 11.2, not including MethodImpls) |
7264 | // C is current class, P is a parent class, I is the interface being implemented |
7265 | // |
7266 | // FOREACH interface I implemented by this class C |
7267 | // FOREACH method I::M |
7268 | // IF I is EXPLICITLY implemented by C |
7269 | // IF some method C::M matches I::M |
7270 | // USE C::M as implementation for I::M |
7271 | // ELIF we inherit a method P::M that matches I::M |
7272 | // USE P::M as implementation for I::M |
7273 | // ENDIF |
7274 | // ELSE |
7275 | // IF I::M lacks implementation |
7276 | // IF some method C::M matches I::M |
7277 | // USE C::M as implementation for I::M |
7278 | // ELIF we inherit a method P::M that matches I::M |
7279 | // USE P::M as implementation for I::M |
7280 | // ELIF I::M was implemented by the parent type with method Parent::M |
7281 | // USE Parent::M for the implementation of I::M // VSD does this by default if we really |
7282 | // // implemented I on the parent type, but |
7283 | // // equivalent interfaces need to make this |
7284 | // // explicit |
7285 | // ENDIF |
7286 | // ENDIF |
7287 | // ENDIF |
7288 | // ENDFOR |
7289 | // ENDFOR |
7290 | // |
7291 | |
7292 | VOID |
7293 | MethodTableBuilder::PlaceInterfaceMethods() |
7294 | { |
7295 | STANDARD_VM_CONTRACT; |
7296 | |
7297 | BOOL fParentInterface; |
7298 | DispatchMapTypeID * rgInterfaceDispatchMapTypeIDs = NULL; |
7299 | |
7300 | for (DWORD dwCurInterface = 0; |
7301 | dwCurInterface < bmtInterface->dwInterfaceMapSize; |
7302 | dwCurInterface++) |
7303 | { |
7304 | // Default to being implemented by the current class |
7305 | fParentInterface = FALSE; |
7306 | |
7307 | // Keep track of the current interface we are trying to place |
7308 | bmtInterfaceEntry * pCurItfEntry = &bmtInterface->pInterfaceMap[dwCurInterface]; |
7309 | bmtRTType * pCurItf = pCurItfEntry->GetInterfaceType(); |
7310 | MethodTable * pCurItfMT = pCurItf->GetMethodTable(); |
7311 | const Substitution * pCurItfSubst = &pCurItf->GetSubstitution(); |
7312 | |
7313 | // |
7314 | // There are three reasons why an interface could be in the implementation list |
7315 | // 1. Inherited from parent |
7316 | // 2. Explicitly declared in the implements list |
7317 | // 3. Implicitly declared through the implements list of an explicitly declared interface |
7318 | // |
7319 | // The reason these cases need to be distinguished is that an inherited interface that is |
7320 | // also explicitly redeclared in the implements list must be fully reimplemented using the |
7321 | // virtual methods of this type (thereby using matching methods in this type that may have |
7322 | // a different slot than an inherited method, but hidden it by name & sig); however all |
7323 | // implicitly redeclared interfaces should not be fully reimplemented if they were also |
7324 | // inherited from the parent. |
7325 | // |
7326 | // Example: |
7327 | // interface I1 : I2 |
7328 | // class A : I1 |
7329 | // class B : A, I1 |
7330 | // |
7331 | // In this example I1 must be fully reimplemented on B, but B can inherit the implementation |
7332 | // of I2. |
7333 | // |
7334 | |
7335 | if (pCurItfEntry->IsImplementedByParent()) |
7336 | { |
7337 | if (!pCurItfEntry->IsDeclaredOnType()) |
7338 | { |
7339 | fParentInterface = TRUE; |
7340 | } |
7341 | } |
7342 | |
7343 | bool fEquivalentInterfaceImplementedByParent = pCurItfEntry->IsImplementedByParent(); |
7344 | bool fEquivalentInterfaceDeclaredOnType = pCurItfEntry->IsDeclaredOnType(); |
7345 | |
7346 | if (pCurItfEntry->InEquivalenceSetWithMultipleEntries()) |
7347 | { |
7348 | for (DWORD dwEquivalentInterface = 0; |
7349 | dwEquivalentInterface < bmtInterface->dwInterfaceMapSize; |
7350 | dwEquivalentInterface++) |
7351 | { |
7352 | bmtInterfaceEntry * pEquivItfEntry = &bmtInterface->pInterfaceMap[dwEquivalentInterface]; |
7353 | if (pEquivItfEntry->GetInterfaceEquivalenceSet() != pCurItfEntry->GetInterfaceEquivalenceSet()) |
7354 | { |
7355 | // Not equivalent |
7356 | continue; |
7357 | } |
7358 | if (pEquivItfEntry->IsImplementedByParent()) |
7359 | { |
7360 | fEquivalentInterfaceImplementedByParent = true; |
7361 | } |
7362 | if (pEquivItfEntry->IsDeclaredOnType()) |
7363 | { |
7364 | fEquivalentInterfaceDeclaredOnType = true; |
7365 | } |
7366 | |
7367 | if (fEquivalentInterfaceDeclaredOnType && fEquivalentInterfaceImplementedByParent) |
7368 | break; |
7369 | } |
7370 | } |
7371 | |
7372 | bool fParentInterfaceEquivalent = fEquivalentInterfaceImplementedByParent && !fEquivalentInterfaceDeclaredOnType; |
7373 | |
7374 | CONSISTENCY_CHECK(!fParentInterfaceEquivalent || HasParent()); |
7375 | |
7376 | if (fParentInterfaceEquivalent) |
7377 | { |
7378 | // In the case the fParentInterface is TRUE, virtual overrides are enough and the interface |
7379 | // does not have to be explicitly (re)implemented. The only exception is if the parent is |
7380 | // abstract, in which case an inherited interface may not be fully implemented yet. |
7381 | // This is an optimization that allows us to skip the more expensive slot filling in below. |
7382 | // Note that the check here is for fParentInterface and not for fParentInterfaceEquivalent. |
7383 | // This is necessary as if the interface is not actually implemented on the parent type we will |
7384 | // need to fill in the slot table below. |
7385 | if (fParentInterface && !GetParentMethodTable()->IsAbstract()) |
7386 | { |
7387 | continue; |
7388 | } |
7389 | |
7390 | { |
7391 | // We will reach here in two cases. |
7392 | // 1 .The parent is abstract and the interface has been declared on the parent, |
7393 | // and possibly partially implemented, so we need to populate the |
7394 | // bmtInterfaceSlotImpl table for this interface with the implementation slot |
7395 | // information. |
7396 | // 2 .The the interface has not been declared on the parent, |
7397 | // but an equivalent interface has been. So we need to populate the |
7398 | // bmtInterfaceSlotImpl table for this interface with the implementation slot |
7399 | // information from one of the parent equivalent interfaces. We may or may not |
7400 | // find implementations for all of the methods on the interface on the parent type. |
7401 | // The parent type may or may not be abstract. |
7402 | |
7403 | MethodTable::MethodDataWrapper hParentData; |
7404 | CONSISTENCY_CHECK(CheckPointer(GetParentMethodTable())); |
7405 | |
7406 | if (rgInterfaceDispatchMapTypeIDs == NULL) |
7407 | { |
7408 | rgInterfaceDispatchMapTypeIDs = |
7409 | new (GetStackingAllocator()) DispatchMapTypeID[bmtInterface->dwInterfaceMapSize]; |
7410 | } |
7411 | |
7412 | if (pCurItfEntry->IsImplementedByParent()) |
7413 | { |
7414 | UINT32 cInterfaceDuplicates; |
7415 | // Compute all TypeIDs for this interface (all duplicates in the interface map) |
7416 | ComputeDispatchMapTypeIDs( |
7417 | pCurItfMT, |
7418 | pCurItfSubst, |
7419 | rgInterfaceDispatchMapTypeIDs, |
7420 | bmtInterface->dwInterfaceMapSize, |
7421 | &cInterfaceDuplicates); |
7422 | // There cannot be more duplicates than number of interfaces |
7423 | _ASSERTE(cInterfaceDuplicates <= bmtInterface->dwInterfaceMapSize); |
7424 | _ASSERTE(cInterfaceDuplicates > 0); |
7425 | |
7426 | //#InterfaceMap_UseParentInterfaceImplementations |
7427 | // We rely on the fact that interface map of parent type is subset of this type (incl. |
7428 | // duplicates), see code:#InterfaceMap_SupersetOfParent |
7429 | // NOTE: This override does not cache the resulting MethodData object |
7430 | hParentData = MethodTable::GetMethodData( |
7431 | rgInterfaceDispatchMapTypeIDs, |
7432 | cInterfaceDuplicates, |
7433 | pCurItfMT, |
7434 | GetParentMethodTable()); |
7435 | |
7436 | bmtInterfaceEntry::InterfaceSlotIterator itfSlotIt = |
7437 | pCurItfEntry->IterateInterfaceSlots(GetStackingAllocator()); |
7438 | for (; !itfSlotIt.AtEnd(); itfSlotIt.Next()) |
7439 | { |
7440 | itfSlotIt->Impl() = static_cast<SLOT_INDEX> |
7441 | (hParentData->GetImplSlotNumber(static_cast<UINT32>(itfSlotIt.CurrentIndex()))); |
7442 | } |
7443 | } |
7444 | #ifdef FEATURE_COMINTEROP |
7445 | else |
7446 | { |
7447 | // Iterate through the methods on the interface, and if they have a slot which was filled in |
7448 | // on an equivalent interface inherited from the parent fill in the appropriate slot. |
7449 | // This code path is only used when there is an implicit implementation of an interface |
7450 | // that was not implemented on a parent type, but there was an equivalent interface implemented |
7451 | // on a parent type. |
7452 | bmtInterfaceEntry::InterfaceSlotIterator itfSlotIt = |
7453 | pCurItfEntry->IterateInterfaceSlots(GetStackingAllocator()); |
7454 | for (; !itfSlotIt.AtEnd(); itfSlotIt.Next()) |
7455 | { |
7456 | PlaceMethodFromParentEquivalentInterfaceIntoInterfaceSlot(itfSlotIt, pCurItfEntry, &rgInterfaceDispatchMapTypeIDs, dwCurInterface); |
7457 | } |
7458 | } |
7459 | #endif // FEATURE_COMINTEROP |
7460 | } |
7461 | } |
7462 | |
7463 | #ifdef FEATURE_COMINTEROP |
7464 | // WinRT types always use methodimpls to line up methods with interface implementations, so we do not want to allow implicit |
7465 | // interface implementations to kick in. This can especially cause problems with redirected interfaces, where the underlying |
7466 | // runtimeclass doesn't actually implement the interfaces we claim it does. For example, a WinRT class which implements both |
7467 | // IVector<int> and ICalculator will be projected as implementing IList<int> and ICalculator. In this case, we do not want the |
7468 | // ICalculator Add(int) method to get lined up with the ICollection<int> Add method, since that will cause us to dispatch to the |
7469 | // wrong underlying COM interface. |
7470 | // |
7471 | // There are a special WinRT types in mscorlib (notably DisposableRuntimeClass) which do implement interfaces in the normal way |
7472 | // so we skip this check for them. (Note that we can't use a methodimpl directly in mscorlib, since ComImport classes are |
7473 | // forbidden from having implementation code by the C# compiler). |
7474 | if (GetHalfBakedClass()->IsProjectedFromWinRT() && !GetModule()->IsSystem()) |
7475 | { |
7476 | continue; |
7477 | } |
7478 | #endif // FEATURE_COMINTEROP |
7479 | |
7480 | // For each method declared in this interface |
7481 | bmtInterfaceEntry::InterfaceSlotIterator itfSlotIt = |
7482 | pCurItfEntry->IterateInterfaceSlots(GetStackingAllocator()); |
7483 | for (; !itfSlotIt.AtEnd(); ++itfSlotIt) |
7484 | { |
7485 | if (fParentInterfaceEquivalent) |
7486 | { |
7487 | if (itfSlotIt->Impl() != INVALID_SLOT_INDEX) |
7488 | { // If this interface is not explicitly declared on this class, and the interface slot has already been |
7489 | // given an implementation, then the only way to provide a new implementation is through an override |
7490 | // or through a MethodImpl. This is necessary in addition to the continue statement before this for |
7491 | // loop because an abstract interface can still have a partial implementation and it is necessary to |
7492 | // skip those interface slots that have already been satisfied. |
7493 | continue; |
7494 | } |
7495 | } |
7496 | |
7497 | BOOL fFoundMatchInBuildingClass = FALSE; |
7498 | bmtInterfaceSlotImpl & curItfSlot = *itfSlotIt; |
7499 | bmtRTMethod * pCurItfMethod = curItfSlot.Decl().AsRTMethod(); |
7500 | const MethodSignature & curItfMethodSig = pCurItfMethod->GetMethodSignature(); |
7501 | |
7502 | // |
7503 | // First, try to find the method explicitly declared in our class |
7504 | // |
7505 | |
7506 | DeclaredMethodIterator methIt(*this); |
7507 | while (methIt.Next()) |
7508 | { |
7509 | // Note that non-publics can legally be exposed via an interface, but only |
7510 | // through methodImpls. |
7511 | if (IsMdVirtual(methIt.Attrs()) && IsMdPublic(methIt.Attrs())) |
7512 | { |
7513 | #ifdef _DEBUG |
7514 | if(GetHalfBakedClass()->m_fDebuggingClass && g_pConfig->ShouldBreakOnMethod(methIt.Name())) |
7515 | CONSISTENCY_CHECK_MSGF(false, ("BreakOnMethodName: '%s' " , methIt.Name())); |
7516 | #endif // _DEBUG |
7517 | |
7518 | if (pCurItfMethod->GetMethodSignature().Equivalent(methIt->GetMethodSignature())) |
7519 | { |
7520 | fFoundMatchInBuildingClass = TRUE; |
7521 | curItfSlot.Impl() = methIt->GetSlotIndex(); |
7522 | |
7523 | DispatchMapTypeID dispatchMapTypeID = |
7524 | DispatchMapTypeID::InterfaceClassID(dwCurInterface); |
7525 | bmtVT->pDispatchMapBuilder->InsertMDMapping( |
7526 | dispatchMapTypeID, |
7527 | static_cast<UINT32>(itfSlotIt.CurrentIndex()), |
7528 | methIt->GetMethodDesc(), |
7529 | FALSE); |
7530 | |
7531 | break; |
7532 | } |
7533 | } |
7534 | } // end ... try to find method |
7535 | |
7536 | // |
7537 | // The ECMA CLR spec states that a type will inherit interface implementations |
7538 | // and that explicit re-declaration of an inherited interface will try to match |
7539 | // only newslot methods with methods in the re-declared interface (note that |
7540 | // this also takes care of matching against unsatisfied interface methods in |
7541 | // the abstract parent type scenario). |
7542 | // |
7543 | // So, if the interface was not declared on a parent and we haven't found a |
7544 | // newslot method declared on this type as a match, search all remaining |
7545 | // public virtual methods (including overrides declared on this type) for a |
7546 | // match. |
7547 | // |
7548 | // Please see bug VSW577403 and VSW593884 for details of this breaking change. |
7549 | // |
7550 | if (!fFoundMatchInBuildingClass && |
7551 | !fEquivalentInterfaceImplementedByParent) |
7552 | { |
7553 | if (HasParent()) |
7554 | { |
7555 | // Iterate backward through the parent's method table. This is important to |
7556 | // find the most derived method. |
7557 | bmtParentInfo::Iterator parentMethodIt = bmtParent->IterateSlots(); |
7558 | parentMethodIt.ResetToEnd(); |
7559 | while (parentMethodIt.Prev()) |
7560 | { |
7561 | bmtRTMethod * pCurParentMethod = parentMethodIt->Decl().AsRTMethod(); |
7562 | DWORD dwAttrs = pCurParentMethod->GetDeclAttrs(); |
7563 | if (!IsMdVirtual(dwAttrs) || !IsMdPublic(dwAttrs)) |
7564 | { // Only match mdPublic mdVirtual methods for interface implementation |
7565 | continue; |
7566 | } |
7567 | |
7568 | if (curItfMethodSig.Equivalent(pCurParentMethod->GetMethodSignature())) |
7569 | { |
7570 | fFoundMatchInBuildingClass = TRUE; |
7571 | curItfSlot.Impl() = pCurParentMethod->GetSlotIndex(); |
7572 | |
7573 | DispatchMapTypeID dispatchMapTypeID = |
7574 | DispatchMapTypeID::InterfaceClassID(dwCurInterface); |
7575 | bmtVT->pDispatchMapBuilder->InsertMDMapping( |
7576 | dispatchMapTypeID, |
7577 | static_cast<UINT32>(itfSlotIt.CurrentIndex()), |
7578 | pCurParentMethod->GetMethodDesc(), |
7579 | FALSE); |
7580 | |
7581 | break; |
7582 | } |
7583 | } // end ... try to find parent method |
7584 | } |
7585 | } |
7586 | |
7587 | // For type equivalent interfaces that had an equivalent interface implemented by their parent |
7588 | // and where the previous logic to fill in the method based on the virtual mappings on the type have |
7589 | // failed, we should attempt to get the mappings from the equivalent interfaces declared on parent types |
7590 | // of the type we are currently building. |
7591 | #ifdef FEATURE_COMINTEROP |
7592 | if (!fFoundMatchInBuildingClass && fEquivalentInterfaceImplementedByParent && !pCurItfEntry->IsImplementedByParent()) |
7593 | { |
7594 | PlaceMethodFromParentEquivalentInterfaceIntoInterfaceSlot(itfSlotIt, pCurItfEntry, &rgInterfaceDispatchMapTypeIDs, dwCurInterface); |
7595 | } |
7596 | #endif |
7597 | } |
7598 | } |
7599 | } // MethodTableBuilder::PlaceInterfaceMethods |
7600 | |
7601 | |
7602 | //******************************************************************************* |
7603 | // |
7604 | // Used by BuildMethodTable |
7605 | // |
7606 | // Place static fields |
7607 | // |
7608 | VOID MethodTableBuilder::PlaceRegularStaticFields() |
7609 | { |
7610 | STANDARD_VM_CONTRACT; |
7611 | |
7612 | DWORD i; |
7613 | |
7614 | LOG((LF_CLASSLOADER, LL_INFO10000, "STATICS: Placing statics for %s\n" , this->GetDebugClassName())); |
7615 | |
7616 | // |
7617 | // Place gc refs and value types first, as they need to have handles created for them. |
7618 | // (Placing them together allows us to easily create the handles when Restoring the class, |
7619 | // and when initializing new DLS for the class.) |
7620 | // |
7621 | |
7622 | DWORD dwCumulativeStaticFieldPos = 0 ; |
7623 | DWORD dwCumulativeStaticGCFieldPos = 0; |
7624 | DWORD dwCumulativeStaticBoxFieldPos = 0; |
7625 | |
7626 | // We don't need to do any calculations for the gc refs or valuetypes, as they're |
7627 | // guaranteed to be aligned in ModuleStaticsInfo |
7628 | bmtFP->NumRegularStaticFieldsOfSize[LOG2_PTRSIZE] -= |
7629 | bmtFP->NumRegularStaticGCBoxedFields + bmtFP->NumRegularStaticGCPointerFields; |
7630 | |
7631 | // Place fields, largest first, padding so that each group is aligned to its natural size |
7632 | for (i = MAX_LOG2_PRIMITIVE_FIELD_SIZE; (signed int) i >= 0; i--) |
7633 | { |
7634 | // Fields of this size start at the next available location |
7635 | bmtFP->RegularStaticFieldStart[i] = dwCumulativeStaticFieldPos; |
7636 | dwCumulativeStaticFieldPos += (bmtFP->NumRegularStaticFieldsOfSize[i] << i); |
7637 | |
7638 | // Reset counters for the loop after this one |
7639 | bmtFP->NumRegularStaticFieldsOfSize[i] = 0; |
7640 | } |
7641 | |
7642 | |
7643 | if (dwCumulativeStaticFieldPos > FIELD_OFFSET_LAST_REAL_OFFSET) |
7644 | BuildMethodTableThrowException(IDS_CLASSLOAD_GENERAL); |
7645 | |
7646 | DWORD dwNumHandleStatics = bmtFP->NumRegularStaticGCBoxedFields + bmtFP->NumRegularStaticGCPointerFields; |
7647 | if (!FitsIn<WORD>(dwNumHandleStatics)) |
7648 | { // Overflow. |
7649 | BuildMethodTableThrowException(IDS_EE_TOOMANYFIELDS); |
7650 | } |
7651 | SetNumHandleRegularStatics(static_cast<WORD>(dwNumHandleStatics)); |
7652 | |
7653 | if (!FitsIn<WORD>(bmtFP->NumRegularStaticGCBoxedFields)) |
7654 | { // Overflow. |
7655 | BuildMethodTableThrowException(IDS_EE_TOOMANYFIELDS); |
7656 | } |
7657 | SetNumBoxedRegularStatics(static_cast<WORD>(bmtFP->NumRegularStaticGCBoxedFields)); |
7658 | |
7659 | // Tell the module to give us the offsets we'll be using and commit space for us |
7660 | // if necessary |
7661 | DWORD dwNonGCOffset, dwGCOffset; |
7662 | GetModule()->GetOffsetsForRegularStaticData(bmtInternal->pType->GetTypeDefToken(), |
7663 | bmtProp->fDynamicStatics, |
7664 | GetNumHandleRegularStatics(), dwCumulativeStaticFieldPos, |
7665 | &dwGCOffset, &dwNonGCOffset); |
7666 | |
7667 | // Allocate boxed statics first ("x << LOG2_PTRSIZE" is equivalent to "x * sizeof(void *)") |
7668 | dwCumulativeStaticGCFieldPos = bmtFP->NumRegularStaticGCBoxedFields<<LOG2_PTRSIZE; |
7669 | |
7670 | FieldDesc *pFieldDescList = GetApproxFieldDescListRaw(); |
7671 | // Place static fields |
7672 | for (i = 0; i < bmtEnumFields->dwNumStaticFields - bmtEnumFields->dwNumThreadStaticFields; i++) |
7673 | { |
7674 | FieldDesc * pCurField = &pFieldDescList[bmtEnumFields->dwNumInstanceFields+i]; |
7675 | DWORD dwLog2FieldSize = (DWORD)(DWORD_PTR&)pCurField->m_pMTOfEnclosingClass; // log2(field size) |
7676 | DWORD dwOffset = (DWORD) pCurField->m_dwOffset; // offset or type of field |
7677 | |
7678 | switch (dwOffset) |
7679 | { |
7680 | case FIELD_OFFSET_UNPLACED_GC_PTR: |
7681 | // Place GC reference static field |
7682 | pCurField->SetOffset(dwCumulativeStaticGCFieldPos + dwGCOffset); |
7683 | dwCumulativeStaticGCFieldPos += 1<<LOG2_PTRSIZE; |
7684 | LOG((LF_CLASSLOADER, LL_INFO10000, "STATICS: Field placed at GC offset 0x%x\n" , pCurField->GetOffset_NoLogging())); |
7685 | |
7686 | break; |
7687 | |
7688 | case FIELD_OFFSET_VALUE_CLASS: |
7689 | // Place boxed GC reference static field |
7690 | pCurField->SetOffset(dwCumulativeStaticBoxFieldPos + dwGCOffset); |
7691 | dwCumulativeStaticBoxFieldPos += 1<<LOG2_PTRSIZE; |
7692 | LOG((LF_CLASSLOADER, LL_INFO10000, "STATICS: Field placed at GC offset 0x%x\n" , pCurField->GetOffset_NoLogging())); |
7693 | |
7694 | break; |
7695 | |
7696 | case FIELD_OFFSET_UNPLACED: |
7697 | // Place non-GC static field |
7698 | pCurField->SetOffset(bmtFP->RegularStaticFieldStart[dwLog2FieldSize] + |
7699 | (bmtFP->NumRegularStaticFieldsOfSize[dwLog2FieldSize] << dwLog2FieldSize) + |
7700 | dwNonGCOffset); |
7701 | bmtFP->NumRegularStaticFieldsOfSize[dwLog2FieldSize]++; |
7702 | LOG((LF_CLASSLOADER, LL_INFO10000, "STATICS: Field placed at non GC offset 0x%x\n" , pCurField->GetOffset_NoLogging())); |
7703 | break; |
7704 | |
7705 | default: |
7706 | // RVA field |
7707 | break; |
7708 | } |
7709 | |
7710 | LOG((LF_CLASSLOADER, LL_INFO1000000, "Offset of %s: %i\n" , pCurField->m_debugName, pCurField->GetOffset_NoLogging())); |
7711 | } |
7712 | |
7713 | if (bmtProp->fDynamicStatics) |
7714 | { |
7715 | _ASSERTE(dwNonGCOffset == 0 || // no statics at all |
7716 | dwNonGCOffset == OFFSETOF__DomainLocalModule__NormalDynamicEntry__m_pDataBlob); // We need space to point to the GC statics |
7717 | bmtProp->dwNonGCRegularStaticFieldBytes = dwCumulativeStaticFieldPos; |
7718 | } |
7719 | else |
7720 | { |
7721 | bmtProp->dwNonGCRegularStaticFieldBytes = 0; // Non dynamics shouldnt be using this |
7722 | } |
7723 | LOG((LF_CLASSLOADER, LL_INFO10000, "STATICS: Static field bytes needed (0 is normal for non dynamic case)%i\n" , bmtProp->dwNonGCRegularStaticFieldBytes)); |
7724 | } |
7725 | |
7726 | |
7727 | VOID MethodTableBuilder::PlaceThreadStaticFields() |
7728 | { |
7729 | STANDARD_VM_CONTRACT; |
7730 | |
7731 | DWORD i; |
7732 | |
7733 | LOG((LF_CLASSLOADER, LL_INFO10000, "STATICS: Placing ThreadStatics for %s\n" , this->GetDebugClassName())); |
7734 | |
7735 | // |
7736 | // Place gc refs and value types first, as they need to have handles created for them. |
7737 | // (Placing them together allows us to easily create the handles when Restoring the class, |
7738 | // and when initializing new DLS for the class.) |
7739 | // |
7740 | |
7741 | DWORD dwCumulativeStaticFieldPos = 0 ; |
7742 | DWORD dwCumulativeStaticGCFieldPos = 0; |
7743 | DWORD dwCumulativeStaticBoxFieldPos = 0; |
7744 | |
7745 | // We don't need to do any calculations for the gc refs or valuetypes, as they're |
7746 | // guaranteed to be aligned in ModuleStaticsInfo |
7747 | bmtFP->NumThreadStaticFieldsOfSize[LOG2_PTRSIZE] -= |
7748 | bmtFP->NumThreadStaticGCBoxedFields + bmtFP->NumThreadStaticGCPointerFields; |
7749 | |
7750 | // Place fields, largest first, padding so that each group is aligned to its natural size |
7751 | for (i = MAX_LOG2_PRIMITIVE_FIELD_SIZE; (signed int) i >= 0; i--) |
7752 | { |
7753 | // Fields of this size start at the next available location |
7754 | bmtFP->ThreadStaticFieldStart[i] = dwCumulativeStaticFieldPos; |
7755 | dwCumulativeStaticFieldPos += (bmtFP->NumThreadStaticFieldsOfSize[i] << i); |
7756 | |
7757 | // Reset counters for the loop after this one |
7758 | bmtFP->NumThreadStaticFieldsOfSize[i] = 0; |
7759 | } |
7760 | |
7761 | |
7762 | if (dwCumulativeStaticFieldPos > FIELD_OFFSET_LAST_REAL_OFFSET) |
7763 | BuildMethodTableThrowException(IDS_CLASSLOAD_GENERAL); |
7764 | |
7765 | DWORD dwNumHandleStatics = bmtFP->NumThreadStaticGCBoxedFields + bmtFP->NumThreadStaticGCPointerFields; |
7766 | if (!FitsIn<WORD>(dwNumHandleStatics)) |
7767 | { // Overflow. |
7768 | BuildMethodTableThrowException(IDS_EE_TOOMANYFIELDS); |
7769 | } |
7770 | |
7771 | SetNumHandleThreadStatics(static_cast<WORD>(dwNumHandleStatics)); |
7772 | |
7773 | if (!FitsIn<WORD>(bmtFP->NumThreadStaticGCBoxedFields)) |
7774 | { // Overflow. |
7775 | BuildMethodTableThrowException(IDS_EE_TOOMANYFIELDS); |
7776 | } |
7777 | |
7778 | SetNumBoxedThreadStatics(static_cast<WORD>(bmtFP->NumThreadStaticGCBoxedFields)); |
7779 | |
7780 | // Tell the module to give us the offsets we'll be using and commit space for us |
7781 | // if necessary |
7782 | DWORD dwNonGCOffset, dwGCOffset; |
7783 | |
7784 | GetModule()->GetOffsetsForThreadStaticData(bmtInternal->pType->GetTypeDefToken(), |
7785 | bmtProp->fDynamicStatics, |
7786 | GetNumHandleThreadStatics(), dwCumulativeStaticFieldPos, |
7787 | &dwGCOffset, &dwNonGCOffset); |
7788 | |
7789 | // Allocate boxed statics first ("x << LOG2_PTRSIZE" is equivalent to "x * sizeof(void *)") |
7790 | dwCumulativeStaticGCFieldPos = bmtFP->NumThreadStaticGCBoxedFields<<LOG2_PTRSIZE; |
7791 | |
7792 | FieldDesc *pFieldDescList = GetHalfBakedClass()->GetFieldDescList(); |
7793 | // Place static fields |
7794 | for (i = 0; i < bmtEnumFields->dwNumThreadStaticFields; i++) |
7795 | { |
7796 | FieldDesc * pCurField = &pFieldDescList[bmtEnumFields->dwNumInstanceFields + bmtEnumFields->dwNumStaticFields - bmtEnumFields->dwNumThreadStaticFields + i]; |
7797 | DWORD dwLog2FieldSize = (DWORD)(DWORD_PTR&)pCurField->m_pMTOfEnclosingClass; // log2(field size) |
7798 | DWORD dwOffset = (DWORD) pCurField->m_dwOffset; // offset or type of field |
7799 | |
7800 | switch (dwOffset) |
7801 | { |
7802 | case FIELD_OFFSET_UNPLACED_GC_PTR: |
7803 | // Place GC reference static field |
7804 | pCurField->SetOffset(dwCumulativeStaticGCFieldPos + dwGCOffset); |
7805 | dwCumulativeStaticGCFieldPos += 1<<LOG2_PTRSIZE; |
7806 | LOG((LF_CLASSLOADER, LL_INFO10000, "THREAD STATICS: Field placed at GC offset 0x%x\n" , pCurField->GetOffset_NoLogging())); |
7807 | |
7808 | break; |
7809 | |
7810 | case FIELD_OFFSET_VALUE_CLASS: |
7811 | // Place boxed GC reference static field |
7812 | pCurField->SetOffset(dwCumulativeStaticBoxFieldPos + dwGCOffset); |
7813 | dwCumulativeStaticBoxFieldPos += 1<<LOG2_PTRSIZE; |
7814 | LOG((LF_CLASSLOADER, LL_INFO10000, "THREAD STATICS: Field placed at GC offset 0x%x\n" , pCurField->GetOffset_NoLogging())); |
7815 | |
7816 | break; |
7817 | |
7818 | case FIELD_OFFSET_UNPLACED: |
7819 | // Place non-GC static field |
7820 | pCurField->SetOffset(bmtFP->ThreadStaticFieldStart[dwLog2FieldSize] + |
7821 | (bmtFP->NumThreadStaticFieldsOfSize[dwLog2FieldSize] << dwLog2FieldSize) + |
7822 | dwNonGCOffset); |
7823 | bmtFP->NumThreadStaticFieldsOfSize[dwLog2FieldSize]++; |
7824 | LOG((LF_CLASSLOADER, LL_INFO10000, "THREAD STATICS: Field placed at non GC offset 0x%x\n" , pCurField->GetOffset_NoLogging())); |
7825 | break; |
7826 | |
7827 | default: |
7828 | // RVA field |
7829 | break; |
7830 | } |
7831 | |
7832 | LOG((LF_CLASSLOADER, LL_INFO1000000, "Offset of %s: %i\n" , pCurField->m_debugName, pCurField->GetOffset_NoLogging())); |
7833 | } |
7834 | |
7835 | if (bmtProp->fDynamicStatics) |
7836 | { |
7837 | _ASSERTE(dwNonGCOffset == 0 || // no thread statics at all |
7838 | dwNonGCOffset == OFFSETOF__ThreadLocalModule__DynamicEntry__m_pDataBlob); // We need space to point to the GC statics |
7839 | bmtProp->dwNonGCThreadStaticFieldBytes = dwCumulativeStaticFieldPos; |
7840 | } |
7841 | else |
7842 | { |
7843 | bmtProp->dwNonGCThreadStaticFieldBytes = 0; // Non dynamics shouldnt be using this |
7844 | } |
7845 | LOG((LF_CLASSLOADER, LL_INFO10000, "STATICS: ThreadStatic field bytes needed (0 is normal for non dynamic case)%i\n" , bmtProp->dwNonGCThreadStaticFieldBytes)); |
7846 | } |
7847 | |
7848 | //******************************************************************************* |
7849 | // |
7850 | // Used by BuildMethodTable |
7851 | // |
7852 | // Place instance fields |
7853 | // |
7854 | VOID MethodTableBuilder::PlaceInstanceFields(MethodTable ** pByValueClassCache) |
7855 | { |
7856 | STANDARD_VM_CONTRACT; |
7857 | |
7858 | |
7859 | DWORD i; |
7860 | |
7861 | //=============================================================== |
7862 | // BEGIN: Place instance fields |
7863 | //=============================================================== |
7864 | |
7865 | FieldDesc *pFieldDescList = GetHalfBakedClass()->GetFieldDescList(); |
7866 | DWORD dwCumulativeInstanceFieldPos; |
7867 | |
7868 | // Instance fields start right after the parent |
7869 | dwCumulativeInstanceFieldPos = HasParent() ? GetParentMethodTable()->GetNumInstanceFieldBytes() : 0; |
7870 | |
7871 | DWORD dwOffsetBias = 0; |
7872 | #ifdef FEATURE_64BIT_ALIGNMENT |
7873 | // On platforms where the alignment of 64-bit primitives is a requirement (but we're not guaranteed |
7874 | // this implicitly by the GC) field offset 0 is actually not 8-byte aligned in reference classes. |
7875 | // That's because all such platforms are currently 32-bit and the 4-byte MethodTable pointer pushes us |
7876 | // out of alignment. Ideally we'd solve this by arranging to have the object header allocated at a |
7877 | // 4-byte offset from an 8-byte boundary, but this is difficult to achieve for objects allocated on |
7878 | // the large object heap (which actually requires headers to be 8-byte aligned). |
7879 | // |
7880 | // So we adjust dwCumulativeInstanceFieldPos to account for the MethodTable* and our alignment |
7881 | // calculations will automatically adjust and add padding as necessary. We need to remove this |
7882 | // adjustment when setting the field offset in the field desc, however, since the rest of the system |
7883 | // expects that value to not include the MethodTable*. |
7884 | // |
7885 | // This happens only for reference classes: value type field 0 really does lie at offset 0 for unboxed |
7886 | // value types. We deal with boxed value types by allocating their headers mis-aligned (luckily for us |
7887 | // value types can never get large enough to allocate on the LOH). |
7888 | if (!IsValueClass()) |
7889 | { |
7890 | dwOffsetBias = TARGET_POINTER_SIZE; |
7891 | dwCumulativeInstanceFieldPos += dwOffsetBias; |
7892 | } |
7893 | #endif // FEATURE_64BIT_ALIGNMENT |
7894 | |
7895 | #ifdef FEATURE_READYTORUN |
7896 | if (NeedsAlignedBaseOffset()) |
7897 | { |
7898 | // READYTORUN: FUTURE: Use the minimum possible alignment, reduce padding when inheriting within same bubble |
7899 | DWORD dwAlignment = DATA_ALIGNMENT; |
7900 | #ifdef FEATURE_64BIT_ALIGNMENT |
7901 | if (GetHalfBakedClass()->IsAlign8Candidate()) |
7902 | dwAlignment = 8; |
7903 | #endif |
7904 | dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, dwAlignment); |
7905 | } |
7906 | #endif // FEATURE_READYTORUN |
7907 | |
7908 | // place small fields first if the parent have a number of field bytes that is not aligned |
7909 | if (!IS_ALIGNED(dwCumulativeInstanceFieldPos, DATA_ALIGNMENT)) |
7910 | { |
7911 | for (i = 0; i < MAX_LOG2_PRIMITIVE_FIELD_SIZE; i++) { |
7912 | DWORD j; |
7913 | |
7914 | if (IS_ALIGNED(dwCumulativeInstanceFieldPos, size_t{ 1 } << (i + 1))) |
7915 | continue; |
7916 | |
7917 | // check whether there are any bigger fields |
7918 | for (j = i + 1; j <= MAX_LOG2_PRIMITIVE_FIELD_SIZE; j++) { |
7919 | if (bmtFP->NumInstanceFieldsOfSize[j] != 0) |
7920 | break; |
7921 | } |
7922 | // nothing to gain if there are no bigger fields |
7923 | // (the subsequent loop will place fields from large to small fields) |
7924 | if (j > MAX_LOG2_PRIMITIVE_FIELD_SIZE) |
7925 | break; |
7926 | |
7927 | // check whether there are any small enough fields |
7928 | for (j = i; (signed int) j >= 0; j--) { |
7929 | if (bmtFP->NumInstanceFieldsOfSize[j] != 0) |
7930 | break; |
7931 | // TODO: since we will refuse to place GC references we should filter them out here. |
7932 | // otherwise the "back-filling" process stops completely. |
7933 | // (PlaceInstanceFields) |
7934 | // the following code would fix the issue (a replacement for the code above this comment): |
7935 | // if (bmtFP->NumInstanceFieldsOfSize[j] != 0 && |
7936 | // (j != LOG2SLOT || bmtFP->NumInstanceFieldsOfSize[j] > bmtFP->NumInstanceGCPointerFields)) |
7937 | // { |
7938 | // break; |
7939 | // } |
7940 | |
7941 | } |
7942 | // nothing to play with if there are no smaller fields |
7943 | if ((signed int) j < 0) |
7944 | break; |
7945 | // eventually go back and use the smaller field as filling |
7946 | i = j; |
7947 | |
7948 | CONSISTENCY_CHECK(bmtFP->NumInstanceFieldsOfSize[i] != 0); |
7949 | |
7950 | j = bmtFP->FirstInstanceFieldOfSize[i]; |
7951 | |
7952 | // Avoid reordering of gcfields |
7953 | if (i == LOG2SLOT) { |
7954 | for ( ; j < bmtEnumFields->dwNumInstanceFields; j++) { |
7955 | if ((pFieldDescList[j].GetOffset_NoLogging() == FIELD_OFFSET_UNPLACED) && |
7956 | ((DWORD_PTR&)pFieldDescList[j].m_pMTOfEnclosingClass == (size_t)i)) |
7957 | break; |
7958 | } |
7959 | |
7960 | // out of luck - can't reorder gc fields |
7961 | if (j >= bmtEnumFields->dwNumInstanceFields) |
7962 | break; |
7963 | } |
7964 | |
7965 | // Place the field |
7966 | dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, size_t{ 1 } << i); |
7967 | |
7968 | pFieldDescList[j].SetOffset(dwCumulativeInstanceFieldPos - dwOffsetBias); |
7969 | dwCumulativeInstanceFieldPos += (1 << i); |
7970 | |
7971 | // We've placed this field now, so there is now one less of this size field to place |
7972 | if (--bmtFP->NumInstanceFieldsOfSize[i] == 0) |
7973 | continue; |
7974 | |
7975 | // We are done in this round if we haven't picked the first field |
7976 | if (bmtFP->FirstInstanceFieldOfSize[i] != j) |
7977 | continue; |
7978 | |
7979 | // Update FirstInstanceFieldOfSize[i] to point to the next such field |
7980 | for (j = j+1; j < bmtEnumFields->dwNumInstanceFields; j++) |
7981 | { |
7982 | // The log of the field size is stored in the method table |
7983 | if ((DWORD_PTR&)pFieldDescList[j].m_pMTOfEnclosingClass == (size_t)i) |
7984 | { |
7985 | bmtFP->FirstInstanceFieldOfSize[i] = j; |
7986 | break; |
7987 | } |
7988 | } |
7989 | _ASSERTE(j < bmtEnumFields->dwNumInstanceFields); |
7990 | } |
7991 | } |
7992 | |
7993 | // Place fields, largest first |
7994 | for (i = MAX_LOG2_PRIMITIVE_FIELD_SIZE; (signed int) i >= 0; i--) |
7995 | { |
7996 | if (bmtFP->NumInstanceFieldsOfSize[i] == 0) |
7997 | continue; |
7998 | |
7999 | // Align instance fields if we aren't already |
8000 | #ifdef FEATURE_64BIT_ALIGNMENT |
8001 | DWORD dwDataAlignment = 1 << i; |
8002 | #else |
8003 | DWORD dwDataAlignment = min(1 << i, DATA_ALIGNMENT); |
8004 | #endif |
8005 | dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, dwDataAlignment); |
8006 | |
8007 | // Fields of this size start at the next available location |
8008 | bmtFP->InstanceFieldStart[i] = dwCumulativeInstanceFieldPos; |
8009 | dwCumulativeInstanceFieldPos += (bmtFP->NumInstanceFieldsOfSize[i] << i); |
8010 | |
8011 | // Reset counters for the loop after this one |
8012 | bmtFP->NumInstanceFieldsOfSize[i] = 0; |
8013 | } |
8014 | |
8015 | |
8016 | // Make corrections to reserve space for GC Pointer Fields |
8017 | // |
8018 | // The GC Pointers simply take up the top part of the region associated |
8019 | // with fields of that size (GC pointers can be 64 bit on certain systems) |
8020 | if (bmtFP->NumInstanceGCPointerFields) |
8021 | { |
8022 | bmtFP->GCPointerFieldStart = bmtFP->InstanceFieldStart[LOG2SLOT] - dwOffsetBias; |
8023 | bmtFP->InstanceFieldStart[LOG2SLOT] = bmtFP->InstanceFieldStart[LOG2SLOT] + (bmtFP->NumInstanceGCPointerFields << LOG2SLOT); |
8024 | bmtFP->NumInstanceGCPointerFields = 0; // reset to zero here, counts up as pointer slots are assigned below |
8025 | } |
8026 | |
8027 | // Place instance fields - be careful not to place any already-placed fields |
8028 | for (i = 0; i < bmtEnumFields->dwNumInstanceFields; i++) |
8029 | { |
8030 | DWORD dwFieldSize = (DWORD)(DWORD_PTR&)pFieldDescList[i].m_pMTOfEnclosingClass; |
8031 | DWORD dwOffset; |
8032 | |
8033 | dwOffset = pFieldDescList[i].GetOffset_NoLogging(); |
8034 | |
8035 | // Don't place already-placed fields |
8036 | if ((dwOffset == FIELD_OFFSET_UNPLACED || dwOffset == FIELD_OFFSET_UNPLACED_GC_PTR || dwOffset == FIELD_OFFSET_VALUE_CLASS)) |
8037 | { |
8038 | if (dwOffset == FIELD_OFFSET_UNPLACED_GC_PTR) |
8039 | { |
8040 | pFieldDescList[i].SetOffset(bmtFP->GCPointerFieldStart + (bmtFP->NumInstanceGCPointerFields << LOG2SLOT)); |
8041 | bmtFP->NumInstanceGCPointerFields++; |
8042 | } |
8043 | else if (pFieldDescList[i].IsByValue() == FALSE) // it's a regular field |
8044 | { |
8045 | pFieldDescList[i].SetOffset(bmtFP->InstanceFieldStart[dwFieldSize] + (bmtFP->NumInstanceFieldsOfSize[dwFieldSize] << dwFieldSize) - dwOffsetBias); |
8046 | bmtFP->NumInstanceFieldsOfSize[dwFieldSize]++; |
8047 | } |
8048 | } |
8049 | } |
8050 | |
8051 | DWORD dwNumGCPointerSeries; |
8052 | // Save Number of pointer series |
8053 | if (bmtFP->NumInstanceGCPointerFields) |
8054 | dwNumGCPointerSeries = bmtParent->NumParentPointerSeries + 1; |
8055 | else |
8056 | dwNumGCPointerSeries = bmtParent->NumParentPointerSeries; |
8057 | |
8058 | // Place by value class fields last |
8059 | // Update the number of GC pointer series |
8060 | for (i = 0; i < bmtEnumFields->dwNumInstanceFields; i++) |
8061 | { |
8062 | if (pFieldDescList[i].IsByValue()) |
8063 | { |
8064 | MethodTable * pByValueMT = pByValueClassCache[i]; |
8065 | |
8066 | // value classes could have GC pointers in them, which need to be pointer-size aligned |
8067 | // so do this if it has not been done already |
8068 | |
8069 | #if !defined(_TARGET_64BIT_) && (DATA_ALIGNMENT > 4) |
8070 | dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, |
8071 | (pByValueMT->GetNumInstanceFieldBytes() >= DATA_ALIGNMENT) ? DATA_ALIGNMENT : TARGET_POINTER_SIZE); |
8072 | #else // !(!defined(_TARGET_64BIT_) && (DATA_ALIGNMENT > 4)) |
8073 | #ifdef FEATURE_64BIT_ALIGNMENT |
8074 | if (pByValueMT->RequiresAlign8()) |
8075 | dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, 8); |
8076 | else |
8077 | #endif // FEATURE_64BIT_ALIGNMENT |
8078 | dwCumulativeInstanceFieldPos = (DWORD)ALIGN_UP(dwCumulativeInstanceFieldPos, TARGET_POINTER_SIZE); |
8079 | #endif // !(!defined(_TARGET_64BIT_) && (DATA_ALIGNMENT > 4)) |
8080 | |
8081 | pFieldDescList[i].SetOffset(dwCumulativeInstanceFieldPos - dwOffsetBias); |
8082 | dwCumulativeInstanceFieldPos += pByValueMT->GetAlignedNumInstanceFieldBytes(); |
8083 | |
8084 | // Add pointer series for by-value classes |
8085 | dwNumGCPointerSeries += pByValueMT->ContainsPointers() ? |
8086 | (DWORD)CGCDesc::GetCGCDescFromMT(pByValueMT)->GetNumSeries() : 0; |
8087 | } |
8088 | } |
8089 | |
8090 | // Can be unaligned |
8091 | DWORD dwNumInstanceFieldBytes = dwCumulativeInstanceFieldPos - dwOffsetBias; |
8092 | |
8093 | if (IsValueClass()) |
8094 | { |
8095 | // Like C++ we enforce that there can be no 0 length structures. |
8096 | // Thus for a value class with no fields, we 'pad' the length to be 1 |
8097 | if (dwNumInstanceFieldBytes == 0) |
8098 | dwNumInstanceFieldBytes = 1; |
8099 | |
8100 | // The JITs like to copy full machine words, |
8101 | // so if the size is bigger than a void* round it up to minAlign |
8102 | // and if the size is smaller than void* round it up to next power of two |
8103 | unsigned minAlign; |
8104 | |
8105 | #ifdef FEATURE_64BIT_ALIGNMENT |
8106 | if (GetHalfBakedClass()->IsAlign8Candidate()) { |
8107 | minAlign = 8; |
8108 | } |
8109 | else |
8110 | #endif // FEATURE_64BIT_ALIGNMENT |
8111 | if (dwNumInstanceFieldBytes > TARGET_POINTER_SIZE) { |
8112 | minAlign = TARGET_POINTER_SIZE; |
8113 | } |
8114 | else { |
8115 | minAlign = 1; |
8116 | while (minAlign < dwNumInstanceFieldBytes) |
8117 | minAlign *= 2; |
8118 | } |
8119 | |
8120 | dwNumInstanceFieldBytes = (dwNumInstanceFieldBytes + minAlign-1) & ~(minAlign-1); |
8121 | } |
8122 | |
8123 | if (dwNumInstanceFieldBytes > FIELD_OFFSET_LAST_REAL_OFFSET) { |
8124 | BuildMethodTableThrowException(IDS_CLASSLOAD_FIELDTOOLARGE); |
8125 | } |
8126 | |
8127 | bmtFP->NumInstanceFieldBytes = dwNumInstanceFieldBytes; |
8128 | |
8129 | bmtFP->NumGCPointerSeries = dwNumGCPointerSeries; |
8130 | |
8131 | //=============================================================== |
8132 | // END: Place instance fields |
8133 | //=============================================================== |
8134 | } |
8135 | |
8136 | //******************************************************************************* |
8137 | // this accesses the field size which is temporarily stored in m_pMTOfEnclosingClass |
8138 | // during class loading. Don't use any other time |
8139 | DWORD MethodTableBuilder::GetFieldSize(FieldDesc *pFD) |
8140 | { |
8141 | STATIC_CONTRACT_NOTHROW; |
8142 | STATIC_CONTRACT_GC_NOTRIGGER; |
8143 | STATIC_CONTRACT_FORBID_FAULT; |
8144 | |
8145 | // We should only be calling this while this class is being built. |
8146 | _ASSERTE(GetHalfBakedMethodTable() == 0); |
8147 | BAD_FORMAT_NOTHROW_ASSERT(! pFD->IsByValue() || HasExplicitFieldOffsetLayout()); |
8148 | |
8149 | if (pFD->IsByValue()) |
8150 | return (DWORD)(DWORD_PTR&)(pFD->m_pMTOfEnclosingClass); |
8151 | return (1 << (DWORD)(DWORD_PTR&)(pFD->m_pMTOfEnclosingClass)); |
8152 | } |
8153 | |
8154 | #ifdef UNIX_AMD64_ABI |
8155 | // checks whether the struct is enregisterable. |
8156 | void MethodTableBuilder::SystemVAmd64CheckForPassStructInRegister() |
8157 | { |
8158 | STANDARD_VM_CONTRACT; |
8159 | |
8160 | // This method should be called for valuetypes only |
8161 | _ASSERTE(IsValueClass()); |
8162 | |
8163 | TypeHandle th(GetHalfBakedMethodTable()); |
8164 | |
8165 | if (th.IsTypeDesc()) |
8166 | { |
8167 | // Not an enregisterable managed structure. |
8168 | return; |
8169 | } |
8170 | |
8171 | DWORD totalStructSize = bmtFP->NumInstanceFieldBytes; |
8172 | |
8173 | // If num of bytes for the fields is bigger than CLR_SYSTEMV_MAX_STRUCT_BYTES_TO_PASS_IN_REGISTERS |
8174 | // pass through stack |
8175 | if (totalStructSize > CLR_SYSTEMV_MAX_STRUCT_BYTES_TO_PASS_IN_REGISTERS) |
8176 | { |
8177 | LOG((LF_JIT, LL_EVERYTHING, "**** SystemVAmd64CheckForPassStructInRegister: struct %s is too big to pass in registers (%d bytes)\n" , |
8178 | this->GetDebugClassName(), totalStructSize)); |
8179 | return; |
8180 | } |
8181 | |
8182 | const bool useNativeLayout = false; |
8183 | // Iterate through the fields and make sure they meet requirements to pass in registers |
8184 | SystemVStructRegisterPassingHelper helper((unsigned int)totalStructSize); |
8185 | if (GetHalfBakedMethodTable()->ClassifyEightBytes(&helper, 0, 0, useNativeLayout)) |
8186 | { |
8187 | // All the above tests passed. It's registers passed struct! |
8188 | GetHalfBakedMethodTable()->SetRegPassedStruct(); |
8189 | |
8190 | StoreEightByteClassification(&helper); |
8191 | } |
8192 | } |
8193 | |
8194 | // checks whether the struct is enregisterable. |
8195 | void MethodTableBuilder::SystemVAmd64CheckForPassNativeStructInRegister() |
8196 | { |
8197 | STANDARD_VM_CONTRACT; |
8198 | DWORD totalStructSize = 0; |
8199 | |
8200 | // If not a native value type, return. |
8201 | if (!IsValueClass()) |
8202 | { |
8203 | return; |
8204 | } |
8205 | |
8206 | totalStructSize = GetLayoutInfo()->GetNativeSize(); |
8207 | |
8208 | // If num of bytes for the fields is bigger than CLR_SYSTEMV_MAX_STRUCT_BYTES_TO_PASS_IN_REGISTERS |
8209 | // pass through stack |
8210 | if (totalStructSize > CLR_SYSTEMV_MAX_STRUCT_BYTES_TO_PASS_IN_REGISTERS) |
8211 | { |
8212 | LOG((LF_JIT, LL_EVERYTHING, "**** SystemVAmd64CheckForPassNativeStructInRegister: struct %s is too big to pass in registers (%d bytes)\n" , |
8213 | this->GetDebugClassName(), totalStructSize)); |
8214 | return; |
8215 | } |
8216 | |
8217 | _ASSERTE(HasLayout()); |
8218 | |
8219 | // Classify the native layout for this struct. |
8220 | const bool useNativeLayout = true; |
8221 | // Iterate through the fields and make sure they meet requirements to pass in registers |
8222 | SystemVStructRegisterPassingHelper helper((unsigned int)totalStructSize); |
8223 | if (GetHalfBakedMethodTable()->ClassifyEightBytes(&helper, 0, 0, useNativeLayout)) |
8224 | { |
8225 | GetLayoutInfo()->SetNativeStructPassedInRegisters(); |
8226 | } |
8227 | } |
8228 | |
8229 | // Store the eightbyte classification into the EEClass |
8230 | void MethodTableBuilder::StoreEightByteClassification(SystemVStructRegisterPassingHelper* helper) |
8231 | { |
8232 | EEClass* eeClass = GetHalfBakedMethodTable()->GetClass(); |
8233 | LoaderAllocator* pAllocator = MethodTableBuilder::GetLoaderAllocator(); |
8234 | AllocMemTracker* pamTracker = MethodTableBuilder::GetMemTracker(); |
8235 | EnsureOptionalFieldsAreAllocated(eeClass, pamTracker, pAllocator->GetLowFrequencyHeap()); |
8236 | eeClass->SetEightByteClassification(helper->eightByteCount, helper->eightByteClassifications, helper->eightByteSizes); |
8237 | } |
8238 | |
8239 | #endif // UNIX_AMD64_ABI |
8240 | |
8241 | //--------------------------------------------------------------------------------------- |
8242 | // |
8243 | // make sure that no object fields are overlapped incorrectly and define the |
8244 | // GC pointer series for the class. We are assuming that this class will always be laid out within |
8245 | // its enclosing class by the compiler in such a way that offset 0 will be the correct alignment |
8246 | // for object ref fields so we don't need to try to align it |
8247 | // |
8248 | VOID |
8249 | MethodTableBuilder::HandleExplicitLayout( |
8250 | MethodTable ** pByValueClassCache) |
8251 | { |
8252 | STANDARD_VM_CONTRACT; |
8253 | |
8254 | |
8255 | // Instance slice size is the total size of an instance, and is calculated as |
8256 | // the field whose offset and size add to the greatest number. |
8257 | UINT instanceSliceSize = 0; |
8258 | DWORD firstObjectOverlapOffset = ((DWORD)(-1)); |
8259 | |
8260 | |
8261 | UINT i; |
8262 | for (i = 0; i < bmtMetaData->cFields; i++) |
8263 | { |
8264 | FieldDesc *pFD = bmtMFDescs->ppFieldDescList[i]; |
8265 | if (pFD == NULL || pFD->IsStatic()) |
8266 | { |
8267 | continue; |
8268 | } |
8269 | |
8270 | UINT fieldExtent = 0; |
8271 | if (!ClrSafeInt<UINT>::addition(pFD->GetOffset_NoLogging(), GetFieldSize(pFD), fieldExtent)) |
8272 | { |
8273 | BuildMethodTableThrowException(COR_E_OVERFLOW); |
8274 | } |
8275 | |
8276 | if (fieldExtent > instanceSliceSize) |
8277 | { |
8278 | instanceSliceSize = fieldExtent; |
8279 | } |
8280 | } |
8281 | |
8282 | CQuickBytes qb; |
8283 | PREFIX_ASSUME(sizeof(BYTE) == 1); |
8284 | BYTE *pFieldLayout = (BYTE*) qb.AllocThrows(instanceSliceSize * sizeof(BYTE)); |
8285 | for (i=0; i < instanceSliceSize; i++) |
8286 | { |
8287 | pFieldLayout[i] = empty; |
8288 | } |
8289 | |
8290 | // go through each field and look for invalid layout |
8291 | // (note that we are more permissive than what Ecma allows. We only disallow the minimum set necessary to |
8292 | // close security holes.) |
8293 | // |
8294 | // This is what we implment: |
8295 | // |
8296 | // 1. Verify that every OREF is on a valid alignment |
8297 | // 2. Verify that OREFs only overlap with other OREFs. |
8298 | // 3. If an OREF does overlap with another OREF, the class is marked unverifiable. |
8299 | // 4. If an overlap of any kind occurs, the class will be marked NotTightlyPacked (affects ValueType.Equals()). |
8300 | // |
8301 | char emptyObject[TARGET_POINTER_SIZE]; |
8302 | char isObject[TARGET_POINTER_SIZE]; |
8303 | for (i = 0; i < TARGET_POINTER_SIZE; i++) |
8304 | { |
8305 | emptyObject[i] = empty; |
8306 | isObject[i] = oref; |
8307 | } |
8308 | |
8309 | |
8310 | ExplicitClassTrust explicitClassTrust; |
8311 | |
8312 | UINT valueClassCacheIndex = ((UINT)(-1)); |
8313 | UINT badOffset = 0; |
8314 | FieldDesc * pFD = NULL; |
8315 | for (i = 0; i < bmtMetaData->cFields; i++) |
8316 | { |
8317 | // Note about this loop body: |
8318 | // |
8319 | // This loop is coded to make it as hard as possible to allow a field to be trusted when it shouldn't. |
8320 | // |
8321 | // Every path in this loop body must lead to an explicit decision as to whether the field nonoverlaps, |
8322 | // overlaps in a verifiable fashion, overlaps in a nonverifiable fashion or overlaps in a completely illegal fashion. |
8323 | // |
8324 | // It must call fieldTrust.SetTrust() with the appropriate result. If you don't call it, fieldTrust's destructor |
8325 | // will intentionally default to kNone and mark the entire class illegal. |
8326 | // |
8327 | // If your result is anything but kNone (class is illegal), you must also explicitly "continue" the loop. |
8328 | // There is a "break" at end of this loop body that will abort the loop if you don't do this. And |
8329 | // if you don't finish iterating through all the fields, this function will automatically mark the entire |
8330 | // class illegal. This rule is a vestige of an earlier version of this function. |
8331 | |
8332 | // This object's dtor will aggregate the trust decision for this field into the trust level for the class as a whole. |
8333 | ExplicitFieldTrustHolder fieldTrust(&explicitClassTrust); |
8334 | |
8335 | pFD = bmtMFDescs->ppFieldDescList[i]; |
8336 | if (pFD == NULL || pFD->IsStatic()) |
8337 | { |
8338 | fieldTrust.SetTrust(ExplicitFieldTrust::kNonOverLayed); |
8339 | continue; |
8340 | } |
8341 | |
8342 | // "i" indexes all fields, valueClassCacheIndex indexes non-static fields only. Don't get them confused! |
8343 | valueClassCacheIndex++; |
8344 | |
8345 | if (CorTypeInfo::IsObjRef(pFD->GetFieldType())) |
8346 | { |
8347 | // Check that the ref offset is pointer aligned |
8348 | if ((pFD->GetOffset_NoLogging() & ((ULONG)TARGET_POINTER_SIZE - 1)) != 0) |
8349 | { |
8350 | badOffset = pFD->GetOffset_NoLogging(); |
8351 | fieldTrust.SetTrust(ExplicitFieldTrust::kNone); |
8352 | |
8353 | // If we got here, OREF field was not pointer aligned. THROW. |
8354 | break; |
8355 | } |
8356 | // check if overlaps another object |
8357 | if (memcmp((void *)&pFieldLayout[pFD->GetOffset_NoLogging()], (void *)isObject, sizeof(isObject)) == 0) |
8358 | { |
8359 | // If we got here, an OREF overlapped another OREF. We permit this but mark the class unverifiable. |
8360 | fieldTrust.SetTrust(ExplicitFieldTrust::kLegal); |
8361 | |
8362 | if (firstObjectOverlapOffset == ((DWORD)(-1))) |
8363 | { |
8364 | firstObjectOverlapOffset = pFD->GetOffset_NoLogging(); |
8365 | } |
8366 | |
8367 | continue; |
8368 | } |
8369 | // check if is empty at this point |
8370 | if (memcmp((void *)&pFieldLayout[pFD->GetOffset_NoLogging()], (void *)emptyObject, sizeof(emptyObject)) == 0) |
8371 | { |
8372 | // If we got here, this OREF is overlapping no other fields (yet). Record that these bytes now contain an OREF. |
8373 | memset((void *)&pFieldLayout[pFD->GetOffset_NoLogging()], oref, sizeof(isObject)); |
8374 | fieldTrust.SetTrust(ExplicitFieldTrust::kNonOverLayed); |
8375 | continue; |
8376 | } |
8377 | |
8378 | // If we got here, the OREF overlaps a non-OREF. THROW. |
8379 | badOffset = pFD->GetOffset_NoLogging(); |
8380 | fieldTrust.SetTrust(ExplicitFieldTrust::kNone); |
8381 | break; |
8382 | } |
8383 | else |
8384 | { |
8385 | UINT fieldSize; |
8386 | if (pFD->IsByValue()) |
8387 | { |
8388 | MethodTable *pByValueMT = pByValueClassCache[valueClassCacheIndex]; |
8389 | if (pByValueMT->ContainsPointers()) |
8390 | { |
8391 | if ((pFD->GetOffset_NoLogging() & ((ULONG)TARGET_POINTER_SIZE - 1)) == 0) |
8392 | { |
8393 | ExplicitFieldTrust::TrustLevel trust; |
8394 | DWORD firstObjectOverlapOffsetInsideValueClass = ((DWORD)(-1)); |
8395 | trust = CheckValueClassLayout(pByValueMT, &pFieldLayout[pFD->GetOffset_NoLogging()], &firstObjectOverlapOffsetInsideValueClass); |
8396 | fieldTrust.SetTrust(trust); |
8397 | if (firstObjectOverlapOffsetInsideValueClass != ((DWORD)(-1))) |
8398 | { |
8399 | if (firstObjectOverlapOffset == ((DWORD)(-1))) |
8400 | { |
8401 | firstObjectOverlapOffset = pFD->GetOffset_NoLogging() + firstObjectOverlapOffsetInsideValueClass; |
8402 | } |
8403 | } |
8404 | |
8405 | if (trust != ExplicitFieldTrust::kNone) |
8406 | { |
8407 | continue; |
8408 | } |
8409 | else |
8410 | { |
8411 | // If we got here, then an OREF inside the valuetype illegally overlapped a non-OREF field. THROW. |
8412 | badOffset = pFD->GetOffset_NoLogging(); |
8413 | break; |
8414 | } |
8415 | } |
8416 | // If we got here, then a valuetype containing an OREF was misaligned. |
8417 | badOffset = pFD->GetOffset_NoLogging(); |
8418 | fieldTrust.SetTrust(ExplicitFieldTrust::kNone); |
8419 | break; |
8420 | } |
8421 | // no pointers so fall through to do standard checking |
8422 | fieldSize = pByValueMT->GetNumInstanceFieldBytes(); |
8423 | } |
8424 | else |
8425 | { |
8426 | // field size temporarily stored in pInterface field |
8427 | fieldSize = GetFieldSize(pFD); |
8428 | } |
8429 | |
8430 | // If we got here, we are trying to place a non-OREF (or a valuetype composed of non-OREFs.) |
8431 | // Look for any orefs under this field |
8432 | BYTE *loc; |
8433 | if ((loc = (BYTE*)memchr((void*)&pFieldLayout[pFD->GetOffset_NoLogging()], oref, fieldSize)) == NULL) |
8434 | { |
8435 | // If we have a nonoref in the range then we are doing an overlay |
8436 | if(memchr((void*)&pFieldLayout[pFD->GetOffset_NoLogging()], nonoref, fieldSize)) |
8437 | { |
8438 | fieldTrust.SetTrust(ExplicitFieldTrust::kVerifiable); |
8439 | } |
8440 | else |
8441 | { |
8442 | fieldTrust.SetTrust(ExplicitFieldTrust::kNonOverLayed); |
8443 | } |
8444 | memset((void*)&pFieldLayout[pFD->GetOffset_NoLogging()], nonoref, fieldSize); |
8445 | continue; |
8446 | } |
8447 | |
8448 | // If we got here, we tried to place a non-OREF (or a valuetype composed of non-OREFs) |
8449 | // on top of an OREF. THROW. |
8450 | badOffset = (UINT)(loc - pFieldLayout); |
8451 | fieldTrust.SetTrust(ExplicitFieldTrust::kNone); |
8452 | break; |
8453 | // anything else is an error |
8454 | } |
8455 | |
8456 | // We have to comment out this assert because otherwise, the compiler refuses to build because the _ASSERT is unreachable |
8457 | // (Thanks for nothing, compiler, that's what the assert is trying to enforce!) But the intent of the assert is correct. |
8458 | //_ASSERTE(!"You aren't supposed to be here. Some path inside the loop body did not execute an explicit break or continue."); |
8459 | |
8460 | |
8461 | // If we got here, some code above failed to execute an explicit "break" or "continue." This is a bug! To be safe, |
8462 | // we will put a catchall "break" here which will cause the typeload to abort (albeit with a probably misleading |
8463 | // error message.) |
8464 | break; |
8465 | } // for(;;) |
8466 | |
8467 | // We only break out of the loop above if we detected an error. |
8468 | if (i < bmtMetaData->cFields || !explicitClassTrust.IsLegal()) |
8469 | { |
8470 | ThrowFieldLayoutError(GetCl(), |
8471 | GetModule(), |
8472 | badOffset, |
8473 | IDS_CLASSLOAD_EXPLICIT_LAYOUT); |
8474 | } |
8475 | |
8476 | if (!explicitClassTrust.IsNonOverLayed()) |
8477 | { |
8478 | SetHasOverLayedFields(); |
8479 | } |
8480 | |
8481 | if (IsBlittable() || IsManagedSequential()) |
8482 | { |
8483 | // Bug 849333: We shouldn't update "bmtFP->NumInstanceFieldBytes" |
8484 | // for Blittable/ManagedSequential types. As this will break backward compatiblity |
8485 | // for the size of types that return true for HasExplicitFieldOffsetLayout() |
8486 | // |
8487 | return; |
8488 | } |
8489 | |
8490 | FindPointerSeriesExplicit(instanceSliceSize, pFieldLayout); |
8491 | |
8492 | // Fixup the offset to include parent as current offsets are relative to instance slice |
8493 | // Could do this earlier, but it's just easier to assume instance relative for most |
8494 | // of the earlier calculations |
8495 | |
8496 | // Instance fields start right after the parent |
8497 | S_UINT32 dwInstanceSliceOffset = S_UINT32(HasParent() ? GetParentMethodTable()->GetNumInstanceFieldBytes() : 0); |
8498 | if (bmtGCSeries->numSeries != 0) |
8499 | { |
8500 | dwInstanceSliceOffset.AlignUp(TARGET_POINTER_SIZE); |
8501 | } |
8502 | if (dwInstanceSliceOffset.IsOverflow()) |
8503 | { |
8504 | // addition overflow or cast truncation |
8505 | BuildMethodTableThrowException(IDS_CLASSLOAD_GENERAL); |
8506 | } |
8507 | |
8508 | S_UINT32 numInstanceFieldBytes = dwInstanceSliceOffset + S_UINT32(instanceSliceSize); |
8509 | |
8510 | if (IsValueClass()) |
8511 | { |
8512 | ULONG clstotalsize; |
8513 | if (FAILED(GetMDImport()->GetClassTotalSize(GetCl(), &clstotalsize))) |
8514 | { |
8515 | clstotalsize = 0; |
8516 | } |
8517 | |
8518 | if (clstotalsize != 0) |
8519 | { |
8520 | // size must be large enough to accomodate layout. If not, we use the layout size instead. |
8521 | if (!numInstanceFieldBytes.IsOverflow() && clstotalsize >= numInstanceFieldBytes.Value()) |
8522 | { |
8523 | numInstanceFieldBytes = S_UINT32(clstotalsize); |
8524 | } |
8525 | } |
8526 | } |
8527 | |
8528 | // The GC requires that all valuetypes containing orefs be sized to a multiple of TARGET_POINTER_SIZE. |
8529 | if (bmtGCSeries->numSeries != 0) |
8530 | { |
8531 | numInstanceFieldBytes.AlignUp(TARGET_POINTER_SIZE); |
8532 | } |
8533 | if (numInstanceFieldBytes.IsOverflow()) |
8534 | { |
8535 | // addition overflow or cast truncation |
8536 | BuildMethodTableThrowException(IDS_CLASSLOAD_GENERAL); |
8537 | } |
8538 | |
8539 | // Set the total size |
8540 | bmtFP->NumInstanceFieldBytes = numInstanceFieldBytes.Value(); |
8541 | |
8542 | for (i = 0; i < bmtMetaData->cFields; i++) |
8543 | { |
8544 | FieldDesc * pTempFD = bmtMFDescs->ppFieldDescList[i]; |
8545 | if ((pTempFD == NULL) || pTempFD->IsStatic()) |
8546 | { |
8547 | continue; |
8548 | } |
8549 | HRESULT hr = pTempFD->SetOffset(pTempFD->GetOffset_NoLogging() + dwInstanceSliceOffset.Value()); |
8550 | if (FAILED(hr)) |
8551 | { |
8552 | BuildMethodTableThrowException(hr, *bmtError); |
8553 | } |
8554 | } |
8555 | } // MethodTableBuilder::HandleExplicitLayout |
8556 | |
8557 | //******************************************************************************* |
8558 | // make sure that no object fields are overlapped incorrectly, returns S_FALSE if |
8559 | // there overlap but nothing illegal, S_OK if there is no overlap |
8560 | /*static*/ ExplicitFieldTrust::TrustLevel MethodTableBuilder::CheckValueClassLayout(MethodTable * pMT, BYTE *pFieldLayout, DWORD *pFirstObjectOverlapOffset) |
8561 | { |
8562 | STANDARD_VM_CONTRACT; |
8563 | |
8564 | |
8565 | *pFirstObjectOverlapOffset = (DWORD)(-1); |
8566 | |
8567 | // Build a layout of the value class. Don't know the sizes of all the fields easily, but |
8568 | // do know a) vc is already consistent so don't need to check it's overlaps and |
8569 | // b) size and location of all objectrefs. So build it by setting all non-oref |
8570 | // then fill in the orefs later |
8571 | UINT fieldSize = pMT->GetNumInstanceFieldBytes(); |
8572 | CQuickBytes qb; |
8573 | BYTE *vcLayout = (BYTE*) qb.AllocThrows(fieldSize * sizeof(BYTE)); |
8574 | |
8575 | memset((void*)vcLayout, nonoref, fieldSize); |
8576 | |
8577 | // use pointer series to locate the orefs |
8578 | |
8579 | CGCDesc* map = CGCDesc::GetCGCDescFromMT(pMT); |
8580 | CGCDescSeries *pSeries = map->GetLowestSeries(); |
8581 | |
8582 | for (SIZE_T j = 0; j < map->GetNumSeries(); j++) |
8583 | { |
8584 | CONSISTENCY_CHECK(pSeries <= map->GetHighestSeries()); |
8585 | |
8586 | memset((void*)&vcLayout[pSeries->GetSeriesOffset() - OBJECT_SIZE], oref, pSeries->GetSeriesSize() + pMT->GetBaseSize()); |
8587 | pSeries++; |
8588 | } |
8589 | |
8590 | |
8591 | ExplicitClassTrust explicitClassTrust; |
8592 | |
8593 | for (UINT i=0; i < fieldSize; i++) { |
8594 | |
8595 | ExplicitFieldTrustHolder fieldTrust(&explicitClassTrust); |
8596 | |
8597 | if (vcLayout[i] == oref) { |
8598 | switch (pFieldLayout[i]) { |
8599 | // oref <--> empty |
8600 | case empty: |
8601 | pFieldLayout[i] = oref; |
8602 | fieldTrust.SetTrust(ExplicitFieldTrust::kNonOverLayed); |
8603 | break; |
8604 | |
8605 | // oref <--> nonoref |
8606 | case nonoref: |
8607 | fieldTrust.SetTrust(ExplicitFieldTrust::kNone); |
8608 | break; |
8609 | |
8610 | // oref <--> oref |
8611 | case oref: |
8612 | fieldTrust.SetTrust(ExplicitFieldTrust::kLegal); |
8613 | if ((*pFirstObjectOverlapOffset) == ((DWORD)(-1))) |
8614 | { |
8615 | *pFirstObjectOverlapOffset = (DWORD)i; |
8616 | } |
8617 | break; |
8618 | |
8619 | default: |
8620 | _ASSERTE(!"Can't get here." ); |
8621 | } |
8622 | } else if (vcLayout[i] == nonoref) { |
8623 | switch (pFieldLayout[i]) { |
8624 | // nonoref <--> empty |
8625 | case empty: |
8626 | pFieldLayout[i] = nonoref; |
8627 | fieldTrust.SetTrust(ExplicitFieldTrust::kNonOverLayed); |
8628 | break; |
8629 | |
8630 | // nonoref <--> nonoref |
8631 | case nonoref: |
8632 | fieldTrust.SetTrust(ExplicitFieldTrust::kVerifiable); |
8633 | break; |
8634 | |
8635 | // nonoref <--> oref |
8636 | case oref: |
8637 | fieldTrust.SetTrust(ExplicitFieldTrust::kNone); |
8638 | break; |
8639 | |
8640 | default: |
8641 | _ASSERTE(!"Can't get here." ); |
8642 | } |
8643 | } else { |
8644 | _ASSERTE(!"Can't get here." ); |
8645 | } |
8646 | } |
8647 | |
8648 | return explicitClassTrust.GetTrustLevel(); |
8649 | } |
8650 | |
8651 | |
8652 | |
8653 | |
8654 | |
8655 | |
8656 | |
8657 | //******************************************************************************* |
8658 | void MethodTableBuilder::FindPointerSeriesExplicit(UINT instanceSliceSize, |
8659 | BYTE *pFieldLayout) |
8660 | { |
8661 | STANDARD_VM_CONTRACT; |
8662 | |
8663 | |
8664 | // Allocate a structure to track the series. We know that the worst case is a |
8665 | // ref-non-ref-non, and since only ref series are recorded and non-ref series |
8666 | // are skipped, the max number of series is total instance size / 2 / sizeof(ref). |
8667 | // But watch out for the case where we have e.g. an instanceSlizeSize of 4. |
8668 | DWORD sz = (instanceSliceSize + (2 * TARGET_POINTER_SIZE) - 1); |
8669 | bmtGCSeries->pSeries = new bmtGCSeriesInfo::Series[sz/2/ TARGET_POINTER_SIZE]; |
8670 | |
8671 | BYTE *loc = pFieldLayout; |
8672 | BYTE *layoutEnd = pFieldLayout + instanceSliceSize; |
8673 | while (loc < layoutEnd) |
8674 | { |
8675 | // Find the next OREF entry. |
8676 | loc = (BYTE*)memchr((void*)loc, oref, layoutEnd-loc); |
8677 | if (loc == NULL) |
8678 | { |
8679 | break; |
8680 | } |
8681 | |
8682 | // Find the next non-OREF entry |
8683 | BYTE *cur = loc; |
8684 | while(cur < layoutEnd && *cur == oref) |
8685 | { |
8686 | cur++; |
8687 | } |
8688 | |
8689 | // so we have a GC series at loc for cur-loc bytes |
8690 | bmtGCSeries->pSeries[bmtGCSeries->numSeries].offset = (DWORD)(loc - pFieldLayout); |
8691 | bmtGCSeries->pSeries[bmtGCSeries->numSeries].len = (DWORD)(cur - loc); |
8692 | |
8693 | CONSISTENCY_CHECK(IS_ALIGNED(cur - loc, TARGET_POINTER_SIZE)); |
8694 | |
8695 | bmtGCSeries->numSeries++; |
8696 | loc = cur; |
8697 | } |
8698 | |
8699 | // Calculate the total series count including the parent, if a parent exists. |
8700 | |
8701 | bmtFP->NumGCPointerSeries = bmtParent->NumParentPointerSeries + bmtGCSeries->numSeries; |
8702 | |
8703 | } |
8704 | |
8705 | //******************************************************************************* |
8706 | VOID |
8707 | MethodTableBuilder::HandleGCForExplicitLayout() |
8708 | { |
8709 | STANDARD_VM_CONTRACT; |
8710 | |
8711 | MethodTable *pMT = GetHalfBakedMethodTable(); |
8712 | |
8713 | #ifdef FEATURE_COLLECTIBLE_TYPES |
8714 | if (bmtFP->NumGCPointerSeries == 0 && pMT->Collectible()) |
8715 | { |
8716 | // For collectible types, insert empty gc series |
8717 | CGCDescSeries *pSeries; |
8718 | |
8719 | CGCDesc::Init( (PVOID) pMT, 1); |
8720 | pSeries = ((CGCDesc*)pMT)->GetLowestSeries(); |
8721 | pSeries->SetSeriesSize( (size_t) (0) - (size_t) pMT->GetBaseSize()); |
8722 | pSeries->SetSeriesOffset(OBJECT_SIZE); |
8723 | } |
8724 | else |
8725 | #endif // FEATURE_COLLECTIBLE_TYPES |
8726 | if (bmtFP->NumGCPointerSeries != 0) |
8727 | { |
8728 | pMT->SetContainsPointers(); |
8729 | |
8730 | // Copy the pointer series map from the parent |
8731 | CGCDesc::Init( (PVOID) pMT, bmtFP->NumGCPointerSeries ); |
8732 | if (bmtParent->NumParentPointerSeries != 0) |
8733 | { |
8734 | size_t ParentGCSize = CGCDesc::ComputeSize(bmtParent->NumParentPointerSeries); |
8735 | memcpy( (PVOID) (((BYTE*) pMT) - ParentGCSize), (PVOID) (((BYTE*) GetParentMethodTable()) - ParentGCSize), ParentGCSize - sizeof(UINT) ); |
8736 | |
8737 | } |
8738 | |
8739 | UINT32 dwInstanceSliceOffset = AlignUp(HasParent() ? GetParentMethodTable()->GetNumInstanceFieldBytes() : 0, TARGET_POINTER_SIZE); |
8740 | |
8741 | // Build the pointer series map for this pointers in this instance |
8742 | CGCDescSeries *pSeries = ((CGCDesc*)pMT)->GetLowestSeries(); |
8743 | for (UINT i=0; i < bmtGCSeries->numSeries; i++) { |
8744 | // See gcdesc.h for an explanation of why we adjust by subtracting BaseSize |
8745 | BAD_FORMAT_NOTHROW_ASSERT(pSeries <= CGCDesc::GetCGCDescFromMT(pMT)->GetHighestSeries()); |
8746 | |
8747 | pSeries->SetSeriesSize( (size_t) bmtGCSeries->pSeries[i].len - (size_t) pMT->GetBaseSize() ); |
8748 | pSeries->SetSeriesOffset(bmtGCSeries->pSeries[i].offset + OBJECT_SIZE + dwInstanceSliceOffset); |
8749 | pSeries++; |
8750 | } |
8751 | } |
8752 | |
8753 | delete [] bmtGCSeries->pSeries; |
8754 | bmtGCSeries->pSeries = NULL; |
8755 | } // MethodTableBuilder::HandleGCForExplicitLayout |
8756 | |
8757 | static |
8758 | BOOL |
8759 | InsertMethodTable( |
8760 | MethodTable *pNew, |
8761 | MethodTable **pArray, |
8762 | DWORD nArraySizeMax, |
8763 | DWORD *pNumAssigned) |
8764 | { |
8765 | LIMITED_METHOD_CONTRACT; |
8766 | |
8767 | for (DWORD j = 0; j < (*pNumAssigned); j++) |
8768 | { |
8769 | if (pNew == pArray[j]) |
8770 | { |
8771 | #ifdef _DEBUG |
8772 | LOG((LF_CLASSLOADER, LL_INFO1000, "GENERICS: Found duplicate interface %s (%p) at position %d out of %d\n" , pNew->GetDebugClassName(), pNew, j, *pNumAssigned)); |
8773 | #endif |
8774 | return pNew->HasInstantiation(); // bail out - we found a duplicate instantiated interface |
8775 | } |
8776 | else |
8777 | { |
8778 | #ifdef _DEBUG |
8779 | LOG((LF_CLASSLOADER, LL_INFO1000, " GENERICS: InsertMethodTable ignored interface %s (%p) at position %d out of %d\n" , pArray[j]->GetDebugClassName(), pArray[j], j, *pNumAssigned)); |
8780 | #endif |
8781 | } |
8782 | } |
8783 | if (*pNumAssigned >= nArraySizeMax) |
8784 | { |
8785 | LOG((LF_CLASSLOADER, LL_INFO1000, "GENERICS: Found interface %s (%p) exceeding size %d of interface array\n" , pNew->GetDebugClassName(), pNew, nArraySizeMax)); |
8786 | return TRUE; |
8787 | } |
8788 | LOG((LF_CLASSLOADER, LL_INFO1000, "GENERICS: Inserting interface %s (%p) at position %d\n" , pNew->GetDebugClassName(), pNew, *pNumAssigned)); |
8789 | pArray[(*pNumAssigned)++] = pNew; |
8790 | return FALSE; |
8791 | } // InsertMethodTable |
8792 | |
8793 | |
8794 | //******************************************************************************* |
8795 | // -------------------------------------------------------------------------------------------- |
8796 | // Copy virtual slots inherited from parent: |
8797 | // |
8798 | // In types created at runtime, inherited virtual slots are initialized using approximate parent |
8799 | // during method table building. This method will update them based on the exact parent. |
8800 | // In types loaded from NGen image, inherited virtual slots from cross-module parents are not |
8801 | // initialized. This method will initialize them based on the actually loaded exact parent |
8802 | // if necessary. |
8803 | /* static */ |
8804 | void MethodTableBuilder::CopyExactParentSlots(MethodTable *pMT, MethodTable *pApproxParentMT) |
8805 | { |
8806 | CONTRACTL |
8807 | { |
8808 | STANDARD_VM_CHECK; |
8809 | PRECONDITION(CheckPointer(pMT)); |
8810 | } |
8811 | CONTRACTL_END; |
8812 | |
8813 | if (pMT->IsZapped()) |
8814 | return; |
8815 | |
8816 | DWORD nParentVirtuals = pMT->GetNumParentVirtuals(); |
8817 | if (nParentVirtuals == 0) |
8818 | return; |
8819 | |
8820 | _ASSERTE(nParentVirtuals == pApproxParentMT->GetNumVirtuals()); |
8821 | |
8822 | // |
8823 | // Update all inherited virtual slots to match exact parent |
8824 | // |
8825 | |
8826 | if (!pMT->IsCanonicalMethodTable()) |
8827 | { |
8828 | // |
8829 | // Copy all slots for non-canonical methodtables to avoid touching methoddescs. |
8830 | // |
8831 | MethodTable * pCanonMT = pMT->GetCanonicalMethodTable(); |
8832 | |
8833 | // Do not write into vtable chunks shared with parent. It would introduce race |
8834 | // with code:MethodDesc::SetStableEntryPointInterlocked. |
8835 | // |
8836 | // Non-canonical method tables either share everything or nothing so it is sufficient to check |
8837 | // just the first indirection to detect sharing. |
8838 | if (pMT->GetVtableIndirections()[0].GetValueMaybeNull() != pCanonMT->GetVtableIndirections()[0].GetValueMaybeNull()) |
8839 | { |
8840 | for (DWORD i = 0; i < nParentVirtuals; i++) |
8841 | { |
8842 | PCODE target = pCanonMT->GetRestoredSlot(i); |
8843 | pMT->SetSlot(i, target); |
8844 | } |
8845 | } |
8846 | } |
8847 | else |
8848 | { |
8849 | MethodTable::MethodDataWrapper hMTData(MethodTable::GetMethodData(pMT, FALSE)); |
8850 | |
8851 | MethodTable * pParentMT = pMT->GetParentMethodTable(); |
8852 | |
8853 | for (DWORD i = 0; i < nParentVirtuals; i++) |
8854 | { |
8855 | // fix up wrongly-inherited method descriptors |
8856 | MethodDesc* pMD = hMTData->GetImplMethodDesc(i); |
8857 | CONSISTENCY_CHECK(pMD == pMT->GetMethodDescForSlot(i)); |
8858 | |
8859 | if (pMD->GetMethodTable() == pMT) |
8860 | continue; |
8861 | |
8862 | // We need to re-inherit this slot from the exact parent. |
8863 | |
8864 | DWORD indirectionIndex = MethodTable::GetIndexOfVtableIndirection(i); |
8865 | if (pMT->GetVtableIndirections()[indirectionIndex].GetValueMaybeNull() == pApproxParentMT->GetVtableIndirections()[indirectionIndex].GetValueMaybeNull()) |
8866 | { |
8867 | // The slot lives in a chunk shared from the approximate parent MT |
8868 | // If so, we need to change to share the chunk from the exact parent MT |
8869 | |
8870 | #ifdef FEATURE_PREJIT |
8871 | _ASSERTE(MethodTable::CanShareVtableChunksFrom(pParentMT, pMT->GetLoaderModule(), Module::GetPreferredZapModuleForMethodTable(pMT))); |
8872 | #else |
8873 | _ASSERTE(MethodTable::CanShareVtableChunksFrom(pParentMT, pMT->GetLoaderModule())); |
8874 | #endif |
8875 | |
8876 | pMT->GetVtableIndirections()[indirectionIndex].SetValueMaybeNull(pParentMT->GetVtableIndirections()[indirectionIndex].GetValueMaybeNull()); |
8877 | |
8878 | i = MethodTable::GetEndSlotForVtableIndirection(indirectionIndex, nParentVirtuals) - 1; |
8879 | continue; |
8880 | } |
8881 | |
8882 | // The slot lives in an unshared chunk. We need to update the slot contents |
8883 | PCODE target = pParentMT->GetRestoredSlot(i); |
8884 | pMT->SetSlot(i, target); |
8885 | } |
8886 | } |
8887 | } // MethodTableBuilder::CopyExactParentSlots |
8888 | |
8889 | //******************************************************************************* |
8890 | /* static */ |
8891 | void |
8892 | MethodTableBuilder::LoadExactInterfaceMap(MethodTable *pMT) |
8893 | { |
8894 | CONTRACTL |
8895 | { |
8896 | STANDARD_VM_CHECK; |
8897 | PRECONDITION(CheckPointer(pMT)); |
8898 | } |
8899 | CONTRACTL_END; |
8900 | |
8901 | BOOL hasInstantiatedInterfaces = FALSE; |
8902 | MethodTable::InterfaceMapIterator it = pMT->IterateInterfaceMap(); |
8903 | while (it.Next()) |
8904 | { |
8905 | if (it.GetInterface()->HasInstantiation()) |
8906 | { |
8907 | hasInstantiatedInterfaces = TRUE; |
8908 | break; |
8909 | } |
8910 | } |
8911 | |
8912 | // If we have some instantiated interfaces, then we have lots more work to do... |
8913 | |
8914 | // In the worst case we have to use the metadata to |
8915 | // (a) load the exact interfaces and determine the order in which they |
8916 | // go. We do those by re-running the interface layout algorithm |
8917 | // and using metadata-comparisons to place interfaces in the list. |
8918 | // (b) do a check to see if any ambiguity in the interface dispatch map is introduced |
8919 | // by the instantiation |
8920 | // See code:#LoadExactInterfaceMap_Algorithm2 |
8921 | // |
8922 | // However, we can do something simpler: we just use |
8923 | // the loaded interface method tables to determine ordering. This can be done |
8924 | // if there are no duplicate instantiated interfaces in the interface |
8925 | // set. |
8926 | // See code:#LoadExactInterfaceMap_Algorithm1. |
8927 | |
8928 | if (!hasInstantiatedInterfaces) |
8929 | { |
8930 | return; |
8931 | } |
8932 | |
8933 | HRESULT hr; |
8934 | TypeHandle thisTH(pMT); |
8935 | SigTypeContext typeContext(thisTH); |
8936 | MethodTable *pParentMT = pMT->GetParentMethodTable(); |
8937 | |
8938 | //#LoadExactInterfaceMap_Algorithm1 |
8939 | // Exact interface instantiation loading TECHNIQUE 1. |
8940 | // (a) For interfaces inherited from an instantiated parent class, just copy down from exact parent |
8941 | // (b) Grab newly declared interfaces by loading and then copying down all their inherited parents |
8942 | // (c) But check for any exact duplicates along the way |
8943 | // (d) If no duplicates then we can use the computed interface map we've created |
8944 | // (e) If duplicates found then use the slow metadata-based technique code:#LoadExactInterfaceMap_Algorithm2 |
8945 | DWORD nInterfacesCount = pMT->GetNumInterfaces(); |
8946 | MethodTable **pExactMTs = (MethodTable**) _alloca(sizeof(MethodTable *) * nInterfacesCount); |
8947 | DWORD nAssigned = 0; |
8948 | BOOL duplicates = false; |
8949 | if (pParentMT != NULL) |
8950 | { |
8951 | MethodTable::InterfaceMapIterator parentIt = pParentMT->IterateInterfaceMap(); |
8952 | while (parentIt.Next()) |
8953 | { |
8954 | duplicates |= InsertMethodTable(parentIt.GetInterface(), pExactMTs, nInterfacesCount, &nAssigned); |
8955 | } |
8956 | } |
8957 | InterfaceImplEnum ie(pMT->GetModule(), pMT->GetCl(), NULL); |
8958 | while ((hr = ie.Next()) == S_OK) |
8959 | { |
8960 | MethodTable *pNewIntfMT = ClassLoader::LoadTypeDefOrRefOrSpecThrowing(pMT->GetModule(), |
8961 | ie.CurrentToken(), |
8962 | &typeContext, |
8963 | ClassLoader::ThrowIfNotFound, |
8964 | ClassLoader::FailIfUninstDefOrRef, |
8965 | ClassLoader::LoadTypes, |
8966 | CLASS_LOAD_EXACTPARENTS, |
8967 | TRUE).GetMethodTable(); |
8968 | |
8969 | duplicates |= InsertMethodTable(pNewIntfMT, pExactMTs, nInterfacesCount, &nAssigned); |
8970 | MethodTable::InterfaceMapIterator intIt = pNewIntfMT->IterateInterfaceMap(); |
8971 | while (intIt.Next()) |
8972 | { |
8973 | duplicates |= InsertMethodTable(intIt.GetInterface(), pExactMTs, nInterfacesCount, &nAssigned); |
8974 | } |
8975 | } |
8976 | if (FAILED(hr)) |
8977 | { |
8978 | pMT->GetAssembly()->ThrowTypeLoadException(pMT->GetMDImport(), pMT->GetCl(), IDS_CLASSLOAD_BADFORMAT); |
8979 | } |
8980 | #ifdef _DEBUG |
8981 | duplicates |= EEConfig::GetConfigDWORD_DontUse_(CLRConfig::INTERNAL_AlwaysUseMetadataInterfaceMapLayout, FALSE); |
8982 | |
8983 | //#InjectInterfaceDuplicates_LoadExactInterfaceMap |
8984 | // If we are injecting duplicates also for non-generic interfaces in check builds, we have to use |
8985 | // algorithm code:#LoadExactInterfaceMap_Algorithm2. |
8986 | // Has to be in sync with code:#InjectInterfaceDuplicates_Main. |
8987 | duplicates |= pMT->Debug_HasInjectedInterfaceDuplicates(); |
8988 | #endif |
8989 | CONSISTENCY_CHECK(duplicates || (nAssigned == pMT->GetNumInterfaces())); |
8990 | if (duplicates) |
8991 | { |
8992 | //#LoadExactInterfaceMap_Algorithm2 |
8993 | // Exact interface instantiation loading TECHNIQUE 2 - The exact instantiation has caused some duplicates to |
8994 | // appear in the interface map! This may not be an error: if the duplicates |
8995 | // were ones that arose because because of inheritance from |
8996 | // a parent type then we accept that. For example |
8997 | // class C<T> : I<T> |
8998 | // class D<T> : C<T>, I<string> |
8999 | // is acceptable even when loading D<string>. Note that in such a case |
9000 | // there will be two entries for I<string> in the final interface map for D<string>. |
9001 | // For dispatch the mappings in D take precedence. |
9002 | // |
9003 | // However we consider it an error if there is real ambiguity within |
9004 | // the interface definitions within the one class, e.g. |
9005 | // class E<T> : I<T>, I<string> |
9006 | // In this situation it is not defined how to dispatch calls to I<string>: would |
9007 | // we use the bindings for I<T> or I<string>? |
9008 | // |
9009 | // Because we may had duplicates the interface map we created above may not |
9010 | // be the correct one: for example for D<string> above we would have computed |
9011 | // a map with only one entry. This is incorrect: an exact instantiation's interface |
9012 | // map must have entries that match the ordering of the interface map in the generic case |
9013 | // (this is because code:#InterfaceMap_SupersetOfParent). |
9014 | // |
9015 | // So, in order to determine how to place the interfaces we need go back to |
9016 | // the metadata. We also do this to check if the presence of duplicates |
9017 | // has caused any potential ambiguity, i.e. the E<string> case above. |
9018 | |
9019 | // First we do a GetCheckpoint for the thread-based allocator. ExpandExactInheritedInterfaces allocates substitution chains |
9020 | // on the thread allocator rather than on the stack. |
9021 | Thread * pThread = GetThread(); |
9022 | CheckPointHolder cph(pThread->m_MarshalAlloc.GetCheckpoint()); //hold checkpoint for autorelease |
9023 | |
9024 | // *********************************************************** |
9025 | // ****** This must be consistent with code:ExpandApproxInterface etc. ******* |
9026 | // |
9027 | // The correlation to ExpandApproxInterfaces etc. simply drops out by how we |
9028 | // traverse interfaces. |
9029 | // *********************************************************** |
9030 | |
9031 | bmtExactInterfaceInfo bmtExactInterface; |
9032 | bmtExactInterface.pInterfaceSubstitution = new (&pThread->m_MarshalAlloc) Substitution[pMT->GetNumInterfaces()]; |
9033 | bmtExactInterface.pExactMTs = pExactMTs; |
9034 | bmtExactInterface.nAssigned = 0; |
9035 | bmtExactInterface.typeContext = typeContext; |
9036 | |
9037 | // Do the interfaces inherited from a parent class |
9038 | if ((pParentMT != NULL) && (pParentMT->GetNumInterfaces() > 0)) |
9039 | { |
9040 | Substitution * pParentSubstForTypeLoad = new (&pThread->m_MarshalAlloc) Substitution( |
9041 | pMT->GetSubstitutionForParent(NULL)); |
9042 | Substitution * pParentSubstForComparing = new (&pThread->m_MarshalAlloc) Substitution( |
9043 | pMT->GetSubstitutionForParent(NULL)); |
9044 | ExpandExactInheritedInterfaces( |
9045 | &bmtExactInterface, |
9046 | pParentMT, |
9047 | pParentSubstForTypeLoad, |
9048 | pParentSubstForComparing); |
9049 | } |
9050 | #ifdef _DEBUG |
9051 | //#ExactInterfaceMap_SupersetOfParent |
9052 | // Check that parent's interface map is subset of this interface map |
9053 | // See code:#InterfaceMap_SupersetOfParent |
9054 | { |
9055 | _ASSERTE(pParentMT->GetNumInterfaces() == bmtExactInterface.nAssigned); |
9056 | |
9057 | MethodTable::InterfaceMapIterator parentInterfacesIterator = pParentMT->IterateInterfaceMap(); |
9058 | UINT32 nInterfaceIndex = 0; |
9059 | while (parentInterfacesIterator.Next()) |
9060 | { |
9061 | if (pMT->IsSharedByGenericInstantiations()) |
9062 | { // The type is a canonical instantiation (contains _Canon) |
9063 | // The interface instantiations of parent can be different (see |
9064 | // code:#InterfaceMap_CanonicalSupersetOfParent), therefore we cannot compare |
9065 | // MethodTables |
9066 | _ASSERTE(parentInterfacesIterator.GetInterfaceInfo()->GetApproxMethodTable(pParentMT->GetLoaderModule())->HasSameTypeDefAs( |
9067 | bmtExactInterface.pExactMTs[nInterfaceIndex])); |
9068 | } |
9069 | else |
9070 | { // It is not canonical instantiation, we can compare MethodTables |
9071 | _ASSERTE(parentInterfacesIterator.GetInterface() == bmtExactInterface.pExactMTs[nInterfaceIndex]); |
9072 | } |
9073 | nInterfaceIndex++; |
9074 | } |
9075 | _ASSERTE(nInterfaceIndex == bmtExactInterface.nAssigned); |
9076 | } |
9077 | #endif //_DEBUG |
9078 | |
9079 | // If there are any __Canon instances in the type argument list, then we defer the |
9080 | // ambiguity checking until an exact instantiation. |
9081 | if (!pMT->IsSharedByGenericInstantiations()) |
9082 | { |
9083 | // There are no __Canon types in the instantiation, so do ambiguity check. |
9084 | bmtInterfaceAmbiguityCheckInfo bmtCheckInfo; |
9085 | bmtCheckInfo.pMT = pMT; |
9086 | bmtCheckInfo.ppInterfaceSubstitutionChains = new (&pThread->m_MarshalAlloc) Substitution *[pMT->GetNumInterfaces()]; |
9087 | bmtCheckInfo.ppExactDeclaredInterfaces = new (&pThread->m_MarshalAlloc) MethodTable *[pMT->GetNumInterfaces()]; |
9088 | bmtCheckInfo.nAssigned = 0; |
9089 | bmtCheckInfo.typeContext = typeContext; |
9090 | MethodTableBuilder::InterfacesAmbiguityCheck(&bmtCheckInfo, pMT->GetModule(), pMT->GetCl(), NULL); |
9091 | } |
9092 | |
9093 | // OK, there is no ambiguity amongst the instantiated interfaces declared on this class. |
9094 | MethodTableBuilder::ExpandExactDeclaredInterfaces( |
9095 | &bmtExactInterface, |
9096 | pMT->GetModule(), |
9097 | pMT->GetCl(), |
9098 | NULL, |
9099 | NULL |
9100 | COMMA_INDEBUG(pMT)); |
9101 | CONSISTENCY_CHECK(bmtExactInterface.nAssigned == pMT->GetNumInterfaces()); |
9102 | |
9103 | // We cannot process interface duplicates on types with __Canon. The duplicates are processed on |
9104 | // exact types only |
9105 | if (!pMT->IsSharedByGenericInstantiations()) |
9106 | { |
9107 | // Process all pairs of duplicates in the interface map: |
9108 | // i.e. If there are 3 duplicates of the same interface at indexes: i1, i2 and i3, then |
9109 | // process pairs of indexes [i1,i2], [i1,i3] and [i2,i3]. |
9110 | // - Update 'declared on type' flag for those interfaces which duplicate is 'declared on type' |
9111 | // - Check interface method implementation ambiguity code:#DuplicateInterface_MethodAmbiguity |
9112 | for (DWORD nOriginalIndex = 0; nOriginalIndex < nInterfacesCount; nOriginalIndex++) |
9113 | { |
9114 | // Search for duplicates further in the interface map |
9115 | for (DWORD nDuplicateIndex = nOriginalIndex + 1; nDuplicateIndex < nInterfacesCount; nDuplicateIndex++) |
9116 | { |
9117 | if (pExactMTs[nOriginalIndex] != pExactMTs[nDuplicateIndex]) |
9118 | { // It's not a duplicate of original interface, skip it |
9119 | continue; |
9120 | } |
9121 | // We found a duplicate |
9122 | |
9123 | // Set 'declared on type' flag if either original or duplicate interface is |
9124 | // 'declared on type' |
9125 | if (pMT->IsInterfaceDeclaredOnClass(nOriginalIndex) || |
9126 | pMT->IsInterfaceDeclaredOnClass(nDuplicateIndex)) |
9127 | { |
9128 | // |
9129 | // Note that both checks are needed: |
9130 | // A<T> : I<T> |
9131 | // B<T,U> : A<T>, I<U> |
9132 | // C<T,U> : B<T,U>, I<T> // Reimplements interface from A<T> |
9133 | // After code:BuildMethodTableThrowing algorithm, this will happen: |
9134 | // B<int,int> will have interface map similar to B<T,U>: |
9135 | // I<int> ... not 'declared on type' |
9136 | // I<int> ... 'declared on type' |
9137 | // C<int,int> will have interface map similar to C<T,U>: |
9138 | // I<int> ... 'declared on type' |
9139 | // I<int> ... not 'declared on type' |
9140 | // |
9141 | |
9142 | pMT->SetInterfaceDeclaredOnClass(nOriginalIndex); |
9143 | pMT->SetInterfaceDeclaredOnClass(nDuplicateIndex); |
9144 | } |
9145 | |
9146 | //#DuplicateInterface_MethodAmbiguity |
9147 | // |
9148 | // In the ideal world we would now check for interface method implementation |
9149 | // ambiguity in the instantiation, but that would be a technical breaking change |
9150 | // (against 2.0 RTM/SP1). |
9151 | // Therefore we ALLOW when interface method is implemented twice through this |
9152 | // original and duplicate interface. |
9153 | // |
9154 | // This ambiguity pattern is therefore ALLOWED (can be expressed only in IL, not in C#): |
9155 | // I<T> |
9156 | // void Print(T t); |
9157 | // A<T> : I<T> // abstract class |
9158 | // B<T,U> : A<T>, I<U> |
9159 | // void Print(T t) { ... } |
9160 | // void Print(U u) { ... } |
9161 | // Now B<int,int> has 2 implementations of I<int>.Print(int), while B<int,char> is |
9162 | // fine. Therefore an instantiation can introduce ambiguity. |
9163 | |
9164 | #if 0 // Removing this code for now as it is a technical breaking change (against CLR 2.0 RTM/SP1). |
9165 | // We might decide later that we want to take this breaking change. |
9166 | // |
9167 | // Note that dispatch map entries are sorted by interface index and then interface |
9168 | // method slot index. |
9169 | // |
9170 | DispatchMapTypeID originalTypeID = DispatchMapTypeID::InterfaceClassID(nOriginalIndex); |
9171 | DispatchMap::EncodedMapIterator originalIt(pMT); |
9172 | // Find first entry for original interface |
9173 | while (originalIt.IsValid()) |
9174 | { |
9175 | DispatchMapEntry *pEntry = originalIt.Entry(); |
9176 | if (pEntry->GetTypeID().ToUINT32() >= originalTypeID.ToUINT32()) |
9177 | { // Found the place where original interface entries should be (dispatch map is |
9178 | // sorted) |
9179 | break; |
9180 | } |
9181 | originalIt.Next(); |
9182 | } |
9183 | |
9184 | DispatchMapTypeID duplicateTypeID = DispatchMapTypeID::InterfaceClassID(nDuplicateIndex); |
9185 | DispatchMap::EncodedMapIterator duplicateIt(pMT); |
9186 | // Find first entry for duplicate interface |
9187 | while (duplicateIt.IsValid()) |
9188 | { |
9189 | DispatchMapEntry *pEntry = duplicateIt.Entry(); |
9190 | if (pEntry->GetTypeID().ToUINT32() >= duplicateTypeID.ToUINT32()) |
9191 | { // Found the place where original interface entries should be (dispatch map is |
9192 | // sorted) |
9193 | break; |
9194 | } |
9195 | duplicateIt.Next(); |
9196 | } |
9197 | |
9198 | // Compare original and duplicate interface entries in the dispatch map if they contain |
9199 | // different implementation for the same interface method |
9200 | for (;;) |
9201 | { |
9202 | if (!originalIt.IsValid() || !duplicateIt.IsValid()) |
9203 | { // We reached end of one dispatch map iterator |
9204 | break; |
9205 | } |
9206 | DispatchMapEntry *pOriginalEntry = originalIt.Entry(); |
9207 | if (pOriginalEntry->GetTypeID().ToUINT32() != originalTypeID.ToUINT32()) |
9208 | { // We reached behind original interface entries |
9209 | break; |
9210 | } |
9211 | DispatchMapEntry *pDuplicateEntry = duplicateIt.Entry(); |
9212 | if (pDuplicateEntry->GetTypeID().ToUINT32() != duplicateTypeID.ToUINT32()) |
9213 | { // We reached behind duplicate interface entries |
9214 | break; |
9215 | } |
9216 | |
9217 | if (pOriginalEntry->GetSlotNumber() == pDuplicateEntry->GetSlotNumber()) |
9218 | { // Found duplicate implementation of interface method |
9219 | if (pOriginalEntry->GetTargetSlotNumber() != pDuplicateEntry->GetTargetSlotNumber()) |
9220 | { // Implementation of the slots is different |
9221 | bmtErrorInfo bmtError; |
9222 | |
9223 | bmtError.pModule = pMT->GetModule(); |
9224 | bmtError.cl = pMT->GetCl(); |
9225 | bmtError.resIDWhy = IDS_CLASSLOAD_MI_MULTIPLEOVERRIDES; |
9226 | bmtError.szMethodNameForError = NULL; |
9227 | bmtError.pThrowable = NULL; |
9228 | |
9229 | MethodDesc *pMD = pMT->GetMethodDescForSlot(pDuplicateEntry->GetTargetSlotNumber()); |
9230 | bmtError.dMethodDefInError = pMD->GetMemberDef(); |
9231 | |
9232 | BuildMethodTableThrowException(COR_E_TYPELOAD, bmtError); |
9233 | } |
9234 | // The method is implemented by the same slot on both interfaces (original and |
9235 | // duplicate) |
9236 | |
9237 | // Process next dispatch map entry |
9238 | originalIt.Next(); |
9239 | duplicateIt.Next(); |
9240 | continue; |
9241 | } |
9242 | // Move iterator representing smaller interface method slot index (the dispatch map |
9243 | // is sorted by slot indexes) |
9244 | if (pOriginalEntry->GetSlotNumber() < pDuplicateEntry->GetSlotNumber()) |
9245 | { |
9246 | originalIt.Next(); |
9247 | continue; |
9248 | } |
9249 | _ASSERTE(pOriginalEntry->GetSlotNumber() > pDuplicateEntry->GetSlotNumber()); |
9250 | duplicateIt.Next(); |
9251 | } |
9252 | #endif //0 |
9253 | } |
9254 | // All duplicates of this original interface were processed |
9255 | } |
9256 | // All pairs of duplicates in the interface map are processed |
9257 | } |
9258 | } |
9259 | // Duplicates in the interface map are resolved |
9260 | |
9261 | // OK, if we've got this far then pExactMTs should now hold the array of exact instantiated interfaces. |
9262 | MethodTable::InterfaceMapIterator thisIt = pMT->IterateInterfaceMap(); |
9263 | DWORD i = 0; |
9264 | while (thisIt.Next()) |
9265 | { |
9266 | #ifdef _DEBUG |
9267 | MethodTable*pOldMT = thisIt.GetInterface(); |
9268 | MethodTable *pNewMT = pExactMTs[i]; |
9269 | CONSISTENCY_CHECK(pOldMT->HasSameTypeDefAs(pNewMT)); |
9270 | #endif // _DEBUG |
9271 | thisIt.SetInterface(pExactMTs[i]); |
9272 | i++; |
9273 | } |
9274 | |
9275 | } // MethodTableBuilder::LoadExactInterfaceMap |
9276 | |
9277 | //******************************************************************************* |
9278 | void |
9279 | MethodTableBuilder::ExpandExactInheritedInterfaces( |
9280 | bmtExactInterfaceInfo * bmtInfo, |
9281 | MethodTable * pMT, |
9282 | const Substitution * pSubstForTypeLoad, |
9283 | Substitution * pSubstForComparing) |
9284 | { |
9285 | STANDARD_VM_CONTRACT; |
9286 | |
9287 | MethodTable *pParentMT = pMT->GetParentMethodTable(); |
9288 | |
9289 | // Backup type's substitution chain for comparing interfaces |
9290 | Substitution substForComparingBackup = *pSubstForComparing; |
9291 | // Make type an open type for comparing interfaces |
9292 | *pSubstForComparing = Substitution(); |
9293 | |
9294 | if (pParentMT) |
9295 | { |
9296 | // Chain parent's substitution for exact type load |
9297 | Substitution * pParentSubstForTypeLoad = new (&GetThread()->m_MarshalAlloc) Substitution( |
9298 | pMT->GetSubstitutionForParent(pSubstForTypeLoad)); |
9299 | |
9300 | // Chain parent's substitution for comparing interfaces (note that this type is temporarily |
9301 | // considered as open type) |
9302 | Substitution * pParentSubstForComparing = new (&GetThread()->m_MarshalAlloc) Substitution( |
9303 | pMT->GetSubstitutionForParent(pSubstForComparing)); |
9304 | |
9305 | ExpandExactInheritedInterfaces( |
9306 | bmtInfo, |
9307 | pParentMT, |
9308 | pParentSubstForTypeLoad, |
9309 | pParentSubstForComparing); |
9310 | } |
9311 | ExpandExactDeclaredInterfaces( |
9312 | bmtInfo, |
9313 | pMT->GetModule(), |
9314 | pMT->GetCl(), |
9315 | pSubstForTypeLoad, |
9316 | pSubstForComparing |
9317 | COMMA_INDEBUG(pMT)); |
9318 | |
9319 | // Restore type's subsitution chain for comparing interfaces |
9320 | *pSubstForComparing = substForComparingBackup; |
9321 | } // MethodTableBuilder::ExpandExactInheritedInterfaces |
9322 | |
9323 | //******************************************************************************* |
9324 | /* static */ |
9325 | void |
9326 | MethodTableBuilder::ExpandExactDeclaredInterfaces( |
9327 | bmtExactInterfaceInfo * bmtInfo, |
9328 | Module * pModule, |
9329 | mdToken typeDef, |
9330 | const Substitution * pSubstForTypeLoad, |
9331 | Substitution * pSubstForComparing |
9332 | COMMA_INDEBUG(MethodTable * dbg_pClassMT)) |
9333 | { |
9334 | STANDARD_VM_CONTRACT; |
9335 | |
9336 | HRESULT hr; |
9337 | InterfaceImplEnum ie(pModule, typeDef, NULL); |
9338 | while ((hr = ie.Next()) == S_OK) |
9339 | { |
9340 | MethodTable * pInterface = ClassLoader::LoadTypeDefOrRefOrSpecThrowing( |
9341 | pModule, |
9342 | ie.CurrentToken(), |
9343 | &bmtInfo->typeContext, |
9344 | ClassLoader::ThrowIfNotFound, |
9345 | ClassLoader::FailIfUninstDefOrRef, |
9346 | ClassLoader::LoadTypes, |
9347 | CLASS_LOAD_EXACTPARENTS, |
9348 | TRUE, |
9349 | pSubstForTypeLoad).GetMethodTable(); |
9350 | |
9351 | Substitution ifaceSubstForTypeLoad(ie.CurrentToken(), pModule, pSubstForTypeLoad); |
9352 | Substitution ifaceSubstForComparing(ie.CurrentToken(), pModule, pSubstForComparing); |
9353 | ExpandExactInterface( |
9354 | bmtInfo, |
9355 | pInterface, |
9356 | &ifaceSubstForTypeLoad, |
9357 | &ifaceSubstForComparing |
9358 | COMMA_INDEBUG(dbg_pClassMT)); |
9359 | } |
9360 | if (FAILED(hr)) |
9361 | { |
9362 | pModule->GetAssembly()->ThrowTypeLoadException(pModule->GetMDImport(), typeDef, IDS_CLASSLOAD_BADFORMAT); |
9363 | } |
9364 | } // MethodTableBuilder::ExpandExactDeclaredInterfaces |
9365 | |
9366 | //******************************************************************************* |
9367 | void |
9368 | MethodTableBuilder::ExpandExactInterface( |
9369 | bmtExactInterfaceInfo * bmtInfo, |
9370 | MethodTable * pIntf, |
9371 | const Substitution * pSubstForTypeLoad_OnStack, // Allocated on stack! |
9372 | const Substitution * pSubstForComparing_OnStack // Allocated on stack! |
9373 | COMMA_INDEBUG(MethodTable * dbg_pClassMT)) |
9374 | { |
9375 | STANDARD_VM_CONTRACT; |
9376 | |
9377 | // ****** This must be consistent with code:MethodTableBuilder::ExpandApproxInterface ****** |
9378 | |
9379 | // Is it already present according to the "generic" layout of the interfaces. |
9380 | // Note we use exactly the same algorithm as when we |
9381 | // determined the layout of the interface map for the "generic" version of the class. |
9382 | for (DWORD i = 0; i < bmtInfo->nAssigned; i++) |
9383 | { |
9384 | // Type Equivalence is not respected for this comparision as you can have multiple type equivalent interfaces on a class |
9385 | TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL); |
9386 | if (MetaSig::CompareTypeDefsUnderSubstitutions(bmtInfo->pExactMTs[i], |
9387 | pIntf, |
9388 | &bmtInfo->pInterfaceSubstitution[i], |
9389 | pSubstForComparing_OnStack, |
9390 | &newVisited)) |
9391 | { |
9392 | #ifdef _DEBUG |
9393 | //#InjectInterfaceDuplicates_ExactInterfaces |
9394 | // We will inject duplicate interfaces in check builds. |
9395 | // Has to be in sync with code:#InjectInterfaceDuplicates_Main. |
9396 | if (dbg_pClassMT->Debug_HasInjectedInterfaceDuplicates()) |
9397 | { // Just pretend we didn't find this match |
9398 | break; |
9399 | } |
9400 | #endif //_DEBUG |
9401 | return; // found it, don't add it again |
9402 | } |
9403 | } |
9404 | |
9405 | // Add the interface and its sub-interfaces |
9406 | DWORD n = bmtInfo->nAssigned; |
9407 | bmtInfo->pExactMTs[n] = pIntf; |
9408 | bmtInfo->pInterfaceSubstitution[n] = *pSubstForComparing_OnStack; |
9409 | bmtInfo->nAssigned++; |
9410 | |
9411 | Substitution * pSubstForTypeLoad = new (&GetThread()->m_MarshalAlloc) Substitution(*pSubstForTypeLoad_OnStack); |
9412 | |
9413 | ExpandExactDeclaredInterfaces( |
9414 | bmtInfo, |
9415 | pIntf->GetModule(), |
9416 | pIntf->GetCl(), |
9417 | pSubstForTypeLoad, |
9418 | &bmtInfo->pInterfaceSubstitution[n] |
9419 | COMMA_INDEBUG(dbg_pClassMT)); |
9420 | } // MethodTableBuilder::ExpandExactInterface |
9421 | |
9422 | //******************************************************************************* |
9423 | /* static */ |
9424 | void MethodTableBuilder::InterfacesAmbiguityCheck(bmtInterfaceAmbiguityCheckInfo *bmtCheckInfo, |
9425 | Module *pModule, |
9426 | mdToken typeDef, |
9427 | const Substitution *pSubstChain) |
9428 | { |
9429 | STANDARD_VM_CONTRACT; |
9430 | |
9431 | HRESULT hr; |
9432 | InterfaceImplEnum ie(pModule, typeDef, pSubstChain); |
9433 | while ((hr = ie.Next()) == S_OK) |
9434 | { |
9435 | MethodTable *pInterface = |
9436 | ClassLoader::LoadTypeDefOrRefOrSpecThrowing(pModule, ie.CurrentToken(), |
9437 | &bmtCheckInfo->typeContext, |
9438 | ClassLoader::ThrowIfNotFound, |
9439 | ClassLoader::FailIfUninstDefOrRef, |
9440 | ClassLoader::LoadTypes, |
9441 | CLASS_LOAD_EXACTPARENTS, |
9442 | TRUE, |
9443 | pSubstChain).GetMethodTable(); |
9444 | InterfaceAmbiguityCheck(bmtCheckInfo, ie.CurrentSubst(), pInterface); |
9445 | } |
9446 | if (FAILED(hr)) |
9447 | { |
9448 | pModule->GetAssembly()->ThrowTypeLoadException(pModule->GetMDImport(), typeDef, IDS_CLASSLOAD_BADFORMAT); |
9449 | } |
9450 | } |
9451 | |
9452 | //******************************************************************************* |
9453 | void MethodTableBuilder::InterfaceAmbiguityCheck(bmtInterfaceAmbiguityCheckInfo *bmtCheckInfo, |
9454 | const Substitution *pItfSubstChain, |
9455 | MethodTable *pIntf) |
9456 | { |
9457 | STANDARD_VM_CONTRACT; |
9458 | |
9459 | // Is it already in the generic version of the freshly declared interfaces. We |
9460 | // do this based on metadata, i.e. via the substitution chains. |
9461 | // Note we use exactly the same algorithm as when we |
9462 | // determined the layout of the interface map for the "generic" version of the class. |
9463 | for (DWORD i = 0; i < bmtCheckInfo->nAssigned; i++) |
9464 | { |
9465 | // Type Equivalence is not respected for this comparision as you can have multiple type equivalent interfaces on a class |
9466 | TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL); |
9467 | if (MetaSig::CompareTypeDefsUnderSubstitutions(bmtCheckInfo->ppExactDeclaredInterfaces[i], |
9468 | pIntf, |
9469 | bmtCheckInfo->ppInterfaceSubstitutionChains[i], |
9470 | pItfSubstChain, |
9471 | &newVisited)) |
9472 | return; // found it, don't add it again |
9473 | } |
9474 | |
9475 | // OK, so it isn't a duplicate based on the generic IL, now check if the instantiation |
9476 | // makes it a duplicate. |
9477 | for (DWORD i = 0; i < bmtCheckInfo->nAssigned; i++) |
9478 | { |
9479 | if (bmtCheckInfo->ppExactDeclaredInterfaces[i] == pIntf) |
9480 | { |
9481 | bmtCheckInfo->pMT->GetModule()->GetAssembly()->ThrowTypeLoadException(bmtCheckInfo->pMT->GetMDImport(), |
9482 | bmtCheckInfo->pMT->GetCl(), |
9483 | IDS_CLASSLOAD_OVERLAPPING_INTERFACES); |
9484 | } |
9485 | } |
9486 | |
9487 | DWORD n = bmtCheckInfo->nAssigned; |
9488 | bmtCheckInfo->ppExactDeclaredInterfaces[n] = pIntf; |
9489 | bmtCheckInfo->ppInterfaceSubstitutionChains[n] = new (&GetThread()->m_MarshalAlloc) Substitution[pItfSubstChain->GetLength()]; |
9490 | pItfSubstChain->CopyToArray(bmtCheckInfo->ppInterfaceSubstitutionChains[n]); |
9491 | |
9492 | bmtCheckInfo->nAssigned++; |
9493 | InterfacesAmbiguityCheck(bmtCheckInfo,pIntf->GetModule(),pIntf->GetCl(),pItfSubstChain); |
9494 | } |
9495 | |
9496 | |
9497 | //******************************************************************************* |
9498 | void MethodTableBuilder::CheckForSystemTypes() |
9499 | { |
9500 | STANDARD_VM_CONTRACT; |
9501 | |
9502 | LPCUTF8 name, nameSpace; |
9503 | |
9504 | MethodTable * pMT = GetHalfBakedMethodTable(); |
9505 | EEClass * pClass = GetHalfBakedClass(); |
9506 | |
9507 | // We can exit early for generic types - there are just a few cases to check for. |
9508 | if (bmtGenerics->HasInstantiation()) |
9509 | { |
9510 | if (pMT->IsIntrinsicType() && pClass->HasLayout()) |
9511 | { |
9512 | if (FAILED(GetMDImport()->GetNameOfTypeDef(GetCl(), &name, &nameSpace))) |
9513 | { |
9514 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
9515 | } |
9516 | |
9517 | if (strcmp(nameSpace, g_IntrinsicsNS) == 0) |
9518 | { |
9519 | EEClassLayoutInfo * pLayout = pClass->GetLayoutInfo(); |
9520 | |
9521 | // The SIMD Hardware Intrinsic types correspond to fundamental data types in the underlying ABIs: |
9522 | // * Vector64<T>: __m64 |
9523 | // * Vector128<T>: __m128 |
9524 | // * Vector256<T>: __m256 |
9525 | |
9526 | // These __m128 and __m256 types, among other requirements, are special in that they must always |
9527 | // be aligned properly. |
9528 | |
9529 | if (IsCompilationProcess()) |
9530 | { |
9531 | // Disable AOT compiling for the SIMD hardware intrinsic types. These types require special |
9532 | // ABI handling as they represent fundamental data types (__m64, __m128, and __m256) and not |
9533 | // aggregate or union types. See https://github.com/dotnet/coreclr/issues/15943 |
9534 | // |
9535 | // Once they are properly handled according to the ABI requirements, we can remove this check |
9536 | // and allow them to be used in crossgen/AOT scenarios. |
9537 | COMPlusThrow(kTypeLoadException, IDS_EE_HWINTRINSIC_NGEN_DISALLOWED); |
9538 | } |
9539 | |
9540 | if (strcmp(name, g_Vector64Name) == 0) |
9541 | { |
9542 | // The System V ABI for i386 defaults to 8-byte alignment for __m64, except for parameter passing, |
9543 | // where it has an alignment of 4. |
9544 | |
9545 | pLayout->m_LargestAlignmentRequirementOfAllMembers = 8; // sizeof(__m64) |
9546 | pLayout->m_ManagedLargestAlignmentRequirementOfAllMembers = 8; // sizeof(__m64) |
9547 | } |
9548 | else if (strcmp(name, g_Vector128Name) == 0) |
9549 | { |
9550 | #ifdef _TARGET_ARM_ |
9551 | // The Procedure Call Standard for ARM defaults to 8-byte alignment for __m128 |
9552 | |
9553 | pLayout->m_LargestAlignmentRequirementOfAllMembers = 8; |
9554 | pLayout->m_ManagedLargestAlignmentRequirementOfAllMembers = 8; |
9555 | #else |
9556 | pLayout->m_LargestAlignmentRequirementOfAllMembers = 16; // sizeof(__m128) |
9557 | pLayout->m_ManagedLargestAlignmentRequirementOfAllMembers = 16; // sizeof(__m128) |
9558 | #endif // _TARGET_ARM_ |
9559 | } |
9560 | else if (strcmp(name, g_Vector256Name) == 0) |
9561 | { |
9562 | #ifdef _TARGET_ARM_ |
9563 | // No such type exists for the Procedure Call Standard for ARM. We will default |
9564 | // to the same alignment as __m128, which is supported by the ABI. |
9565 | |
9566 | pLayout->m_LargestAlignmentRequirementOfAllMembers = 8; |
9567 | pLayout->m_ManagedLargestAlignmentRequirementOfAllMembers = 8; |
9568 | #elif defined(_TARGET_ARM64_) |
9569 | // The Procedure Call Standard for ARM 64-bit (with SVE support) defaults to |
9570 | // 16-byte alignment for __m256. |
9571 | |
9572 | pLayout->m_LargestAlignmentRequirementOfAllMembers = 16; |
9573 | pLayout->m_ManagedLargestAlignmentRequirementOfAllMembers = 16; |
9574 | #else |
9575 | pLayout->m_LargestAlignmentRequirementOfAllMembers = 32; // sizeof(__m256) |
9576 | pLayout->m_ManagedLargestAlignmentRequirementOfAllMembers = 32; // sizeof(__m256) |
9577 | #endif // _TARGET_ARM_ elif _TARGET_ARM64_ |
9578 | } |
9579 | else |
9580 | { |
9581 | // These types should be handled or explicitly skipped below to ensure that we don't |
9582 | // miss adding required ABI support for future types. |
9583 | |
9584 | _ASSERTE_MSG(FALSE, "Unhandled Hardware Intrinsic Type." ); |
9585 | } |
9586 | |
9587 | return; |
9588 | } |
9589 | } |
9590 | |
9591 | if (g_pNullableClass != NULL) |
9592 | { |
9593 | _ASSERTE(g_pByReferenceClass != NULL); |
9594 | _ASSERTE(g_pByReferenceClass->IsByRefLike()); |
9595 | |
9596 | #ifdef _TARGET_X86_ |
9597 | if (GetCl() == g_pByReferenceClass->GetCl()) |
9598 | { |
9599 | // x86 by default treats the type of ByReference<T> as the actual type of its IntPtr field, see calls to |
9600 | // ComputeInternalCorElementTypeForValueType in this file. This is a special case where the struct needs to be |
9601 | // treated as a value type so that its field can be considered as a byref pointer. |
9602 | _ASSERTE(pMT->GetFlag(MethodTable::enum_flag_Category_Mask) == MethodTable::enum_flag_Category_PrimitiveValueType); |
9603 | pMT->ClearFlag(MethodTable::enum_flag_Category_Mask); |
9604 | pMT->SetInternalCorElementType(ELEMENT_TYPE_VALUETYPE); |
9605 | return; |
9606 | } |
9607 | #endif |
9608 | |
9609 | _ASSERTE(g_pNullableClass->IsNullable()); |
9610 | |
9611 | // Pre-compute whether the class is a Nullable<T> so that code:Nullable::IsNullableType is efficient |
9612 | // This is useful to the performance of boxing/unboxing a Nullable |
9613 | if (GetCl() == g_pNullableClass->GetCl()) |
9614 | pMT->SetIsNullable(); |
9615 | |
9616 | return; |
9617 | } |
9618 | } |
9619 | |
9620 | if (IsNested() || IsEnum()) |
9621 | return; |
9622 | |
9623 | if (FAILED(GetMDImport()->GetNameOfTypeDef(GetCl(), &name, &nameSpace))) |
9624 | { |
9625 | BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT); |
9626 | } |
9627 | |
9628 | if (IsValueClass()) |
9629 | { |
9630 | // |
9631 | // Value types |
9632 | // |
9633 | |
9634 | // All special value types are in the system namespace |
9635 | if (strcmp(nameSpace, g_SystemNS) != 0) |
9636 | return; |
9637 | |
9638 | // Check if it is a primitive type |
9639 | CorElementType type = CorTypeInfo::FindPrimitiveType(name); |
9640 | if (type != ELEMENT_TYPE_END) |
9641 | { |
9642 | pMT->SetInternalCorElementType(type); |
9643 | pMT->SetIsTruePrimitive(); |
9644 | |
9645 | #if defined(_TARGET_X86_) && defined(UNIX_X86_ABI) |
9646 | switch (type) |
9647 | { |
9648 | // The System V ABI for i386 defines different packing for these types. |
9649 | |
9650 | case ELEMENT_TYPE_I8: |
9651 | case ELEMENT_TYPE_U8: |
9652 | case ELEMENT_TYPE_R8: |
9653 | { |
9654 | EEClassLayoutInfo * pLayout = pClass->GetLayoutInfo(); |
9655 | pLayout->m_LargestAlignmentRequirementOfAllMembers = 4; |
9656 | pLayout->m_ManagedLargestAlignmentRequirementOfAllMembers = 4; |
9657 | break; |
9658 | } |
9659 | |
9660 | default: |
9661 | break; |
9662 | } |
9663 | #endif // _TARGET_X86_ && UNIX_X86_ABI |
9664 | |
9665 | #ifdef _DEBUG |
9666 | if (FAILED(GetMDImport()->GetNameOfTypeDef(GetCl(), &name, &nameSpace))) |
9667 | { |
9668 | name = nameSpace = "Invalid TypeDef record" ; |
9669 | } |
9670 | LOG((LF_CLASSLOADER, LL_INFO10000, "%s::%s marked as primitive type %i\n" , nameSpace, name, type)); |
9671 | #endif // _DEBUG |
9672 | } |
9673 | else if (strcmp(name, g_NullableName) == 0) |
9674 | { |
9675 | pMT->SetIsNullable(); |
9676 | } |
9677 | #ifdef _TARGET_X86_ |
9678 | else if (strcmp(name, g_ByReferenceName) == 0) |
9679 | { |
9680 | // x86 by default treats the type of ByReference<T> as the actual type of its IntPtr field, see calls to |
9681 | // ComputeInternalCorElementTypeForValueType in this file. This is a special case where the struct needs to be |
9682 | // treated as a value type so that its field can be considered as a byref pointer. |
9683 | _ASSERTE(pMT->GetFlag(MethodTable::enum_flag_Category_Mask) == MethodTable::enum_flag_Category_PrimitiveValueType); |
9684 | pMT->ClearFlag(MethodTable::enum_flag_Category_Mask); |
9685 | pMT->SetInternalCorElementType(ELEMENT_TYPE_VALUETYPE); |
9686 | } |
9687 | #endif |
9688 | #ifndef _TARGET_X86_ |
9689 | else if (strcmp(name, g_RuntimeArgumentHandleName) == 0) |
9690 | { |
9691 | pMT->SetInternalCorElementType (ELEMENT_TYPE_I); |
9692 | } |
9693 | else if (strcmp(name, g_RuntimeMethodHandleInternalName) == 0) |
9694 | { |
9695 | pMT->SetInternalCorElementType (ELEMENT_TYPE_I); |
9696 | } |
9697 | #endif |
9698 | #if defined(ALIGN_ACCESS) || defined(FEATURE_64BIT_ALIGNMENT) |
9699 | else if (strcmp(name, g_DecimalName) == 0) |
9700 | { |
9701 | // This is required because native layout of System.Decimal causes it to be aligned |
9702 | // differently to the layout of the native DECIMAL structure, which will cause |
9703 | // data misalignent exceptions if Decimal is embedded in another type. |
9704 | |
9705 | EEClassLayoutInfo* pLayout = pClass->GetLayoutInfo(); |
9706 | pLayout->m_LargestAlignmentRequirementOfAllMembers = sizeof(ULONGLONG); |
9707 | pLayout->m_ManagedLargestAlignmentRequirementOfAllMembers = sizeof(ULONGLONG); |
9708 | |
9709 | #ifdef FEATURE_64BIT_ALIGNMENT |
9710 | // Also need to mark the type so it will be allocated on a 64-bit boundary for |
9711 | // platforms that won't do this naturally. |
9712 | SetAlign8Candidate(); |
9713 | #endif |
9714 | } |
9715 | #endif // ALIGN_ACCESS || FEATURE_64BIT_ALIGNMENT |
9716 | } |
9717 | else |
9718 | { |
9719 | // |
9720 | // Reference types |
9721 | // |
9722 | if (strcmp(name, g_StringName) == 0 && strcmp(nameSpace, g_SystemNS) == 0) |
9723 | { |
9724 | // Strings are not "normal" objects, so we need to mess with their method table a bit |
9725 | // so that the GC can figure out how big each string is... |
9726 | DWORD baseSize = StringObject::GetBaseSize(); |
9727 | pMT->SetBaseSize(baseSize); |
9728 | |
9729 | GetHalfBakedClass()->SetBaseSizePadding(baseSize - bmtFP->NumInstanceFieldBytes); |
9730 | |
9731 | pMT->SetComponentSize(2); |
9732 | } |
9733 | else if (strcmp(name, g_CriticalFinalizerObjectName) == 0 && strcmp(nameSpace, g_ConstrainedExecutionNS) == 0) |
9734 | { |
9735 | // To introduce a class with a critical finalizer, |
9736 | // we'll set the bit here. |
9737 | pMT->SetHasCriticalFinalizer(); |
9738 | } |
9739 | #ifdef FEATURE_COMINTEROP |
9740 | else |
9741 | { |
9742 | bool bIsComObject = false; |
9743 | bool bIsRuntimeClass = false; |
9744 | |
9745 | if (strcmp(name, g_ComObjectName) == 0 && strcmp(nameSpace, g_SystemNS) == 0) |
9746 | bIsComObject = true; |
9747 | |
9748 | if (strcmp(name, g_RuntimeClassName) == 0 && strcmp(nameSpace, g_WinRTNS) == 0) |
9749 | bIsRuntimeClass = true; |
9750 | |
9751 | if (bIsComObject || bIsRuntimeClass) |
9752 | { |
9753 | // Make System.__ComObject/System.Runtime.InteropServices.WindowsRuntime.RuntimeClass a ComImport type |
9754 | // We can't do it using attribute as C# won't allow putting code in ComImport types |
9755 | pMT->SetComObjectType(); |
9756 | |
9757 | // COM objects need an optional field on the EEClass, so ensure this class instance has allocated |
9758 | // the optional field descriptor. |
9759 | EnsureOptionalFieldsAreAllocated(pClass, m_pAllocMemTracker, GetLoaderAllocator()->GetLowFrequencyHeap()); |
9760 | } |
9761 | |
9762 | if (bIsRuntimeClass) |
9763 | { |
9764 | // Note that we set it here to avoid type loader considering RuntimeClass as a normal WindowsImportType |
9765 | // as functions in RuntimeClass doesn't go through COM interop |
9766 | GetHalfBakedClass()->SetProjectedFromWinRT(); |
9767 | } |
9768 | } |
9769 | #endif // FEATURE_COMINTEROP |
9770 | } |
9771 | } |
9772 | |
9773 | //========================================================================================== |
9774 | // Helper to create a new method table. This is the only |
9775 | // way to allocate a new MT. Don't try calling new / ctor. |
9776 | // Called from SetupMethodTable |
9777 | // This needs to be kept consistent with MethodTable::GetSavedExtent() |
9778 | MethodTable * MethodTableBuilder::AllocateNewMT(Module *pLoaderModule, |
9779 | DWORD dwVtableSlots, |
9780 | DWORD dwVirtuals, |
9781 | DWORD dwGCSize, |
9782 | DWORD dwNumInterfaces, |
9783 | DWORD dwNumDicts, |
9784 | DWORD cbInstAndDict, |
9785 | MethodTable *pMTParent, |
9786 | ClassLoader *pClassLoader, |
9787 | LoaderAllocator *pAllocator, |
9788 | BOOL isInterface, |
9789 | BOOL fDynamicStatics, |
9790 | BOOL fHasGenericsStaticsInfo, |
9791 | BOOL fNeedsRCWPerTypeData |
9792 | #ifdef FEATURE_COMINTEROP |
9793 | , BOOL fHasDynamicInterfaceMap |
9794 | #endif |
9795 | #ifdef FEATURE_PREJIT |
9796 | , Module *pComputedPZM |
9797 | #endif // FEATURE_PREJIT |
9798 | , AllocMemTracker *pamTracker |
9799 | ) |
9800 | { |
9801 | CONTRACT (MethodTable*) |
9802 | { |
9803 | THROWS; |
9804 | GC_TRIGGERS; |
9805 | MODE_ANY; |
9806 | POSTCONDITION(CheckPointer(RETVAL)); |
9807 | } |
9808 | CONTRACT_END; |
9809 | |
9810 | DWORD dwNonVirtualSlots = dwVtableSlots - dwVirtuals; |
9811 | |
9812 | // GCSize must be aligned |
9813 | _ASSERTE(IS_ALIGNED(dwGCSize, sizeof(void*))); |
9814 | |
9815 | // size without the interface map |
9816 | S_SIZE_T cbTotalSize = S_SIZE_T(dwGCSize) + S_SIZE_T(sizeof(MethodTable)); |
9817 | |
9818 | // vtable |
9819 | cbTotalSize += MethodTable::GetNumVtableIndirections(dwVirtuals) * sizeof(MethodTable::VTableIndir_t); |
9820 | |
9821 | |
9822 | DWORD dwMultipurposeSlotsMask = 0; |
9823 | if (dwNumInterfaces != 0) |
9824 | dwMultipurposeSlotsMask |= MethodTable::enum_flag_HasInterfaceMap; |
9825 | if (dwNumDicts != 0) |
9826 | dwMultipurposeSlotsMask |= MethodTable::enum_flag_HasPerInstInfo; |
9827 | if (bmtVT->pDispatchMapBuilder->Count() > 0) |
9828 | dwMultipurposeSlotsMask |= MethodTable::enum_flag_HasDispatchMapSlot; |
9829 | if (dwNonVirtualSlots != 0) |
9830 | dwMultipurposeSlotsMask |= MethodTable::enum_flag_HasNonVirtualSlots; |
9831 | if (pLoaderModule != GetModule()) |
9832 | dwMultipurposeSlotsMask |= MethodTable::enum_flag_HasModuleOverride; |
9833 | |
9834 | // Add space for optional members here. Same as GetOptionalMembersSize() |
9835 | cbTotalSize += MethodTable::GetOptionalMembersAllocationSize(dwMultipurposeSlotsMask, |
9836 | fHasGenericsStaticsInfo, |
9837 | FALSE, // no GuidInfo needed for canonical instantiations |
9838 | FALSE, // no CCW template needed for canonical instantiations |
9839 | fNeedsRCWPerTypeData, |
9840 | RidFromToken(GetCl()) >= METHODTABLE_TOKEN_OVERFLOW); |
9841 | |
9842 | // Interface map starts here |
9843 | S_SIZE_T offsetOfInterfaceMap = cbTotalSize; |
9844 | |
9845 | cbTotalSize += S_SIZE_T(dwNumInterfaces) * S_SIZE_T(sizeof(InterfaceInfo_t)); |
9846 | |
9847 | #ifdef FEATURE_COMINTEROP |
9848 | // DynamicInterfaceMap have an extra DWORD added to the end of the normal interface |
9849 | // map. This will be used to store the count of dynamically added interfaces |
9850 | // (the ones that are not in the metadata but are QI'ed for at runtime). |
9851 | cbTotalSize += S_SIZE_T(fHasDynamicInterfaceMap ? sizeof(DWORD_PTR) : 0); |
9852 | #endif |
9853 | |
9854 | // Dictionary pointers start here |
9855 | S_SIZE_T offsetOfInstAndDict = cbTotalSize; |
9856 | |
9857 | if (dwNumDicts != 0) |
9858 | { |
9859 | cbTotalSize += sizeof(GenericsDictInfo); |
9860 | cbTotalSize += S_SIZE_T(dwNumDicts) * S_SIZE_T(sizeof(MethodTable::PerInstInfoElem_t)); |
9861 | cbTotalSize += cbInstAndDict; |
9862 | } |
9863 | |
9864 | S_SIZE_T offsetOfUnsharedVtableChunks = cbTotalSize; |
9865 | |
9866 | BOOL canShareVtableChunks = pMTParent && MethodTable::CanShareVtableChunksFrom(pMTParent, pLoaderModule |
9867 | #ifdef FEATURE_PREJIT |
9868 | , pComputedPZM |
9869 | #endif //FEATURE_PREJIT |
9870 | ); |
9871 | |
9872 | // If pMTParent has a generic instantiation, we cannot share its vtable chunks |
9873 | // This is because pMTParent is only approximate at this point, and MethodTableBuilder::CopyExactParentSlots |
9874 | // may swap in an exact parent that does not satisfy CanShareVtableChunksFrom |
9875 | if (pMTParent && pMTParent->HasInstantiation()) |
9876 | { |
9877 | canShareVtableChunks = FALSE; |
9878 | } |
9879 | |
9880 | // We will share any parent vtable chunk that does not contain a method we overrode (or introduced) |
9881 | // For the rest, we need to allocate space |
9882 | for (DWORD i = 0; i < dwVirtuals; i++) |
9883 | { |
9884 | if (!canShareVtableChunks || ChangesImplementationOfVirtualSlot(static_cast<SLOT_INDEX>(i))) |
9885 | { |
9886 | DWORD chunkStart = MethodTable::GetStartSlotForVtableIndirection(MethodTable::GetIndexOfVtableIndirection(i), dwVirtuals); |
9887 | DWORD chunkEnd = MethodTable::GetEndSlotForVtableIndirection(MethodTable::GetIndexOfVtableIndirection(i), dwVirtuals); |
9888 | |
9889 | cbTotalSize += S_SIZE_T(chunkEnd - chunkStart) * S_SIZE_T(sizeof(PCODE)); |
9890 | |
9891 | i = chunkEnd - 1; |
9892 | } |
9893 | } |
9894 | |
9895 | // Add space for the non-virtual slots array (pointed to by an optional member) if required |
9896 | // If there is only one non-virtual slot, we store it directly in the optional member and need no array |
9897 | S_SIZE_T offsetOfNonVirtualSlots = cbTotalSize; |
9898 | if (dwNonVirtualSlots > 1) |
9899 | { |
9900 | cbTotalSize += S_SIZE_T(dwNonVirtualSlots) * S_SIZE_T(sizeof(PCODE)); |
9901 | } |
9902 | |
9903 | BYTE *pData = (BYTE *)pamTracker->Track(pAllocator->GetHighFrequencyHeap()->AllocMem(cbTotalSize)); |
9904 | |
9905 | _ASSERTE(IS_ALIGNED(pData, TARGET_POINTER_SIZE)); |
9906 | |
9907 | // There should be no overflows if we have allocated the memory succesfully |
9908 | _ASSERTE(!cbTotalSize.IsOverflow()); |
9909 | |
9910 | MethodTable* pMT = (MethodTable*)(pData + dwGCSize); |
9911 | |
9912 | pMT->SetMultipurposeSlotsMask(dwMultipurposeSlotsMask); |
9913 | |
9914 | MethodTableWriteableData * pMTWriteableData = (MethodTableWriteableData *) (BYTE *) |
9915 | pamTracker->Track(pAllocator->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(MethodTableWriteableData)))); |
9916 | // Note: Memory allocated on loader heap is zero filled |
9917 | pMT->SetWriteableData(pMTWriteableData); |
9918 | |
9919 | // This also disables IBC logging until the type is sufficiently intitialized so |
9920 | // it needs to be done early |
9921 | pMTWriteableData->SetIsNotFullyLoadedForBuildMethodTable(); |
9922 | |
9923 | #ifdef _DEBUG |
9924 | pClassLoader->m_dwGCSize += dwGCSize; |
9925 | pClassLoader->m_dwInterfaceMapSize += (dwNumInterfaces * sizeof(InterfaceInfo_t)); |
9926 | pClassLoader->m_dwMethodTableSize += (DWORD)cbTotalSize.Value(); |
9927 | pClassLoader->m_dwVtableData += (dwVtableSlots * sizeof(PCODE)); |
9928 | #endif // _DEBUG |
9929 | |
9930 | // There should be no overflows if we have allocated the memory succesfully |
9931 | _ASSERTE(!offsetOfUnsharedVtableChunks.IsOverflow()); |
9932 | _ASSERTE(!offsetOfNonVirtualSlots.IsOverflow()); |
9933 | _ASSERTE(!offsetOfInterfaceMap.IsOverflow()); |
9934 | _ASSERTE(!offsetOfInstAndDict.IsOverflow()); |
9935 | |
9936 | // initialize the total number of slots |
9937 | pMT->SetNumVirtuals(static_cast<WORD>(dwVirtuals)); |
9938 | |
9939 | pMT->SetParentMethodTable(pMTParent); |
9940 | |
9941 | // Fill out the vtable indirection slots |
9942 | SIZE_T dwCurrentUnsharedSlotOffset = offsetOfUnsharedVtableChunks.Value(); |
9943 | MethodTable::VtableIndirectionSlotIterator it = pMT->IterateVtableIndirectionSlots(); |
9944 | while (it.Next()) |
9945 | { |
9946 | BOOL shared = canShareVtableChunks; |
9947 | |
9948 | // Recalculate whether we will share this chunk |
9949 | if (canShareVtableChunks) |
9950 | { |
9951 | for (DWORD i = it.GetStartSlot(); i < it.GetEndSlot(); i++) |
9952 | { |
9953 | if (ChangesImplementationOfVirtualSlot(static_cast<SLOT_INDEX>(i))) |
9954 | { |
9955 | shared = FALSE; |
9956 | break; |
9957 | } |
9958 | } |
9959 | } |
9960 | |
9961 | if (shared) |
9962 | { |
9963 | // Share the parent chunk |
9964 | _ASSERTE(it.GetEndSlot() <= pMTParent->GetNumVirtuals()); |
9965 | it.SetIndirectionSlot(pMTParent->GetVtableIndirections()[it.GetIndex()].GetValueMaybeNull()); |
9966 | } |
9967 | else |
9968 | { |
9969 | // Use the locally allocated chunk |
9970 | it.SetIndirectionSlot((MethodTable::VTableIndir2_t *)(pData+dwCurrentUnsharedSlotOffset)); |
9971 | dwCurrentUnsharedSlotOffset += it.GetSize(); |
9972 | } |
9973 | } |
9974 | |
9975 | #ifdef FEATURE_COMINTEROP |
9976 | // Extensible RCW's are prefixed with the count of dynamic interfaces. |
9977 | if (fHasDynamicInterfaceMap) |
9978 | { |
9979 | _ASSERTE (dwNumInterfaces > 0); |
9980 | pMT->SetInterfaceMap ((WORD) (dwNumInterfaces), (InterfaceInfo_t*)(pData+offsetOfInterfaceMap.Value()+sizeof(DWORD_PTR))); |
9981 | |
9982 | *(((DWORD_PTR *)pMT->GetInterfaceMap()) - 1) = 0; |
9983 | } |
9984 | else |
9985 | #endif // FEATURE_COMINTEROP |
9986 | { |
9987 | // interface map is at the end of the vtable |
9988 | pMT->SetInterfaceMap ((WORD) dwNumInterfaces, (InterfaceInfo_t *)(pData+offsetOfInterfaceMap.Value())); |
9989 | } |
9990 | |
9991 | _ASSERTE(((WORD) dwNumInterfaces) == dwNumInterfaces); |
9992 | |
9993 | if (fDynamicStatics) |
9994 | { |
9995 | pMT->SetDynamicStatics(fHasGenericsStaticsInfo); |
9996 | } |
9997 | |
9998 | if (dwNonVirtualSlots > 0) |
9999 | { |
10000 | if (dwNonVirtualSlots > 1) |
10001 | { |
10002 | pMT->SetNonVirtualSlotsArray((PTR_PCODE)(pData+offsetOfNonVirtualSlots.Value())); |
10003 | } |
10004 | else |
10005 | { |
10006 | pMT->SetHasSingleNonVirtualSlot(); |
10007 | } |
10008 | } |
10009 | |
10010 | // the dictionary pointers follow the interface map |
10011 | if (dwNumDicts) |
10012 | { |
10013 | MethodTable::PerInstInfoElem_t *pPerInstInfo = (MethodTable::PerInstInfoElem_t *)(pData + offsetOfInstAndDict.Value() + sizeof(GenericsDictInfo)); |
10014 | |
10015 | pMT->SetPerInstInfo ( pPerInstInfo); |
10016 | |
10017 | // Fill in the dictionary for this type, if it's instantiated |
10018 | if (cbInstAndDict) |
10019 | { |
10020 | MethodTable::PerInstInfoElem_t *pPInstInfo = (MethodTable::PerInstInfoElem_t *)(pPerInstInfo + (dwNumDicts-1)); |
10021 | pPInstInfo->SetValueMaybeNull((Dictionary*) (pPerInstInfo + dwNumDicts)); |
10022 | } |
10023 | } |
10024 | |
10025 | #ifdef _DEBUG |
10026 | pMT->m_pWriteableData.GetValue()->m_dwLastVerifedGCCnt = (DWORD)-1; |
10027 | #endif // _DEBUG |
10028 | |
10029 | RETURN(pMT); |
10030 | } |
10031 | |
10032 | |
10033 | //******************************************************************************* |
10034 | // |
10035 | // Used by BuildMethodTable |
10036 | // |
10037 | // Setup the method table |
10038 | // |
10039 | #ifdef _PREFAST_ |
10040 | #pragma warning(push) |
10041 | #pragma warning(disable:21000) // Suppress PREFast warning about overly large function |
10042 | #endif // _PREFAST_ |
10043 | |
10044 | VOID |
10045 | MethodTableBuilder::SetupMethodTable2( |
10046 | Module * pLoaderModule |
10047 | #ifdef FEATURE_PREJIT |
10048 | , Module * pComputedPZM |
10049 | #endif // FEATURE_PREJIT |
10050 | ) |
10051 | { |
10052 | CONTRACTL |
10053 | { |
10054 | STANDARD_VM_CHECK; |
10055 | PRECONDITION(CheckPointer(this)); |
10056 | PRECONDITION(CheckPointer(bmtVT)); |
10057 | PRECONDITION(CheckPointer(bmtInterface)); |
10058 | PRECONDITION(CheckPointer(bmtInternal)); |
10059 | PRECONDITION(CheckPointer(bmtProp)); |
10060 | PRECONDITION(CheckPointer(bmtMFDescs)); |
10061 | PRECONDITION(CheckPointer(bmtEnumFields)); |
10062 | PRECONDITION(CheckPointer(bmtError)); |
10063 | PRECONDITION(CheckPointer(bmtMetaData)); |
10064 | PRECONDITION(CheckPointer(bmtParent)); |
10065 | PRECONDITION(CheckPointer(bmtGenerics)); |
10066 | } |
10067 | CONTRACTL_END; |
10068 | |
10069 | DWORD i; |
10070 | |
10071 | #ifdef FEATURE_COMINTEROP |
10072 | BOOL fHasDynamicInterfaceMap = bmtInterface->dwInterfaceMapSize > 0 && |
10073 | bmtProp->fIsComObjectType && |
10074 | (GetParentMethodTable() != g_pObjectClass); |
10075 | BOOL fNeedsRCWPerTypeData = bmtProp->fNeedsRCWPerTypeData; |
10076 | #else // FEATURE_COMINTEROP |
10077 | BOOL fNeedsRCWPerTypeData = FALSE; |
10078 | #endif // FEATURE_COMINTEROP |
10079 | |
10080 | EEClass *pClass = GetHalfBakedClass(); |
10081 | |
10082 | DWORD cbDict = bmtGenerics->HasInstantiation() |
10083 | ? DictionaryLayout::GetFirstDictionaryBucketSize( |
10084 | bmtGenerics->GetNumGenericArgs(), pClass->GetDictionaryLayout()) |
10085 | : 0; |
10086 | |
10087 | #ifdef FEATURE_COLLECTIBLE_TYPES |
10088 | BOOL fCollectible = pLoaderModule->IsCollectible(); |
10089 | #endif // FEATURE_COLLECTIBLE_TYPES |
10090 | |
10091 | DWORD dwGCSize; |
10092 | |
10093 | if (bmtFP->NumGCPointerSeries > 0) |
10094 | { |
10095 | dwGCSize = (DWORD)CGCDesc::ComputeSize(bmtFP->NumGCPointerSeries); |
10096 | } |
10097 | else |
10098 | { |
10099 | #ifdef FEATURE_COLLECTIBLE_TYPES |
10100 | if (fCollectible) |
10101 | dwGCSize = (DWORD)CGCDesc::ComputeSize(1); |
10102 | else |
10103 | #endif // FEATURE_COLLECTIBLE_TYPES |
10104 | dwGCSize = 0; |
10105 | } |
10106 | |
10107 | pClass->SetNumMethods(bmtVT->cTotalSlots); |
10108 | pClass->SetNumNonVirtualSlots(bmtVT->cVtableSlots - bmtVT->cVirtualSlots); |
10109 | |
10110 | // Now setup the method table |
10111 | // interface map is allocated along with the method table |
10112 | MethodTable *pMT = AllocateNewMT(pLoaderModule, |
10113 | bmtVT->cVtableSlots, |
10114 | bmtVT->cVirtualSlots, |
10115 | dwGCSize, |
10116 | bmtInterface->dwInterfaceMapSize, |
10117 | bmtGenerics->numDicts, |
10118 | cbDict, |
10119 | GetParentMethodTable(), |
10120 | GetClassLoader(), |
10121 | bmtAllocator, |
10122 | IsInterface(), |
10123 | bmtProp->fDynamicStatics, |
10124 | bmtProp->fGenericsStatics, |
10125 | fNeedsRCWPerTypeData, |
10126 | #ifdef FEATURE_COMINTEROP |
10127 | fHasDynamicInterfaceMap, |
10128 | #endif |
10129 | #ifdef FEATURE_PREJIT |
10130 | pComputedPZM, |
10131 | #endif //FEATURE_PREJIT |
10132 | GetMemTracker()); |
10133 | |
10134 | pMT->SetClass(pClass); |
10135 | pClass->m_pMethodTable.SetValue(pMT); |
10136 | m_pHalfBakedMT = pMT; |
10137 | |
10138 | #ifdef _DEBUG |
10139 | pMT->SetDebugClassName(GetDebugClassName()); |
10140 | #endif |
10141 | |
10142 | #ifdef FEATURE_COMINTEROP |
10143 | if (fNeedsRCWPerTypeData) |
10144 | pMT->SetHasRCWPerTypeData(); |
10145 | #endif // FEATURE_COMINTEROP |
10146 | |
10147 | |
10148 | if (IsInterface()) |
10149 | pMT->SetIsInterface(); |
10150 | |
10151 | if (GetParentMethodTable() != NULL) |
10152 | { |
10153 | if (GetParentMethodTable()->HasModuleDependencies()) |
10154 | { |
10155 | pMT->SetHasModuleDependencies(); |
10156 | } |
10157 | else |
10158 | { |
10159 | Module * pModule = GetModule(); |
10160 | Module * pParentModule = GetParentMethodTable()->GetModule(); |
10161 | if (pModule != pParentModule) |
10162 | { |
10163 | pMT->SetHasModuleDependencies(); |
10164 | } |
10165 | } |
10166 | |
10167 | if (GetParentMethodTable()->HasPreciseInitCctors() || !pClass->IsBeforeFieldInit()) |
10168 | { |
10169 | pMT->SetHasPreciseInitCctors(); |
10170 | } |
10171 | } |
10172 | |
10173 | // Must be done early because various methods test HasInstantiation() and ContainsGenericVariables() |
10174 | if (bmtGenerics->GetNumGenericArgs() != 0) |
10175 | { |
10176 | pMT->SetHasInstantiation(bmtGenerics->fTypicalInstantiation, bmtGenerics->fSharedByGenericInstantiations); |
10177 | |
10178 | if (bmtGenerics->fContainsGenericVariables) |
10179 | pMT->SetContainsGenericVariables(); |
10180 | } |
10181 | |
10182 | if (bmtGenerics->numDicts != 0) |
10183 | { |
10184 | if (!FitsIn<WORD>(bmtGenerics->GetNumGenericArgs())) |
10185 | { |
10186 | BuildMethodTableThrowException(IDS_CLASSLOAD_TOOMANYGENERICARGS); |
10187 | } |
10188 | |
10189 | pMT->SetDictInfo(bmtGenerics->numDicts, |
10190 | static_cast<WORD>(bmtGenerics->GetNumGenericArgs())); |
10191 | } |
10192 | |
10193 | CONSISTENCY_CHECK(pMT->GetNumGenericArgs() == bmtGenerics->GetNumGenericArgs()); |
10194 | CONSISTENCY_CHECK(pMT->GetNumDicts() == bmtGenerics->numDicts); |
10195 | CONSISTENCY_CHECK(pMT->HasInstantiation() == bmtGenerics->HasInstantiation()); |
10196 | CONSISTENCY_CHECK(pMT->HasInstantiation() == !pMT->GetInstantiation().IsEmpty()); |
10197 | |
10198 | pMT->SetLoaderModule(pLoaderModule); |
10199 | pMT->SetLoaderAllocator(bmtAllocator); |
10200 | |
10201 | pMT->SetModule(GetModule()); |
10202 | |
10203 | pMT->SetInternalCorElementType (ELEMENT_TYPE_CLASS); |
10204 | |
10205 | SetNonGCRegularStaticFieldBytes (bmtProp->dwNonGCRegularStaticFieldBytes); |
10206 | SetNonGCThreadStaticFieldBytes (bmtProp->dwNonGCThreadStaticFieldBytes); |
10207 | |
10208 | #ifdef FEATURE_TYPEEQUIVALENCE |
10209 | if (bmtProp->fHasTypeEquivalence) |
10210 | { |
10211 | pMT->SetHasTypeEquivalence(); |
10212 | } |
10213 | #endif //FEATURE_TYPEEQUIVALENCE |
10214 | |
10215 | #ifdef FEATURE_COMINTEROP |
10216 | if (bmtProp->fSparse) |
10217 | pClass->SetSparseForCOMInterop(); |
10218 | |
10219 | if (IsInterface() && IsComImport()) |
10220 | { |
10221 | // Determine if we are creating an interface methodtable that may be used to dispatch through VSD |
10222 | // on an object that has the methodtable of __ComObject. |
10223 | |
10224 | // This is done to allow COM tearoff interfaces, but as a side-effect of this feature, |
10225 | // we end up using a domain-shared type (__ComObject) with a domain-specific dispatch token. |
10226 | // This is a problem because the same domain-specific dispatch token value can appear in |
10227 | // multiple unshared domains (VSD takes advantage of the fact that in general a shared type |
10228 | // cannot implement an unshared interface). This means that the same <token, __ComObject> pair |
10229 | // value can mean different things in different domains (since the token could represent |
10230 | // IFoo in one domain and IBar in another). This is a problem because the |
10231 | // VSD polymorphic lookup mechanism relies on a process-wide cache table, and as a result |
10232 | // these duplicate values would collide if we didn't use fat dispatch token to ensure uniqueness |
10233 | // and the interface methodtable is not in the shared domain. |
10234 | |
10235 | pMT->SetRequiresFatDispatchTokens(); |
10236 | } |
10237 | #endif // FEATURE_COMINTEROP |
10238 | |
10239 | if (bmtVT->pCCtor != NULL) |
10240 | { |
10241 | pMT->SetHasClassConstructor(); |
10242 | CONSISTENCY_CHECK(pMT->GetClassConstructorSlot() == bmtVT->pCCtor->GetSlotIndex()); |
10243 | } |
10244 | if (bmtVT->pDefaultCtor != NULL) |
10245 | { |
10246 | pMT->SetHasDefaultConstructor(); |
10247 | CONSISTENCY_CHECK(pMT->GetDefaultConstructorSlot() == bmtVT->pDefaultCtor->GetSlotIndex()); |
10248 | } |
10249 | |
10250 | for (MethodDescChunk *pChunk = GetHalfBakedClass()->GetChunks(); pChunk != NULL; pChunk = pChunk->GetNextChunk()) |
10251 | { |
10252 | pChunk->SetMethodTable(pMT); |
10253 | } |
10254 | |
10255 | #ifdef _DEBUG |
10256 | { |
10257 | // disable ibc logging because we can assert in ComputerPreferredZapModule for partially constructed |
10258 | // generic types |
10259 | IBCLoggingDisabler disableLogging; |
10260 | |
10261 | DeclaredMethodIterator it(*this); |
10262 | while (it.Next()) |
10263 | { |
10264 | MethodDesc *pMD = it->GetMethodDesc(); |
10265 | if (pMD != NULL) |
10266 | { |
10267 | pMD->m_pDebugMethodTable.SetValue(pMT); |
10268 | pMD->m_pszDebugMethodSignature = FormatSig(pMD, GetLoaderAllocator()->GetLowFrequencyHeap(), GetMemTracker()); |
10269 | } |
10270 | MethodDesc *pUnboxedMD = it->GetUnboxedMethodDesc(); |
10271 | if (pUnboxedMD != NULL) |
10272 | { |
10273 | pUnboxedMD->m_pDebugMethodTable.SetValue(pMT); |
10274 | pUnboxedMD->m_pszDebugMethodSignature = FormatSig(pUnboxedMD, GetLoaderAllocator()->GetLowFrequencyHeap(), GetMemTracker()); |
10275 | } |
10276 | } |
10277 | } |
10278 | #endif // _DEBUG |
10279 | |
10280 | // Note that for value classes, the following calculation is only appropriate |
10281 | // when the instance is in its "boxed" state. |
10282 | if (!IsInterface()) |
10283 | { |
10284 | DWORD baseSize = Max<DWORD>(bmtFP->NumInstanceFieldBytes + OBJECT_BASESIZE, MIN_OBJECT_SIZE); |
10285 | baseSize = (baseSize + ALLOC_ALIGN_CONSTANT) & ~ALLOC_ALIGN_CONSTANT; // m_BaseSize must be aligned |
10286 | pMT->SetBaseSize(baseSize); |
10287 | |
10288 | GetHalfBakedClass()->SetBaseSizePadding(baseSize - bmtFP->NumInstanceFieldBytes); |
10289 | |
10290 | if (bmtProp->fIsComObjectType) |
10291 | { // Propagate the com specific info |
10292 | pMT->SetComObjectType(); |
10293 | #ifdef FEATURE_COMINTEROP |
10294 | // COM objects need an optional field on the EEClass, so ensure this class instance has allocated |
10295 | // the optional field descriptor. |
10296 | EnsureOptionalFieldsAreAllocated(pClass, m_pAllocMemTracker, GetLoaderAllocator()->GetLowFrequencyHeap()); |
10297 | #endif // FEATURE_COMINTEROP |
10298 | } |
10299 | |
10300 | #ifdef FEATURE_COMINTEROP |
10301 | if (pMT->GetAssembly()->IsManagedWinMD()) |
10302 | { |
10303 | // We need to mark classes that are implementations of managed WinRT runtime classes with |
10304 | // the "exported to WinRT" flag. It's not quite possible to tell which ones these are by |
10305 | // reading metadata so we ask the adapter. |
10306 | |
10307 | IWinMDImport *pWinMDImport = pMT->GetAssembly()->GetManifestWinMDImport(); |
10308 | _ASSERTE(pWinMDImport != NULL); |
10309 | |
10310 | BOOL bResult; |
10311 | IfFailThrow(pWinMDImport->IsRuntimeClassImplementation(GetCl(), &bResult)); |
10312 | |
10313 | if (bResult) |
10314 | { |
10315 | pClass->SetExportedToWinRT(); |
10316 | |
10317 | // We need optional fields for activation from WinRT. |
10318 | EnsureOptionalFieldsAreAllocated(pClass, m_pAllocMemTracker, GetLoaderAllocator()->GetLowFrequencyHeap()); |
10319 | } |
10320 | } |
10321 | |
10322 | if (pClass->IsProjectedFromWinRT() || pClass->IsExportedToWinRT()) |
10323 | { |
10324 | const BYTE * pVal; |
10325 | ULONG cbVal; |
10326 | HRESULT hr = GetMDImport()->GetCustomAttributeByName(GetCl(), g_WindowsFoundationMarshalingBehaviorAttributeClassName, (const void **) &pVal, &cbVal); |
10327 | if (hr == S_OK) |
10328 | { |
10329 | CustomAttributeParser cap(pVal, cbVal); |
10330 | IfFailThrow(cap.SkipProlog()); |
10331 | UINT32 u = 0; |
10332 | IfFailThrow(cap.GetU4(&u)); |
10333 | if(u > 0) |
10334 | pClass->SetMarshalingType(u); |
10335 | } |
10336 | } |
10337 | #endif // FEATURE_COMINTEROP |
10338 | } |
10339 | else |
10340 | { |
10341 | #ifdef FEATURE_COMINTEROP |
10342 | // If this is an interface then we need to set the ComInterfaceType to |
10343 | // -1 to indicate we have not yet determined the interface type. |
10344 | pClass->SetComInterfaceType((CorIfaceAttr)-1); |
10345 | |
10346 | // If this is a special COM event interface, then mark the MT as such. |
10347 | if (bmtProp->fComEventItfType) |
10348 | { |
10349 | pClass->SetComEventItfType(); |
10350 | } |
10351 | #endif // FEATURE_COMINTEROP |
10352 | } |
10353 | _ASSERTE((pMT->IsInterface() == 0) == (IsInterface() == 0)); |
10354 | |
10355 | if (HasLayout()) |
10356 | { |
10357 | pClass->SetNativeSize(GetLayoutInfo()->GetNativeSize()); |
10358 | } |
10359 | |
10360 | FieldDesc *pFieldDescList = pClass->GetFieldDescList(); |
10361 | // Set all field slots to point to the newly created MethodTable |
10362 | for (i = 0; i < (bmtEnumFields->dwNumStaticFields + bmtEnumFields->dwNumInstanceFields); i++) |
10363 | { |
10364 | pFieldDescList[i].m_pMTOfEnclosingClass.SetValue(pMT); |
10365 | } |
10366 | |
10367 | // Fill in type parameters before looking up exact parent or fetching the types of any field descriptors! |
10368 | // This must come before the use of GetFieldType in the value class representation optimization below. |
10369 | if (bmtGenerics->GetNumGenericArgs() != 0) |
10370 | { |
10371 | // Space has already been allocated for the instantiation but the parameters haven't been filled in |
10372 | Instantiation destInst = pMT->GetInstantiation(); |
10373 | Instantiation inst = bmtGenerics->GetInstantiation(); |
10374 | |
10375 | // So fill them in... |
10376 | TypeHandle * pInstDest = (TypeHandle *)destInst.GetRawArgs(); |
10377 | for (DWORD j = 0; j < bmtGenerics->GetNumGenericArgs(); j++) |
10378 | { |
10379 | pInstDest[j] = inst[j]; |
10380 | } |
10381 | } |
10382 | |
10383 | CorElementType normalizedType = ELEMENT_TYPE_CLASS; |
10384 | if (IsValueClass()) |
10385 | { |
10386 | if (IsEnum()) |
10387 | { |
10388 | if (GetNumInstanceFields() != 1 || |
10389 | !CorTypeInfo::IsPrimitiveType(pFieldDescList[0].GetFieldType())) |
10390 | { |
10391 | BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_BAD_FIELD, mdTokenNil); |
10392 | } |
10393 | CONSISTENCY_CHECK(!pFieldDescList[0].IsStatic()); |
10394 | normalizedType = pFieldDescList->GetFieldType(); |
10395 | } |
10396 | else |
10397 | { |
10398 | #ifdef _TARGET_X86_ |
10399 | // JIT64 is not aware of normalized value types and this |
10400 | // optimization (return small value types by value in registers) |
10401 | // is already done in JIT64. |
10402 | OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOADED); |
10403 | normalizedType = EEClass::ComputeInternalCorElementTypeForValueType(pMT); |
10404 | #else |
10405 | normalizedType = ELEMENT_TYPE_VALUETYPE; |
10406 | #endif |
10407 | } |
10408 | } |
10409 | pMT->SetInternalCorElementType(normalizedType); |
10410 | |
10411 | if (bmtProp->fIsIntrinsicType) |
10412 | { |
10413 | pMT->SetIsIntrinsicType(); |
10414 | } |
10415 | |
10416 | if (GetModule()->IsSystem()) |
10417 | { |
10418 | // we are in mscorlib |
10419 | CheckForSystemTypes(); |
10420 | } |
10421 | |
10422 | // Now fill in the real interface map with the approximate interfaces |
10423 | if (bmtInterface->dwInterfaceMapSize > 0) |
10424 | { |
10425 | // First ensure we have enough space to record extra flag information for each interface (we don't |
10426 | // record this directly into each interface map entry since these flags don't pack well due to |
10427 | // alignment). |
10428 | PVOID = NULL; |
10429 | SIZE_T = MethodTable::GetExtraInterfaceInfoSize(bmtInterface->dwInterfaceMapSize); |
10430 | if (cbExtraInterfaceInfo) |
10431 | pExtraInterfaceInfo = GetMemTracker()->Track(GetLoaderAllocator()->GetLowFrequencyHeap()->AllocMem(S_SIZE_T(cbExtraInterfaceInfo))); |
10432 | |
10433 | // Call this even in the case where pExtraInterfaceInfo == NULL (certain cases are optimized and don't |
10434 | // require extra buffer space). |
10435 | pMT->InitializeExtraInterfaceInfo(pExtraInterfaceInfo); |
10436 | |
10437 | InterfaceInfo_t *pInterfaces = pMT->GetInterfaceMap(); |
10438 | |
10439 | CONSISTENCY_CHECK(CheckPointer(pInterfaces)); |
10440 | |
10441 | // Copy the interface map member by member so there is no junk in the padding. |
10442 | for (i = 0; i < bmtInterface->dwInterfaceMapSize; i++) |
10443 | { |
10444 | bmtInterfaceEntry * pEntry = &bmtInterface->pInterfaceMap[i]; |
10445 | |
10446 | if (pEntry->IsDeclaredOnType()) |
10447 | pMT->SetInterfaceDeclaredOnClass(i); |
10448 | _ASSERTE(!!pEntry->IsDeclaredOnType() == !!pMT->IsInterfaceDeclaredOnClass(i)); |
10449 | |
10450 | pInterfaces[i].SetMethodTable(pEntry->GetInterfaceType()->GetMethodTable()); |
10451 | } |
10452 | } |
10453 | |
10454 | pMT->SetCl(GetCl()); |
10455 | |
10456 | // The type is sufficiently initialized for most general purpose accessor methods to work. |
10457 | // Mark the type as restored to avoid avoid asserts. Note that this also enables IBC logging. |
10458 | pMT->GetWriteableDataForWrite_NoLogging()->SetIsRestoredForBuildMethodTable(); |
10459 | |
10460 | #ifdef _DEBUG |
10461 | // Store status if we tried to inject duplicate interfaces |
10462 | if (bmtInterface->dbg_fShouldInjectInterfaceDuplicates) |
10463 | pMT->Debug_SetHasInjectedInterfaceDuplicates(); |
10464 | #endif //_DEBUG |
10465 | |
10466 | // Keep bmtInterface data around since we no longer write the flags (IsDeclaredOnType and |
10467 | // IsImplementedByParent) into the interface map (these flags are only required during type loading). |
10468 | |
10469 | { |
10470 | for (MethodDescChunk *pChunk = GetHalfBakedClass()->GetChunks(); pChunk != NULL; pChunk = pChunk->GetNextChunk()) |
10471 | { |
10472 | // Make sure that temporary entrypoints are create for methods. NGEN uses temporary |
10473 | // entrypoints as surrogate keys for precodes. |
10474 | pChunk->EnsureTemporaryEntryPointsCreated(GetLoaderAllocator(), GetMemTracker()); |
10475 | } |
10476 | } |
10477 | |
10478 | { // copy onto the real vtable (methods only) |
10479 | //@GENERICS: Because we sometimes load an inexact parent (see ClassLoader::GetParent) the inherited slots might |
10480 | // come from the wrong place and need fixing up once we know the exact parent |
10481 | |
10482 | for (bmtVtable::Iterator slotIt = bmtVT->IterateSlots(); !slotIt.AtEnd(); ++slotIt) |
10483 | { |
10484 | SLOT_INDEX iCurSlot = static_cast<SLOT_INDEX>(slotIt.CurrentIndex()); |
10485 | |
10486 | // We want the unboxed MethodDesc if we're out of the virtual method range |
10487 | // and the method we're dealing with has an unboxing method. If so, then |
10488 | // the unboxing method was placed in the virtual section of the vtable and |
10489 | // we now need to place the unboxed version. |
10490 | MethodDesc * pMD = NULL; |
10491 | if (iCurSlot < bmtVT->cVirtualSlots || !slotIt->Impl().AsMDMethod()->IsUnboxing()) |
10492 | { |
10493 | pMD = slotIt->Impl().GetMethodDesc(); |
10494 | CONSISTENCY_CHECK(slotIt->Decl().GetSlotIndex() == iCurSlot); |
10495 | } |
10496 | else |
10497 | { |
10498 | pMD = slotIt->Impl().AsMDMethod()->GetUnboxedMethodDesc(); |
10499 | CONSISTENCY_CHECK(pMD->GetSlot() == iCurSlot); |
10500 | } |
10501 | |
10502 | CONSISTENCY_CHECK(CheckPointer(pMD)); |
10503 | |
10504 | if (pMD->GetMethodTable() != pMT) |
10505 | { |
10506 | // |
10507 | // Inherited slots |
10508 | // |
10509 | // Do not write into vtable chunks shared with parent. It would introduce race |
10510 | // with code:MethodDesc::SetStableEntryPointInterlocked. |
10511 | // |
10512 | DWORD indirectionIndex = MethodTable::GetIndexOfVtableIndirection(iCurSlot); |
10513 | if (GetParentMethodTable()->GetVtableIndirections()[indirectionIndex].GetValueMaybeNull() != pMT->GetVtableIndirections()[indirectionIndex].GetValueMaybeNull()) |
10514 | pMT->SetSlot(iCurSlot, pMD->GetMethodEntryPoint()); |
10515 | } |
10516 | else |
10517 | { |
10518 | // |
10519 | // Owned slots |
10520 | // |
10521 | _ASSERTE(iCurSlot >= bmtVT->cVirtualSlots || ChangesImplementationOfVirtualSlot(iCurSlot)); |
10522 | |
10523 | PCODE addr = pMD->GetTemporaryEntryPoint(); |
10524 | _ASSERTE(addr != NULL); |
10525 | |
10526 | if (pMD->HasNonVtableSlot()) |
10527 | { |
10528 | *((PCODE *)pMD->GetAddrOfSlot()) = addr; |
10529 | } |
10530 | else |
10531 | { |
10532 | pMT->SetSlot(iCurSlot, addr); |
10533 | } |
10534 | |
10535 | if (pMD->GetSlot() == iCurSlot && pMD->RequiresStableEntryPoint()) |
10536 | { |
10537 | // The rest of the system assumes that certain methods always have stable entrypoints. |
10538 | // Create them now. |
10539 | pMD->GetOrCreatePrecode(); |
10540 | } |
10541 | } |
10542 | } |
10543 | } |
10544 | |
10545 | // If we have any entries, then finalize them and allocate the object in class loader heap |
10546 | DispatchMap *pDispatchMap = NULL; |
10547 | DispatchMapBuilder *pDispatchMapBuilder = bmtVT->pDispatchMapBuilder; |
10548 | CONSISTENCY_CHECK(CheckPointer(pDispatchMapBuilder)); |
10549 | |
10550 | if (pDispatchMapBuilder->Count() > 0) |
10551 | { |
10552 | // Create a map in stacking memory. |
10553 | BYTE * pbMap; |
10554 | UINT32 cbMap; |
10555 | DispatchMap::CreateEncodedMapping( |
10556 | pMT, |
10557 | pDispatchMapBuilder, |
10558 | pDispatchMapBuilder->GetAllocator(), |
10559 | &pbMap, |
10560 | &cbMap); |
10561 | |
10562 | // Now finalize the impltable and allocate the block in the low frequency loader heap |
10563 | size_t objSize = (size_t) DispatchMap::GetObjectSize(cbMap); |
10564 | void * pv = AllocateFromLowFrequencyHeap(S_SIZE_T(objSize)); |
10565 | _ASSERTE(pv != NULL); |
10566 | |
10567 | // Use placement new |
10568 | pDispatchMap = new (pv) DispatchMap(pbMap, cbMap); |
10569 | pMT->SetDispatchMap(pDispatchMap); |
10570 | |
10571 | #ifdef LOGGING |
10572 | g_sdStats.m_cDispatchMap++; |
10573 | g_sdStats.m_cbDispatchMap += (UINT32) objSize; |
10574 | LOG((LF_LOADER, LL_INFO1000, "SD: Dispatch map for %s: %d bytes for map, %d bytes total for object.\n" , |
10575 | pMT->GetDebugClassName(), cbMap, objSize)); |
10576 | #endif // LOGGING |
10577 | |
10578 | } |
10579 | |
10580 | // GetMethodData by default will cache its result. However, in the case that we're |
10581 | // building a MethodTable, we aren't guaranteed that this type is going to successfully |
10582 | // load and so caching it would result in errors down the road since the memory and |
10583 | // type occupying the same memory location would very likely be incorrect. The second |
10584 | // argument specifies that GetMethodData should not cache the returned object. |
10585 | MethodTable::MethodDataWrapper hMTData(MethodTable::GetMethodData(pMT, FALSE)); |
10586 | |
10587 | if (!IsInterface()) |
10588 | { |
10589 | // Propagate inheritance. |
10590 | |
10591 | // NOTE: In the world of unfolded interface this was used to propagate overrides into |
10592 | // the unfolded interface vtables to make sure that overrides of virtual methods |
10593 | // also overrode the interface methods that they contributed to. This had the |
10594 | // unfortunate side-effect of also overwriting regular vtable slots that had been |
10595 | // methodimpl'd and as a result changed the meaning of methodimpl from "substitute |
10596 | // the body of method A with the body of method B" to "unify the slots of methods |
10597 | // A and B". But now compilers have come to rely on this side-effect and it can |
10598 | // not be brought back to its originally intended behaviour. |
10599 | |
10600 | // For every slot whose body comes from another slot (determined by getting the MethodDesc |
10601 | // for a slot and seeing if MethodDesc::GetSlot returns a different value than the slot |
10602 | // from which the MethodDesc was recovered), copy the value of the slot stated by the |
10603 | // MethodDesc over top of the current slot. |
10604 | |
10605 | // Because of the way slot unification works, we need to iterate the enture vtable until |
10606 | // no slots need updated. To understand this, imagine the following: |
10607 | // C1::M1 is overridden by C2::M2 |
10608 | // C1::M2 is methodImpled by C1::M3 |
10609 | // C1::M3 is overridden by C2::M3 |
10610 | // This should mean that C1::M1 is implemented by C2::M3, but if we didn't run the below |
10611 | // for loop a second time, this would not be propagated properly - it would only be placed |
10612 | // into the slot for C1::M2 and never make its way up to C1::M1. |
10613 | |
10614 | BOOL fChangeMade; |
10615 | do |
10616 | { |
10617 | fChangeMade = FALSE; |
10618 | for (i = 0; i < pMT->GetNumVirtuals(); i++) |
10619 | { |
10620 | MethodDesc* pMD = hMTData->GetImplMethodDesc(i); |
10621 | |
10622 | CONSISTENCY_CHECK(CheckPointer(pMD)); |
10623 | CONSISTENCY_CHECK(pMD == pMT->GetMethodDescForSlot(i)); |
10624 | |
10625 | // This indicates that the method body in this slot was copied here through a methodImpl. |
10626 | // Thus, copy the value of the slot from which the body originally came, in case it was |
10627 | // overridden, to make sure the two slots stay in sync. |
10628 | INDEBUG(MethodDesc * pMDOld; pMDOld = pMD;) |
10629 | if(pMD->GetSlot() != i && |
10630 | pMT->GetSlot(i) != pMT->GetSlot(pMD->GetSlot())) |
10631 | { |
10632 | // Copy the slot value in the method's original slot. |
10633 | pMT->SetSlot(i,pMT->GetSlot(pMD->GetSlot())); |
10634 | hMTData->InvalidateCachedVirtualSlot(i); |
10635 | |
10636 | // Update the pMD to the new method desc we just copied over ourselves with. This will |
10637 | // be used in the check for missing method block below. |
10638 | pMD = pMT->GetMethodDescForSlot(pMD->GetSlot()); |
10639 | |
10640 | // This method is now duplicate |
10641 | pMD->SetDuplicate(); |
10642 | INDEBUG(g_dupMethods++;) |
10643 | fChangeMade = TRUE; |
10644 | } |
10645 | } |
10646 | } |
10647 | while (fChangeMade); |
10648 | } |
10649 | |
10650 | if (!bmtProp->fNoSanityChecks) |
10651 | VerifyVirtualMethodsImplemented(hMTData); |
10652 | |
10653 | #ifdef _DEBUG |
10654 | { |
10655 | for (bmtVtable::Iterator i = bmtVT->IterateSlots(); |
10656 | !i.AtEnd(); ++i) |
10657 | { |
10658 | _ASSERTE(i->Impl().GetMethodDesc() != NULL); |
10659 | } |
10660 | } |
10661 | #endif // _DEBUG |
10662 | |
10663 | |
10664 | #ifdef FEATURE_COMINTEROP |
10665 | // for ComObject types, i.e. if the class extends from a COM Imported |
10666 | // class |
10667 | // make sure any interface implementated by the COM Imported class |
10668 | // is overridden fully, (OR) not overridden at all.. |
10669 | // We relax this for WinRT where we want to be able to override individual methods. |
10670 | if (bmtProp->fIsComObjectType && !pMT->IsWinRTObjectType()) |
10671 | { |
10672 | MethodTable::InterfaceMapIterator intIt = pMT->IterateInterfaceMap(); |
10673 | while (intIt.Next()) |
10674 | { |
10675 | MethodTable* pIntfMT = intIt.GetInterface(); |
10676 | if (pIntfMT->GetNumVirtuals() != 0) |
10677 | { |
10678 | BOOL hasComImportMethod = FALSE; |
10679 | BOOL hasManagedMethod = FALSE; |
10680 | |
10681 | // NOTE: Avoid caching the MethodData object for the type being built. |
10682 | MethodTable::MethodDataWrapper hItfImplData(MethodTable::GetMethodData(pIntfMT, pMT, FALSE)); |
10683 | MethodTable::MethodIterator it(hItfImplData); |
10684 | for (;it.IsValid(); it.Next()) |
10685 | { |
10686 | MethodDesc *pClsMD = NULL; |
10687 | // If we fail to find an _IMPLEMENTATION_ for the interface MD, then |
10688 | // we are a ComImportMethod, otherwise we still be a ComImportMethod or |
10689 | // we can be a ManagedMethod. |
10690 | DispatchSlot impl(it.GetTarget()); |
10691 | if (!impl.IsNull()) |
10692 | { |
10693 | pClsMD = it.GetMethodDesc(); |
10694 | |
10695 | CONSISTENCY_CHECK(!pClsMD->IsInterface()); |
10696 | if (pClsMD->GetClass()->IsComImport()) |
10697 | { |
10698 | hasComImportMethod = TRUE; |
10699 | } |
10700 | else |
10701 | { |
10702 | hasManagedMethod = TRUE; |
10703 | } |
10704 | } |
10705 | else |
10706 | { |
10707 | // Need to set the pClsMD for the error reporting below. |
10708 | pClsMD = it.GetDeclMethodDesc(); |
10709 | CONSISTENCY_CHECK(CheckPointer(pClsMD)); |
10710 | hasComImportMethod = TRUE; |
10711 | } |
10712 | |
10713 | // One and only one of the two must be set. |
10714 | if ((hasComImportMethod && hasManagedMethod) || |
10715 | (!hasComImportMethod && !hasManagedMethod)) |
10716 | { |
10717 | BuildMethodTableThrowException(IDS_EE_BAD_COMEXTENDS_CLASS, pClsMD->GetNameOnNonArrayClass()); |
10718 | } |
10719 | } |
10720 | } |
10721 | } |
10722 | } |
10723 | |
10724 | // For COM event interfaces, we need to make sure that all the methods are |
10725 | // methods to add or remove events. This means that they all need to take |
10726 | // a delegate derived class and have a void return type. |
10727 | if (bmtProp->fComEventItfType) |
10728 | { |
10729 | // COM event interfaces had better be interfaces. |
10730 | CONSISTENCY_CHECK(IsInterface()); |
10731 | |
10732 | // Go through all the methods and check the validity of the signature. |
10733 | // NOTE: Uses hMTData to avoid caching a MethodData object for the type being built. |
10734 | MethodTable::MethodIterator it(hMTData); |
10735 | for (;it.IsValid(); it.Next()) |
10736 | { |
10737 | MethodDesc* pMD = it.GetMethodDesc(); |
10738 | _ASSERTE(pMD); |
10739 | |
10740 | MetaSig Sig(pMD); |
10741 | |
10742 | { |
10743 | CONTRACT_VIOLATION(LoadsTypeViolation); |
10744 | if (Sig.GetReturnType() != ELEMENT_TYPE_VOID || |
10745 | Sig.NumFixedArgs() != 1 || |
10746 | Sig.NextArg() != ELEMENT_TYPE_CLASS || |
10747 | !Sig.GetLastTypeHandleThrowing().CanCastTo(TypeHandle(g_pDelegateClass))) |
10748 | { |
10749 | BuildMethodTableThrowException(IDS_EE_BAD_COMEVENTITF_CLASS, pMD->GetNameOnNonArrayClass()); |
10750 | } |
10751 | } |
10752 | } |
10753 | } |
10754 | #endif // FEATURE_COMINTEROP |
10755 | |
10756 | // If this class uses any VTS (Version Tolerant Serialization) features |
10757 | // (event callbacks or OptionalField attributes) we've previously cached the |
10758 | // additional information in the bmtMFDescs structure. Now it's time to add |
10759 | // this information as an optional extension to the MethodTable. |
10760 | } |
10761 | #ifdef _PREFAST_ |
10762 | #pragma warning(pop) |
10763 | #endif |
10764 | |
10765 | // Returns true if there is at least one default implementation for this interface method |
10766 | // We don't care about conflicts at this stage in order to avoid impact type load performance |
10767 | BOOL MethodTableBuilder::HasDefaultInterfaceImplementation(bmtRTType *pDeclType, MethodDesc *pDeclMD) |
10768 | { |
10769 | STANDARD_VM_CONTRACT; |
10770 | |
10771 | #ifdef FEATURE_DEFAULT_INTERFACES |
10772 | // If the interface method is already non-abstract, we are done |
10773 | if (!pDeclMD->IsAbstract()) |
10774 | return TRUE; |
10775 | |
10776 | int targetSlot = pDeclMD->GetSlot(); |
10777 | |
10778 | // Iterate over all the interfaces this type implements |
10779 | bmtInterfaceEntry * pItfEntry = NULL; |
10780 | for (DWORD i = 0; i < bmtInterface->dwInterfaceMapSize; i++) |
10781 | { |
10782 | bmtRTType * pCurItf = bmtInterface->pInterfaceMap[i].GetInterfaceType(); |
10783 | |
10784 | // Go over the methods on the interface |
10785 | MethodTable::IntroducedMethodIterator methIt(pCurItf->GetMethodTable()); |
10786 | for (; methIt.IsValid(); methIt.Next()) |
10787 | { |
10788 | MethodDesc * pPotentialImpl = methIt.GetMethodDesc(); |
10789 | |
10790 | // If this interface method is not a MethodImpl, it can't possibly implement |
10791 | // the interface method we are looking for |
10792 | if (!pPotentialImpl->IsMethodImpl()) |
10793 | continue; |
10794 | |
10795 | // Go over all the decls this MethodImpl is implementing |
10796 | MethodImpl::Iterator it(pPotentialImpl); |
10797 | for (; it.IsValid(); it.Next()) |
10798 | { |
10799 | MethodDesc *pPotentialDecl = it.GetMethodDesc(); |
10800 | |
10801 | // Check this is a decl with the right slot |
10802 | if (pPotentialDecl->GetSlot() != targetSlot) |
10803 | continue; |
10804 | |
10805 | // Find out what interface this default implementation is implementing |
10806 | mdToken tkParent; |
10807 | IfFailThrow(GetModule()->GetMDImport()->GetParentToken(it.GetToken(), &tkParent)); |
10808 | |
10809 | // We can only load the approximate interface at this point |
10810 | MethodTable * pPotentialInterfaceMT = ClassLoader::LoadTypeDefOrRefOrSpecThrowing( |
10811 | GetModule(), |
10812 | tkParent, |
10813 | &bmtGenerics->typeContext, |
10814 | ClassLoader::ThrowIfNotFound, |
10815 | ClassLoader::PermitUninstDefOrRef, |
10816 | ClassLoader::LoadTypes, |
10817 | CLASS_LOAD_APPROXPARENTS, |
10818 | TRUE).GetMethodTable()->GetCanonicalMethodTable(); |
10819 | |
10820 | // Is this a default implementation for the interface we are looking for? |
10821 | if (pDeclType->GetMethodTable()->HasSameTypeDefAs(pPotentialInterfaceMT)) |
10822 | { |
10823 | // If the type is not generic, matching defs are all we need |
10824 | if (!pDeclType->GetMethodTable()->HasInstantiation()) |
10825 | return TRUE; |
10826 | |
10827 | // If this is generic, we need to compare under substitutions |
10828 | Substitution curItfSubs(tkParent, GetModule(), &pCurItf->GetSubstitution()); |
10829 | |
10830 | // Type Equivalence is not respected for this comparision as you can have multiple type equivalent interfaces on a class |
10831 | TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL); |
10832 | if (MetaSig::CompareTypeDefsUnderSubstitutions( |
10833 | pPotentialInterfaceMT, pDeclType->GetMethodTable(), |
10834 | &curItfSubs, &pDeclType->GetSubstitution(), |
10835 | &newVisited)) |
10836 | { |
10837 | return TRUE; |
10838 | } |
10839 | } |
10840 | } |
10841 | } |
10842 | } |
10843 | #endif // FEATURE_DEFAULT_INTERFACES |
10844 | |
10845 | return FALSE; |
10846 | } |
10847 | |
10848 | void MethodTableBuilder::VerifyVirtualMethodsImplemented(MethodTable::MethodData * hMTData) |
10849 | { |
10850 | STANDARD_VM_CONTRACT; |
10851 | |
10852 | // |
10853 | // This verification is not applicable or required in many cases |
10854 | // |
10855 | |
10856 | if (IsAbstract() || IsInterface()) |
10857 | return; |
10858 | |
10859 | #ifdef FEATURE_COMINTEROP |
10860 | // Note that this is important for WinRT where redirected .NET interfaces appear on the interface |
10861 | // impl list but their methods are not implemented (the adapter only hides the WinRT methods, it |
10862 | // does not make up the .NET ones). |
10863 | if (bmtProp->fIsComObjectType) |
10864 | return; |
10865 | #endif // FEATURE_COMINTEROP |
10866 | |
10867 | // Since interfaces aren't laid out in the vtable for stub dispatch, what we need to do |
10868 | // is try to find an implementation for every interface contract by iterating through |
10869 | // the interfaces not declared on a parent. |
10870 | BOOL fParentIsAbstract = FALSE; |
10871 | if (HasParent()) |
10872 | { |
10873 | fParentIsAbstract = GetParentMethodTable()->IsAbstract(); |
10874 | } |
10875 | |
10876 | // If the parent is abstract, we need to check that each virtual method is implemented |
10877 | if (fParentIsAbstract) |
10878 | { |
10879 | // NOTE: Uses hMTData to avoid caching a MethodData object for the type being built. |
10880 | MethodTable::MethodIterator it(hMTData); |
10881 | for (; it.IsValid() && it.IsVirtual(); it.Next()) |
10882 | { |
10883 | MethodDesc *pMD = it.GetMethodDesc(); |
10884 | if (pMD->IsAbstract()) |
10885 | { |
10886 | MethodDesc *pDeclMD = it.GetDeclMethodDesc(); |
10887 | BuildMethodTableThrowException(IDS_CLASSLOAD_NOTIMPLEMENTED, pDeclMD->GetNameOnNonArrayClass()); |
10888 | } |
10889 | } |
10890 | } |
10891 | |
10892 | DispatchMapTypeID * rgInterfaceDispatchMapTypeIDs = |
10893 | new (GetStackingAllocator()) DispatchMapTypeID[bmtInterface->dwInterfaceMapSize]; |
10894 | |
10895 | bmtInterfaceInfo::MapIterator intIt = bmtInterface->IterateInterfaceMap(); |
10896 | for (; !intIt.AtEnd(); intIt.Next()) |
10897 | { |
10898 | if (fParentIsAbstract || !intIt->IsImplementedByParent()) |
10899 | { |
10900 | // Compute all TypeIDs for this interface (all duplicates in the interface map) |
10901 | UINT32 cInterfaceDuplicates; |
10902 | ComputeDispatchMapTypeIDs( |
10903 | intIt->GetInterfaceType()->GetMethodTable(), |
10904 | &intIt->GetInterfaceType()->GetSubstitution(), |
10905 | rgInterfaceDispatchMapTypeIDs, |
10906 | bmtInterface->dwInterfaceMapSize, |
10907 | &cInterfaceDuplicates); |
10908 | _ASSERTE(cInterfaceDuplicates <= bmtInterface->dwInterfaceMapSize); |
10909 | _ASSERTE(cInterfaceDuplicates > 0); |
10910 | |
10911 | // NOTE: This override does not cache the resulting MethodData object. |
10912 | MethodTable::MethodDataWrapper hData(MethodTable::GetMethodData( |
10913 | rgInterfaceDispatchMapTypeIDs, |
10914 | cInterfaceDuplicates, |
10915 | intIt->GetInterfaceType()->GetMethodTable(), |
10916 | GetHalfBakedMethodTable())); |
10917 | MethodTable::MethodIterator it(hData); |
10918 | for (; it.IsValid() && it.IsVirtual(); it.Next()) |
10919 | { |
10920 | if (it.GetTarget().IsNull()) |
10921 | { |
10922 | MethodDesc *pMD = it.GetDeclMethodDesc(); |
10923 | |
10924 | if (!HasDefaultInterfaceImplementation(intIt->GetInterfaceType(), pMD)) |
10925 | BuildMethodTableThrowException(IDS_CLASSLOAD_NOTIMPLEMENTED, pMD->GetNameOnNonArrayClass()); |
10926 | } |
10927 | } |
10928 | } |
10929 | } |
10930 | } |
10931 | |
10932 | INT32 __stdcall IsDefined(Module *pModule, mdToken token, TypeHandle attributeClass) |
10933 | { |
10934 | CONTRACTL |
10935 | { |
10936 | THROWS; |
10937 | GC_TRIGGERS; |
10938 | } |
10939 | CONTRACTL_END; |
10940 | |
10941 | BOOL isDefined = FALSE; |
10942 | |
10943 | IMDInternalImport *pInternalImport = pModule->GetMDImport(); |
10944 | BOOL isSealed = FALSE; |
10945 | |
10946 | HENUMInternalHolder hEnum(pInternalImport); |
10947 | TypeHandle caTH; |
10948 | |
10949 | // Get the enum first but don't get any values |
10950 | hEnum.EnumInit(mdtCustomAttribute, token); |
10951 | |
10952 | ULONG cMax = pInternalImport->EnumGetCount(&hEnum); |
10953 | if (cMax) |
10954 | { |
10955 | // we have something to look at |
10956 | |
10957 | |
10958 | if (!attributeClass.IsNull()) |
10959 | isSealed = attributeClass.GetMethodTable()->IsSealed(); |
10960 | |
10961 | // Loop through the Attributes and look for the requested one |
10962 | mdCustomAttribute cv; |
10963 | while (pInternalImport->EnumNext(&hEnum, &cv)) |
10964 | { |
10965 | // |
10966 | // fetch the ctor |
10967 | mdToken tkCtor; |
10968 | IfFailThrow(pInternalImport->GetCustomAttributeProps(cv, &tkCtor)); |
10969 | |
10970 | mdToken tkType = TypeFromToken(tkCtor); |
10971 | if(tkType != mdtMemberRef && tkType != mdtMethodDef) |
10972 | continue; // we only deal with the ctor case |
10973 | |
10974 | // |
10975 | // get the info to load the type, so we can check whether the current |
10976 | // attribute is a subtype of the requested attribute |
10977 | IfFailThrow(pInternalImport->GetParentToken(tkCtor, &tkType)); |
10978 | |
10979 | _ASSERTE(TypeFromToken(tkType) == mdtTypeRef || TypeFromToken(tkType) == mdtTypeDef); |
10980 | // load the type |
10981 | if (isSealed) |
10982 | { |
10983 | caTH=ClassLoader::LoadTypeDefOrRefThrowing(pModule, tkType, |
10984 | ClassLoader::ReturnNullIfNotFound, |
10985 | ClassLoader::FailIfUninstDefOrRef, |
10986 | TypeFromToken(tkType) == mdtTypeDef ? tdAllTypes : tdNoTypes); |
10987 | } |
10988 | else |
10989 | { |
10990 | caTH = ClassLoader::LoadTypeDefOrRefThrowing(pModule, tkType, |
10991 | ClassLoader::ReturnNullIfNotFound, |
10992 | ClassLoader::FailIfUninstDefOrRef); |
10993 | } |
10994 | if (caTH.IsNull()) |
10995 | continue; |
10996 | |
10997 | // a null class implies all custom attribute |
10998 | if (!attributeClass.IsNull()) |
10999 | { |
11000 | if (isSealed) |
11001 | { |
11002 | if (attributeClass != caTH) |
11003 | continue; |
11004 | } |
11005 | else |
11006 | { |
11007 | if (!caTH.CanCastTo(attributeClass)) |
11008 | continue; |
11009 | } |
11010 | } |
11011 | |
11012 | // |
11013 | // if we are here we got one |
11014 | isDefined = TRUE; |
11015 | break; |
11016 | } |
11017 | } |
11018 | |
11019 | return isDefined; |
11020 | } |
11021 | |
11022 | //******************************************************************************* |
11023 | VOID MethodTableBuilder::CheckForRemotingProxyAttrib() |
11024 | { |
11025 | STANDARD_VM_CONTRACT; |
11026 | |
11027 | } |
11028 | |
11029 | |
11030 | //******************************************************************************* |
11031 | // Checks for a bunch of special interface names and if it matches then it sets |
11032 | // bmtProp->fIsMngStandardItf to TRUE. Additionally, it checks to see if the |
11033 | // type is an interface and if it has ComEventInterfaceAttribute custom attribute |
11034 | // set, then it sets bmtProp->fComEventItfType to true. |
11035 | // |
11036 | // NOTE: This only does anything when COM interop is enabled. |
11037 | |
11038 | VOID MethodTableBuilder::CheckForSpecialTypes() |
11039 | { |
11040 | #ifdef FEATURE_COMINTEROP |
11041 | STANDARD_VM_CONTRACT; |
11042 | |
11043 | |
11044 | Module *pModule = GetModule(); |
11045 | IMDInternalImport *pMDImport = pModule->GetMDImport(); |
11046 | |
11047 | // Check to see if this type is a managed standard interface. All the managed |
11048 | // standard interfaces live in mscorlib.dll so checking for that first |
11049 | // makes the strcmp that comes afterwards acceptable. |
11050 | if (pModule->IsSystem()) |
11051 | { |
11052 | if (IsInterface()) |
11053 | { |
11054 | LPCUTF8 pszClassName; |
11055 | LPCUTF8 pszClassNamespace; |
11056 | if (FAILED(pMDImport->GetNameOfTypeDef(GetCl(), &pszClassName, &pszClassNamespace))) |
11057 | { |
11058 | pszClassName = pszClassNamespace = NULL; |
11059 | } |
11060 | if ((pszClassName != NULL) && (pszClassNamespace != NULL)) |
11061 | { |
11062 | LPUTF8 pszFullyQualifiedName = NULL; |
11063 | MAKE_FULLY_QUALIFIED_NAME(pszFullyQualifiedName, pszClassNamespace, pszClassName); |
11064 | |
11065 | // This is just to give us a scope to break out of. |
11066 | do |
11067 | { |
11068 | |
11069 | #define MNGSTDITF_BEGIN_INTERFACE(FriendlyName, strMngItfName, strUCOMMngItfName, strCustomMarshalerName, strCustomMarshalerCookie, strManagedViewName, NativeItfIID, bCanCastOnNativeItfQI) \ |
11070 | if (strcmp(strMngItfName, pszFullyQualifiedName) == 0) \ |
11071 | { \ |
11072 | bmtProp->fIsMngStandardItf = true; \ |
11073 | break; \ |
11074 | } |
11075 | |
11076 | #define MNGSTDITF_DEFINE_METH_IMPL(FriendlyName, ECallMethName, MethName, MethSig, FcallDecl) |
11077 | |
11078 | #define MNGSTDITF_END_INTERFACE(FriendlyName) |
11079 | |
11080 | #include "mngstditflist.h" |
11081 | |
11082 | #undef MNGSTDITF_BEGIN_INTERFACE |
11083 | #undef MNGSTDITF_DEFINE_METH_IMPL |
11084 | #undef MNGSTDITF_END_INTERFACE |
11085 | |
11086 | } while (FALSE); |
11087 | |
11088 | if (strcmp(pszFullyQualifiedName, g_CollectionsGenericCollectionItfName) == 0 || |
11089 | strcmp(pszFullyQualifiedName, g_CollectionsGenericReadOnlyCollectionItfName) == 0 || |
11090 | strcmp(pszFullyQualifiedName, g_CollectionsCollectionItfName) == 0) |
11091 | { |
11092 | // ICollection`1, ICollection and IReadOnlyCollection`1 are special cases the adapter is unaware of |
11093 | bmtProp->fIsRedirectedInterface = true; |
11094 | } |
11095 | else |
11096 | { |
11097 | if (strcmp(pszFullyQualifiedName, WinMDAdapter::GetRedirectedTypeFullCLRName(WinMDAdapter::RedirectedTypeIndex_System_Collections_Generic_IEnumerable)) == 0 || |
11098 | strcmp(pszFullyQualifiedName, WinMDAdapter::GetRedirectedTypeFullCLRName(WinMDAdapter::RedirectedTypeIndex_System_Collections_Generic_IList)) == 0 || |
11099 | strcmp(pszFullyQualifiedName, WinMDAdapter::GetRedirectedTypeFullCLRName(WinMDAdapter::RedirectedTypeIndex_System_Collections_Generic_IDictionary)) == 0 || |
11100 | strcmp(pszFullyQualifiedName, WinMDAdapter::GetRedirectedTypeFullCLRName(WinMDAdapter::RedirectedTypeIndex_System_Collections_Generic_IReadOnlyList)) == 0 || |
11101 | strcmp(pszFullyQualifiedName, WinMDAdapter::GetRedirectedTypeFullCLRName(WinMDAdapter::RedirectedTypeIndex_System_Collections_Generic_IReadOnlyDictionary)) == 0 || |
11102 | strcmp(pszFullyQualifiedName, WinMDAdapter::GetRedirectedTypeFullCLRName(WinMDAdapter::RedirectedTypeIndex_System_Collections_IEnumerable)) == 0 || |
11103 | strcmp(pszFullyQualifiedName, WinMDAdapter::GetRedirectedTypeFullCLRName(WinMDAdapter::RedirectedTypeIndex_System_Collections_IList)) == 0 || |
11104 | strcmp(pszFullyQualifiedName, WinMDAdapter::GetRedirectedTypeFullCLRName(WinMDAdapter::RedirectedTypeIndex_System_IDisposable)) == 0) |
11105 | { |
11106 | bmtProp->fIsRedirectedInterface = true; |
11107 | } |
11108 | } |
11109 | |
11110 | // We want to allocate the per-type RCW data optional MethodTable field for |
11111 | // 1. Redirected interfaces |
11112 | // 2. Mscorlib-declared [WindowsRuntimeImport] interfaces |
11113 | bmtProp->fNeedsRCWPerTypeData = (bmtProp->fIsRedirectedInterface || GetHalfBakedClass()->IsProjectedFromWinRT()); |
11114 | |
11115 | if (!bmtProp->fNeedsRCWPerTypeData) |
11116 | { |
11117 | // 3. Non-generic IEnumerable |
11118 | if (strcmp(pszFullyQualifiedName, g_CollectionsEnumerableItfName) == 0) |
11119 | { |
11120 | bmtProp->fNeedsRCWPerTypeData = true; |
11121 | } |
11122 | } |
11123 | } |
11124 | } |
11125 | else if (IsDelegate() && bmtGenerics->HasInstantiation()) |
11126 | { |
11127 | // 4. Redirected delegates |
11128 | if (GetHalfBakedClass()->GetWinRTRedirectedTypeIndex() |
11129 | != WinMDAdapter::RedirectedTypeIndex_Invalid) |
11130 | { |
11131 | bmtProp->fNeedsRCWPerTypeData = true; |
11132 | } |
11133 | } |
11134 | } |
11135 | else if (bmtGenerics->HasInstantiation() && pModule->GetAssembly()->IsWinMD()) |
11136 | { |
11137 | // 5. WinRT types with variance |
11138 | if (bmtGenerics->pVarianceInfo != NULL) |
11139 | { |
11140 | bmtProp->fNeedsRCWPerTypeData = true; |
11141 | } |
11142 | else if (IsInterface()) |
11143 | { |
11144 | // 6. Windows.Foundation.Collections.IIterator`1 |
11145 | LPCUTF8 pszClassName; |
11146 | LPCUTF8 pszClassNamespace; |
11147 | if (SUCCEEDED(pMDImport->GetNameOfTypeDef(GetCl(), &pszClassName, &pszClassNamespace))) |
11148 | { |
11149 | LPUTF8 pszFullyQualifiedName = NULL; |
11150 | MAKE_FULLY_QUALIFIED_NAME(pszFullyQualifiedName, pszClassNamespace, pszClassName); |
11151 | |
11152 | if (strcmp(pszFullyQualifiedName, g_WinRTIIteratorClassName) == 0) |
11153 | { |
11154 | bmtProp->fNeedsRCWPerTypeData = true; |
11155 | } |
11156 | } |
11157 | } |
11158 | } |
11159 | else if ((IsInterface() || IsDelegate()) && |
11160 | IsTdPublic(GetHalfBakedClass()->GetAttrClass()) && |
11161 | GetHalfBakedClass()->GetWinRTRedirectedTypeIndex() != WinMDAdapter::RedirectedTypeIndex_Invalid) |
11162 | { |
11163 | // 7. System.Collections.Specialized.INotifyCollectionChanged |
11164 | // 8. System.Collections.Specialized.NotifyCollectionChangedEventHandler |
11165 | // 9. System.ComponentModel.INotifyPropertyChanged |
11166 | // 10. System.ComponentModel.PropertyChangedEventHandler |
11167 | // 11. System.Windows.Input.ICommand |
11168 | LPCUTF8 pszClassName; |
11169 | LPCUTF8 pszClassNamespace; |
11170 | if (SUCCEEDED(pMDImport->GetNameOfTypeDef(GetCl(), &pszClassName, &pszClassNamespace))) |
11171 | { |
11172 | LPUTF8 pszFullyQualifiedName = NULL; |
11173 | MAKE_FULLY_QUALIFIED_NAME(pszFullyQualifiedName, pszClassNamespace, pszClassName); |
11174 | |
11175 | if (strcmp(pszFullyQualifiedName, g_INotifyCollectionChangedName) == 0 || |
11176 | strcmp(pszFullyQualifiedName, g_NotifyCollectionChangedEventHandlerName) == 0 || |
11177 | strcmp(pszFullyQualifiedName, g_INotifyPropertyChangedName) == 0 || |
11178 | strcmp(pszFullyQualifiedName, g_PropertyChangedEventHandlerName) == 0 || |
11179 | strcmp(pszFullyQualifiedName, g_ICommandName) == 0) |
11180 | { |
11181 | bmtProp->fNeedsRCWPerTypeData = true; |
11182 | } |
11183 | } |
11184 | } |
11185 | |
11186 | // Check to see if the type is a COM event interface (classic COM interop only). |
11187 | if (IsInterface() && !GetHalfBakedClass()->IsProjectedFromWinRT()) |
11188 | { |
11189 | HRESULT hr = pMDImport->GetCustomAttributeByName(GetCl(), INTEROP_COMEVENTINTERFACE_TYPE, NULL, NULL); |
11190 | if (hr == S_OK) |
11191 | { |
11192 | bmtProp->fComEventItfType = true; |
11193 | } |
11194 | } |
11195 | #endif // FEATURE_COMINTEROP |
11196 | } |
11197 | |
11198 | #ifdef FEATURE_READYTORUN |
11199 | //******************************************************************************* |
11200 | VOID MethodTableBuilder::CheckLayoutDependsOnOtherModules(MethodTable * pDependencyMT) |
11201 | { |
11202 | STANDARD_VM_CONTRACT; |
11203 | |
11204 | // These cases are expected to be handled by the caller |
11205 | _ASSERTE(!(pDependencyMT == g_pObjectClass || pDependencyMT->IsTruePrimitive() || ((g_pEnumClass != NULL) && pDependencyMT->IsEnum()))); |
11206 | |
11207 | // |
11208 | // WARNING: Changes in this algorithm are potential ReadyToRun breaking changes !!! |
11209 | // |
11210 | // Track whether field layout of this type depend on information outside its containing module |
11211 | // |
11212 | // It is a stronger condition than MethodTable::IsInheritanceChainLayoutFixedInCurrentVersionBubble(). |
11213 | // It has to remain fixed accross versioning changes in the module dependencies. In particular, it does |
11214 | // not take into account NonVersionable attribute. Otherwise, adding NonVersionable attribute to existing |
11215 | // type would be ReadyToRun incompatible change. |
11216 | // |
11217 | if (pDependencyMT->GetModule() == GetModule()) |
11218 | { |
11219 | if (!pDependencyMT->GetClass()->HasLayoutDependsOnOtherModules()) |
11220 | return; |
11221 | } |
11222 | |
11223 | GetHalfBakedClass()->SetHasLayoutDependsOnOtherModules(); |
11224 | } |
11225 | |
11226 | BOOL MethodTableBuilder::NeedsAlignedBaseOffset() |
11227 | { |
11228 | STANDARD_VM_CONTRACT; |
11229 | |
11230 | // |
11231 | // WARNING: Changes in this algorithm are potential ReadyToRun breaking changes !!! |
11232 | // |
11233 | // This method returns whether the type needs aligned base offset in order to have layout resilient to |
11234 | // base class layout changes. |
11235 | // |
11236 | if (IsValueClass()) |
11237 | return FALSE; |
11238 | |
11239 | // Always use the ReadyToRun field layout algorithm if the source IL image was ReadyToRun, independent on |
11240 | // whether ReadyToRun is actually enabled for the module. It is required to allow mixing and matching |
11241 | // ReadyToRun images with NGen. |
11242 | if (!GetModule()->GetFile()->IsILImageReadyToRun()) |
11243 | { |
11244 | // Always use ReadyToRun field layout algorithm to produce ReadyToRun images |
11245 | if (!IsReadyToRunCompilation()) |
11246 | return FALSE; |
11247 | } |
11248 | |
11249 | MethodTable * pParentMT = GetParentMethodTable(); |
11250 | |
11251 | // Trivial parents |
11252 | if (pParentMT == NULL || pParentMT == g_pObjectClass) |
11253 | return FALSE; |
11254 | |
11255 | if (pParentMT->GetModule() == GetModule()) |
11256 | { |
11257 | if (!pParentMT->GetClass()->HasLayoutDependsOnOtherModules()) |
11258 | return FALSE; |
11259 | } |
11260 | |
11261 | return TRUE; |
11262 | } |
11263 | #endif // FEATURE_READYTORUN |
11264 | |
11265 | //******************************************************************************* |
11266 | // |
11267 | // Used by BuildMethodTable |
11268 | // |
11269 | // Set the HasFinalizer and HasCriticalFinalizer flags |
11270 | // |
11271 | VOID MethodTableBuilder::SetFinalizationSemantics() |
11272 | { |
11273 | STANDARD_VM_CONTRACT; |
11274 | |
11275 | if (g_pObjectFinalizerMD && !IsInterface() && !IsValueClass()) |
11276 | { |
11277 | WORD slot = g_pObjectFinalizerMD->GetSlot(); |
11278 | |
11279 | // Objects not derived from Object will get marked as having a finalizer, if they have |
11280 | // sufficient virtual methods. This will only be an issue if they can be allocated |
11281 | // in the GC heap (which will cause all sorts of other problems). |
11282 | if (slot < bmtVT->cVirtualSlots && (*bmtVT)[slot].Impl().GetMethodDesc() != g_pObjectFinalizerMD) |
11283 | { |
11284 | GetHalfBakedMethodTable()->SetHasFinalizer(); |
11285 | |
11286 | // The need for a critical finalizer can be inherited from a parent. |
11287 | // Since we set this automatically for CriticalFinalizerObject |
11288 | // elsewhere, the code below is the means by which any derived class |
11289 | // picks up the attribute. |
11290 | if (HasParent() && GetParentMethodTable()->HasCriticalFinalizer()) |
11291 | { |
11292 | GetHalfBakedMethodTable()->SetHasCriticalFinalizer(); |
11293 | } |
11294 | } |
11295 | } |
11296 | } |
11297 | |
11298 | //******************************************************************************* |
11299 | // |
11300 | // Used by BuildMethodTable |
11301 | // |
11302 | // Perform relevant GC calculations for value classes |
11303 | // |
11304 | VOID MethodTableBuilder::HandleGCForValueClasses(MethodTable ** pByValueClassCache) |
11305 | { |
11306 | STANDARD_VM_CONTRACT; |
11307 | |
11308 | DWORD i; |
11309 | |
11310 | EEClass *pClass = GetHalfBakedClass(); |
11311 | MethodTable *pMT = GetHalfBakedMethodTable(); |
11312 | |
11313 | FieldDesc *pFieldDescList = pClass->GetFieldDescList(); |
11314 | |
11315 | // Note that for value classes, the following calculation is only appropriate |
11316 | // when the instance is in its "boxed" state. |
11317 | #ifdef FEATURE_COLLECTIBLE_TYPES |
11318 | if (bmtFP->NumGCPointerSeries == 0 && pMT->Collectible()) |
11319 | { |
11320 | // For collectible types, insert empty gc series |
11321 | CGCDescSeries *pSeries; |
11322 | |
11323 | CGCDesc::Init( (PVOID) pMT, 1); |
11324 | pSeries = ((CGCDesc*)pMT)->GetLowestSeries(); |
11325 | pSeries->SetSeriesSize( (size_t) (0) - (size_t) pMT->GetBaseSize()); |
11326 | pSeries->SetSeriesOffset(OBJECT_SIZE); |
11327 | } |
11328 | else |
11329 | #endif // FEATURE_COLLECTIBLE_TYPES |
11330 | if (bmtFP->NumGCPointerSeries != 0) |
11331 | { |
11332 | CGCDescSeries *pSeries; |
11333 | CGCDescSeries *pHighest; |
11334 | |
11335 | pMT->SetContainsPointers(); |
11336 | |
11337 | // Copy the pointer series map from the parent |
11338 | CGCDesc::Init( (PVOID) pMT, bmtFP->NumGCPointerSeries ); |
11339 | if (bmtParent->NumParentPointerSeries != 0) |
11340 | { |
11341 | size_t ParentGCSize = CGCDesc::ComputeSize(bmtParent->NumParentPointerSeries); |
11342 | memcpy( (PVOID) (((BYTE*) pMT) - ParentGCSize), |
11343 | (PVOID) (((BYTE*) GetParentMethodTable()) - ParentGCSize), |
11344 | ParentGCSize - sizeof(size_t) // sizeof(size_t) is the NumSeries count |
11345 | ); |
11346 | |
11347 | } |
11348 | |
11349 | // Build the pointer series map for this pointers in this instance |
11350 | pSeries = ((CGCDesc*)pMT)->GetLowestSeries(); |
11351 | if (bmtFP->NumInstanceGCPointerFields) |
11352 | { |
11353 | // See gcdesc.h for an explanation of why we adjust by subtracting BaseSize |
11354 | pSeries->SetSeriesSize( (size_t) (bmtFP->NumInstanceGCPointerFields * TARGET_POINTER_SIZE) - (size_t) pMT->GetBaseSize()); |
11355 | pSeries->SetSeriesOffset(bmtFP->GCPointerFieldStart + OBJECT_SIZE); |
11356 | pSeries++; |
11357 | } |
11358 | |
11359 | // Insert GC info for fields which are by-value classes |
11360 | for (i = 0; i < bmtEnumFields->dwNumInstanceFields; i++) |
11361 | { |
11362 | if (pFieldDescList[i].IsByValue()) |
11363 | { |
11364 | MethodTable *pByValueMT = pByValueClassCache[i]; |
11365 | |
11366 | if (pByValueMT->ContainsPointers()) |
11367 | { |
11368 | // Offset of the by value class in the class we are building, does NOT include Object |
11369 | DWORD dwCurrentOffset = pFieldDescList[i].GetOffset_NoLogging(); |
11370 | |
11371 | // The by value class may have more than one pointer series |
11372 | CGCDescSeries * pByValueSeries = CGCDesc::GetCGCDescFromMT(pByValueMT)->GetLowestSeries(); |
11373 | SIZE_T dwNumByValueSeries = CGCDesc::GetCGCDescFromMT(pByValueMT)->GetNumSeries(); |
11374 | |
11375 | for (SIZE_T j = 0; j < dwNumByValueSeries; j++) |
11376 | { |
11377 | size_t cbSeriesSize; |
11378 | size_t cbSeriesOffset; |
11379 | |
11380 | _ASSERTE(pSeries <= CGCDesc::GetCGCDescFromMT(pMT)->GetHighestSeries()); |
11381 | |
11382 | cbSeriesSize = pByValueSeries->GetSeriesSize(); |
11383 | |
11384 | // Add back the base size of the by value class, since it's being transplanted to this class |
11385 | cbSeriesSize += pByValueMT->GetBaseSize(); |
11386 | |
11387 | // Subtract the base size of the class we're building |
11388 | cbSeriesSize -= pMT->GetBaseSize(); |
11389 | |
11390 | // Set current series we're building |
11391 | pSeries->SetSeriesSize(cbSeriesSize); |
11392 | |
11393 | // Get offset into the value class of the first pointer field (includes a +Object) |
11394 | cbSeriesOffset = pByValueSeries->GetSeriesOffset(); |
11395 | |
11396 | // Add it to the offset of the by value class in our class |
11397 | cbSeriesOffset += dwCurrentOffset; |
11398 | |
11399 | pSeries->SetSeriesOffset(cbSeriesOffset); // Offset of field |
11400 | pSeries++; |
11401 | pByValueSeries++; |
11402 | } |
11403 | } |
11404 | } |
11405 | } |
11406 | |
11407 | // Adjust the inherited series - since the base size has increased by "# new field instance bytes", we need to |
11408 | // subtract that from all the series (since the series always has BaseSize subtracted for it - see gcdesc.h) |
11409 | pHighest = CGCDesc::GetCGCDescFromMT(pMT)->GetHighestSeries(); |
11410 | while (pSeries <= pHighest) |
11411 | { |
11412 | CONSISTENCY_CHECK(CheckPointer(GetParentMethodTable())); |
11413 | pSeries->SetSeriesSize( pSeries->GetSeriesSize() - ((size_t) pMT->GetBaseSize() - (size_t) GetParentMethodTable()->GetBaseSize()) ); |
11414 | pSeries++; |
11415 | } |
11416 | |
11417 | _ASSERTE(pSeries-1 <= CGCDesc::GetCGCDescFromMT(pMT)->GetHighestSeries()); |
11418 | } |
11419 | |
11420 | } |
11421 | |
11422 | //******************************************************************************* |
11423 | // |
11424 | // Used by BuildMethodTable |
11425 | // |
11426 | // Check for the presence of type equivalence. If present, make sure |
11427 | // it is permitted to be on this type. |
11428 | // |
11429 | |
11430 | void MethodTableBuilder::CheckForTypeEquivalence( |
11431 | WORD cBuildingInterfaceList, |
11432 | BuildingInterfaceInfo_t *pBuildingInterfaceList) |
11433 | { |
11434 | STANDARD_VM_CONTRACT; |
11435 | |
11436 | #ifdef FEATURE_TYPEEQUIVALENCE |
11437 | bmtProp->fIsTypeEquivalent = !!IsTypeDefEquivalent(GetCl(), GetModule()); |
11438 | |
11439 | if (bmtProp->fIsTypeEquivalent) |
11440 | { |
11441 | BOOL comImportOrEventInterface = IsComImport(); |
11442 | #ifdef FEATURE_COMINTEROP |
11443 | comImportOrEventInterface = comImportOrEventInterface || bmtProp->fComEventItfType; |
11444 | #endif // FEATURE_COMINTEROP |
11445 | |
11446 | BOOL fTypeEquivalentNotPermittedDueToType = !((comImportOrEventInterface && IsInterface()) || IsValueClass() || IsDelegate()); |
11447 | BOOL fTypeEquivalentNotPermittedDueToGenerics = bmtGenerics->HasInstantiation(); |
11448 | |
11449 | if (fTypeEquivalentNotPermittedDueToType || fTypeEquivalentNotPermittedDueToGenerics) |
11450 | { |
11451 | BuildMethodTableThrowException(IDS_CLASSLOAD_EQUIVALENTBADTYPE); |
11452 | } |
11453 | |
11454 | GetHalfBakedClass()->SetIsEquivalentType(); |
11455 | } |
11456 | |
11457 | bmtProp->fHasTypeEquivalence = bmtProp->fIsTypeEquivalent; |
11458 | |
11459 | if (!bmtProp->fHasTypeEquivalence) |
11460 | { |
11461 | // fHasTypeEquivalence flag is inherited from interfaces so we can quickly detect |
11462 | // types that implement type equivalent interfaces |
11463 | for (WORD i = 0; i < cBuildingInterfaceList; i++) |
11464 | { |
11465 | MethodTable *pItfMT = pBuildingInterfaceList[i].m_pMethodTable; |
11466 | if (pItfMT->HasTypeEquivalence()) |
11467 | { |
11468 | bmtProp->fHasTypeEquivalence = true; |
11469 | break; |
11470 | } |
11471 | } |
11472 | } |
11473 | |
11474 | if (!bmtProp->fHasTypeEquivalence) |
11475 | { |
11476 | // fHasTypeEquivalence flag is "inherited" from generic arguments so we can quickly detect |
11477 | // types like List<Str> where Str is a structure with the TypeIdentifierAttribute. |
11478 | if (bmtGenerics->HasInstantiation() && !bmtGenerics->IsTypicalTypeDefinition()) |
11479 | { |
11480 | Instantiation inst = bmtGenerics->GetInstantiation(); |
11481 | for (DWORD i = 0; i < inst.GetNumArgs(); i++) |
11482 | { |
11483 | if (inst[i].HasTypeEquivalence()) |
11484 | { |
11485 | bmtProp->fHasTypeEquivalence = true; |
11486 | break; |
11487 | } |
11488 | } |
11489 | } |
11490 | } |
11491 | #endif //FEATURE_TYPEEQUIVALENCE |
11492 | } |
11493 | |
11494 | //******************************************************************************* |
11495 | // |
11496 | // Used by BuildMethodTable |
11497 | // |
11498 | // Before we make the final leap, make sure we've allocated all memory needed to |
11499 | // fill out the RID maps. |
11500 | // |
11501 | VOID MethodTableBuilder::EnsureRIDMapsCanBeFilled() |
11502 | { |
11503 | STANDARD_VM_CONTRACT; |
11504 | |
11505 | |
11506 | DWORD i; |
11507 | |
11508 | |
11509 | // Rather than call Ensure***CanBeStored() hundreds of times, we |
11510 | // will call it once on the largest token we find. This relies |
11511 | // on an invariant that RidMaps don't use some kind of sparse |
11512 | // allocation. |
11513 | |
11514 | { |
11515 | mdMethodDef largest = mdMethodDefNil; |
11516 | |
11517 | DeclaredMethodIterator it(*this); |
11518 | while (it.Next()) |
11519 | { |
11520 | if (it.Token() > largest) |
11521 | { |
11522 | largest = it.Token(); |
11523 | } |
11524 | } |
11525 | if ( largest != mdMethodDefNil ) |
11526 | { |
11527 | GetModule()->EnsureMethodDefCanBeStored(largest); |
11528 | } |
11529 | } |
11530 | |
11531 | { |
11532 | mdFieldDef largest = mdFieldDefNil; |
11533 | |
11534 | for (i = 0; i < bmtMetaData->cFields; i++) |
11535 | { |
11536 | if (bmtMetaData->pFields[i] > largest) |
11537 | { |
11538 | largest = bmtMetaData->pFields[i]; |
11539 | } |
11540 | } |
11541 | if ( largest != mdFieldDefNil ) |
11542 | { |
11543 | GetModule()->EnsureFieldDefCanBeStored(largest); |
11544 | } |
11545 | } |
11546 | } |
11547 | |
11548 | #ifdef FEATURE_COMINTEROP |
11549 | //******************************************************************************* |
11550 | void MethodTableBuilder::GetCoClassAttribInfo() |
11551 | { |
11552 | STANDARD_VM_CONTRACT; |
11553 | |
11554 | if (!GetHalfBakedClass()->IsProjectedFromWinRT()) // ignore classic COM interop CA on WinRT interfaces |
11555 | { |
11556 | // Retrieve the CoClassAttribute CA. |
11557 | HRESULT hr = GetMDImport()->GetCustomAttributeByName(GetCl(), INTEROP_COCLASS_TYPE, NULL, NULL); |
11558 | if (hr == S_OK) |
11559 | { |
11560 | // COM class interfaces may lazily populate the m_pCoClassForIntf field of EEClass. This field is |
11561 | // optional so we must ensure the optional field descriptor has been allocated. |
11562 | EnsureOptionalFieldsAreAllocated(GetHalfBakedClass(), m_pAllocMemTracker, GetLoaderAllocator()->GetLowFrequencyHeap()); |
11563 | SetIsComClassInterface(); |
11564 | } |
11565 | } |
11566 | } |
11567 | #endif // FEATURE_COMINTEROP |
11568 | |
11569 | //******************************************************************************* |
11570 | void MethodTableBuilder::bmtMethodImplInfo::AddMethodImpl( |
11571 | bmtMDMethod * pImplMethod, bmtMethodHandle declMethod, mdToken declToken, |
11572 | StackingAllocator * pStackingAllocator) |
11573 | { |
11574 | STANDARD_VM_CONTRACT; |
11575 | |
11576 | CONSISTENCY_CHECK(CheckPointer(pImplMethod)); |
11577 | CONSISTENCY_CHECK(!declMethod.IsNull()); |
11578 | if (pIndex >= cMaxIndex) |
11579 | { |
11580 | DWORD newEntriesCount = 0; |
11581 | |
11582 | if (!ClrSafeInt<DWORD>::multiply(cMaxIndex, 2, newEntriesCount)) |
11583 | ThrowHR(COR_E_OVERFLOW); |
11584 | |
11585 | if (newEntriesCount == 0) |
11586 | newEntriesCount = 10; |
11587 | |
11588 | // If we have to grow this array, we will not free the old array before we clean up the BuildMethodTable operation |
11589 | // because this is a stacking allocator. However, the old array will get freed when all the stack allocator is freed. |
11590 | Entry *rgEntriesNew = new (pStackingAllocator) Entry[newEntriesCount]; |
11591 | memcpy(rgEntriesNew, rgEntries, sizeof(Entry) * cMaxIndex); |
11592 | |
11593 | // Start using newly allocated array. |
11594 | rgEntries = rgEntriesNew; |
11595 | cMaxIndex = newEntriesCount; |
11596 | } |
11597 | rgEntries[pIndex++] = Entry(pImplMethod, declMethod, declToken); |
11598 | } |
11599 | |
11600 | //******************************************************************************* |
11601 | // Returns TRUE if tok acts as a body for any methodImpl entry. FALSE, otherwise. |
11602 | BOOL MethodTableBuilder::bmtMethodImplInfo::IsBody(mdToken tok) |
11603 | { |
11604 | LIMITED_METHOD_CONTRACT; |
11605 | CONSISTENCY_CHECK(TypeFromToken(tok) == mdtMethodDef); |
11606 | for (DWORD i = 0; i < pIndex; i++) |
11607 | { |
11608 | if (GetBodyMethodDesc(i)->GetMemberDef() == tok) |
11609 | { |
11610 | return TRUE; |
11611 | } |
11612 | } |
11613 | return FALSE; |
11614 | } |
11615 | |
11616 | //******************************************************************************* |
11617 | BYTE * |
11618 | MethodTableBuilder::AllocateFromHighFrequencyHeap(S_SIZE_T cbMem) |
11619 | { |
11620 | CONTRACTL |
11621 | { |
11622 | THROWS; |
11623 | GC_NOTRIGGER; |
11624 | MODE_ANY; |
11625 | } |
11626 | CONTRACTL_END; |
11627 | return (BYTE *)GetMemTracker()->Track( |
11628 | GetLoaderAllocator()->GetHighFrequencyHeap()->AllocMem(cbMem)); |
11629 | } |
11630 | |
11631 | //******************************************************************************* |
11632 | BYTE * |
11633 | MethodTableBuilder::AllocateFromLowFrequencyHeap(S_SIZE_T cbMem) |
11634 | { |
11635 | CONTRACTL |
11636 | { |
11637 | THROWS; |
11638 | GC_NOTRIGGER; |
11639 | MODE_ANY; |
11640 | } |
11641 | CONTRACTL_END; |
11642 | return (BYTE *)GetMemTracker()->Track( |
11643 | GetLoaderAllocator()->GetLowFrequencyHeap()->AllocMem(cbMem)); |
11644 | } |
11645 | |
11646 | //------------------------------------------------------------------------------- |
11647 | // Make best-case effort to obtain an image name for use in an error message. |
11648 | // |
11649 | // This routine must expect to be called before the this object is fully loaded. |
11650 | // It can return an empty if the name isn't available or the object isn't initialized |
11651 | // enough to get a name, but it mustn't crash. |
11652 | //------------------------------------------------------------------------------- |
11653 | LPCWSTR MethodTableBuilder::GetPathForErrorMessages() |
11654 | { |
11655 | STANDARD_VM_CONTRACT; |
11656 | |
11657 | return GetModule()->GetPathForErrorMessages(); |
11658 | } |
11659 | |
11660 | BOOL MethodTableBuilder::ChangesImplementationOfVirtualSlot(SLOT_INDEX idx) |
11661 | { |
11662 | STANDARD_VM_CONTRACT; |
11663 | |
11664 | BOOL fChangesImplementation = TRUE; |
11665 | |
11666 | _ASSERTE(idx < bmtVT->cVirtualSlots); |
11667 | |
11668 | if (HasParent() && idx < GetParentMethodTable()->GetNumVirtuals()) |
11669 | { |
11670 | _ASSERTE(idx < bmtParent->pSlotTable->GetSlotCount()); |
11671 | bmtMethodHandle VTImpl = (*bmtVT)[idx].Impl(); |
11672 | bmtMethodHandle ParentImpl = (*bmtParent)[idx].Impl(); |
11673 | |
11674 | fChangesImplementation = VTImpl != ParentImpl; |
11675 | |
11676 | // See code:MethodTableBuilder::SetupMethodTable2 and its logic |
11677 | // for handling MethodImpl's on parent classes which affect non interface |
11678 | // methods. |
11679 | if (!fChangesImplementation && (ParentImpl.GetSlotIndex() != idx)) |
11680 | fChangesImplementation = TRUE; |
11681 | } |
11682 | |
11683 | return fChangesImplementation; |
11684 | } |
11685 | |
11686 | // Must be called prior to setting the value of any optional field on EEClass (on a debug build an assert will |
11687 | // fire if this invariant is violated). |
11688 | void MethodTableBuilder::EnsureOptionalFieldsAreAllocated(EEClass *pClass, AllocMemTracker *pamTracker, LoaderHeap *pHeap) |
11689 | { |
11690 | STANDARD_VM_CONTRACT; |
11691 | |
11692 | if (pClass->HasOptionalFields()) |
11693 | return; |
11694 | |
11695 | EEClassOptionalFields *pOptFields = (EEClassOptionalFields*) |
11696 | pamTracker->Track(pHeap->AllocMem(S_SIZE_T(sizeof(EEClassOptionalFields)))); |
11697 | |
11698 | // Initialize default values for all optional fields. |
11699 | pOptFields->Init(); |
11700 | |
11701 | // Attach optional fields to the class. |
11702 | pClass->AttachOptionalFields(pOptFields); |
11703 | } |
11704 | |
11705 | //--------------------------------------------------------------------------------------- |
11706 | // |
11707 | // Gather information about a generic type |
11708 | // - number of parameters |
11709 | // - variance annotations |
11710 | // - dictionaries |
11711 | // - sharability |
11712 | // |
11713 | //static |
11714 | void |
11715 | MethodTableBuilder::GatherGenericsInfo( |
11716 | Module * pModule, |
11717 | mdTypeDef cl, |
11718 | Instantiation inst, |
11719 | bmtGenericsInfo * bmtGenericsInfo) |
11720 | { |
11721 | CONTRACTL |
11722 | { |
11723 | STANDARD_VM_CHECK; |
11724 | PRECONDITION(GetThread() != NULL); |
11725 | PRECONDITION(CheckPointer(pModule)); |
11726 | PRECONDITION(CheckPointer(bmtGenericsInfo)); |
11727 | } |
11728 | CONTRACTL_END; |
11729 | |
11730 | IMDInternalImport * pInternalImport = pModule->GetMDImport(); |
11731 | |
11732 | // Enumerate the formal type parameters |
11733 | HENUMInternal hEnumGenericPars; |
11734 | HRESULT hr = pInternalImport->EnumInit(mdtGenericParam, cl, &hEnumGenericPars); |
11735 | if (FAILED(hr)) |
11736 | pModule->GetAssembly()->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_BADFORMAT); |
11737 | |
11738 | DWORD numGenericArgs = pInternalImport->EnumGetCount(&hEnumGenericPars); |
11739 | |
11740 | // Work out what kind of EEClass we're creating w.r.t. generics. If there |
11741 | // are no generics involved this will be a VMFLAG_NONGENERIC. |
11742 | BOOL fHasVariance = FALSE; |
11743 | if (numGenericArgs > 0) |
11744 | { |
11745 | // Generic type verification |
11746 | { |
11747 | DWORD dwAttr; |
11748 | mdToken tkParent; |
11749 | if (FAILED(pInternalImport->GetTypeDefProps(cl, &dwAttr, &tkParent))) |
11750 | { |
11751 | pModule->GetAssembly()->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_BADFORMAT); |
11752 | } |
11753 | // A generic with explicit layout is not allowed. |
11754 | if (IsTdExplicitLayout(dwAttr)) |
11755 | { |
11756 | pModule->GetAssembly()->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_EXPLICIT_GENERIC); |
11757 | } |
11758 | } |
11759 | |
11760 | bmtGenericsInfo->numDicts = 1; |
11761 | |
11762 | mdGenericParam tkTyPar; |
11763 | bmtGenericsInfo->pVarianceInfo = new (&GetThread()->m_MarshalAlloc) BYTE[numGenericArgs]; |
11764 | |
11765 | // If it has generic arguments but none have been specified, then load the instantiation at the formals |
11766 | if (inst.IsEmpty()) |
11767 | { |
11768 | bmtGenericsInfo->fTypicalInstantiation = TRUE; |
11769 | S_UINT32 scbAllocSize = S_UINT32(numGenericArgs) * S_UINT32(sizeof(TypeHandle)); |
11770 | TypeHandle * genericArgs = (TypeHandle *) GetThread()->m_MarshalAlloc.Alloc(scbAllocSize); |
11771 | |
11772 | inst = Instantiation(genericArgs, numGenericArgs); |
11773 | |
11774 | bmtGenericsInfo->fSharedByGenericInstantiations = FALSE; |
11775 | } |
11776 | else |
11777 | { |
11778 | bmtGenericsInfo->fTypicalInstantiation = FALSE; |
11779 | |
11780 | bmtGenericsInfo->fSharedByGenericInstantiations = TypeHandle::IsCanonicalSubtypeInstantiation(inst); |
11781 | _ASSERTE(bmtGenericsInfo->fSharedByGenericInstantiations == ClassLoader::IsSharableInstantiation(inst)); |
11782 | |
11783 | #ifdef _DEBUG |
11784 | // Set typical instantiation MethodTable |
11785 | { |
11786 | MethodTable * pTypicalInstantiationMT = pModule->LookupTypeDef(cl).AsMethodTable(); |
11787 | // Typical instantiation was already loaded by code:ClassLoader::LoadApproxTypeThrowing |
11788 | _ASSERTE(pTypicalInstantiationMT != NULL); |
11789 | bmtGenericsInfo->dbg_pTypicalInstantiationMT = pTypicalInstantiationMT; |
11790 | } |
11791 | #endif //_DEBUG |
11792 | } |
11793 | |
11794 | TypeHandle * pDestInst = (TypeHandle *)inst.GetRawArgs(); |
11795 | for (unsigned int i = 0; i < numGenericArgs; i++) |
11796 | { |
11797 | pInternalImport->EnumNext(&hEnumGenericPars, &tkTyPar); |
11798 | DWORD flags; |
11799 | if (FAILED(pInternalImport->GetGenericParamProps(tkTyPar, NULL, &flags, NULL, NULL, NULL))) |
11800 | { |
11801 | pModule->GetAssembly()->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_BADFORMAT); |
11802 | } |
11803 | |
11804 | if (bmtGenericsInfo->fTypicalInstantiation) |
11805 | { |
11806 | // code:Module.m_GenericParamToDescMap maps generic parameter RIDs to TypeVarTypeDesc |
11807 | // instances so that we do not leak by allocating them all over again, if the type |
11808 | // repeatedly fails to load. |
11809 | TypeVarTypeDesc *pTypeVarTypeDesc = pModule->LookupGenericParam(tkTyPar); |
11810 | if (pTypeVarTypeDesc == NULL) |
11811 | { |
11812 | // Do NOT use the alloc tracker for this memory as we need it stay allocated even if the load fails. |
11813 | void *mem = (void *)pModule->GetLoaderAllocator()->GetLowFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(TypeVarTypeDesc))); |
11814 | pTypeVarTypeDesc = new (mem) TypeVarTypeDesc(pModule, cl, i, tkTyPar); |
11815 | |
11816 | // No race here - the row in GenericParam table is owned exclusively by this type and we |
11817 | // are holding a lock preventing other threads from concurrently loading it. |
11818 | pModule->StoreGenericParamThrowing(tkTyPar, pTypeVarTypeDesc); |
11819 | } |
11820 | pDestInst[i] = TypeHandle(pTypeVarTypeDesc); |
11821 | } |
11822 | |
11823 | DWORD varianceAnnotation = flags & gpVarianceMask; |
11824 | bmtGenericsInfo->pVarianceInfo[i] = static_cast<BYTE>(varianceAnnotation); |
11825 | if (varianceAnnotation != gpNonVariant) |
11826 | { |
11827 | if (varianceAnnotation != gpContravariant && varianceAnnotation != gpCovariant) |
11828 | { |
11829 | pModule->GetAssembly()->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_BADVARIANCE); |
11830 | } |
11831 | else |
11832 | { |
11833 | fHasVariance = TRUE; |
11834 | } |
11835 | } |
11836 | } |
11837 | |
11838 | if (!fHasVariance) |
11839 | bmtGenericsInfo->pVarianceInfo = NULL; |
11840 | } |
11841 | else |
11842 | { |
11843 | bmtGenericsInfo->fTypicalInstantiation = FALSE; |
11844 | bmtGenericsInfo->fSharedByGenericInstantiations = FALSE; |
11845 | bmtGenericsInfo->numDicts = 0; |
11846 | } |
11847 | |
11848 | bmtGenericsInfo->fContainsGenericVariables = MethodTable::ComputeContainsGenericVariables(inst); |
11849 | |
11850 | SigTypeContext typeContext(inst, Instantiation()); |
11851 | bmtGenericsInfo->typeContext = typeContext; |
11852 | } // MethodTableBuilder::GatherGenericsInfo |
11853 | |
11854 | //--------------------------------------------------------------------------------------- |
11855 | // |
11856 | // This service is called for normal classes -- and for the pseudo class we invent to |
11857 | // hold the module's public members. |
11858 | // |
11859 | //static |
11860 | TypeHandle |
11861 | ClassLoader::CreateTypeHandleForTypeDefThrowing( |
11862 | Module * pModule, |
11863 | mdTypeDef cl, |
11864 | Instantiation inst, |
11865 | AllocMemTracker * pamTracker) |
11866 | { |
11867 | CONTRACT(TypeHandle) |
11868 | { |
11869 | STANDARD_VM_CHECK; |
11870 | PRECONDITION(GetThread() != NULL); |
11871 | PRECONDITION(CheckPointer(pModule)); |
11872 | POSTCONDITION(!RETVAL.IsNull()); |
11873 | POSTCONDITION(CheckPointer(RETVAL.GetMethodTable())); |
11874 | } |
11875 | CONTRACT_END; |
11876 | |
11877 | MethodTable * pMT = NULL; |
11878 | |
11879 | Thread * pThread = GetThread(); |
11880 | BEGIN_SO_INTOLERANT_CODE_FOR(pThread, DefaultEntryProbeAmount() * 2) |
11881 | |
11882 | MethodTable * pParentMethodTable = NULL; |
11883 | SigPointer parentInst; |
11884 | mdTypeDef tdEnclosing = mdTypeDefNil; |
11885 | DWORD cInterfaces; |
11886 | BuildingInterfaceInfo_t * pInterfaceBuildInfo = NULL; |
11887 | IMDInternalImport * pInternalImport = NULL; |
11888 | LayoutRawFieldInfo * pLayoutRawFieldInfos = NULL; |
11889 | MethodTableBuilder::bmtGenericsInfo genericsInfo; |
11890 | |
11891 | Assembly * pAssembly = pModule->GetAssembly(); |
11892 | pInternalImport = pModule->GetMDImport(); |
11893 | |
11894 | if (TypeFromToken(cl) != mdtTypeDef || !pInternalImport->IsValidToken(cl)) |
11895 | { |
11896 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_BADFORMAT); |
11897 | } |
11898 | |
11899 | // GetCheckpoint for the thread-based allocator |
11900 | // This checkpoint provides a scope for all transient allocations of data structures |
11901 | // used during class loading. |
11902 | // <NICE> Ideally a debug/checked build should pass around tokens indicating the Checkpoint |
11903 | // being used and check these dynamically </NICE> |
11904 | CheckPointHolder cph(pThread->m_MarshalAlloc.GetCheckpoint()); //hold checkpoint for autorelease |
11905 | |
11906 | // Gather up generics info |
11907 | MethodTableBuilder::GatherGenericsInfo(pModule, cl, inst, &genericsInfo); |
11908 | |
11909 | Module * pLoaderModule = pModule; |
11910 | if (!inst.IsEmpty()) |
11911 | { |
11912 | pLoaderModule = ClassLoader::ComputeLoaderModuleWorker( |
11913 | pModule, |
11914 | cl, |
11915 | inst, |
11916 | Instantiation()); |
11917 | pLoaderModule->GetLoaderAllocator()->EnsureInstantiation(pModule, inst); |
11918 | } |
11919 | |
11920 | LoaderAllocator * pAllocator = pLoaderModule->GetLoaderAllocator(); |
11921 | |
11922 | { |
11923 | // As this is loading a parent type, we are allowed to override the load type limit. |
11924 | OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS); |
11925 | pParentMethodTable = LoadApproxParentThrowing(pModule, cl, &parentInst, &genericsInfo.typeContext); |
11926 | } |
11927 | |
11928 | if (pParentMethodTable != NULL) |
11929 | { |
11930 | // Since methods on System.Array assume the layout of arrays, we can not allow |
11931 | // subclassing of arrays, it is sealed from the users point of view. |
11932 | // Value types and enums should be sealed - disable inheritting from them (we cannot require sealed |
11933 | // flag because of AppCompat) |
11934 | if (pParentMethodTable->IsSealed() || |
11935 | (pParentMethodTable == g_pArrayClass) || |
11936 | pParentMethodTable->IsValueType()) |
11937 | { |
11938 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_SEALEDPARENT); |
11939 | } |
11940 | |
11941 | DWORD dwTotalDicts = genericsInfo.numDicts + pParentMethodTable->GetNumDicts(); |
11942 | if (!FitsIn<WORD>(dwTotalDicts)) |
11943 | { |
11944 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_TOOMANYGENERICARGS); |
11945 | } |
11946 | genericsInfo.numDicts = static_cast<WORD>(dwTotalDicts); |
11947 | } |
11948 | |
11949 | GetEnclosingClassThrowing(pInternalImport, pModule, cl, &tdEnclosing); |
11950 | |
11951 | BYTE nstructPackingSize = 0, nstructNLT = 0; |
11952 | BOOL fExplicitOffsets = FALSE; |
11953 | // NOTE: HasLayoutMetadata does not load classes |
11954 | BOOL fHasLayout = |
11955 | !genericsInfo.fContainsGenericVariables && |
11956 | HasLayoutMetadata( |
11957 | pModule->GetAssembly(), |
11958 | pInternalImport, |
11959 | cl, |
11960 | pParentMethodTable, |
11961 | &nstructPackingSize, |
11962 | &nstructNLT, |
11963 | &fExplicitOffsets); |
11964 | |
11965 | BOOL fIsEnum = ((g_pEnumClass != NULL) && (pParentMethodTable == g_pEnumClass)); |
11966 | |
11967 | // enums may not have layout because they derive from g_pEnumClass and that has no layout |
11968 | // this is enforced by HasLayoutMetadata above |
11969 | _ASSERTE(!(fIsEnum && fHasLayout)); |
11970 | |
11971 | // This is a delegate class if it derives from MulticastDelegate (we do not allow single cast delegates) |
11972 | BOOL fIsDelegate = pParentMethodTable && pParentMethodTable == g_pMulticastDelegateClass; |
11973 | |
11974 | // Create a EEClass entry for it, filling out a few fields, such as the parent class token |
11975 | // (and the generic type should we be creating an instantiation) |
11976 | EEClass * pClass = MethodTableBuilder::CreateClass( |
11977 | pModule, |
11978 | cl, |
11979 | fHasLayout, |
11980 | fIsDelegate, |
11981 | fIsEnum, |
11982 | &genericsInfo, |
11983 | pAllocator, |
11984 | pamTracker); |
11985 | |
11986 | if ((pParentMethodTable != NULL) && (pParentMethodTable == g_pDelegateClass)) |
11987 | { |
11988 | // Note we do not allow single cast delegates |
11989 | if (pModule->GetAssembly() != SystemDomain::SystemAssembly()) |
11990 | { |
11991 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, BFA_CANNOT_INHERIT_FROM_DELEGATE); |
11992 | } |
11993 | |
11994 | #ifdef _DEBUG |
11995 | // Only MultiCastDelegate should inherit from Delegate |
11996 | LPCUTF8 className; |
11997 | LPCUTF8 nameSpace; |
11998 | if (FAILED(pInternalImport->GetNameOfTypeDef(cl, &className, &nameSpace))) |
11999 | { |
12000 | className = nameSpace = "Invalid TypeDef record" ; |
12001 | } |
12002 | BAD_FORMAT_NOTHROW_ASSERT(strcmp(className, "MulticastDelegate" ) == 0); |
12003 | #endif |
12004 | } |
12005 | |
12006 | if (fIsDelegate) |
12007 | { |
12008 | if (!pClass->IsSealed()) |
12009 | { |
12010 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, BFA_DELEGATE_CLASS_NOTSEALED); |
12011 | } |
12012 | |
12013 | pClass->SetIsDelegate(); |
12014 | } |
12015 | |
12016 | if (tdEnclosing != mdTypeDefNil) |
12017 | { |
12018 | pClass->SetIsNested(); |
12019 | THROW_BAD_FORMAT_MAYBE(IsTdNested(pClass->GetProtection()), VLDTR_E_TD_ENCLNOTNESTED, pModule); |
12020 | } |
12021 | else if (IsTdNested(pClass->GetProtection())) |
12022 | { |
12023 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_BADFORMAT); |
12024 | } |
12025 | |
12026 | // We only permit generic interfaces and delegates to have variant type parameters |
12027 | if (genericsInfo.pVarianceInfo != NULL && !pClass->IsInterface() && !fIsDelegate) |
12028 | { |
12029 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_VARIANCE_CLASS); |
12030 | } |
12031 | |
12032 | // Now load all the interfaces |
12033 | HENUMInternalHolder hEnumInterfaceImpl(pInternalImport); |
12034 | hEnumInterfaceImpl.EnumInit(mdtInterfaceImpl, cl); |
12035 | |
12036 | cInterfaces = pInternalImport->EnumGetCount(&hEnumInterfaceImpl); |
12037 | |
12038 | if (cInterfaces != 0) |
12039 | { |
12040 | DWORD i; |
12041 | |
12042 | // Allocate the BuildingInterfaceList table |
12043 | pInterfaceBuildInfo = new (&GetThread()->m_MarshalAlloc) BuildingInterfaceInfo_t[cInterfaces]; |
12044 | |
12045 | mdInterfaceImpl ii; |
12046 | for (i = 0; pInternalImport->EnumNext(&hEnumInterfaceImpl, &ii); i++) |
12047 | { |
12048 | // Get properties on this interface |
12049 | mdTypeRef crInterface; |
12050 | if (FAILED(pInternalImport->GetTypeOfInterfaceImpl(ii, &crInterface))) |
12051 | { |
12052 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_BADFORMAT); |
12053 | } |
12054 | // validate the token |
12055 | mdToken crIntType = |
12056 | (RidFromToken(crInterface) && pInternalImport->IsValidToken(crInterface)) ? |
12057 | TypeFromToken(crInterface) : |
12058 | 0; |
12059 | switch (crIntType) |
12060 | { |
12061 | case mdtTypeDef: |
12062 | case mdtTypeRef: |
12063 | case mdtTypeSpec: |
12064 | break; |
12065 | default: |
12066 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_INTERFACENULL); |
12067 | } |
12068 | |
12069 | TypeHandle intType; |
12070 | |
12071 | { |
12072 | OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS); |
12073 | intType = LoadApproxTypeThrowing(pModule, crInterface, NULL, &genericsInfo.typeContext); |
12074 | } |
12075 | |
12076 | pInterfaceBuildInfo[i].m_pMethodTable = intType.AsMethodTable(); |
12077 | if (pInterfaceBuildInfo[i].m_pMethodTable == NULL) |
12078 | { |
12079 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_INTERFACENULL); |
12080 | } |
12081 | |
12082 | // Ensure this is an interface |
12083 | if (!pInterfaceBuildInfo[i].m_pMethodTable->IsInterface()) |
12084 | { |
12085 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_NOTINTERFACE); |
12086 | } |
12087 | |
12088 | // Check interface for use of variant type parameters |
12089 | if ((genericsInfo.pVarianceInfo != NULL) && (TypeFromToken(crInterface) == mdtTypeSpec)) |
12090 | { |
12091 | ULONG cSig; |
12092 | PCCOR_SIGNATURE pSig; |
12093 | if (FAILED(pInternalImport->GetTypeSpecFromToken(crInterface, &pSig, &cSig))) |
12094 | { |
12095 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_BADFORMAT); |
12096 | } |
12097 | // Interfaces behave covariantly |
12098 | if (!EEClass::CheckVarianceInSig( |
12099 | genericsInfo.GetNumGenericArgs(), |
12100 | genericsInfo.pVarianceInfo, |
12101 | pModule, |
12102 | SigPointer(pSig, cSig), |
12103 | gpCovariant)) |
12104 | { |
12105 | pAssembly->ThrowTypeLoadException( |
12106 | pInternalImport, |
12107 | cl, |
12108 | IDS_CLASSLOAD_VARIANCE_IN_INTERFACE); |
12109 | } |
12110 | } |
12111 | } |
12112 | _ASSERTE(i == cInterfaces); |
12113 | } |
12114 | |
12115 | if (fHasLayout || |
12116 | /* Variant delegates should not have any instance fields of the variant. |
12117 | type parameter. For now, we just completely disallow all fields even |
12118 | if they are non-variant or static, as it is not a useful scenario. |
12119 | @TODO: A more logical place for this check would be in |
12120 | MethodTableBuilder::EnumerateClassMembers() */ |
12121 | (fIsDelegate && genericsInfo.pVarianceInfo)) |
12122 | { |
12123 | // check for fields and variance |
12124 | ULONG cFields; |
12125 | HENUMInternalHolder hEnumField(pInternalImport); |
12126 | hEnumField.EnumInit(mdtFieldDef, cl); |
12127 | |
12128 | cFields = pInternalImport->EnumGetCount(&hEnumField); |
12129 | |
12130 | if ((cFields != 0) && fIsDelegate && (genericsInfo.pVarianceInfo != NULL)) |
12131 | { |
12132 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_VARIANCE_IN_DELEGATE); |
12133 | } |
12134 | |
12135 | if (fHasLayout) |
12136 | { |
12137 | // Though we fail on this condition, we should never run into it. |
12138 | CONSISTENCY_CHECK(nstructPackingSize != 0); |
12139 | // MD Val check: PackingSize |
12140 | if((nstructPackingSize == 0) || |
12141 | (nstructPackingSize > 128) || |
12142 | (nstructPackingSize & (nstructPackingSize-1))) |
12143 | { |
12144 | THROW_BAD_FORMAT_MAYBE(!"ClassLayout:Invalid PackingSize" , BFA_BAD_PACKING_SIZE, pModule); |
12145 | pAssembly->ThrowTypeLoadException(pInternalImport, cl, IDS_CLASSLOAD_BADFORMAT); |
12146 | } |
12147 | |
12148 | pLayoutRawFieldInfos = (LayoutRawFieldInfo *)GetThread()->m_MarshalAlloc.Alloc( |
12149 | (S_UINT32(1) + S_UINT32(cFields)) * S_UINT32(sizeof(LayoutRawFieldInfo))); |
12150 | |
12151 | { |
12152 | // Warning: this can load classes |
12153 | CONTRACT_VIOLATION(LoadsTypeViolation); |
12154 | |
12155 | // Set a flag that allows us to break dead-locks that are result of the LoadsTypeViolation |
12156 | ThreadStateNCStackHolder tsNC(TRUE, Thread::TSNC_LoadsTypeViolation); |
12157 | |
12158 | EEClassLayoutInfo::CollectLayoutFieldMetadataThrowing( |
12159 | cl, |
12160 | nstructPackingSize, |
12161 | nstructNLT, |
12162 | #ifdef FEATURE_COMINTEROP |
12163 | pClass->IsProjectedFromWinRT(), |
12164 | #endif // FEATURE_COMINTEROP |
12165 | fExplicitOffsets, |
12166 | pParentMethodTable, |
12167 | cFields, |
12168 | &hEnumField, |
12169 | pModule, |
12170 | &genericsInfo.typeContext, |
12171 | &(((LayoutEEClass *)pClass)->m_LayoutInfo), |
12172 | pLayoutRawFieldInfos, |
12173 | pAllocator, |
12174 | pamTracker); |
12175 | } |
12176 | } |
12177 | } |
12178 | |
12179 | // Resolve this class, given that we know now that all of its dependencies are loaded and resolved. |
12180 | // !!! This must be the last thing in this TRY block: if MethodTableBuilder succeeds, it has published the class |
12181 | // and there is no going back. |
12182 | MethodTableBuilder builder( |
12183 | NULL, |
12184 | pClass, |
12185 | &GetThread()->m_MarshalAlloc, |
12186 | pamTracker); |
12187 | |
12188 | pMT = builder.BuildMethodTableThrowing( |
12189 | pAllocator, |
12190 | pLoaderModule, |
12191 | pModule, |
12192 | cl, |
12193 | pInterfaceBuildInfo, |
12194 | pLayoutRawFieldInfos, |
12195 | pParentMethodTable, |
12196 | &genericsInfo, |
12197 | parentInst, |
12198 | (WORD)cInterfaces); |
12199 | |
12200 | END_SO_INTOLERANT_CODE; |
12201 | RETURN(TypeHandle(pMT)); |
12202 | } // ClassLoader::CreateTypeHandleForTypeDefThrowing |
12203 | |