1// Licensed to the .NET Foundation under one or more agreements.
2// The .NET Foundation licenses this file to you under the MIT license.
3// See the LICENSE file in the project root for more information.
4//
5// File: generics.cpp
6//
7
8
9//
10// Helper functions for generics prototype
11//
12
13//
14// ============================================================================
15
16#include "common.h"
17#include "method.hpp"
18#include "field.h"
19#include "eeconfig.h"
20#include "generics.h"
21#include "genericdict.h"
22#include "stackprobe.h"
23#include "typestring.h"
24#include "typekey.h"
25#include "dumpcommon.h"
26#include "array.h"
27
28#include "generics.inl"
29#ifdef FEATURE_COMINTEROP
30#include "winrttypenameconverter.h"
31#endif // FEATURE_COMINTEROP
32
33/* static */
34TypeHandle ClassLoader::CanonicalizeGenericArg(TypeHandle thGenericArg)
35{
36 CONTRACT(TypeHandle)
37 {
38 NOTHROW;
39 GC_NOTRIGGER;
40 POSTCONDITION(CheckPointer(RETVAL));
41 }
42 CONTRACT_END
43
44#if defined(FEATURE_SHARE_GENERIC_CODE)
45 CorElementType et = thGenericArg.GetSignatureCorElementType();
46
47 // Note that generic variables do not share
48
49 if (CorTypeInfo::IsObjRef_NoThrow(et))
50 RETURN(TypeHandle(g_pCanonMethodTableClass));
51
52 if (et == ELEMENT_TYPE_VALUETYPE)
53 {
54 // Don't share structs. But sharability must be propagated through
55 // them (i.e. struct<object> * shares with struct<string> *)
56 RETURN(TypeHandle(thGenericArg.GetCanonicalMethodTable()));
57 }
58
59 _ASSERTE(et != ELEMENT_TYPE_PTR && et != ELEMENT_TYPE_FNPTR);
60 RETURN(thGenericArg);
61#else
62 RETURN (thGenericArg);
63#endif // FEATURE_SHARE_GENERIC_CODE
64}
65
66 // Given the build-time ShareGenericCode setting, is the specified type
67// representation-sharable as a type parameter to a generic type or method ?
68/* static */ BOOL ClassLoader::IsSharableInstantiation(Instantiation inst)
69{
70 CONTRACTL
71 {
72 NOTHROW;
73 GC_NOTRIGGER;
74 FORBID_FAULT;
75 }
76 CONTRACTL_END
77
78 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
79 {
80 if (CanonicalizeGenericArg(inst[i]).IsCanonicalSubtype())
81 return TRUE;
82 }
83 return FALSE;
84}
85
86/* static */ BOOL ClassLoader::IsCanonicalGenericInstantiation(Instantiation inst)
87{
88 CONTRACTL
89 {
90 NOTHROW;
91 GC_NOTRIGGER;
92 FORBID_FAULT;
93 }
94 CONTRACTL_END
95
96 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
97 {
98 if (CanonicalizeGenericArg(inst[i]) != inst[i])
99 return FALSE;
100 }
101 return TRUE;
102}
103
104/* static */ BOOL ClassLoader::IsTypicalSharedInstantiation(Instantiation inst)
105{
106 CONTRACTL
107 {
108 NOTHROW;
109 GC_NOTRIGGER;
110 FORBID_FAULT;
111 }
112 CONTRACTL_END
113
114 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
115 {
116 if (inst[i] != TypeHandle(g_pCanonMethodTableClass))
117 return FALSE;
118 }
119 return TRUE;
120}
121
122#ifndef DACCESS_COMPILE
123
124TypeHandle ClassLoader::LoadCanonicalGenericInstantiation(TypeKey *pTypeKey,
125 LoadTypesFlag fLoadTypes/*=LoadTypes*/,
126 ClassLoadLevel level/*=CLASS_LOADED*/)
127{
128 CONTRACT(TypeHandle)
129 {
130 if (FORBIDGC_LOADER_USE_ENABLED()) NOTHROW; else THROWS;
131 if (FORBIDGC_LOADER_USE_ENABLED()) GC_NOTRIGGER; else GC_TRIGGERS;
132 if (FORBIDGC_LOADER_USE_ENABLED() || fLoadTypes != LoadTypes) { LOADS_TYPE(CLASS_LOAD_BEGIN); } else { LOADS_TYPE(level); }
133 POSTCONDITION(CheckPointer(RETVAL, ((fLoadTypes == LoadTypes) ? NULL_NOT_OK : NULL_OK)));
134 POSTCONDITION(RETVAL.IsNull() || RETVAL.CheckLoadLevel(level));
135 }
136 CONTRACT_END
137
138 Instantiation inst = pTypeKey->GetInstantiation();
139 DWORD ntypars = inst.GetNumArgs();
140
141 // Canonicalize the type arguments.
142 DWORD dwAllocSize = 0;
143 if (!ClrSafeInt<DWORD>::multiply(ntypars, sizeof(TypeHandle), dwAllocSize))
144 ThrowHR(COR_E_OVERFLOW);
145
146 TypeHandle ret = TypeHandle();
147 DECLARE_INTERIOR_STACK_PROBE;
148#ifndef DACCESS_COMPILE
149 if ((dwAllocSize/GetOsPageSize()+1) >= 2)
150 {
151 DO_INTERIOR_STACK_PROBE_FOR_NOTHROW_CHECK_THREAD((10+dwAllocSize/GetOsPageSize()+1), NO_FORBIDGC_LOADER_USE_ThrowSO(););
152 }
153#endif // DACCESS_COMPILE
154 TypeHandle *repInst = (TypeHandle*) _alloca(dwAllocSize);
155
156 for (DWORD i = 0; i < ntypars; i++)
157 {
158 repInst[i] = ClassLoader::CanonicalizeGenericArg(inst[i]);
159 }
160
161 // Load the canonical instantiation
162 TypeKey canonKey(pTypeKey->GetModule(), pTypeKey->GetTypeToken(), Instantiation(repInst, ntypars));
163 ret = ClassLoader::LoadConstructedTypeThrowing(&canonKey, fLoadTypes, level);
164
165 END_INTERIOR_STACK_PROBE;
166 RETURN(ret);
167}
168
169// Create a non-canonical instantiation of a generic type, by
170// copying the method table of the canonical instantiation
171//
172/* static */
173TypeHandle
174ClassLoader::CreateTypeHandleForNonCanonicalGenericInstantiation(
175 TypeKey *pTypeKey,
176 AllocMemTracker *pamTracker)
177{
178 CONTRACT(TypeHandle)
179 {
180 STANDARD_VM_CHECK;
181 PRECONDITION(CheckPointer(pTypeKey));
182 PRECONDITION(CheckPointer(pamTracker));
183 PRECONDITION(pTypeKey->HasInstantiation());
184 PRECONDITION(ClassLoader::IsSharableInstantiation(pTypeKey->GetInstantiation()));
185 PRECONDITION(!TypeHandle::IsCanonicalSubtypeInstantiation(pTypeKey->GetInstantiation()));
186 POSTCONDITION(CheckPointer(RETVAL));
187 POSTCONDITION(RETVAL.CheckMatchesKey(pTypeKey));
188 }
189 CONTRACT_END
190
191 Module *pLoaderModule = ClassLoader::ComputeLoaderModule(pTypeKey);
192 LoaderAllocator* pAllocator=pLoaderModule->GetLoaderAllocator();
193
194 Instantiation inst = pTypeKey->GetInstantiation();
195 pAllocator->EnsureInstantiation(pTypeKey->GetModule(), inst);
196 DWORD ntypars = inst.GetNumArgs();
197
198#ifdef _DEBUG
199 if (LoggingOn(LF_CLASSLOADER, LL_INFO1000) || g_pConfig->BreakOnInstantiationEnabled())
200 {
201 StackSString debugTypeKeyName;
202 TypeString::AppendTypeKeyDebug(debugTypeKeyName, pTypeKey);
203 LOG((LF_CLASSLOADER, LL_INFO1000, "GENERICS: New instantiation requested: %S\n", debugTypeKeyName.GetUnicode()));
204
205 StackScratchBuffer buf;
206 if (g_pConfig->ShouldBreakOnInstantiation(debugTypeKeyName.GetUTF8(buf)))
207 CONSISTENCY_CHECK_MSGF(false, ("BreakOnInstantiation: typename '%s' ", debugTypeKeyName.GetUTF8(buf)));
208 }
209#endif // _DEBUG
210
211 TypeHandle canonType;
212 {
213 OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS);
214 canonType = ClassLoader::LoadCanonicalGenericInstantiation(pTypeKey, ClassLoader::LoadTypes, CLASS_LOAD_APPROXPARENTS);
215 }
216
217 // Now fabricate a method table
218 MethodTable* pOldMT = canonType.AsMethodTable();
219
220 // We only need true vtable entries as the rest can be found in the representative method table
221 WORD cSlots = static_cast<WORD>(pOldMT->GetNumVirtuals());
222
223 BOOL fContainsGenericVariables = MethodTable::ComputeContainsGenericVariables(inst);
224
225 // These are all copied across from the old MT, i.e. don't depend on the
226 // instantiation.
227 BOOL fHasGenericsStaticsInfo = pOldMT->HasGenericsStaticsInfo();
228
229#ifdef FEATURE_COMINTEROP
230 BOOL fHasDynamicInterfaceMap = pOldMT->HasDynamicInterfaceMap();
231 BOOL fHasRCWPerTypeData = pOldMT->HasRCWPerTypeData();
232#else // FEATURE_COMINTEROP
233 BOOL fHasDynamicInterfaceMap = FALSE;
234 BOOL fHasRCWPerTypeData = FALSE;
235#endif // FEATURE_COMINTEROP
236
237 // Collectible types have some special restrictions
238 if (pAllocator->IsCollectible())
239 {
240 if (pOldMT->HasFixedAddressVTStatics())
241 {
242 ClassLoader::ThrowTypeLoadException(pTypeKey, IDS_CLASSLOAD_COLLECTIBLEFIXEDVTATTR);
243 }
244 }
245
246 // The number of bytes used for GC info
247 size_t cbGC = pOldMT->ContainsPointers() ? ((CGCDesc*) pOldMT)->GetSize() : 0;
248
249 // Bytes are required for the vtable itself
250 S_SIZE_T safe_cbMT = S_SIZE_T( cbGC ) + S_SIZE_T( sizeof(MethodTable) );
251 safe_cbMT += MethodTable::GetNumVtableIndirections(cSlots) * sizeof(MethodTable::VTableIndir_t);
252 if (safe_cbMT.IsOverflow())
253 {
254 ThrowHR(COR_E_OVERFLOW);
255 }
256 const size_t cbMT = safe_cbMT.Value();
257
258 // After the optional members (see below) comes the duplicated interface map.
259 // For dynamic interfaces the interface map area begins one word
260 // before the location returned by GetInterfaceMap()
261 WORD wNumInterfaces = static_cast<WORD>(pOldMT->GetNumInterfaces());
262 DWORD cbIMap = pOldMT->GetInterfaceMapSize();
263 InterfaceInfo_t * pOldIMap = (InterfaceInfo_t *)pOldMT->GetInterfaceMap();
264
265 BOOL fHasGuidInfo = FALSE;
266 BOOL fHasCCWTemplate = FALSE;
267
268 Generics::DetermineCCWTemplateAndGUIDPresenceOnNonCanonicalMethodTable(pOldMT, fContainsGenericVariables, &fHasGuidInfo, &fHasCCWTemplate);
269
270 DWORD dwMultipurposeSlotsMask = 0;
271 dwMultipurposeSlotsMask |= MethodTable::enum_flag_HasPerInstInfo;
272 if (wNumInterfaces != 0)
273 dwMultipurposeSlotsMask |= MethodTable::enum_flag_HasInterfaceMap;
274
275 // NonVirtualSlots, DispatchMap and ModuleOverride multipurpose slots are used
276 // from the canonical methodtable, so we do not need to store them here.
277
278 // We need space for the optional members.
279 DWORD cbOptional = MethodTable::GetOptionalMembersAllocationSize(dwMultipurposeSlotsMask,
280 fHasGenericsStaticsInfo,
281 fHasGuidInfo,
282 fHasCCWTemplate,
283 fHasRCWPerTypeData,
284 pOldMT->HasTokenOverflow());
285
286 // We need space for the PerInstInfo, i.e. the generic dictionary pointers...
287 DWORD cbPerInst = sizeof(GenericsDictInfo) + pOldMT->GetPerInstInfoSize();
288
289 // Finally we need space for the instantiation/dictionary for this type
290 DWORD cbInstAndDict = pOldMT->GetInstAndDictSize();
291
292 // Allocate from the high frequence heap of the correct domain
293 S_SIZE_T allocSize = safe_cbMT;
294 allocSize += cbOptional;
295 allocSize += cbIMap;
296 allocSize += cbPerInst;
297 allocSize += cbInstAndDict;
298
299 if (allocSize.IsOverflow())
300 {
301 ThrowHR(COR_E_OVERFLOW);
302 }
303
304#ifdef FEATURE_PREJIT
305 Module *pComputedPZM = Module::ComputePreferredZapModule(pTypeKey);
306 BOOL canShareVtableChunks = MethodTable::CanShareVtableChunksFrom(pOldMT, pLoaderModule, pComputedPZM);
307#else
308 BOOL canShareVtableChunks = MethodTable::CanShareVtableChunksFrom(pOldMT, pLoaderModule);
309#endif // FEATURE_PREJIT
310
311 SIZE_T offsetOfUnsharedVtableChunks = allocSize.Value();
312
313 // We either share all of the canonical's virtual slots or none of them
314 // If none, we need to allocate space for the slots
315 if (!canShareVtableChunks)
316 {
317 allocSize += S_SIZE_T( cSlots ) * S_SIZE_T( sizeof(MethodTable::VTableIndir2_t) );
318 }
319
320 if (allocSize.IsOverflow())
321 {
322 ThrowHR(COR_E_OVERFLOW);
323 }
324
325 BYTE* pMemory = (BYTE *) pamTracker->Track(pAllocator->GetHighFrequencyHeap()->AllocMem( allocSize ));
326
327 // Head of MethodTable memory
328 MethodTable *pMT = (MethodTable*) (pMemory + cbGC);
329
330 // Copy of GC
331 memcpy((BYTE*)pMT - cbGC, (BYTE*) pOldMT - cbGC, cbGC);
332
333 // Allocate the private data block ("private" during runtime in the ngen'ed case)
334 MethodTableWriteableData * pMTWriteableData = (MethodTableWriteableData *) (BYTE *)
335 pamTracker->Track(pAllocator->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(MethodTableWriteableData))));
336 // Note: Memory allocated on loader heap is zero filled
337 pMT->SetWriteableData(pMTWriteableData);
338
339 // This also disables IBC logging until the type is sufficiently intitialized so
340 // it needs to be done early
341 pMTWriteableData->SetIsNotFullyLoadedForBuildMethodTable();
342
343 // <TODO> this is incredibly fragile. We should just construct the MT all over agin. </TODO>
344 pMT->CopyFlags(pOldMT);
345
346 pMT->ClearFlag(MethodTable::enum_flag_MultipurposeSlotsMask);
347 pMT->SetMultipurposeSlotsMask(dwMultipurposeSlotsMask);
348
349 // Set generics flags
350 pMT->ClearFlag(MethodTable::enum_flag_GenericsMask);
351 pMT->SetFlag(MethodTable::enum_flag_GenericsMask_GenericInst);
352
353 // Freshly allocated - does not need restore
354 pMT->ClearFlag(MethodTable::enum_flag_IsZapped);
355 pMT->ClearFlag(MethodTable::enum_flag_IsPreRestored);
356
357 pMT->ClearFlag(MethodTable::enum_flag_HasIndirectParent);
358 pMT->m_pParentMethodTable.SetValueMaybeNull(NULL);
359
360 // Non non-virtual slots
361 pMT->ClearFlag(MethodTable::enum_flag_HasSingleNonVirtualSlot);
362
363 pMT->SetBaseSize(pOldMT->GetBaseSize());
364 pMT->SetParentMethodTable(pOldMT->GetParentMethodTable());
365 pMT->SetCanonicalMethodTable(pOldMT);
366
367 pMT->m_wNumInterfaces = pOldMT->m_wNumInterfaces;
368
369#ifdef FEATURE_TYPEEQUIVALENCE
370 if (pMT->IsInterface() && !pMT->HasTypeEquivalence())
371 {
372 // fHasTypeEquivalence flag is "inherited" from generic arguments so we can quickly detect
373 // types like IList<IFoo> where IFoo is an interface with the TypeIdentifierAttribute.
374 for (DWORD i = 0; i < ntypars; i++)
375 {
376 if (inst[i].HasTypeEquivalence())
377 {
378 pMT->SetHasTypeEquivalence();
379 break;
380 }
381 }
382 }
383#endif // FEATURE_TYPEEQUIVALENCE
384
385 if (pOldMT->IsInterface() && IsImplicitInterfaceOfSZArray(pOldMT))
386 {
387 // Determine if we are creating an interface methodtable that may be used to dispatch through VSD
388 // on an array object using a generic interface (such as IList<T>).
389 // Please read comments in IsArray block of code:MethodTable::FindDispatchImpl.
390 //
391 // Arrays are special because we use the same method table (object[]) for all arrays of reference
392 // classes (eg string[]). This means that the method table for an array is not a complete description of
393 // the type of the array and thus the target of if something list IList<T>::IndexOf can not be determined
394 // simply by looking at the method table of T[] (which might be the method table of object[], if T is a
395 // reference type).
396 //
397 // This is done to minimize MethodTables, but as a side-effect of this optimization,
398 // we end up using a domain-shared type (object[]) with a domain-specific dispatch token.
399 // This is a problem because the same domain-specific dispatch token value can appear in
400 // multiple unshared domains (VSD takes advantage of the fact that in general a shared type
401 // cannot implement an unshared interface). This means that the same <token, object[]> pair
402 // value can mean different things in different domains (since the token could represent
403 // IList<Foo> in one domain and IEnumerable<Bar> in another). This is a problem because the
404 // VSD polymorphic lookup mechanism relies on a process-wide cache table, and as a result
405 // these duplicate values would collide if we didn't use fat dispatch token to ensure uniqueness
406 // and the interface methodtable is not in the shared domain.
407 //
408 // Of note: there is also some interesting array-specific behaviour where if B inherits from A
409 // and you have an array of B (B[]) then B[] implements IList<B> and IList<A>, but a dispatch
410 // on an IList<A> reference results in a dispatch to SZArrayHelper<A> rather than
411 // SZArrayHelper<B> (i.e., the variance implemention is not done like virtual methods).
412 //
413 // For example If Sub inherits from Super inherits from Object, then
414 // * Sub[] implements IList<Super>
415 // * Sub[] implements IList<Sub>
416 //
417 // And as a result we have the following mappings:
418 // * IList<Super>::IndexOf for Sub[] goes to SZArrayHelper<Super>::IndexOf
419 // * IList<Sub>::IndexOf for Sub[] goes to SZArrayHelper<Sub>::IndexOf
420 //
421 pMT->SetRequiresFatDispatchTokens();
422 }
423
424 // Number of slots only includes vtable slots
425 pMT->SetNumVirtuals(cSlots);
426
427 // Fill out the vtable indirection slots
428 MethodTable::VtableIndirectionSlotIterator it = pMT->IterateVtableIndirectionSlots();
429 while (it.Next())
430 {
431 if (canShareVtableChunks)
432 {
433 // Share the canonical chunk
434 it.SetIndirectionSlot(pOldMT->GetVtableIndirections()[it.GetIndex()].GetValueMaybeNull());
435 }
436 else
437 {
438 // Use the locally allocated chunk
439 it.SetIndirectionSlot((MethodTable::VTableIndir2_t *)(pMemory+offsetOfUnsharedVtableChunks));
440 offsetOfUnsharedVtableChunks += it.GetSize();
441 }
442 }
443
444 // If we are not sharing parent chunks, copy down the slot contents
445 if (!canShareVtableChunks)
446 {
447 // Need to assign the slots one by one to filter out jump thunks
448 for (DWORD i = 0; i < cSlots; i++)
449 {
450 pMT->SetSlot(i, pOldMT->GetRestoredSlot(i));
451 }
452 }
453
454 // All flags on m_pNgenPrivateData data apart
455 // are initially false for a dynamically generated instantiation.
456
457 if (fContainsGenericVariables)
458 pMT->SetContainsGenericVariables();
459
460 if (fHasGenericsStaticsInfo)
461 pMT->SetDynamicStatics(TRUE);
462
463
464#ifdef FEATURE_COMINTEROP
465 if (fHasCCWTemplate)
466 pMT->SetHasCCWTemplate();
467 if (fHasGuidInfo)
468 pMT->SetHasGuidInfo();
469#endif
470
471 // Since we are fabricating a new MT based on an existing one, the per-inst info should
472 // be non-null
473 _ASSERTE(pOldMT->HasPerInstInfo());
474
475 // Fill in per-inst map pointer (which points to the array of generic dictionary pointers)
476 pMT->SetPerInstInfo((MethodTable::PerInstInfoElem_t *) (pMemory + cbMT + cbOptional + cbIMap + sizeof(GenericsDictInfo)));
477 _ASSERTE(FitsIn<WORD>(pOldMT->GetNumDicts()));
478 _ASSERTE(FitsIn<WORD>(pOldMT->GetNumGenericArgs()));
479 pMT->SetDictInfo(static_cast<WORD>(pOldMT->GetNumDicts()), static_cast<WORD>(pOldMT->GetNumGenericArgs()));
480
481 // Fill in the last entry in the array of generic dictionary pointers ("per inst info")
482 // The others are filled in by LoadExactParents which copied down any inherited generic
483 // dictionary pointers.
484 Dictionary * pDict = (Dictionary*) (pMemory + cbMT + cbOptional + cbIMap + cbPerInst);
485 MethodTable::PerInstInfoElem_t *pPInstInfo = (MethodTable::PerInstInfoElem_t *) (pMT->GetPerInstInfo() + (pOldMT->GetNumDicts()-1));
486 pPInstInfo->SetValueMaybeNull(pDict);
487
488 // Fill in the instantiation section of the generic dictionary. The remainder of the
489 // generic dictionary will be zeroed, which is the correct initial state.
490 TypeHandle * pInstDest = (TypeHandle *)pDict->GetInstantiation();
491 for (DWORD iArg = 0; iArg < ntypars; iArg++)
492 {
493 pInstDest[iArg] = inst[iArg];
494 }
495
496 // Copy interface map across
497 InterfaceInfo_t * pInterfaceMap = (InterfaceInfo_t *)(pMemory + cbMT + cbOptional + (fHasDynamicInterfaceMap ? sizeof(DWORD_PTR) : 0));
498
499#ifdef FEATURE_COMINTEROP
500 // Extensible RCW's are prefixed with the count of dynamic interfaces.
501 if (fHasDynamicInterfaceMap)
502 {
503 *(((DWORD_PTR *)pInterfaceMap) - 1) = 0;
504 }
505#endif // FEATURE_COMINTEROP
506
507 for (WORD iItf = 0; iItf < wNumInterfaces; iItf++)
508 {
509 OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS);
510 pInterfaceMap[iItf].SetMethodTable(pOldIMap[iItf].GetApproxMethodTable(pOldMT->GetLoaderModule()));
511 }
512
513 // Set the interface map pointer stored in the main section of the vtable (actually
514 // an optional member) to point to the correct region within the newly
515 // allocated method table.
516
517 // Fill in interface map pointer
518 pMT->SetInterfaceMap(wNumInterfaces, pInterfaceMap);
519
520 // Copy across extra flags for these interfaces as well. We may need additional memory for this.
521 PVOID pExtraInterfaceInfo = NULL;
522 SIZE_T cbExtraInterfaceInfo = MethodTable::GetExtraInterfaceInfoSize(wNumInterfaces);
523 if (cbExtraInterfaceInfo)
524 pExtraInterfaceInfo = pamTracker->Track(pAllocator->GetLowFrequencyHeap()->AllocMem(S_SIZE_T(cbExtraInterfaceInfo)));
525
526 // Call this even in the case where pExtraInterfaceInfo == NULL (certain cases are optimized and don't
527 // require extra buffer space).
528 pMT->InitializeExtraInterfaceInfo(pExtraInterfaceInfo);
529
530 for (UINT32 i = 0; i < pOldMT->GetNumInterfaces(); i++)
531 {
532 if (pOldMT->IsInterfaceDeclaredOnClass(i))
533 pMT->SetInterfaceDeclaredOnClass(i);
534 }
535
536 pMT->SetLoaderModule(pLoaderModule);
537 pMT->SetLoaderAllocator(pAllocator);
538
539
540#ifdef _DEBUG
541 // Name for debugging
542 StackSString debug_ClassNameString;
543 TypeString::AppendTypeKey(debug_ClassNameString, pTypeKey, TypeString::FormatNamespace | TypeString::FormatAngleBrackets | TypeString::FormatFullInst);
544 StackScratchBuffer debug_ClassNameBuffer;
545 const char *debug_szClassNameBuffer = debug_ClassNameString.GetUTF8(debug_ClassNameBuffer);
546 S_SIZE_T safeLen = S_SIZE_T(strlen(debug_szClassNameBuffer)) + S_SIZE_T(1);
547 if (safeLen.IsOverflow()) COMPlusThrowHR(COR_E_OVERFLOW);
548
549 size_t len = safeLen.Value();
550 char *debug_szClassName = (char *)pamTracker->Track(pAllocator->GetLowFrequencyHeap()->AllocMem(safeLen));
551 strcpy_s(debug_szClassName, len, debug_szClassNameBuffer);
552 pMT->SetDebugClassName(debug_szClassName);
553
554 // Debugging information
555 if (pOldMT->Debug_HasInjectedInterfaceDuplicates())
556 pMT->Debug_SetHasInjectedInterfaceDuplicates();
557#endif // _DEBUG
558
559 // <NICE>This logic is identical to logic in class.cpp. Factor these out.</NICE>
560 // No need to generate IDs for open types. However
561 // we still leave the optional member in the MethodTable holding the value -1 for the ID.
562 if (fHasGenericsStaticsInfo)
563 {
564 FieldDesc* pStaticFieldDescs = NULL;
565
566 if (pOldMT->GetNumStaticFields() != 0)
567 {
568 pStaticFieldDescs = (FieldDesc*) pamTracker->Track(pAllocator->GetLowFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(FieldDesc)) * S_SIZE_T(pOldMT->GetNumStaticFields())));
569 FieldDesc* pOldFD = pOldMT->GetGenericsStaticFieldDescs();
570
571 g_IBCLogger.LogFieldDescsAccess(pOldFD);
572
573 for (DWORD i = 0; i < pOldMT->GetNumStaticFields(); i++)
574 {
575 pStaticFieldDescs[i].InitializeFrom(pOldFD[i], pMT);
576 }
577 }
578 pMT->SetupGenericsStaticsInfo(pStaticFieldDescs);
579 }
580
581
582 // VTS info doesn't depend on the exact instantiation but we make a copy
583 // anyway since we can't currently deal with the possibility of having a
584 // cross module pointer to the data block. Eventually we might be able to
585 // tokenize this reference, but determine first whether there's enough
586 // performance degradation to justify the extra complexity.
587
588 pMT->SetCl(pOldMT->GetCl());
589
590 // Check we've set up the flags correctly on the new method table
591 _ASSERTE(!fContainsGenericVariables == !pMT->ContainsGenericVariables());
592 _ASSERTE(!fHasGenericsStaticsInfo == !pMT->HasGenericsStaticsInfo());
593#ifdef FEATURE_COMINTEROP
594 _ASSERTE(!fHasDynamicInterfaceMap == !pMT->HasDynamicInterfaceMap());
595 _ASSERTE(!fHasRCWPerTypeData == !pMT->HasRCWPerTypeData());
596 _ASSERTE(!fHasCCWTemplate == !pMT->HasCCWTemplate());
597 _ASSERTE(!fHasGuidInfo == !pMT->HasGuidInfo());
598#endif
599
600 LOG((LF_CLASSLOADER, LL_INFO1000, "GENERICS: Replicated methodtable to create type %s\n", pMT->GetDebugClassName()));
601
602#ifdef _DEBUG
603 if (g_pConfig->ShouldDumpOnClassLoad(debug_szClassName))
604 {
605 LOG((LF_ALWAYS, LL_ALWAYS,
606 "Method table summary for '%s' (instantiation):\n",
607 pMT->GetDebugClassName()));
608 pMT->Debug_DumpInterfaceMap("Approximate");
609 }
610#endif //_DEBUG
611
612#ifdef FEATURE_PREJIT
613 _ASSERTE(pComputedPZM == Module::GetPreferredZapModuleForMethodTable(pMT));
614#endif //FEATURE_PREJIT
615
616 // We never have non-virtual slots in this method table (set SetNumVtableSlots and SetNumVirtuals above)
617 _ASSERTE(!pMT->HasNonVirtualSlots());
618
619 pMTWriteableData->SetIsRestoredForBuildMethodTable();
620
621 RETURN(TypeHandle(pMT));
622} // ClassLoader::CreateTypeHandleForNonCanonicalGenericInstantiation
623
624namespace Generics
625{
626
627BOOL CheckInstantiation(Instantiation inst)
628{
629 CONTRACTL
630 {
631 NOTHROW;
632 GC_NOTRIGGER;
633 }
634 CONTRACTL_END
635
636 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
637 {
638 TypeHandle th = inst[i];
639 if (th.IsNull())
640 {
641 return FALSE;
642 }
643
644 CorElementType type = th.GetSignatureCorElementType();
645 if (CorTypeInfo::IsGenericVariable_NoThrow(type))
646 {
647 return TRUE;
648 }
649
650 g_IBCLogger.LogTypeMethodTableAccess(&th);
651
652 if ( type == ELEMENT_TYPE_BYREF
653 || type == ELEMENT_TYPE_TYPEDBYREF
654 || type == ELEMENT_TYPE_VOID
655 || type == ELEMENT_TYPE_PTR
656 || type == ELEMENT_TYPE_FNPTR)
657 {
658 return FALSE;
659 }
660
661 MethodTable* pMT = th.GetMethodTable();
662 if (pMT != NULL)
663 {
664 if (pMT->IsByRefLike())
665 {
666 return FALSE;
667 }
668 }
669 }
670 return TRUE;
671}
672
673// Just records the owner and links to the previous graph.
674RecursionGraph::RecursionGraph(RecursionGraph *pPrev, TypeHandle thOwner)
675{
676 LIMITED_METHOD_CONTRACT;
677
678 m_pPrev = pPrev;
679 m_thOwner = thOwner;
680
681 m_pNodes = NULL;
682}
683
684RecursionGraph::~RecursionGraph()
685{
686 WRAPPER_NO_CONTRACT;
687 if (m_pNodes != NULL)
688 delete [] m_pNodes;
689}
690
691// Adds edges generated by the parent and implemented interfaces; returns TRUE iff
692// an expanding cycle was found.
693BOOL RecursionGraph::CheckForIllegalRecursion()
694{
695 CONTRACTL
696 {
697 THROWS;
698 GC_TRIGGERS;
699 PRECONDITION(!m_thOwner.IsTypeDesc());
700 }
701 CONTRACTL_END;
702
703 MethodTable *pMT = m_thOwner.AsMethodTable();
704
705 Instantiation inst = pMT->GetInstantiation();
706
707 // Initialize the node array.
708 m_pNodes = new Node[inst.GetNumArgs()];
709
710 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
711 {
712 m_pNodes[i].SetSourceVar(inst[i].AsGenericVariable());
713 }
714
715 // Record edges generated by inheriting from the parent.
716 MethodTable *pParentMT = pMT->GetParentMethodTable();
717 if (pParentMT)
718 {
719 AddDependency(pParentMT);
720 }
721
722 // Record edges generated by implementing interfaces.
723 MethodTable::InterfaceMapIterator it = pMT->IterateInterfaceMap();
724 while (it.Next())
725 {
726 AddDependency(it.GetInterface());
727 }
728
729 // Check all owned nodes for expanding cycles. The edges recorded above must all
730 // go from owned nodes so it suffices to look only at these.
731 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
732 {
733 if (HasExpandingCycle(&m_pNodes[i], &m_pNodes[i]))
734 return TRUE;
735 }
736
737 return FALSE;
738}
739
740// Returns TRUE iff the given type is already on the stack (in fact an analogue of
741// code:TypeHandleList::Exists).
742//
743// static
744BOOL RecursionGraph::HasSeenType(RecursionGraph *pDepGraph, TypeHandle thType)
745{
746 LIMITED_METHOD_CONTRACT;
747
748 while (pDepGraph != NULL)
749 {
750 if (pDepGraph->m_thOwner == thType) return TRUE;
751 pDepGraph = pDepGraph->m_pPrev;
752 }
753 return FALSE;
754}
755
756// Adds the specified MT as a dependency (parent or interface) of the owner.
757void RecursionGraph::AddDependency(MethodTable *pMT, TypeHandleList *pExpansionVars /*= NULL*/)
758{
759 CONTRACTL
760 {
761 THROWS;
762 GC_TRIGGERS;
763 PRECONDITION(pMT != NULL);
764 }
765 CONTRACTL_END
766
767 // ECMA:
768 // - If T appears as the actual type argument to be substituted for U in some referenced
769 // type D<..., U, ...> add a non-expanding (->) edge from T to U.
770 // - If T appears somewhere inside (but not as) the actual type argument to be substituted
771 // for U in referenced type D<..., U, ...> add an expanding (=>) edge from T to U.
772
773 // Non-generic dependencies are not interesting.
774 if (!pMT->HasInstantiation())
775 return;
776
777 // Get the typical instantiation of pMT to figure out its type vars.
778 TypeHandle thTypical = ClassLoader::LoadTypeDefThrowing(
779 pMT->GetModule(), pMT->GetCl(),
780 ClassLoader::ThrowIfNotFound,
781 ClassLoader::PermitUninstDefOrRef, tdNoTypes,
782 CLASS_LOAD_APPROXPARENTS);
783
784 Instantiation inst = pMT->GetInstantiation();
785 Instantiation typicalInst = thTypical.GetInstantiation();
786
787 _ASSERTE(inst.GetNumArgs() == typicalInst.GetNumArgs());
788
789 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
790 {
791 TypeHandle thArg = inst[i];
792 TypeHandle thVar = typicalInst[i];
793 if (thArg.IsGenericVariable())
794 {
795 // Add a non-expanding edge from thArg to i-th generic parameter of pMT.
796 AddEdge(thArg.AsGenericVariable(), thVar.AsGenericVariable(), FALSE);
797
798 // Process the backlog.
799 TypeHandle thTo;
800 TypeHandleList *pList = pExpansionVars;
801 while (TypeHandleList::GetNext(&pList, &thTo))
802 {
803 AddEdge(thArg.AsGenericVariable(), thTo.AsGenericVariable(), TRUE);
804 }
805 }
806 else
807 {
808 while (thArg.IsTypeDesc())
809 {
810 _ASSERTE(thArg.HasTypeParam());
811 thArg = (static_cast<PTR_ParamTypeDesc>(thArg.AsTypeDesc()))->GetModifiedType();
812
813 if (thArg.IsGenericVariable()) // : A<!T[]>
814 {
815 // Add an expanding edge from thArg to i-th parameter of pMT.
816 AddEdge(thArg.AsGenericVariable(), thVar.AsGenericVariable(), TRUE);
817 break;
818 }
819 }
820
821 if (!thArg.IsTypeDesc()) // : A<B<!T>>
822 {
823 // We will add an expanding edge but we do not yet know from which variable(s).
824 // Add the to-variable to the list and call recursively to inspect thArg's
825 // instantiation.
826 TypeHandleList newExpansionVars(thVar, pExpansionVars);
827 AddDependency(thArg.AsMethodTable(), &newExpansionVars);
828 }
829 }
830 }
831}
832
833// Add an edge from pFromVar to pToVar - either non-expanding or expanding.
834void RecursionGraph::AddEdge(TypeVarTypeDesc *pFromVar, TypeVarTypeDesc *pToVar, BOOL fExpanding)
835{
836 CONTRACTL
837 {
838 THROWS;
839 GC_NOTRIGGER;
840 PRECONDITION(pFromVar != NULL);
841 PRECONDITION(pToVar != NULL);
842 }
843 CONTRACTL_END
844
845 LOG((LF_CLASSLOADER, LL_INFO10000, "GENERICS: Adding %s edge: from %x(0x%x) to %x(0x%x) into recursion graph owned by MT: %x\n",
846 (fExpanding ? "EXPANDING" : "NON-EXPANDING"),
847 pFromVar->GetToken(), pFromVar->GetModule(),
848 pToVar->GetToken(), pToVar->GetModule(),
849 m_thOwner.AsMethodTable()));
850
851 // Get the source node.
852 Node *pNode = &m_pNodes[pFromVar->GetIndex()];
853 _ASSERTE(pFromVar == pNode->GetSourceVar());
854
855 // Add the edge.
856 ULONG_PTR edge = (ULONG_PTR)pToVar;
857 if (fExpanding) edge |= Node::EDGE_EXPANDING_FLAG;
858
859 IfFailThrow(pNode->GetEdges()->Append((void *)edge));
860}
861
862// Recursive worker that checks whether this node is part of an expanding cycle.
863BOOL RecursionGraph::HasExpandingCycle(Node *pCurrentNode, Node *pStartNode, BOOL fExpanded /*= FALSE*/)
864{
865 CONTRACTL
866 {
867 NOTHROW;
868 GC_NOTRIGGER;
869 PRECONDITION(CheckPointer(pCurrentNode));
870 PRECONDITION(CheckPointer(pStartNode));
871 }
872 CONTRACTL_END;
873
874 // This method performs a modified DFS. We are not looking for any cycle but for a cycle
875 // which has at least one expanding edge. Therefore we:
876 // 1) Pass aroung the fExpanded flag to indicate that we've seen an expanding edge.
877 // 2) Explicitly check for returning to the starting point rather an arbitrary visited node.
878
879 // Did we just find the cycle?
880 if (fExpanded && pCurrentNode == pStartNode)
881 return TRUE;
882
883 // Have we been here before or is this a dead end?
884 if (pCurrentNode->IsVisited() || pCurrentNode->GetEdges()->GetCount() == 0)
885 return FALSE;
886
887 pCurrentNode->SetVisited();
888
889 ArrayList::Iterator iter = pCurrentNode->GetEdges()->Iterate();
890 while (iter.Next())
891 {
892 ULONG_PTR edge = (ULONG_PTR)iter.GetElement();
893
894 BOOL fExpanding = (edge & Node::EDGE_EXPANDING_FLAG);
895
896 TypeVarTypeDesc *pToVar = (TypeVarTypeDesc *)(edge & ~Node::EDGE_EXPANDING_FLAG);
897 unsigned int dwIndex = pToVar->GetIndex();
898
899 Node *pNode = NULL;
900 RecursionGraph *pGraph = this;
901
902 // Find the destination node.
903 do
904 {
905 if (pGraph->m_pNodes != NULL &&
906 dwIndex < pGraph->m_thOwner.GetNumGenericArgs() &&
907 pGraph->m_pNodes[dwIndex].GetSourceVar() == pToVar)
908 {
909 pNode = &pGraph->m_pNodes[dwIndex];
910 break;
911 }
912 pGraph = pGraph->m_pPrev;
913 }
914 while (pGraph != NULL);
915
916 if (pNode != NULL)
917 {
918 // The new path is expanding if it was expanding already or if the edge we follow is expanding.
919 if (HasExpandingCycle(pNode, pStartNode, fExpanded || fExpanding))
920 return TRUE;
921 }
922 }
923
924 pCurrentNode->ClearVisited();
925
926 return FALSE;
927}
928
929} // namespace Generics
930
931#endif // !DACCESS_COMPILE
932
933namespace Generics
934{
935
936/*
937 * GetExactInstantiationsOfMethodAndItsClassFromCallInformation
938 *
939 * This routine takes in the various pieces of information of a call site to managed code
940 * and returns the exact instatiations for the method and the class on which the method is defined.
941 *
942 * Parameters:
943 * pRepMethod - A MethodDesc to the representative instantiation method.
944 * pThis - The OBJECTREF that is being passed to pRepMethod.
945 * pParamTypeArg - The extra argument passed to pRepMethod when pRepMethod is either
946 * RequiresInstMethodTableArg() or RequiresInstMethodDescArg().
947 * pSpecificClass - A pointer to a TypeHandle for storing the exact instantiation
948 * of the class on which pRepMethod is defined, based on the call information
949 * pSpecificMethod - A pointer to a MethodDesc* for storing the exact instantiation
950 * of pRepMethod, based on the call information
951 *
952 * Returns:
953 * TRUE if successful.
954 * FALSE if could not get the exact TypeHandle & MethodDesc requested. In this case,
955 * the SpecificClass may be correct, iff the class is not a generic class.
956 *
957 */
958BOOL GetExactInstantiationsOfMethodAndItsClassFromCallInformation(
959 /* in */ MethodDesc *pRepMethod,
960 /* in */ OBJECTREF pThis,
961 /* in */ PTR_VOID pParamTypeArg,
962 /* out*/ TypeHandle *pSpecificClass,
963 /* out*/ MethodDesc** pSpecificMethod
964 )
965{
966 CONTRACTL
967 {
968 NOTHROW;
969 GC_NOTRIGGER;
970 SO_TOLERANT;
971 CANNOT_TAKE_LOCK;
972 PRECONDITION(CheckPointer(pRepMethod));
973 SUPPORTS_DAC;
974 }
975 CONTRACTL_END;
976
977 PTR_VOID pExactGenericArgsToken = NULL;
978
979 if (pRepMethod->AcquiresInstMethodTableFromThis())
980 {
981 if (pThis != NULL)
982 {
983 // We could be missing the memory from a dump, or the target could have simply been corrupted.
984 ALLOW_DATATARGET_MISSING_MEMORY(
985 pExactGenericArgsToken = dac_cast<PTR_VOID>(pThis->GetMethodTable());
986 );
987 }
988 }
989 else
990 {
991 pExactGenericArgsToken = pParamTypeArg;
992 }
993
994 return GetExactInstantiationsOfMethodAndItsClassFromCallInformation(pRepMethod, pExactGenericArgsToken,
995 pSpecificClass, pSpecificMethod);
996}
997
998BOOL GetExactInstantiationsOfMethodAndItsClassFromCallInformation(
999 /* in */ MethodDesc *pRepMethod,
1000 /* in */ PTR_VOID pExactGenericArgsToken,
1001 /* out*/ TypeHandle *pSpecificClass,
1002 /* out*/ MethodDesc** pSpecificMethod
1003 )
1004{
1005 CONTRACTL
1006 {
1007 NOTHROW;
1008 GC_NOTRIGGER;
1009 SO_TOLERANT;
1010 CANNOT_TAKE_LOCK;
1011 PRECONDITION(CheckPointer(pRepMethod));
1012 SUPPORTS_DAC;
1013 }
1014 CONTRACTL_END;
1015
1016 //
1017 // Start with some decent default values.
1018 //
1019 MethodDesc * pMD = pRepMethod;
1020 MethodTable * pMT = pRepMethod->GetMethodTable();
1021
1022 *pSpecificMethod = pMD;
1023 *pSpecificClass = pMT;
1024
1025 if (!pRepMethod->IsSharedByGenericInstantiations())
1026 {
1027 return TRUE;
1028 }
1029
1030 if (pExactGenericArgsToken == NULL)
1031 {
1032 return FALSE;
1033 }
1034
1035 BOOL retVal = FALSE;
1036
1037 // The following target memory reads will not necessarily succeed against dumps, and will throw on failure.
1038 EX_TRY_ALLOW_DATATARGET_MISSING_MEMORY
1039 {
1040 if (pRepMethod->RequiresInstMethodTableArg())
1041 {
1042 pMT = dac_cast<PTR_MethodTable>(pExactGenericArgsToken);
1043 retVal = TRUE;
1044 }
1045 else if (pRepMethod->RequiresInstMethodDescArg())
1046 {
1047 pMD = dac_cast<PTR_MethodDesc>(pExactGenericArgsToken);
1048 pMT = pMD->GetMethodTable();
1049 retVal = TRUE;
1050 }
1051 else if (pRepMethod->AcquiresInstMethodTableFromThis())
1052 {
1053 // The exact token might actually be a child class of the class containing
1054 // the specified function so walk up the parent chain to make sure we return
1055 // an exact instantiation of the CORRECT parent class.
1056 pMT = pMD->GetExactDeclaringType(dac_cast<PTR_MethodTable>(pExactGenericArgsToken));
1057 _ASSERTE(pMT != NULL);
1058 retVal = TRUE;
1059 }
1060 else
1061 {
1062 _ASSERTE(!"Should not happen.");
1063 }
1064 }
1065 EX_END_CATCH_ALLOW_DATATARGET_MISSING_MEMORY
1066
1067 *pSpecificMethod = pMD;
1068 *pSpecificClass = pMT;
1069
1070 return retVal;
1071}
1072
1073} // namespace Generics;
1074
1075