1 | // Licensed to the .NET Foundation under one or more agreements. |
2 | // The .NET Foundation licenses this file to you under the MIT license. |
3 | // See the LICENSE file in the project root for more information. |
4 | |
5 | /*============================================================ |
6 | ** |
7 | ** Header: LoaderAllocator.hpp |
8 | ** |
9 | |
10 | ** |
11 | ** Purpose: Implements collection of loader heaps |
12 | ** |
13 | ** |
14 | ===========================================================*/ |
15 | |
16 | #ifndef __LoaderAllocator_h__ |
17 | #define __LoaderAllocator_h__ |
18 | |
19 | class FuncPtrStubs; |
20 | #include "qcall.h" |
21 | #include "ilstubcache.h" |
22 | |
23 | #define VPTRU_LoaderAllocator 0x3200 |
24 | |
25 | enum LoaderAllocatorType |
26 | { |
27 | LAT_Invalid, |
28 | LAT_Global, |
29 | LAT_Assembly |
30 | }; |
31 | |
32 | class CLRPrivBinderAssemblyLoadContext; |
33 | |
34 | // Iterator over a DomainAssembly in the same ALC |
35 | class DomainAssemblyIterator |
36 | { |
37 | DomainAssembly* pCurrentAssembly; |
38 | DomainAssembly* pNextAssembly; |
39 | |
40 | public: |
41 | DomainAssemblyIterator(DomainAssembly* pFirstAssembly); |
42 | |
43 | bool end() const |
44 | { |
45 | return pCurrentAssembly == NULL; |
46 | } |
47 | |
48 | operator DomainAssembly*() const |
49 | { |
50 | return pCurrentAssembly; |
51 | } |
52 | |
53 | DomainAssembly* operator ->() const |
54 | { |
55 | return pCurrentAssembly; |
56 | } |
57 | |
58 | void operator++(); |
59 | |
60 | void operator++(int dummy) |
61 | { |
62 | this->operator++(); |
63 | } |
64 | }; |
65 | |
66 | class LoaderAllocatorID |
67 | { |
68 | |
69 | protected: |
70 | LoaderAllocatorType m_type; |
71 | union |
72 | { |
73 | DomainAssembly* m_pDomainAssembly; |
74 | void* m_pValue; |
75 | }; |
76 | |
77 | VOID * GetValue(); |
78 | |
79 | public: |
80 | LoaderAllocatorID(LoaderAllocatorType laType=LAT_Invalid, VOID* value = 0) |
81 | { |
82 | m_type = laType; |
83 | m_pValue = value; |
84 | }; |
85 | VOID Init(); |
86 | LoaderAllocatorType GetType(); |
87 | VOID AddDomainAssembly(DomainAssembly* pDomainAssembly); |
88 | DomainAssemblyIterator GetDomainAssemblyIterator(); |
89 | BOOL Equals(LoaderAllocatorID* pId); |
90 | COUNT_T Hash(); |
91 | }; |
92 | |
93 | // Segmented stack to store freed handle indices |
94 | class SegmentedHandleIndexStack |
95 | { |
96 | // Segment of the stack |
97 | struct Segment |
98 | { |
99 | static const int Size = 64; |
100 | |
101 | Segment* m_prev; |
102 | DWORD m_data[Size]; |
103 | }; |
104 | |
105 | // Segment containing the TOS |
106 | Segment * m_TOSSegment = NULL; |
107 | // One free segment to prevent rapid delete / new if pop / push happens rapidly |
108 | // at the boundary of two segments. |
109 | Segment * m_freeSegment = NULL; |
110 | // Index of the top of stack in the TOS segment |
111 | int m_TOSIndex = Segment::Size; |
112 | |
113 | public: |
114 | |
115 | // Push the value to the stack. If the push cannot be done due to OOM, return false; |
116 | inline bool Push(DWORD value); |
117 | |
118 | // Pop the value from the stack |
119 | inline DWORD Pop(); |
120 | |
121 | // Check if the stack is empty. |
122 | inline bool IsEmpty(); |
123 | }; |
124 | |
125 | class StringLiteralMap; |
126 | class VirtualCallStubManager; |
127 | template <typename ELEMENT> |
128 | class ListLockEntryBase; |
129 | typedef ListLockEntryBase<void*> ListLockEntry; |
130 | class UMEntryThunkCache; |
131 | |
132 | #ifdef FEATURE_COMINTEROP |
133 | class ComCallWrapperCache; |
134 | #endif // FEATURE_COMINTEROP |
135 | |
136 | class LoaderAllocator |
137 | { |
138 | VPTR_BASE_VTABLE_CLASS(LoaderAllocator) |
139 | VPTR_UNIQUE(VPTRU_LoaderAllocator) |
140 | protected: |
141 | |
142 | //**************************************************************************************** |
143 | // #LoaderAllocator Heaps |
144 | // Heaps for allocating data that persists for the life of the AppDomain |
145 | // Objects that are allocated frequently should be allocated into the HighFreq heap for |
146 | // better page management |
147 | BYTE * m_InitialReservedMemForLoaderHeaps; |
148 | BYTE m_LowFreqHeapInstance[sizeof(LoaderHeap)]; |
149 | BYTE m_HighFreqHeapInstance[sizeof(LoaderHeap)]; |
150 | BYTE m_StubHeapInstance[sizeof(LoaderHeap)]; |
151 | BYTE m_PrecodeHeapInstance[sizeof(CodeFragmentHeap)]; |
152 | PTR_LoaderHeap m_pLowFrequencyHeap; |
153 | PTR_LoaderHeap m_pHighFrequencyHeap; |
154 | PTR_LoaderHeap m_pStubHeap; // stubs for PInvoke, remoting, etc |
155 | PTR_CodeFragmentHeap m_pPrecodeHeap; |
156 | PTR_LoaderHeap m_pExecutableHeap; |
157 | #ifdef FEATURE_READYTORUN |
158 | PTR_CodeFragmentHeap m_pDynamicHelpersHeap; |
159 | #endif |
160 | //**************************************************************************************** |
161 | OBJECTHANDLE m_hLoaderAllocatorObjectHandle; |
162 | FuncPtrStubs * m_pFuncPtrStubs; // for GetMultiCallableAddrOfCode() |
163 | // The LoaderAllocator specific string literal map. |
164 | StringLiteralMap *m_pStringLiteralMap; |
165 | CrstExplicitInit m_crstLoaderAllocator; |
166 | bool m_fGCPressure; |
167 | bool m_fUnloaded; |
168 | bool m_fTerminated; |
169 | bool m_fMarked; |
170 | int m_nGCCount; |
171 | bool m_IsCollectible; |
172 | |
173 | // Pre-allocated blocks of heap for collectible assemblies. Will be set to NULL as soon as it is |
174 | // used. See code in GetVSDHeapInitialBlock and GetCodeHeapInitialBlock |
175 | BYTE * m_pVSDHeapInitialAlloc; |
176 | BYTE * m_pCodeHeapInitialAlloc; |
177 | |
178 | // U->M thunks that are not associated with a delegate. |
179 | // The cache is keyed by MethodDesc pointers. |
180 | UMEntryThunkCache * m_pUMEntryThunkCache; |
181 | |
182 | // IL stub cache with fabricated MethodTable parented by a random module in this LoaderAllocator. |
183 | ILStubCache m_ILStubCache; |
184 | |
185 | public: |
186 | BYTE *GetVSDHeapInitialBlock(DWORD *pSize); |
187 | BYTE *GetCodeHeapInitialBlock(const BYTE * loAddr, const BYTE * hiAddr, DWORD minimumSize, DWORD *pSize); |
188 | |
189 | BaseDomain *m_pDomain; |
190 | |
191 | // ExecutionManager caches |
192 | void * m_pLastUsedCodeHeap; |
193 | void * m_pLastUsedDynamicCodeHeap; |
194 | void * m_pJumpStubCache; |
195 | |
196 | // LoaderAllocator GC Structures |
197 | PTR_LoaderAllocator m_pLoaderAllocatorDestroyNext; // Used in LoaderAllocator GC process (during sweeping) |
198 | protected: |
199 | void ClearMark(); |
200 | void Mark(); |
201 | bool Marked(); |
202 | |
203 | #ifdef FAT_DISPATCH_TOKENS |
204 | struct DispatchTokenFatSHashTraits : public DefaultSHashTraits<DispatchTokenFat*> |
205 | { |
206 | typedef DispatchTokenFat* key_t; |
207 | |
208 | static key_t GetKey(element_t e) |
209 | { return e; } |
210 | |
211 | static BOOL Equals(key_t k1, key_t k2) |
212 | { return *k1 == *k2; } |
213 | |
214 | static count_t Hash(key_t k) |
215 | { return (count_t)(size_t)*k; } |
216 | }; |
217 | |
218 | typedef SHash<DispatchTokenFatSHashTraits> FatTokenSet; |
219 | SimpleRWLock *m_pFatTokenSetLock; |
220 | FatTokenSet *m_pFatTokenSet; |
221 | #endif |
222 | |
223 | #ifndef CROSSGEN_COMPILE |
224 | VirtualCallStubManager *m_pVirtualCallStubManager; |
225 | #endif |
226 | |
227 | private: |
228 | typedef SHash<PtrSetSHashTraits<LoaderAllocator * > > LoaderAllocatorSet; |
229 | |
230 | LoaderAllocatorSet m_LoaderAllocatorReferences; |
231 | Volatile<UINT32> m_cReferences; |
232 | // This will be set by code:LoaderAllocator::Destroy (from managed scout finalizer) and signalizes that |
233 | // the assembly was collected |
234 | DomainAssembly * m_pFirstDomainAssemblyFromSameALCToDelete; |
235 | |
236 | BOOL CheckAddReference_Unlocked(LoaderAllocator *pOtherLA); |
237 | |
238 | static UINT64 cLoaderAllocatorsCreated; |
239 | UINT64 m_nLoaderAllocator; |
240 | |
241 | struct FailedTypeInitCleanupListItem |
242 | { |
243 | SLink m_Link; |
244 | ListLockEntry *m_pListLockEntry; |
245 | explicit FailedTypeInitCleanupListItem(ListLockEntry *pListLockEntry) |
246 | : |
247 | m_pListLockEntry(pListLockEntry) |
248 | { |
249 | } |
250 | }; |
251 | |
252 | SList<FailedTypeInitCleanupListItem> m_failedTypeInitCleanupList; |
253 | |
254 | SegmentedHandleIndexStack m_freeHandleIndexesStack; |
255 | #ifdef FEATURE_COMINTEROP |
256 | // The wrapper cache for this loader allocator - it has its own CCacheLineAllocator on a per loader allocator basis |
257 | // to allow the loader allocator to go away and eventually kill the memory when all refs are gone |
258 | |
259 | VolatilePtr<ComCallWrapperCache> m_pComCallWrapperCache; |
260 | // Used for synchronizing creation of the m_pComCallWrapperCache |
261 | CrstExplicitInit m_ComCallWrapperCrst; |
262 | // Hash table that maps a MethodTable to COM Interop compatibility data. |
263 | PtrHashMap m_interopDataHash; |
264 | // Used for synchronizing access to the m_interopDataHash |
265 | CrstExplicitInit m_InteropDataCrst; |
266 | #endif |
267 | |
268 | #ifndef DACCESS_COMPILE |
269 | |
270 | public: |
271 | // CleanupFailedTypeInit is called from AppDomain |
272 | // This method accesses loader allocator state in a thread unsafe manner. |
273 | // It expects to be called only from Terminate. |
274 | void CleanupFailedTypeInit(); |
275 | #endif //!DACCESS_COMPILE |
276 | |
277 | // Collect unreferenced assemblies, remove them from the assembly list and return their loader allocator |
278 | // list. |
279 | static LoaderAllocator * GCLoaderAllocators_RemoveAssemblies(AppDomain * pAppDomain); |
280 | |
281 | public: |
282 | |
283 | // |
284 | // The scheme for ensuring that LoaderAllocators are destructed correctly is substantially |
285 | // complicated by the requirement that LoaderAllocators that are eligible for destruction |
286 | // must be destroyed as a group due to issues where there may be ordering issues in destruction |
287 | // of LoaderAllocators. |
288 | // Thus, while there must be a complete web of references keeping the LoaderAllocator alive in |
289 | // managed memory, we must also have an analogous web in native memory to manage the specific |
290 | // ordering requirements. |
291 | // |
292 | // Thus we have an extra garbage collector here to manage the native web of LoaderAllocator references |
293 | // Also, we have a reference count scheme so that LCG methods keep their associated LoaderAllocator |
294 | // alive. LCG methods cannot be referenced by LoaderAllocators, so they do not need to participate |
295 | // in the garbage collection scheme except by using AddRef/Release to adjust the root set of this |
296 | // garbage collector. |
297 | // |
298 | |
299 | //#AssemblyPhases |
300 | // The phases of unloadable assembly are: |
301 | // |
302 | // 1. Managed LoaderAllocator is alive. |
303 | // - Assembly is visible to managed world, the managed scout is alive and was not finalized yet. |
304 | // Note that the fact that the managed scout is in the finalizer queue is not important as it can |
305 | // (and in certain cases has to) ressurect itself. |
306 | // Detection: |
307 | // code:IsAlive ... TRUE |
308 | // code:IsManagedScoutAlive ... TRUE |
309 | // code:DomainAssembly::GetExposedAssemblyObject ... non-NULL (may need to allocate GC object) |
310 | // |
311 | // code:AddReferenceIfAlive ... TRUE (+ adds reference) |
312 | // |
313 | // 2. Managed scout is alive, managed LoaderAllocator is collected. |
314 | // - All managed object related to this assembly (types, their instances, Assembly/AssemblyBuilder) |
315 | // are dead and/or about to disappear and cannot be recreated anymore. We are just waiting for the |
316 | // managed scout to run its finalizer. |
317 | // Detection: |
318 | // code:IsAlive ... TRUE |
319 | // code:IsManagedScoutAlive ... TRUE |
320 | // code:DomainAssembly::GetExposedAssemblyObject ... NULL (change from phase #1) |
321 | // |
322 | // code:AddReferenceIfAlive ... TRUE (+ adds reference) |
323 | // |
324 | // 3. Native LoaderAllocator is alive, managed scout is collected. |
325 | // - The native LoaderAllocator can be kept alive via native reference with code:AddRef call, e.g.: |
326 | // * Reference from LCG method, |
327 | // * Reference recieved from assembly iterator code:AppDomain::AssemblyIterator::Next and/or |
328 | // held by code:CollectibleAssemblyHolder. |
329 | // - Other LoaderAllocator can have this LoaderAllocator in its reference list |
330 | // (code:m_LoaderAllocatorReferences), but without call to code:AddRef. |
331 | // - LoaderAllocator cannot ever go back to phase #1 or #2, but it can skip this phase if there are |
332 | // not any LCG method references keeping it alive at the time of manged scout finalization. |
333 | // Detection: |
334 | // code:IsAlive ... TRUE |
335 | // code:IsManagedScoutAlive ... FALSE (change from phase #2) |
336 | // code:DomainAssembly::GetExposedAssemblyObject ... NULL |
337 | // |
338 | // code:AddReferenceIfAlive ... TRUE (+ adds reference) |
339 | // |
340 | // 4. LoaderAllocator is dead. |
341 | // - The managed scout was collected. No one holds a native reference with code:AddRef to this |
342 | // LoaderAllocator. |
343 | // - Other LoaderAllocator can have this LoaderAllocator in its reference list |
344 | // (code:m_LoaderAllocatorReferences), but without call to code:AddRef. |
345 | // - LoaderAllocator cannot ever become alive again (i.e. go back to phase #3, #2 or #1). |
346 | // Detection: |
347 | // code:IsAlive ... FALSE (change from phase #3, #2 and #1) |
348 | // |
349 | // code:AddReferenceIfAlive ... FALSE (change from phase #3, #2 and #1) |
350 | // |
351 | |
352 | void AddReference(); |
353 | // Adds reference if the native object is alive - code:#AssemblyPhases. |
354 | // Returns TRUE if the reference was added. |
355 | BOOL AddReferenceIfAlive(); |
356 | BOOL Release(); |
357 | // Checks if the native object is alive - see code:#AssemblyPhases. |
358 | BOOL IsAlive() { LIMITED_METHOD_DAC_CONTRACT; return (m_cReferences != (UINT32)0); } |
359 | // Checks if managed scout is alive - see code:#AssemblyPhases. |
360 | BOOL IsManagedScoutAlive() |
361 | { |
362 | return (m_pFirstDomainAssemblyFromSameALCToDelete == NULL); |
363 | } |
364 | |
365 | // Collect unreferenced assemblies, delete all their remaining resources. |
366 | static void GCLoaderAllocators(LoaderAllocator* firstLoaderAllocator); |
367 | |
368 | UINT64 GetCreationNumber() { LIMITED_METHOD_DAC_CONTRACT; return m_nLoaderAllocator; } |
369 | |
370 | // Ensure this LoaderAllocator has a reference to another LoaderAllocator |
371 | BOOL EnsureReference(LoaderAllocator *pOtherLA); |
372 | |
373 | // Ensure this LoaderAllocator has a reference to every LoaderAllocator of the types |
374 | // in an instantiation |
375 | BOOL EnsureInstantiation(Module *pDefiningModule, Instantiation inst); |
376 | |
377 | // Given typeId and slotNumber, GetDispatchToken will return a DispatchToken |
378 | // representing <typeId, slotNumber>. If the typeId is big enough, this |
379 | // method will automatically allocate a DispatchTokenFat and encapsulate it |
380 | // in the return value. |
381 | DispatchToken GetDispatchToken(UINT32 typeId, UINT32 slotNumber); |
382 | |
383 | // Same as GetDispatchToken, but returns invalid DispatchToken when the |
384 | // value doesn't exist or a transient exception (OOM, stack overflow) is |
385 | // encountered. To check if the token is valid, use DispatchToken::IsValid |
386 | DispatchToken TryLookupDispatchToken(UINT32 typeId, UINT32 slotNumber); |
387 | |
388 | virtual LoaderAllocatorID* Id() =0; |
389 | BOOL IsCollectible() { WRAPPER_NO_CONTRACT; return m_IsCollectible; } |
390 | |
391 | #ifdef DACCESS_COMPILE |
392 | void EnumMemoryRegions(CLRDataEnumMemoryFlags flags); |
393 | #endif |
394 | |
395 | PTR_LoaderHeap GetLowFrequencyHeap() |
396 | { |
397 | LIMITED_METHOD_CONTRACT; |
398 | return m_pLowFrequencyHeap; |
399 | } |
400 | |
401 | PTR_LoaderHeap GetHighFrequencyHeap() |
402 | { |
403 | LIMITED_METHOD_CONTRACT; |
404 | return m_pHighFrequencyHeap; |
405 | } |
406 | |
407 | PTR_LoaderHeap GetStubHeap() |
408 | { |
409 | LIMITED_METHOD_CONTRACT; |
410 | return m_pStubHeap; |
411 | } |
412 | |
413 | PTR_CodeFragmentHeap GetPrecodeHeap() |
414 | { |
415 | LIMITED_METHOD_CONTRACT; |
416 | return m_pPrecodeHeap; |
417 | } |
418 | |
419 | // The executable heap is intended to only be used by the global loader allocator. |
420 | // It refers to executable memory that is not associated with a rangelist. |
421 | PTR_LoaderHeap GetExecutableHeap() |
422 | { |
423 | LIMITED_METHOD_CONTRACT; |
424 | return m_pExecutableHeap; |
425 | } |
426 | |
427 | PTR_CodeFragmentHeap GetDynamicHelpersHeap(); |
428 | |
429 | FuncPtrStubs * GetFuncPtrStubs(); |
430 | |
431 | FuncPtrStubs * GetFuncPtrStubsNoCreate() |
432 | { |
433 | LIMITED_METHOD_CONTRACT; |
434 | return m_pFuncPtrStubs; |
435 | } |
436 | |
437 | OBJECTHANDLE GetLoaderAllocatorObjectHandle() |
438 | { |
439 | LIMITED_METHOD_CONTRACT; |
440 | return m_hLoaderAllocatorObjectHandle; |
441 | } |
442 | |
443 | LOADERALLOCATORREF GetExposedObject(); |
444 | |
445 | #ifndef DACCESS_COMPILE |
446 | LOADERHANDLE AllocateHandle(OBJECTREF value); |
447 | |
448 | void SetHandleValue(LOADERHANDLE handle, OBJECTREF value); |
449 | OBJECTREF CompareExchangeValueInHandle(LOADERHANDLE handle, OBJECTREF value, OBJECTREF compare); |
450 | void FreeHandle(LOADERHANDLE handle); |
451 | |
452 | // The default implementation is a no-op. Only collectible loader allocators implement this method. |
453 | virtual void RegisterHandleForCleanup(OBJECTHANDLE /* objHandle */) { } |
454 | virtual void CleanupHandles() { } |
455 | |
456 | void RegisterFailedTypeInitForCleanup(ListLockEntry *pListLockEntry); |
457 | #endif // !defined(DACCESS_COMPILE) |
458 | |
459 | |
460 | // This function is only safe to call if the handle is known to be a handle in a collectible |
461 | // LoaderAllocator, and the handle is allocated, and the LoaderAllocator is also not collected. |
462 | FORCEINLINE OBJECTREF GetHandleValueFastCannotFailType2(LOADERHANDLE handle); |
463 | |
464 | // These functions are designed to be used for maximum performance to access handle values |
465 | // The GetHandleValueFast will handle the scenario where a loader allocator pointer does not |
466 | // need to be acquired to do the handle lookup, and the GetHandleValueFastPhase2 handles |
467 | // the scenario where the LoaderAllocator pointer is required. |
468 | // Do not use these functions directly - use GET_LOADERHANDLE_VALUE_FAST macro instead. |
469 | FORCEINLINE static BOOL GetHandleValueFast(LOADERHANDLE handle, OBJECTREF *pValue); |
470 | FORCEINLINE BOOL GetHandleValueFastPhase2(LOADERHANDLE handle, OBJECTREF *pValue); |
471 | |
472 | #define GET_LOADERHANDLE_VALUE_FAST(pLoaderAllocator, handle, pRetVal) \ |
473 | do { \ |
474 | LOADERHANDLE __handle__ = handle; \ |
475 | if (!LoaderAllocator::GetHandleValueFast(__handle__, pRetVal) && \ |
476 | !pLoaderAllocator->GetHandleValueFastPhase2(__handle__, pRetVal)) \ |
477 | { \ |
478 | *(pRetVal) = NULL; \ |
479 | } \ |
480 | } while (0) |
481 | |
482 | OBJECTREF GetHandleValue(LOADERHANDLE handle); |
483 | |
484 | LoaderAllocator(); |
485 | virtual ~LoaderAllocator(); |
486 | BaseDomain *GetDomain() { LIMITED_METHOD_CONTRACT; return m_pDomain; } |
487 | virtual BOOL CanUnload() = 0; |
488 | void Init(BaseDomain *pDomain, BYTE *pExecutableHeapMemory = NULL); |
489 | void Terminate(); |
490 | virtual void ReleaseManagedAssemblyLoadContext() {} |
491 | |
492 | SIZE_T EstimateSize(); |
493 | |
494 | void SetupManagedTracking(LOADERALLOCATORREF *pLoaderAllocatorKeepAlive); |
495 | void ActivateManagedTracking(); |
496 | |
497 | // Unloaded in this context means that there is no managed code running against this loader allocator. |
498 | // This flag is used by debugger to filter out methods in modules that are being destructed. |
499 | bool IsUnloaded() { LIMITED_METHOD_CONTRACT; return m_fUnloaded; } |
500 | void SetIsUnloaded() { LIMITED_METHOD_CONTRACT; m_fUnloaded = true; } |
501 | |
502 | void SetGCRefPoint(int gccounter) |
503 | { |
504 | LIMITED_METHOD_CONTRACT; |
505 | m_nGCCount=gccounter; |
506 | } |
507 | int GetGCRefPoint() |
508 | { |
509 | LIMITED_METHOD_CONTRACT; |
510 | return m_nGCCount; |
511 | } |
512 | |
513 | static BOOL QCALLTYPE Destroy(QCall::LoaderAllocatorHandle pLoaderAllocator); |
514 | |
515 | //**************************************************************************************** |
516 | // Methods to retrieve a pointer to the COM+ string STRINGREF for a string constant. |
517 | // If the string is not currently in the hash table it will be added and if the |
518 | // copy string flag is set then the string will be copied before it is inserted. |
519 | STRINGREF *GetStringObjRefPtrFromUnicodeString(EEStringData *pStringData); |
520 | void LazyInitStringLiteralMap(); |
521 | STRINGREF *IsStringInterned(STRINGREF *pString); |
522 | STRINGREF *GetOrInternString(STRINGREF *pString); |
523 | void CleanupStringLiteralMap(); |
524 | |
525 | void InitVirtualCallStubManager(BaseDomain *pDomain); |
526 | void UninitVirtualCallStubManager(); |
527 | |
528 | #ifndef CROSSGEN_COMPILE |
529 | inline VirtualCallStubManager *GetVirtualCallStubManager() |
530 | { |
531 | LIMITED_METHOD_CONTRACT; |
532 | return m_pVirtualCallStubManager; |
533 | } |
534 | |
535 | UMEntryThunkCache *GetUMEntryThunkCache(); |
536 | |
537 | #endif |
538 | |
539 | static LoaderAllocator* GetLoaderAllocator(ILStubCache* pILStubCache) |
540 | { |
541 | return CONTAINING_RECORD(pILStubCache, LoaderAllocator, m_ILStubCache); |
542 | } |
543 | |
544 | ILStubCache* GetILStubCache() |
545 | { |
546 | LIMITED_METHOD_CONTRACT; |
547 | return &m_ILStubCache; |
548 | } |
549 | |
550 | #ifdef FEATURE_COMINTEROP |
551 | |
552 | ComCallWrapperCache * GetComCallWrapperCache(); |
553 | |
554 | void ResetComCallWrapperCache() |
555 | { |
556 | LIMITED_METHOD_CONTRACT; |
557 | m_pComCallWrapperCache = NULL; |
558 | } |
559 | |
560 | #ifndef DACCESS_COMPILE |
561 | |
562 | // Look up interop data for a method table |
563 | // Returns the data pointer if present, NULL otherwise |
564 | InteropMethodTableData *LookupComInteropData(MethodTable *pMT); |
565 | |
566 | // Returns TRUE if successfully inserted, FALSE if this would be a duplicate entry |
567 | BOOL InsertComInteropData(MethodTable* pMT, InteropMethodTableData *pData); |
568 | |
569 | #endif // DACCESS_COMPILE |
570 | |
571 | #endif // FEATURE_COMINTEROP |
572 | |
573 | }; // class LoaderAllocator |
574 | |
575 | typedef VPTR(LoaderAllocator) PTR_LoaderAllocator; |
576 | |
577 | class GlobalLoaderAllocator : public LoaderAllocator |
578 | { |
579 | VPTR_VTABLE_CLASS(GlobalLoaderAllocator, LoaderAllocator) |
580 | VPTR_UNIQUE(VPTRU_LoaderAllocator+1) |
581 | |
582 | BYTE m_ExecutableHeapInstance[sizeof(LoaderHeap)]; |
583 | |
584 | protected: |
585 | LoaderAllocatorID m_Id; |
586 | |
587 | public: |
588 | void Init(BaseDomain *pDomain); |
589 | GlobalLoaderAllocator() : m_Id(LAT_Global, (void*)1) { LIMITED_METHOD_CONTRACT;}; |
590 | virtual LoaderAllocatorID* Id(); |
591 | virtual BOOL CanUnload(); |
592 | }; |
593 | |
594 | typedef VPTR(GlobalLoaderAllocator) PTR_GlobalLoaderAllocator; |
595 | |
596 | class ShuffleThunkCache; |
597 | |
598 | class AssemblyLoaderAllocator : public LoaderAllocator |
599 | { |
600 | VPTR_VTABLE_CLASS(AssemblyLoaderAllocator, LoaderAllocator) |
601 | VPTR_UNIQUE(VPTRU_LoaderAllocator+3) |
602 | |
603 | protected: |
604 | LoaderAllocatorID m_Id; |
605 | ShuffleThunkCache* m_pShuffleThunkCache; |
606 | public: |
607 | virtual LoaderAllocatorID* Id(); |
608 | AssemblyLoaderAllocator() : m_Id(LAT_Assembly), m_pShuffleThunkCache(NULL) |
609 | #if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE) |
610 | , m_binderToRelease(NULL) |
611 | #endif |
612 | { LIMITED_METHOD_CONTRACT; } |
613 | void Init(AppDomain *pAppDomain); |
614 | virtual BOOL CanUnload(); |
615 | |
616 | void SetCollectible(); |
617 | |
618 | void AddDomainAssembly(DomainAssembly *pDomainAssembly) |
619 | { |
620 | WRAPPER_NO_CONTRACT; |
621 | m_Id.AddDomainAssembly(pDomainAssembly); |
622 | } |
623 | |
624 | ShuffleThunkCache* GetShuffleThunkCache() |
625 | { |
626 | return m_pShuffleThunkCache; |
627 | } |
628 | |
629 | #if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE) |
630 | virtual void RegisterHandleForCleanup(OBJECTHANDLE objHandle); |
631 | virtual void CleanupHandles(); |
632 | CLRPrivBinderAssemblyLoadContext* GetBinder() |
633 | { |
634 | return m_binderToRelease; |
635 | } |
636 | virtual ~AssemblyLoaderAllocator(); |
637 | void RegisterBinder(CLRPrivBinderAssemblyLoadContext* binderToRelease); |
638 | virtual void ReleaseManagedAssemblyLoadContext(); |
639 | #endif // !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE) |
640 | |
641 | private: |
642 | struct HandleCleanupListItem |
643 | { |
644 | SLink m_Link; |
645 | OBJECTHANDLE m_handle; |
646 | explicit HandleCleanupListItem(OBJECTHANDLE handle) |
647 | : |
648 | m_handle(handle) |
649 | { |
650 | } |
651 | }; |
652 | |
653 | SList<HandleCleanupListItem> m_handleCleanupList; |
654 | #if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE) |
655 | CLRPrivBinderAssemblyLoadContext* m_binderToRelease; |
656 | #endif |
657 | }; |
658 | |
659 | typedef VPTR(AssemblyLoaderAllocator) PTR_AssemblyLoaderAllocator; |
660 | |
661 | |
662 | #include "loaderallocator.inl" |
663 | |
664 | #endif // __LoaderAllocator_h__ |
665 | |
666 | |