1 | // Licensed to the .NET Foundation under one or more agreements. |
2 | // The .NET Foundation licenses this file to you under the MIT license. |
3 | // See the LICENSE file in the project root for more information. |
4 | // |
5 | // ThreadStatics.cpp |
6 | // |
7 | |
8 | // |
9 | // |
10 | |
11 | |
12 | #include "common.h" |
13 | |
14 | #include "threadstatics.h" |
15 | #include "field.h" |
16 | |
17 | |
18 | #ifndef DACCESS_COMPILE |
19 | |
20 | void ThreadLocalBlock::FreeTLM(SIZE_T i, BOOL isThreadShuttingdown) |
21 | { |
22 | CONTRACTL |
23 | { |
24 | NOTHROW; |
25 | GC_NOTRIGGER; |
26 | SO_TOLERANT; |
27 | MODE_ANY; |
28 | } |
29 | CONTRACTL_END; |
30 | |
31 | PTR_ThreadLocalModule pThreadLocalModule; |
32 | |
33 | { |
34 | SpinLock::Holder lock(&m_TLMTableLock); |
35 | |
36 | if ((m_pTLMTable == NULL) || (i >= m_TLMTableSize)) |
37 | { |
38 | return; |
39 | } |
40 | pThreadLocalModule = m_pTLMTable[i].pTLM; |
41 | m_pTLMTable[i].pTLM = NULL; |
42 | } |
43 | |
44 | if (pThreadLocalModule != NULL) |
45 | { |
46 | if (pThreadLocalModule->m_pDynamicClassTable != NULL) |
47 | { |
48 | for (DWORD k = 0; k < pThreadLocalModule->m_aDynamicEntries; ++k) |
49 | { |
50 | if (pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry != NULL) |
51 | { |
52 | if (isThreadShuttingdown && (pThreadLocalModule->m_pDynamicClassTable[k].m_dwFlags & ClassInitFlags::COLLECTIBLE_FLAG)) |
53 | { |
54 | ThreadLocalModule::CollectibleDynamicEntry *entry = (ThreadLocalModule::CollectibleDynamicEntry*)pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry; |
55 | PTR_LoaderAllocator pLoaderAllocator = entry->m_pLoaderAllocator; |
56 | |
57 | if (entry->m_hGCStatics != NULL) |
58 | { |
59 | pLoaderAllocator->FreeHandle(entry->m_hGCStatics); |
60 | } |
61 | if (entry->m_hNonGCStatics != NULL) |
62 | { |
63 | pLoaderAllocator->FreeHandle(entry->m_hNonGCStatics); |
64 | } |
65 | } |
66 | delete pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry; |
67 | pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry = NULL; |
68 | } |
69 | } |
70 | delete pThreadLocalModule->m_pDynamicClassTable; |
71 | pThreadLocalModule->m_pDynamicClassTable = NULL; |
72 | } |
73 | |
74 | delete pThreadLocalModule; |
75 | } |
76 | } |
77 | |
78 | void ThreadLocalBlock::FreeTable() |
79 | { |
80 | CONTRACTL |
81 | { |
82 | NOTHROW; |
83 | GC_NOTRIGGER; |
84 | SO_INTOLERANT; |
85 | MODE_COOPERATIVE; |
86 | } |
87 | CONTRACTL_END; |
88 | // Free the TLM table |
89 | if (m_pTLMTable != NULL) |
90 | { |
91 | // Iterate over the table and free each TLM |
92 | for (SIZE_T i = 0; i < m_TLMTableSize; ++i) |
93 | { |
94 | if (m_pTLMTable[i].pTLM != NULL) |
95 | { |
96 | FreeTLM(i, TRUE /* isThreadShuttingDown */); |
97 | } |
98 | } |
99 | |
100 | SpinLock::Holder lock(&m_TLMTableLock); |
101 | |
102 | // Free the table itself |
103 | delete m_pTLMTable; |
104 | m_pTLMTable = NULL; |
105 | } |
106 | |
107 | // Set table size to zero |
108 | m_TLMTableSize = 0; |
109 | |
110 | // Free the ThreadStaticHandleTable |
111 | if (m_pThreadStaticHandleTable != NULL) |
112 | { |
113 | delete m_pThreadStaticHandleTable; |
114 | m_pThreadStaticHandleTable = NULL; |
115 | } |
116 | |
117 | // Free any pinning handles we may have created |
118 | FreePinningHandles(); |
119 | } |
120 | |
121 | void ThreadLocalBlock::EnsureModuleIndex(ModuleIndex index) |
122 | { |
123 | CONTRACTL { |
124 | THROWS; |
125 | GC_NOTRIGGER; |
126 | } CONTRACTL_END; |
127 | |
128 | if (m_TLMTableSize > index.m_dwIndex) |
129 | { |
130 | _ASSERTE(m_pTLMTable != NULL); |
131 | return; |
132 | } |
133 | |
134 | SIZE_T aModuleIndices = max(16, m_TLMTableSize); |
135 | while (aModuleIndices <= index.m_dwIndex) |
136 | { |
137 | aModuleIndices *= 2; |
138 | } |
139 | |
140 | // If this allocation fails, we will throw. If it succeeds, |
141 | // then we are good to go |
142 | PTR_TLMTableEntry pNewModuleSlots = (PTR_TLMTableEntry) (void*) new BYTE[sizeof(TLMTableEntry) * aModuleIndices]; |
143 | |
144 | // Zero out the new TLM table |
145 | memset(pNewModuleSlots, 0 , sizeof(TLMTableEntry) * aModuleIndices); |
146 | |
147 | PTR_TLMTableEntry pOldModuleSlots = m_pTLMTable; |
148 | |
149 | { |
150 | SpinLock::Holder lock(&m_TLMTableLock); |
151 | |
152 | if (m_pTLMTable != NULL) |
153 | { |
154 | memcpy(pNewModuleSlots, m_pTLMTable, sizeof(TLMTableEntry) * m_TLMTableSize); |
155 | } |
156 | else |
157 | { |
158 | _ASSERTE(m_TLMTableSize == 0); |
159 | } |
160 | |
161 | m_pTLMTable = pNewModuleSlots; |
162 | m_TLMTableSize = aModuleIndices; |
163 | } |
164 | |
165 | if (pOldModuleSlots != NULL) |
166 | delete pOldModuleSlots; |
167 | } |
168 | |
169 | #endif |
170 | |
171 | void ThreadLocalBlock::SetModuleSlot(ModuleIndex index, PTR_ThreadLocalModule pLocalModule) |
172 | { |
173 | CONTRACTL { |
174 | NOTHROW; |
175 | GC_NOTRIGGER; |
176 | } CONTRACTL_END; |
177 | |
178 | // This method will not grow the table. You need to grow |
179 | // the table explicitly before calling SetModuleSlot() |
180 | |
181 | _ASSERTE(index.m_dwIndex < m_TLMTableSize); |
182 | |
183 | m_pTLMTable[index.m_dwIndex].pTLM = pLocalModule; |
184 | } |
185 | |
186 | #ifdef DACCESS_COMPILE |
187 | |
188 | void |
189 | ThreadLocalModule::EnumMemoryRegions(CLRDataEnumMemoryFlags flags) |
190 | { |
191 | SUPPORTS_DAC; |
192 | |
193 | // Enumerate the ThreadLocalModule itself. TLMs are allocated to be larger than |
194 | // sizeof(ThreadLocalModule) to make room for ClassInit flags and non-GC statics. |
195 | // "DAC_ENUM_DTHIS()" probably does not account for this, so we might not enumerate |
196 | // all of the ClassInit flags and non-GC statics. |
197 | DAC_ENUM_DTHIS(); |
198 | |
199 | if (m_pDynamicClassTable != NULL) |
200 | { |
201 | DacEnumMemoryRegion(dac_cast<TADDR>(m_pDynamicClassTable), |
202 | m_aDynamicEntries * sizeof(DynamicClassInfo)); |
203 | |
204 | for (SIZE_T i = 0; i < m_aDynamicEntries; i++) |
205 | { |
206 | PTR_DynamicEntry entry = dac_cast<PTR_DynamicEntry>(m_pDynamicClassTable[i].m_pDynamicEntry); |
207 | if (entry.IsValid()) |
208 | { |
209 | entry.EnumMem(); |
210 | } |
211 | } |
212 | } |
213 | } |
214 | |
215 | void |
216 | ThreadLocalBlock::EnumMemoryRegions(CLRDataEnumMemoryFlags flags) |
217 | { |
218 | SUPPORTS_DAC; |
219 | |
220 | // Enumerate the ThreadLocalBlock itself |
221 | DAC_ENUM_DTHIS(); |
222 | |
223 | if (m_pTLMTable.IsValid()) |
224 | { |
225 | DacEnumMemoryRegion(dac_cast<TADDR>(m_pTLMTable), |
226 | m_TLMTableSize * sizeof(TADDR)); |
227 | |
228 | for (SIZE_T i = 0; i < m_TLMTableSize; i++) |
229 | { |
230 | PTR_ThreadLocalModule domMod = m_pTLMTable[i].pTLM; |
231 | if (domMod.IsValid()) |
232 | { |
233 | domMod->EnumMemoryRegions(flags); |
234 | } |
235 | } |
236 | } |
237 | } |
238 | |
239 | #endif |
240 | |
241 | DWORD ThreadLocalModule::GetClassFlags(MethodTable* pMT, DWORD iClassIndex) // iClassIndex defaults to (DWORD)-1 |
242 | { |
243 | CONTRACTL { |
244 | NOTHROW; |
245 | GC_NOTRIGGER; |
246 | SO_TOLERANT; |
247 | } CONTRACTL_END; |
248 | |
249 | if (pMT->IsDynamicStatics()) |
250 | { |
251 | DWORD dynamicClassID = pMT->GetModuleDynamicEntryID(); |
252 | if(m_aDynamicEntries <= dynamicClassID) |
253 | return FALSE; |
254 | return (m_pDynamicClassTable[dynamicClassID].m_dwFlags); |
255 | } |
256 | else |
257 | { |
258 | if (iClassIndex == (DWORD)-1) |
259 | iClassIndex = pMT->GetClassIndex(); |
260 | return GetPrecomputedStaticsClassData()[iClassIndex]; |
261 | } |
262 | } |
263 | |
264 | #ifndef DACCESS_COMPILE |
265 | |
266 | void ThreadLocalModule::SetClassFlags(MethodTable* pMT, DWORD dwFlags) |
267 | { |
268 | CONTRACTL { |
269 | THROWS; |
270 | GC_NOTRIGGER; |
271 | } CONTRACTL_END; |
272 | |
273 | if (pMT->IsDynamicStatics()) |
274 | { |
275 | DWORD dwID = pMT->GetModuleDynamicEntryID(); |
276 | EnsureDynamicClassIndex(dwID); |
277 | m_pDynamicClassTable[dwID].m_dwFlags |= dwFlags; |
278 | } |
279 | else |
280 | { |
281 | GetPrecomputedStaticsClassData()[pMT->GetClassIndex()] |= dwFlags; |
282 | } |
283 | } |
284 | |
285 | void ThreadLocalBlock::AddPinningHandleToList(OBJECTHANDLE oh) |
286 | { |
287 | CONTRACTL |
288 | { |
289 | THROWS; |
290 | GC_NOTRIGGER; |
291 | SO_TOLERANT; |
292 | MODE_ANY; |
293 | } |
294 | CONTRACTL_END; |
295 | ObjectHandleList::NodeType* pNewNode = new ObjectHandleList::NodeType(oh); |
296 | m_PinningHandleList.LinkHead(pNewNode); |
297 | } |
298 | |
299 | void ThreadLocalBlock::FreePinningHandles() |
300 | { |
301 | CONTRACTL |
302 | { |
303 | NOTHROW; |
304 | GC_NOTRIGGER; |
305 | SO_TOLERANT; |
306 | MODE_ANY; |
307 | } |
308 | CONTRACTL_END; |
309 | // Destroy all pinning handles in the list, and free the nodes |
310 | ObjectHandleList::NodeType* pHandleNode; |
311 | while ((pHandleNode = m_PinningHandleList.UnlinkHead()) != NULL) |
312 | { |
313 | DestroyPinningHandle(pHandleNode->data); |
314 | delete pHandleNode; |
315 | } |
316 | } |
317 | |
318 | void ThreadLocalBlock::AllocateThreadStaticHandles(Module * pModule, PTR_ThreadLocalModule pThreadLocalModule) |
319 | { |
320 | CONTRACTL |
321 | { |
322 | THROWS; |
323 | GC_TRIGGERS; |
324 | } |
325 | CONTRACTL_END; |
326 | |
327 | _ASSERTE(pThreadLocalModule->GetPrecomputedGCStaticsBaseHandleAddress() != NULL); |
328 | _ASSERTE(pThreadLocalModule->GetPrecomputedGCStaticsBaseHandle() == NULL); |
329 | |
330 | if (pModule->GetNumGCThreadStaticHandles() > 0) |
331 | { |
332 | AllocateStaticFieldObjRefPtrs(pModule->GetNumGCThreadStaticHandles(), |
333 | pThreadLocalModule->GetPrecomputedGCStaticsBaseHandleAddress()); |
334 | |
335 | // We should throw if we fail to allocate and never hit this assert |
336 | _ASSERTE(pThreadLocalModule->GetPrecomputedGCStaticsBaseHandle() != NULL); |
337 | _ASSERTE(pThreadLocalModule->GetPrecomputedGCStaticsBasePointer() != NULL); |
338 | } |
339 | } |
340 | |
341 | OBJECTHANDLE ThreadLocalBlock::AllocateStaticFieldObjRefPtrs(int nRequested, OBJECTHANDLE * ppLazyAllocate) |
342 | { |
343 | CONTRACTL |
344 | { |
345 | THROWS; |
346 | GC_TRIGGERS; |
347 | MODE_COOPERATIVE; |
348 | PRECONDITION((nRequested > 0)); |
349 | INJECT_FAULT(COMPlusThrowOM();); |
350 | } |
351 | CONTRACTL_END; |
352 | |
353 | if (ppLazyAllocate && *ppLazyAllocate) |
354 | { |
355 | // Allocation already happened |
356 | return *ppLazyAllocate; |
357 | } |
358 | |
359 | // Make sure the large heap handle table is initialized. |
360 | if (!m_pThreadStaticHandleTable) |
361 | InitThreadStaticHandleTable(); |
362 | |
363 | // Allocate the handles. |
364 | OBJECTHANDLE result = m_pThreadStaticHandleTable->AllocateHandles(nRequested); |
365 | |
366 | if (ppLazyAllocate) |
367 | { |
368 | *ppLazyAllocate = result; |
369 | } |
370 | |
371 | return result; |
372 | } |
373 | |
374 | void ThreadLocalBlock::InitThreadStaticHandleTable() |
375 | { |
376 | CONTRACTL |
377 | { |
378 | THROWS; |
379 | GC_NOTRIGGER; |
380 | MODE_ANY; |
381 | PRECONDITION(m_pThreadStaticHandleTable==NULL); |
382 | INJECT_FAULT(COMPlusThrowOM();); |
383 | } |
384 | CONTRACTL_END; |
385 | |
386 | // If the allocation fails this will throw; callers need |
387 | // to account for this possibility |
388 | m_pThreadStaticHandleTable = new ThreadStaticHandleTable(GetAppDomain()); |
389 | } |
390 | |
391 | void ThreadLocalBlock::AllocateThreadStaticBoxes(MethodTable * pMT) |
392 | { |
393 | CONTRACTL |
394 | { |
395 | THROWS; |
396 | GC_TRIGGERS; |
397 | MODE_COOPERATIVE; |
398 | PRECONDITION(pMT->GetNumBoxedThreadStatics() > 0); |
399 | INJECT_FAULT(COMPlusThrowOM();); |
400 | } |
401 | CONTRACTL_END; |
402 | |
403 | FieldDesc *pField = pMT->HasGenericsStaticsInfo() ? |
404 | pMT->GetGenericsStaticFieldDescs() : (pMT->GetApproxFieldDescListRaw() + pMT->GetNumIntroducedInstanceFields()); |
405 | |
406 | // Move pField to point to the list of thread statics |
407 | pField += pMT->GetNumStaticFields() - pMT->GetNumThreadStaticFields(); |
408 | |
409 | FieldDesc *pFieldEnd = pField + pMT->GetNumThreadStaticFields(); |
410 | |
411 | while (pField < pFieldEnd) |
412 | { |
413 | _ASSERTE(pField->IsThreadStatic()); |
414 | |
415 | // We only care about thread statics which are value types |
416 | if (pField->IsByValue()) |
417 | { |
418 | TypeHandle th = pField->GetFieldTypeHandleThrowing(); |
419 | MethodTable* pFieldMT = th.GetMethodTable(); |
420 | |
421 | // AllocateStaticBox will pin this object if this class is FixedAddressVTStatics. |
422 | // We save this pinning handle in a list attached to the ThreadLocalBlock. When |
423 | // the thread dies, we release all the pinning handles in the list. |
424 | |
425 | OBJECTHANDLE handle; |
426 | OBJECTREF obj = MethodTable::AllocateStaticBox(pFieldMT, pMT->HasFixedAddressVTStatics(), &handle); |
427 | |
428 | PTR_BYTE pStaticBase = pMT->GetGCThreadStaticsBasePointer(); |
429 | _ASSERTE(pStaticBase != NULL); |
430 | |
431 | SetObjectReference( (OBJECTREF*)(pStaticBase + pField->GetOffset()), obj, GetAppDomain() ); |
432 | |
433 | // If we created a pinning handle, save it to the list |
434 | if (handle != NULL) |
435 | AddPinningHandleToList(handle); |
436 | } |
437 | |
438 | pField++; |
439 | } |
440 | } |
441 | |
442 | #endif |
443 | |
444 | #ifndef DACCESS_COMPILE |
445 | |
446 | void ThreadLocalModule::EnsureDynamicClassIndex(DWORD dwID) |
447 | { |
448 | CONTRACTL |
449 | { |
450 | THROWS; |
451 | GC_NOTRIGGER; |
452 | MODE_ANY; |
453 | INJECT_FAULT(COMPlusThrowOM();); |
454 | } |
455 | CONTRACTL_END; |
456 | |
457 | if (dwID < m_aDynamicEntries) |
458 | { |
459 | _ASSERTE(m_pDynamicClassTable != NULL); |
460 | return; |
461 | } |
462 | |
463 | SIZE_T aDynamicEntries = max(16, m_aDynamicEntries); |
464 | while (aDynamicEntries <= dwID) |
465 | { |
466 | aDynamicEntries *= 2; |
467 | } |
468 | |
469 | DynamicClassInfo* pNewDynamicClassTable; |
470 | |
471 | // If this allocation fails, we throw. If it succeeds, |
472 | // then we are good to go |
473 | pNewDynamicClassTable = (DynamicClassInfo*)(void*)new BYTE[sizeof(DynamicClassInfo) * aDynamicEntries]; |
474 | |
475 | // Zero out the dynamic class table |
476 | memset(pNewDynamicClassTable, 0, sizeof(DynamicClassInfo) * aDynamicEntries); |
477 | |
478 | // We might always be guaranteed that this will be non-NULL, but just to be safe |
479 | if (m_pDynamicClassTable != NULL) |
480 | { |
481 | memcpy(pNewDynamicClassTable, m_pDynamicClassTable, sizeof(DynamicClassInfo) * m_aDynamicEntries); |
482 | } |
483 | else |
484 | { |
485 | _ASSERTE(m_aDynamicEntries == 0); |
486 | } |
487 | |
488 | _ASSERTE(m_aDynamicEntries%2 == 0); |
489 | |
490 | DynamicClassInfo* pOldDynamicClassTable = m_pDynamicClassTable; |
491 | |
492 | m_pDynamicClassTable = pNewDynamicClassTable; |
493 | m_aDynamicEntries = aDynamicEntries; |
494 | |
495 | if (pOldDynamicClassTable != NULL) |
496 | delete pOldDynamicClassTable; |
497 | } |
498 | |
499 | void ThreadLocalModule::AllocateDynamicClass(MethodTable *pMT) |
500 | { |
501 | CONTRACTL |
502 | { |
503 | THROWS; |
504 | GC_TRIGGERS; |
505 | MODE_COOPERATIVE; |
506 | INJECT_FAULT(COMPlusThrowOM();); |
507 | } |
508 | CONTRACTL_END; |
509 | |
510 | _ASSERTE(!pMT->IsSharedByGenericInstantiations()); |
511 | _ASSERTE(pMT->IsDynamicStatics()); |
512 | |
513 | DWORD dwID = pMT->GetModuleDynamicEntryID(); |
514 | |
515 | EnsureDynamicClassIndex(dwID); |
516 | |
517 | _ASSERTE(m_aDynamicEntries > dwID); |
518 | |
519 | EEClass *pClass = pMT->GetClass(); |
520 | DWORD dwStaticBytes = pClass->GetNonGCThreadStaticFieldBytes(); |
521 | DWORD dwNumHandleStatics = pClass->GetNumHandleThreadStatics(); |
522 | |
523 | _ASSERTE(!IsClassAllocated(pMT)); |
524 | _ASSERTE(!IsClassInitialized(pMT)); |
525 | _ASSERTE(!IsClassInitError(pMT)); |
526 | |
527 | DynamicEntry *pDynamicStatics = m_pDynamicClassTable[dwID].m_pDynamicEntry; |
528 | |
529 | // We need this check because maybe a class had a cctor but no statics |
530 | if (dwStaticBytes > 0 || dwNumHandleStatics > 0) |
531 | { |
532 | if (pDynamicStatics == NULL) |
533 | { |
534 | SIZE_T dynamicEntrySize; |
535 | if (pMT->Collectible()) |
536 | { |
537 | dynamicEntrySize = sizeof(CollectibleDynamicEntry); |
538 | } |
539 | else |
540 | { |
541 | dynamicEntrySize = DynamicEntry::GetOffsetOfDataBlob() + dwStaticBytes; |
542 | } |
543 | |
544 | // If this allocation fails, we will throw |
545 | pDynamicStatics = (DynamicEntry*)new BYTE[dynamicEntrySize]; |
546 | |
547 | #ifdef FEATURE_64BIT_ALIGNMENT |
548 | // The memory block has be aligned at MAX_PRIMITIVE_FIELD_SIZE to guarantee alignment of statics |
549 | static_assert_no_msg(sizeof(NormalDynamicEntry) % MAX_PRIMITIVE_FIELD_SIZE == 0); |
550 | _ASSERTE(IS_ALIGNED(pDynamicStatics, MAX_PRIMITIVE_FIELD_SIZE)); |
551 | #endif |
552 | |
553 | // Zero out the new DynamicEntry |
554 | memset((BYTE*)pDynamicStatics, 0, dynamicEntrySize); |
555 | |
556 | if (pMT->Collectible()) |
557 | { |
558 | ((CollectibleDynamicEntry*)pDynamicStatics)->m_pLoaderAllocator = pMT->GetLoaderAllocator(); |
559 | } |
560 | |
561 | // Save the DynamicEntry in the DynamicClassTable |
562 | m_pDynamicClassTable[dwID].m_pDynamicEntry = pDynamicStatics; |
563 | } |
564 | |
565 | if (pMT->Collectible() && (dwStaticBytes != 0)) |
566 | { |
567 | OBJECTREF nongcStaticsArray = NULL; |
568 | GCPROTECT_BEGIN(nongcStaticsArray); |
569 | #ifdef FEATURE_64BIT_ALIGNMENT |
570 | // Allocate memory with extra alignment only if it is really necessary |
571 | if (dwStaticBytes >= MAX_PRIMITIVE_FIELD_SIZE) |
572 | nongcStaticsArray = AllocatePrimitiveArray(ELEMENT_TYPE_I8, (dwStaticBytes + (sizeof(CLR_I8) - 1)) / (sizeof(CLR_I8))); |
573 | else |
574 | #endif |
575 | nongcStaticsArray = AllocatePrimitiveArray(ELEMENT_TYPE_U1, dwStaticBytes); |
576 | |
577 | ((CollectibleDynamicEntry *)pDynamicStatics)->m_hNonGCStatics = pMT->GetLoaderAllocator()->AllocateHandle(nongcStaticsArray); |
578 | GCPROTECT_END(); |
579 | } |
580 | |
581 | if (dwNumHandleStatics > 0) |
582 | { |
583 | if (!pMT->Collectible()) |
584 | { |
585 | GetThread()->m_ThreadLocalBlock.AllocateStaticFieldObjRefPtrs(dwNumHandleStatics, |
586 | &((NormalDynamicEntry *)pDynamicStatics)->m_pGCStatics); |
587 | } |
588 | else |
589 | { |
590 | OBJECTREF gcStaticsArray = NULL; |
591 | GCPROTECT_BEGIN(gcStaticsArray); |
592 | gcStaticsArray = AllocateObjectArray(dwNumHandleStatics, g_pObjectClass); |
593 | ((CollectibleDynamicEntry *)pDynamicStatics)->m_hGCStatics = pMT->GetLoaderAllocator()->AllocateHandle(gcStaticsArray); |
594 | GCPROTECT_END(); |
595 | } |
596 | } |
597 | } |
598 | } |
599 | |
600 | void ThreadLocalModule::PopulateClass(MethodTable *pMT) |
601 | { |
602 | CONTRACTL |
603 | { |
604 | THROWS; |
605 | GC_TRIGGERS; |
606 | MODE_COOPERATIVE; |
607 | INJECT_FAULT(COMPlusThrowOM();); |
608 | } |
609 | CONTRACTL_END; |
610 | |
611 | _ASSERTE(this != NULL); |
612 | _ASSERTE(pMT != NULL); |
613 | _ASSERTE(!IsClassAllocated(pMT)); |
614 | |
615 | // If this is a dynamic class then we need to allocate |
616 | // an entry in our dynamic class table |
617 | if (pMT->IsDynamicStatics()) |
618 | AllocateDynamicClass(pMT); |
619 | |
620 | if (pMT->Collectible()) |
621 | { |
622 | SetClassFlags(pMT, ClassInitFlags::COLLECTIBLE_FLAG); |
623 | } |
624 | |
625 | // We need to allocate boxes any value-type statics that are not |
626 | // primitives or enums, because these statics may contain references |
627 | // to objects on the GC heap |
628 | if (pMT->GetNumBoxedThreadStatics() > 0) |
629 | { |
630 | PTR_ThreadLocalBlock pThreadLocalBlock = ThreadStatics::GetCurrentTLB(); |
631 | _ASSERTE(pThreadLocalBlock != NULL); |
632 | pThreadLocalBlock->AllocateThreadStaticBoxes(pMT); |
633 | } |
634 | |
635 | // Mark the class as allocated |
636 | SetClassAllocated(pMT); |
637 | } |
638 | |
639 | PTR_ThreadLocalModule ThreadStatics::AllocateAndInitTLM(ModuleIndex index, PTR_ThreadLocalBlock pThreadLocalBlock, Module * pModule) //static |
640 | { |
641 | CONTRACTL |
642 | { |
643 | THROWS; |
644 | GC_TRIGGERS; |
645 | MODE_COOPERATIVE; |
646 | INJECT_FAULT(COMPlusThrowOM();); |
647 | } |
648 | CONTRACTL_END; |
649 | |
650 | pThreadLocalBlock->EnsureModuleIndex(index); |
651 | |
652 | _ASSERTE(pThreadLocalBlock != NULL); |
653 | _ASSERTE(pModule != NULL); |
654 | |
655 | NewHolder<ThreadLocalModule> pThreadLocalModule = AllocateTLM(pModule); |
656 | |
657 | pThreadLocalBlock->AllocateThreadStaticHandles(pModule, pThreadLocalModule); |
658 | |
659 | pThreadLocalBlock->SetModuleSlot(index, pThreadLocalModule); |
660 | pThreadLocalModule.SuppressRelease(); |
661 | |
662 | return pThreadLocalModule; |
663 | } |
664 | |
665 | |
666 | PTR_ThreadLocalModule ThreadStatics::GetTLM(ModuleIndex index, Module * pModule) //static |
667 | { |
668 | CONTRACTL |
669 | { |
670 | THROWS; |
671 | GC_TRIGGERS; |
672 | MODE_COOPERATIVE; |
673 | } |
674 | CONTRACTL_END; |
675 | |
676 | // Get the TLM if it already exists |
677 | PTR_ThreadLocalModule pThreadLocalModule = ThreadStatics::GetTLMIfExists(index); |
678 | |
679 | // If the TLM does not exist, create it now |
680 | if (pThreadLocalModule == NULL) |
681 | { |
682 | // Get the current ThreadLocalBlock |
683 | PTR_ThreadLocalBlock pThreadLocalBlock = ThreadStatics::GetCurrentTLB(); |
684 | _ASSERTE(pThreadLocalBlock != NULL); |
685 | |
686 | // Allocate and initialize the TLM, and add it to the TLB's table |
687 | pThreadLocalModule = AllocateAndInitTLM(index, pThreadLocalBlock, pModule); |
688 | } |
689 | |
690 | return pThreadLocalModule; |
691 | } |
692 | |
693 | PTR_ThreadLocalModule ThreadStatics::GetTLM(MethodTable * pMT) //static |
694 | { |
695 | Module * pModule = pMT->GetModuleForStatics(); |
696 | return GetTLM(pModule->GetModuleIndex(), pModule); |
697 | } |
698 | |
699 | PTR_ThreadLocalModule ThreadStatics::AllocateTLM(Module * pModule) |
700 | { |
701 | CONTRACTL |
702 | { |
703 | THROWS; |
704 | GC_NOTRIGGER; |
705 | MODE_ANY; |
706 | INJECT_FAULT(COMPlusThrowOM();); |
707 | } |
708 | CONTRACTL_END; |
709 | |
710 | |
711 | SIZE_T size = pModule->GetThreadLocalModuleSize(); |
712 | |
713 | _ASSERTE(size >= ThreadLocalModule::OffsetOfDataBlob()); |
714 | |
715 | PTR_ThreadLocalModule pThreadLocalModule = (ThreadLocalModule*)new BYTE[size]; |
716 | |
717 | // We guarantee alignment for 64-bit regular thread statics on 32-bit platforms even without FEATURE_64BIT_ALIGNMENT for performance reasons. |
718 | |
719 | // The memory block has to be aligned at MAX_PRIMITIVE_FIELD_SIZE to guarantee alignment of statics |
720 | _ASSERTE(IS_ALIGNED(pThreadLocalModule, MAX_PRIMITIVE_FIELD_SIZE)); |
721 | |
722 | // Zero out the part of memory where the TLM resides |
723 | memset(pThreadLocalModule, 0, size); |
724 | |
725 | return pThreadLocalModule; |
726 | } |
727 | |
728 | #endif |
729 | |