1 | // Licensed to the .NET Foundation under one or more agreements. |
2 | // The .NET Foundation licenses this file to you under the MIT license. |
3 | // See the LICENSE file in the project root for more information. |
4 | // precode.cpp |
5 | // |
6 | |
7 | // |
8 | // Stub that runs before the actual native code |
9 | // |
10 | |
11 | |
12 | #include "common.h" |
13 | |
14 | #ifdef FEATURE_PREJIT |
15 | #include "compile.h" |
16 | #endif |
17 | |
18 | #ifdef FEATURE_PERFMAP |
19 | #include "perfmap.h" |
20 | #endif |
21 | |
22 | //========================================================================================== |
23 | // class Precode |
24 | //========================================================================================== |
25 | BOOL Precode::IsValidType(PrecodeType t) |
26 | { |
27 | LIMITED_METHOD_CONTRACT; |
28 | SUPPORTS_DAC; |
29 | |
30 | switch (t) { |
31 | case PRECODE_STUB: |
32 | #ifdef HAS_NDIRECT_IMPORT_PRECODE |
33 | case PRECODE_NDIRECT_IMPORT: |
34 | #endif // HAS_NDIRECT_IMPORT_PRECODE |
35 | #ifdef HAS_FIXUP_PRECODE |
36 | case PRECODE_FIXUP: |
37 | #endif // HAS_FIXUP_PRECODE |
38 | #ifdef HAS_THISPTR_RETBUF_PRECODE |
39 | case PRECODE_THISPTR_RETBUF: |
40 | #endif // HAS_THISPTR_RETBUF_PRECODE |
41 | return TRUE; |
42 | default: |
43 | return FALSE; |
44 | } |
45 | } |
46 | |
47 | SIZE_T Precode::SizeOf(PrecodeType t) |
48 | { |
49 | LIMITED_METHOD_CONTRACT; |
50 | SUPPORTS_DAC; |
51 | |
52 | switch (t) |
53 | { |
54 | case PRECODE_STUB: |
55 | return sizeof(StubPrecode); |
56 | #ifdef HAS_NDIRECT_IMPORT_PRECODE |
57 | case PRECODE_NDIRECT_IMPORT: |
58 | return sizeof(NDirectImportPrecode); |
59 | #endif // HAS_NDIRECT_IMPORT_PRECODE |
60 | #ifdef HAS_FIXUP_PRECODE |
61 | case PRECODE_FIXUP: |
62 | return sizeof(FixupPrecode); |
63 | #endif // HAS_FIXUP_PRECODE |
64 | #ifdef HAS_THISPTR_RETBUF_PRECODE |
65 | case PRECODE_THISPTR_RETBUF: |
66 | return sizeof(ThisPtrRetBufPrecode); |
67 | #endif // HAS_THISPTR_RETBUF_PRECODE |
68 | |
69 | default: |
70 | UnexpectedPrecodeType("Precode::SizeOf" , t); |
71 | break; |
72 | } |
73 | return 0; |
74 | } |
75 | |
76 | // Note: This is immediate target of the precode. It does not follow jump stub if there is one. |
77 | PCODE Precode::GetTarget() |
78 | { |
79 | LIMITED_METHOD_CONTRACT; |
80 | SUPPORTS_DAC; |
81 | |
82 | PCODE target = NULL; |
83 | |
84 | PrecodeType precodeType = GetType(); |
85 | switch (precodeType) |
86 | { |
87 | case PRECODE_STUB: |
88 | target = AsStubPrecode()->GetTarget(); |
89 | break; |
90 | #ifdef HAS_FIXUP_PRECODE |
91 | case PRECODE_FIXUP: |
92 | target = AsFixupPrecode()->GetTarget(); |
93 | break; |
94 | #endif // HAS_FIXUP_PRECODE |
95 | #ifdef HAS_THISPTR_RETBUF_PRECODE |
96 | case PRECODE_THISPTR_RETBUF: |
97 | target = AsThisPtrRetBufPrecode()->GetTarget(); |
98 | break; |
99 | #endif // HAS_THISPTR_RETBUF_PRECODE |
100 | |
101 | default: |
102 | UnexpectedPrecodeType("Precode::GetTarget" , precodeType); |
103 | break; |
104 | } |
105 | return target; |
106 | } |
107 | |
108 | MethodDesc* Precode::GetMethodDesc(BOOL fSpeculative /*= FALSE*/) |
109 | { |
110 | CONTRACTL { |
111 | NOTHROW; |
112 | GC_NOTRIGGER; |
113 | SO_TOLERANT; |
114 | SUPPORTS_DAC; |
115 | } CONTRACTL_END; |
116 | |
117 | TADDR pMD = NULL; |
118 | |
119 | PrecodeType precodeType = GetType(); |
120 | switch (precodeType) |
121 | { |
122 | case PRECODE_STUB: |
123 | pMD = AsStubPrecode()->GetMethodDesc(); |
124 | break; |
125 | #ifdef HAS_NDIRECT_IMPORT_PRECODE |
126 | case PRECODE_NDIRECT_IMPORT: |
127 | pMD = AsNDirectImportPrecode()->GetMethodDesc(); |
128 | break; |
129 | #endif // HAS_NDIRECT_IMPORT_PRECODE |
130 | #ifdef HAS_FIXUP_PRECODE |
131 | case PRECODE_FIXUP: |
132 | pMD = AsFixupPrecode()->GetMethodDesc(); |
133 | break; |
134 | #endif // HAS_FIXUP_PRECODE |
135 | #ifdef HAS_THISPTR_RETBUF_PRECODE |
136 | case PRECODE_THISPTR_RETBUF: |
137 | pMD = AsThisPtrRetBufPrecode()->GetMethodDesc(); |
138 | break; |
139 | #endif // HAS_THISPTR_RETBUF_PRECODE |
140 | |
141 | default: |
142 | break; |
143 | } |
144 | |
145 | if (pMD == NULL) |
146 | { |
147 | if (fSpeculative) |
148 | return NULL; |
149 | else |
150 | UnexpectedPrecodeType("Precode::GetMethodDesc" , precodeType); |
151 | } |
152 | |
153 | // GetMethodDesc() on platform specific precode types returns TADDR. It should return |
154 | // PTR_MethodDesc instead. It is a workaround to resolve cyclic dependency between headers. |
155 | // Once we headers factoring of headers cleaned up, we should be able to get rid of it. |
156 | |
157 | // For speculative calls, pMD can be garbage that causes IBC logging to crash |
158 | if (!fSpeculative) |
159 | g_IBCLogger.LogMethodPrecodeAccess((PTR_MethodDesc)pMD); |
160 | |
161 | return (PTR_MethodDesc)pMD; |
162 | } |
163 | |
164 | BOOL Precode::IsCorrectMethodDesc(MethodDesc * pMD) |
165 | { |
166 | CONTRACTL |
167 | { |
168 | NOTHROW; |
169 | GC_NOTRIGGER; |
170 | SO_TOLERANT; |
171 | MODE_ANY; |
172 | } |
173 | CONTRACTL_END; |
174 | MethodDesc * pMDfromPrecode = GetMethodDesc(TRUE); |
175 | |
176 | if (pMDfromPrecode == pMD) |
177 | return TRUE; |
178 | |
179 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
180 | if (pMDfromPrecode == NULL) |
181 | { |
182 | PrecodeType precodeType = GetType(); |
183 | |
184 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
185 | // We do not keep track of the MethodDesc in every kind of fixup precode |
186 | if (precodeType == PRECODE_FIXUP) |
187 | return TRUE; |
188 | #endif |
189 | } |
190 | #endif // HAS_FIXUP_PRECODE_CHUNKS |
191 | |
192 | return FALSE; |
193 | } |
194 | |
195 | BOOL Precode::IsPointingToPrestub(PCODE target) |
196 | { |
197 | CONTRACTL |
198 | { |
199 | NOTHROW; |
200 | GC_NOTRIGGER; |
201 | SO_TOLERANT; |
202 | MODE_ANY; |
203 | } |
204 | CONTRACTL_END; |
205 | |
206 | if (IsPointingTo(target, GetPreStubEntryPoint())) |
207 | return TRUE; |
208 | |
209 | #ifdef HAS_FIXUP_PRECODE |
210 | if (IsPointingTo(target, GetEEFuncEntryPoint(PrecodeFixupThunk))) |
211 | return TRUE; |
212 | #endif |
213 | |
214 | #ifdef FEATURE_PREJIT |
215 | Module *pZapModule = GetMethodDesc()->GetZapModule(); |
216 | if (pZapModule != NULL) |
217 | { |
218 | if (IsPointingTo(target, pZapModule->GetPrestubJumpStub())) |
219 | return TRUE; |
220 | |
221 | #ifdef HAS_FIXUP_PRECODE |
222 | if (IsPointingTo(target, pZapModule->GetPrecodeFixupJumpStub())) |
223 | return TRUE; |
224 | #endif |
225 | } |
226 | #endif // FEATURE_PREJIT |
227 | |
228 | return FALSE; |
229 | } |
230 | |
231 | // If addr is patched fixup precode, returns address that it points to. Otherwise returns NULL. |
232 | PCODE Precode::TryToSkipFixupPrecode(PCODE addr) |
233 | { |
234 | CONTRACTL { |
235 | NOTHROW; |
236 | GC_NOTRIGGER; |
237 | SO_TOLERANT; |
238 | } CONTRACTL_END; |
239 | |
240 | PCODE pTarget = NULL; |
241 | |
242 | #if defined(FEATURE_PREJIT) && defined(HAS_FIXUP_PRECODE) |
243 | // Early out for common cases |
244 | if (!FixupPrecode::IsFixupPrecodeByASM(addr)) |
245 | return NULL; |
246 | |
247 | // This optimization makes sense in NGened code only. |
248 | Module * pModule = ExecutionManager::FindZapModule(addr); |
249 | if (pModule == NULL) |
250 | return NULL; |
251 | |
252 | // Verify that the address is in precode section |
253 | if (!pModule->IsZappedPrecode(addr)) |
254 | return NULL; |
255 | |
256 | pTarget = GetPrecodeFromEntryPoint(addr)->GetTarget(); |
257 | |
258 | // Verify that the target is in code section |
259 | if (!pModule->IsZappedCode(pTarget)) |
260 | return NULL; |
261 | |
262 | #if defined(_DEBUG) |
263 | MethodDesc * pMD_orig = MethodTable::GetMethodDescForSlotAddress(addr); |
264 | MethodDesc * pMD_direct = MethodTable::GetMethodDescForSlotAddress(pTarget); |
265 | |
266 | // Both the original and direct entrypoint should map to same MethodDesc |
267 | // Some FCalls are remapped to private methods (see System.String.CtorCharArrayStartLength) |
268 | _ASSERTE((pMD_orig == pMD_direct) || pMD_orig->IsRuntimeSupplied()); |
269 | #endif |
270 | |
271 | #endif // defined(FEATURE_PREJIT) && defined(HAS_FIXUP_PRECODE) |
272 | |
273 | return pTarget; |
274 | } |
275 | |
276 | Precode* Precode::GetPrecodeForTemporaryEntryPoint(TADDR temporaryEntryPoints, int index) |
277 | { |
278 | WRAPPER_NO_CONTRACT; |
279 | PrecodeType t = PTR_Precode(temporaryEntryPoints)->GetType(); |
280 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
281 | if (t == PRECODE_FIXUP) |
282 | { |
283 | return PTR_Precode(temporaryEntryPoints + index * sizeof(FixupPrecode)); |
284 | } |
285 | #endif |
286 | SIZE_T oneSize = SizeOfTemporaryEntryPoint(t); |
287 | return PTR_Precode(temporaryEntryPoints + index * oneSize); |
288 | } |
289 | |
290 | SIZE_T Precode::SizeOfTemporaryEntryPoints(PrecodeType t, bool preallocateJumpStubs, int count) |
291 | { |
292 | WRAPPER_NO_CONTRACT; |
293 | SUPPORTS_DAC; |
294 | |
295 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
296 | if (t == PRECODE_FIXUP) |
297 | { |
298 | SIZE_T size = count * sizeof(FixupPrecode) + sizeof(PTR_MethodDesc); |
299 | |
300 | #ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
301 | if (preallocateJumpStubs) |
302 | { |
303 | // For dynamic methods, space for jump stubs is allocated along with the precodes as part of the temporary entry |
304 | // points block. The first jump stub begins immediately after the PTR_MethodDesc. Aside from a jump stub per |
305 | // precode, an additional shared precode fixup jump stub is also allocated (see |
306 | // GetDynamicMethodPrecodeFixupJumpStub()). |
307 | size += ((SIZE_T)count + 1) * BACK_TO_BACK_JUMP_ALLOCATE_SIZE; |
308 | } |
309 | #else // !FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
310 | _ASSERTE(!preallocateJumpStubs); |
311 | #endif // FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
312 | |
313 | return size; |
314 | } |
315 | else |
316 | { |
317 | _ASSERTE(!preallocateJumpStubs); |
318 | } |
319 | #endif |
320 | SIZE_T oneSize = SizeOfTemporaryEntryPoint(t); |
321 | return count * oneSize; |
322 | } |
323 | |
324 | SIZE_T Precode::SizeOfTemporaryEntryPoints(TADDR temporaryEntryPoints, int count) |
325 | { |
326 | WRAPPER_NO_CONTRACT; |
327 | SUPPORTS_DAC; |
328 | |
329 | PrecodeType precodeType = PTR_Precode(temporaryEntryPoints)->GetType(); |
330 | #ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
331 | bool preallocateJumpStubs = |
332 | precodeType == PRECODE_FIXUP && |
333 | ((PTR_MethodDesc)((PTR_FixupPrecode)temporaryEntryPoints)->GetMethodDesc())->IsLCGMethod(); |
334 | #else // !FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
335 | bool preallocateJumpStubs = false; |
336 | #endif // FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
337 | return SizeOfTemporaryEntryPoints(precodeType, preallocateJumpStubs, count); |
338 | } |
339 | |
340 | #ifndef DACCESS_COMPILE |
341 | |
342 | Precode* Precode::Allocate(PrecodeType t, MethodDesc* pMD, |
343 | LoaderAllocator * pLoaderAllocator, |
344 | AllocMemTracker * pamTracker) |
345 | { |
346 | CONTRACTL |
347 | { |
348 | THROWS; |
349 | GC_NOTRIGGER; |
350 | MODE_ANY; |
351 | } |
352 | CONTRACTL_END; |
353 | |
354 | SIZE_T size; |
355 | |
356 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
357 | if (t == PRECODE_FIXUP) |
358 | { |
359 | size = sizeof(FixupPrecode) + sizeof(PTR_MethodDesc); |
360 | } |
361 | else |
362 | #endif |
363 | { |
364 | size = Precode::SizeOf(t); |
365 | } |
366 | |
367 | Precode* pPrecode = (Precode*)pamTracker->Track(pLoaderAllocator->GetPrecodeHeap()->AllocAlignedMem(size, AlignOf(t))); |
368 | pPrecode->Init(t, pMD, pLoaderAllocator); |
369 | |
370 | #ifndef CROSSGEN_COMPILE |
371 | ClrFlushInstructionCache(pPrecode, size); |
372 | #endif |
373 | |
374 | return pPrecode; |
375 | } |
376 | |
377 | void Precode::Init(PrecodeType t, MethodDesc* pMD, LoaderAllocator *pLoaderAllocator) |
378 | { |
379 | LIMITED_METHOD_CONTRACT; |
380 | |
381 | switch (t) { |
382 | case PRECODE_STUB: |
383 | ((StubPrecode*)this)->Init(pMD, pLoaderAllocator); |
384 | break; |
385 | #ifdef HAS_NDIRECT_IMPORT_PRECODE |
386 | case PRECODE_NDIRECT_IMPORT: |
387 | ((NDirectImportPrecode*)this)->Init(pMD, pLoaderAllocator); |
388 | break; |
389 | #endif // HAS_NDIRECT_IMPORT_PRECODE |
390 | #ifdef HAS_FIXUP_PRECODE |
391 | case PRECODE_FIXUP: |
392 | ((FixupPrecode*)this)->Init(pMD, pLoaderAllocator); |
393 | break; |
394 | #endif // HAS_FIXUP_PRECODE |
395 | #ifdef HAS_THISPTR_RETBUF_PRECODE |
396 | case PRECODE_THISPTR_RETBUF: |
397 | ((ThisPtrRetBufPrecode*)this)->Init(pMD, pLoaderAllocator); |
398 | break; |
399 | #endif // HAS_THISPTR_RETBUF_PRECODE |
400 | default: |
401 | UnexpectedPrecodeType("Precode::Init" , t); |
402 | break; |
403 | } |
404 | |
405 | _ASSERTE(IsValidType(GetType())); |
406 | } |
407 | |
408 | void Precode::ResetTargetInterlocked() |
409 | { |
410 | WRAPPER_NO_CONTRACT; |
411 | |
412 | PrecodeType precodeType = GetType(); |
413 | switch (precodeType) |
414 | { |
415 | case PRECODE_STUB: |
416 | AsStubPrecode()->ResetTargetInterlocked(); |
417 | break; |
418 | |
419 | #ifdef HAS_FIXUP_PRECODE |
420 | case PRECODE_FIXUP: |
421 | AsFixupPrecode()->ResetTargetInterlocked(); |
422 | break; |
423 | #endif // HAS_FIXUP_PRECODE |
424 | |
425 | default: |
426 | UnexpectedPrecodeType("Precode::ResetTargetInterlocked" , precodeType); |
427 | break; |
428 | } |
429 | |
430 | // Although executable code is modified on x86/x64, a FlushInstructionCache() is not necessary on those platforms due to the |
431 | // interlocked operation above (see ClrFlushInstructionCache()) |
432 | } |
433 | |
434 | BOOL Precode::SetTargetInterlocked(PCODE target, BOOL fOnlyRedirectFromPrestub) |
435 | { |
436 | WRAPPER_NO_CONTRACT; |
437 | _ASSERTE(!IsPointingToPrestub(target)); |
438 | |
439 | PCODE expected = GetTarget(); |
440 | BOOL ret = FALSE; |
441 | |
442 | if (fOnlyRedirectFromPrestub && !IsPointingToPrestub(expected)) |
443 | return FALSE; |
444 | |
445 | g_IBCLogger.LogMethodPrecodeWriteAccess(GetMethodDesc()); |
446 | |
447 | PrecodeType precodeType = GetType(); |
448 | switch (precodeType) |
449 | { |
450 | case PRECODE_STUB: |
451 | ret = AsStubPrecode()->SetTargetInterlocked(target, expected); |
452 | break; |
453 | |
454 | #ifdef HAS_FIXUP_PRECODE |
455 | case PRECODE_FIXUP: |
456 | ret = AsFixupPrecode()->SetTargetInterlocked(target, expected); |
457 | break; |
458 | #endif // HAS_FIXUP_PRECODE |
459 | |
460 | #ifdef HAS_THISPTR_RETBUF_PRECODE |
461 | case PRECODE_THISPTR_RETBUF: |
462 | ret = AsThisPtrRetBufPrecode()->SetTargetInterlocked(target, expected); |
463 | break; |
464 | #endif // HAS_THISPTR_RETBUF_PRECODE |
465 | |
466 | default: |
467 | UnexpectedPrecodeType("Precode::SetTargetInterlocked" , precodeType); |
468 | break; |
469 | } |
470 | |
471 | // Although executable code is modified on x86/x64, a FlushInstructionCache() is not necessary on those platforms due to the |
472 | // interlocked operation above (see ClrFlushInstructionCache()) |
473 | |
474 | return ret; |
475 | } |
476 | |
477 | void Precode::Reset() |
478 | { |
479 | WRAPPER_NO_CONTRACT; |
480 | |
481 | MethodDesc* pMD = GetMethodDesc(); |
482 | Init(GetType(), pMD, pMD->GetLoaderAllocatorForCode()); |
483 | ClrFlushInstructionCache(this, SizeOf()); |
484 | } |
485 | |
486 | /* static */ |
487 | TADDR Precode::AllocateTemporaryEntryPoints(MethodDescChunk * pChunk, |
488 | LoaderAllocator * pLoaderAllocator, |
489 | AllocMemTracker * pamTracker) |
490 | { |
491 | WRAPPER_NO_CONTRACT; |
492 | |
493 | MethodDesc* pFirstMD = pChunk->GetFirstMethodDesc(); |
494 | |
495 | int count = pChunk->GetCount(); |
496 | |
497 | PrecodeType t = PRECODE_STUB; |
498 | bool preallocateJumpStubs = false; |
499 | |
500 | #ifdef HAS_FIXUP_PRECODE |
501 | // Default to faster fixup precode if possible |
502 | if (!pFirstMD->RequiresMethodDescCallingConvention(count > 1)) |
503 | { |
504 | t = PRECODE_FIXUP; |
505 | |
506 | #ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
507 | if (pFirstMD->IsLCGMethod()) |
508 | { |
509 | preallocateJumpStubs = true; |
510 | } |
511 | #endif // FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
512 | } |
513 | else |
514 | { |
515 | _ASSERTE(!pFirstMD->IsLCGMethod()); |
516 | } |
517 | #endif // HAS_FIXUP_PRECODE |
518 | |
519 | SIZE_T totalSize = SizeOfTemporaryEntryPoints(t, preallocateJumpStubs, count); |
520 | |
521 | #ifdef HAS_COMPACT_ENTRYPOINTS |
522 | // Note that these are just best guesses to save memory. If we guessed wrong, |
523 | // we will allocate a new exact type of precode in GetOrCreatePrecode. |
524 | BOOL fForcedPrecode = pFirstMD->RequiresStableEntryPoint(count > 1); |
525 | |
526 | #ifdef _TARGET_ARM_ |
527 | if (pFirstMD->RequiresMethodDescCallingConvention(count > 1) |
528 | || count >= MethodDescChunk::GetCompactEntryPointMaxCount ()) |
529 | { |
530 | // We do not pass method desc on scratch register |
531 | fForcedPrecode = TRUE; |
532 | } |
533 | #endif // _TARGET_ARM_ |
534 | |
535 | if (!fForcedPrecode && (totalSize > MethodDescChunk::SizeOfCompactEntryPoints(count))) |
536 | return NULL; |
537 | #endif |
538 | |
539 | TADDR temporaryEntryPoints = (TADDR)pamTracker->Track(pLoaderAllocator->GetPrecodeHeap()->AllocAlignedMem(totalSize, AlignOf(t))); |
540 | |
541 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
542 | if (t == PRECODE_FIXUP) |
543 | { |
544 | #ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
545 | PCODE precodeFixupJumpStub = NULL; |
546 | if (preallocateJumpStubs) |
547 | { |
548 | // Emit the jump for the precode fixup jump stub now. This jump stub immediately follows the MethodDesc (see |
549 | // GetDynamicMethodPrecodeFixupJumpStub()). |
550 | precodeFixupJumpStub = temporaryEntryPoints + count * sizeof(FixupPrecode) + sizeof(PTR_MethodDesc); |
551 | #ifndef CROSSGEN_COMPILE |
552 | emitBackToBackJump((LPBYTE)precodeFixupJumpStub, (LPVOID)GetEEFuncEntryPoint(PrecodeFixupThunk)); |
553 | #endif // !CROSSGEN_COMPILE |
554 | } |
555 | #endif // FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
556 | |
557 | TADDR entryPoint = temporaryEntryPoints; |
558 | MethodDesc * pMD = pChunk->GetFirstMethodDesc(); |
559 | for (int i = 0; i < count; i++) |
560 | { |
561 | ((FixupPrecode *)entryPoint)->Init(pMD, pLoaderAllocator, pMD->GetMethodDescIndex(), (count - 1) - i); |
562 | |
563 | #ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
564 | _ASSERTE( |
565 | !preallocateJumpStubs || |
566 | !pMD->IsLCGMethod() || |
567 | ((FixupPrecode *)entryPoint)->GetDynamicMethodPrecodeFixupJumpStub() == precodeFixupJumpStub); |
568 | #endif // FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS |
569 | |
570 | _ASSERTE((Precode *)entryPoint == GetPrecodeForTemporaryEntryPoint(temporaryEntryPoints, i)); |
571 | entryPoint += sizeof(FixupPrecode); |
572 | |
573 | pMD = (MethodDesc *)(dac_cast<TADDR>(pMD) + pMD->SizeOf()); |
574 | } |
575 | |
576 | #ifdef FEATURE_PERFMAP |
577 | PerfMap::LogStubs(__FUNCTION__, "PRECODE_FIXUP" , (PCODE)temporaryEntryPoints, count * sizeof(FixupPrecode)); |
578 | #endif |
579 | ClrFlushInstructionCache((LPVOID)temporaryEntryPoints, count * sizeof(FixupPrecode)); |
580 | |
581 | return temporaryEntryPoints; |
582 | } |
583 | #endif |
584 | |
585 | SIZE_T oneSize = SizeOfTemporaryEntryPoint(t); |
586 | TADDR entryPoint = temporaryEntryPoints; |
587 | MethodDesc * pMD = pChunk->GetFirstMethodDesc(); |
588 | for (int i = 0; i < count; i++) |
589 | { |
590 | ((Precode *)entryPoint)->Init(t, pMD, pLoaderAllocator); |
591 | |
592 | _ASSERTE((Precode *)entryPoint == GetPrecodeForTemporaryEntryPoint(temporaryEntryPoints, i)); |
593 | entryPoint += oneSize; |
594 | |
595 | pMD = (MethodDesc *)(dac_cast<TADDR>(pMD) + pMD->SizeOf()); |
596 | } |
597 | |
598 | #ifdef FEATURE_PERFMAP |
599 | PerfMap::LogStubs(__FUNCTION__, "PRECODE_STUB" , (PCODE)temporaryEntryPoints, count * oneSize); |
600 | #endif |
601 | |
602 | ClrFlushInstructionCache((LPVOID)temporaryEntryPoints, count * oneSize); |
603 | |
604 | return temporaryEntryPoints; |
605 | } |
606 | |
607 | #ifdef FEATURE_NATIVE_IMAGE_GENERATION |
608 | |
609 | static DataImage::ItemKind GetPrecodeItemKind(DataImage * image, MethodDesc * pMD, BOOL fIsPrebound = FALSE) |
610 | { |
611 | STANDARD_VM_CONTRACT; |
612 | |
613 | DataImage::ItemKind kind = DataImage::ITEM_METHOD_PRECODE_COLD_WRITEABLE; |
614 | |
615 | DWORD flags = image->GetMethodProfilingFlags(pMD); |
616 | |
617 | if (flags & (1 << WriteMethodPrecode)) |
618 | { |
619 | kind = fIsPrebound ? DataImage::ITEM_METHOD_PRECODE_HOT : DataImage::ITEM_METHOD_PRECODE_HOT_WRITEABLE; |
620 | } |
621 | else |
622 | if (flags & (1 << ReadMethodPrecode)) |
623 | { |
624 | kind = DataImage::ITEM_METHOD_PRECODE_HOT; |
625 | } |
626 | else |
627 | if ( |
628 | fIsPrebound || |
629 | // The generic method definitions get precode to make GetMethodDescForSlot work. |
630 | // This precode should not be ever written to. |
631 | pMD->ContainsGenericVariables() || |
632 | // Interface MDs are run only for remoting and cominterop which is pretty rare. Make them cold. |
633 | pMD->IsInterface() |
634 | ) |
635 | { |
636 | kind = DataImage::ITEM_METHOD_PRECODE_COLD; |
637 | } |
638 | |
639 | return kind; |
640 | } |
641 | |
642 | void Precode::Save(DataImage *image) |
643 | { |
644 | STANDARD_VM_CONTRACT; |
645 | |
646 | MethodDesc * pMD = GetMethodDesc(); |
647 | PrecodeType t = GetType(); |
648 | |
649 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
650 | _ASSERTE(GetType() != PRECODE_FIXUP); |
651 | #endif |
652 | |
653 | #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) |
654 | // StubPrecode may have straddlers (relocations crossing pages) on x86 and x64. We need |
655 | // to insert padding to eliminate it. To do that, we need to save these using custom ZapNode that can only |
656 | // be implemented in dataimage.cpp or zapper due to factoring of the header files. |
657 | BOOL fIsPrebound = IsPrebound(image); |
658 | image->SavePrecode(this, |
659 | pMD, |
660 | t, |
661 | GetPrecodeItemKind(image, pMD, fIsPrebound), |
662 | fIsPrebound); |
663 | #else |
664 | _ASSERTE(FitsIn<ULONG>(SizeOf(t))); |
665 | image->StoreStructure((void*)GetStart(), |
666 | static_cast<ULONG>(SizeOf(t)), |
667 | GetPrecodeItemKind(image, pMD, IsPrebound(image)), |
668 | AlignOf(t)); |
669 | #endif // _TARGET_X86_ || _TARGET_AMD64_ |
670 | } |
671 | |
672 | void Precode::Fixup(DataImage *image, MethodDesc * pMD) |
673 | { |
674 | STANDARD_VM_CONTRACT; |
675 | |
676 | PrecodeType precodeType = GetType(); |
677 | |
678 | #if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) |
679 | #if defined(HAS_FIXUP_PRECODE) |
680 | if (precodeType == PRECODE_FIXUP) |
681 | { |
682 | AsFixupPrecode()->Fixup(image, pMD); |
683 | } |
684 | #endif |
685 | #else // _TARGET_X86_ || _TARGET_AMD64_ |
686 | ZapNode * pCodeNode = NULL; |
687 | |
688 | if (IsPrebound(image)) |
689 | { |
690 | pCodeNode = image->GetCodeAddress(pMD); |
691 | } |
692 | |
693 | switch (precodeType) { |
694 | case PRECODE_STUB: |
695 | AsStubPrecode()->Fixup(image); |
696 | break; |
697 | #ifdef HAS_NDIRECT_IMPORT_PRECODE |
698 | case PRECODE_NDIRECT_IMPORT: |
699 | AsNDirectImportPrecode()->Fixup(image); |
700 | break; |
701 | #endif // HAS_NDIRECT_IMPORT_PRECODE |
702 | #ifdef HAS_FIXUP_PRECODE |
703 | case PRECODE_FIXUP: |
704 | AsFixupPrecode()->Fixup(image, pMD); |
705 | break; |
706 | #endif // HAS_FIXUP_PRECODE |
707 | default: |
708 | UnexpectedPrecodeType("Precode::Save" , precodeType); |
709 | break; |
710 | } |
711 | #endif // _TARGET_X86_ || _TARGET_AMD64_ |
712 | } |
713 | |
714 | BOOL Precode::IsPrebound(DataImage *image) |
715 | { |
716 | WRAPPER_NO_CONTRACT; |
717 | |
718 | return FALSE; |
719 | } |
720 | |
721 | void Precode::SaveChunk::Save(DataImage* image, MethodDesc * pMD) |
722 | { |
723 | STANDARD_VM_CONTRACT; |
724 | |
725 | PrecodeType precodeType = pMD->GetPrecodeType(); |
726 | |
727 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
728 | if (precodeType == PRECODE_FIXUP) |
729 | { |
730 | m_rgPendingChunk.Append(pMD); |
731 | return; |
732 | } |
733 | #endif // HAS_FIXUP_PRECODE_CHUNKS |
734 | |
735 | SIZE_T size = Precode::SizeOf(precodeType); |
736 | Precode* pPrecode = (Precode *)new (image->GetHeap()) BYTE[size]; |
737 | pPrecode->Init(precodeType, pMD, NULL); |
738 | pPrecode->Save(image); |
739 | |
740 | // Alias the temporary entrypoint |
741 | image->RegisterSurrogate(pMD, pPrecode); |
742 | } |
743 | |
744 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
745 | static void SaveFixupPrecodeChunk(DataImage * image, MethodDesc ** rgMD, COUNT_T count, DataImage::ItemKind kind) |
746 | { |
747 | STANDARD_VM_CONTRACT; |
748 | |
749 | ULONG size = sizeof(FixupPrecode) * count + sizeof(PTR_MethodDesc); |
750 | FixupPrecode * pBase = (FixupPrecode *)new (image->GetHeap()) BYTE[size]; |
751 | |
752 | ZapStoredStructure * pNode = image->StoreStructure(NULL, size, kind, |
753 | Precode::AlignOf(PRECODE_FIXUP)); |
754 | |
755 | for (COUNT_T i = 0; i < count; i++) |
756 | { |
757 | MethodDesc * pMD = rgMD[i]; |
758 | FixupPrecode * pPrecode = pBase + i; |
759 | |
760 | pPrecode->InitForSave((count - 1) - i); |
761 | |
762 | image->BindPointer(pPrecode, pNode, i * sizeof(FixupPrecode)); |
763 | |
764 | // Alias the temporary entrypoint |
765 | image->RegisterSurrogate(pMD, pPrecode); |
766 | } |
767 | |
768 | image->CopyData(pNode, pBase, size); |
769 | } |
770 | #endif // HAS_FIXUP_PRECODE_CHUNKS |
771 | |
772 | void Precode::SaveChunk::Flush(DataImage * image) |
773 | { |
774 | STANDARD_VM_CONTRACT; |
775 | |
776 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
777 | if (m_rgPendingChunk.GetCount() == 0) |
778 | return; |
779 | |
780 | // Sort MethodDescs using the item kind for hot-cold spliting |
781 | struct SortMethodDesc : CQuickSort< MethodDesc * > |
782 | { |
783 | DataImage * m_image; |
784 | |
785 | SortMethodDesc(DataImage *image, MethodDesc **pBase, SSIZE_T iCount) |
786 | : CQuickSort< MethodDesc * >(pBase, iCount), |
787 | m_image(image) |
788 | { |
789 | } |
790 | |
791 | int Compare(MethodDesc ** ppMD1, MethodDesc ** ppMD2) |
792 | { |
793 | MethodDesc * pMD1 = *ppMD1; |
794 | MethodDesc * pMD2 = *ppMD2; |
795 | |
796 | // Compare item kind |
797 | DataImage::ItemKind kind1 = GetPrecodeItemKind(m_image, pMD1); |
798 | DataImage::ItemKind kind2 = GetPrecodeItemKind(m_image, pMD2); |
799 | |
800 | return kind1 - kind2; |
801 | } |
802 | }; |
803 | |
804 | SortMethodDesc sort(image, &(m_rgPendingChunk[0]), m_rgPendingChunk.GetCount()); |
805 | sort.Sort(); |
806 | |
807 | DataImage::ItemKind pendingKind = DataImage::ITEM_METHOD_PRECODE_COLD_WRITEABLE; |
808 | COUNT_T pendingCount = 0; |
809 | |
810 | COUNT_T i; |
811 | for (i = 0; i < m_rgPendingChunk.GetCount(); i++) |
812 | { |
813 | MethodDesc * pMD = m_rgPendingChunk[i]; |
814 | |
815 | DataImage::ItemKind kind = GetPrecodeItemKind(image, pMD); |
816 | if (kind != pendingKind) |
817 | { |
818 | if (pendingCount != 0) |
819 | SaveFixupPrecodeChunk(image, &(m_rgPendingChunk[i-pendingCount]), pendingCount, pendingKind); |
820 | |
821 | pendingKind = kind; |
822 | pendingCount = 0; |
823 | } |
824 | |
825 | pendingCount++; |
826 | } |
827 | |
828 | // Flush the remaining items |
829 | SaveFixupPrecodeChunk(image, &(m_rgPendingChunk[i-pendingCount]), pendingCount, pendingKind); |
830 | #endif // HAS_FIXUP_PRECODE_CHUNKS |
831 | } |
832 | |
833 | #endif // FEATURE_NATIVE_IMAGE_GENERATION |
834 | |
835 | #endif // !DACCESS_COMPILE |
836 | |
837 | |
838 | #ifdef DACCESS_COMPILE |
839 | void Precode::EnumMemoryRegions(CLRDataEnumMemoryFlags flags) |
840 | { |
841 | SUPPORTS_DAC; |
842 | PrecodeType t = GetType(); |
843 | |
844 | #ifdef HAS_FIXUP_PRECODE_CHUNKS |
845 | if (t == PRECODE_FIXUP) |
846 | { |
847 | AsFixupPrecode()->EnumMemoryRegions(flags); |
848 | return; |
849 | } |
850 | #endif |
851 | |
852 | DacEnumMemoryRegion(GetStart(), SizeOf(t)); |
853 | } |
854 | #endif |
855 | |
856 | |