1// Licensed to the .NET Foundation under one or more agreements.
2// The .NET Foundation licenses this file to you under the MIT license.
3// See the LICENSE file in the project root for more information.
4// ===========================================================================
5// File: CodeVersion.cpp
6//
7// ===========================================================================
8
9#include "common.h"
10#include "codeversion.h"
11
12#ifdef FEATURE_CODE_VERSIONING
13#include "threadsuspend.h"
14#include "methoditer.h"
15#include "../debug/ee/debugger.h"
16#include "../debug/ee/walker.h"
17#include "../debug/ee/controller.h"
18#endif // FEATURE_CODE_VERSIONING
19
20#ifndef FEATURE_CODE_VERSIONING
21
22//
23// When not using code versioning we've got a minimal implementation of
24// NativeCodeVersion that simply wraps a MethodDesc* with no additional
25// versioning information
26//
27
28NativeCodeVersion::NativeCodeVersion(const NativeCodeVersion & rhs) : m_pMethod(rhs.m_pMethod) {}
29NativeCodeVersion::NativeCodeVersion(PTR_MethodDesc pMethod) : m_pMethod(pMethod) {}
30BOOL NativeCodeVersion::IsNull() const { return m_pMethod == NULL; }
31PTR_MethodDesc NativeCodeVersion::GetMethodDesc() const { return m_pMethod; }
32PCODE NativeCodeVersion::GetNativeCode() const { return m_pMethod->GetNativeCode(); }
33NativeCodeVersionId NativeCodeVersion::GetVersionId() const { return 0; }
34ReJITID NativeCodeVersion::GetILCodeVersionId() const; { return 0; }
35ILCodeVersion NativeCodeVersion::GetILCodeVersion() const { return ILCodeVersion(m_pMethod); }
36#ifndef DACCESS_COMPILE
37BOOL NativeCodeVersion::SetNativeCodeInterlocked(PCODE pCode, PCODE pExpected) { return m_pMethod->SetNativeCodeInterlocked(pCode, pExpected); }
38#endif
39bool NativeCodeVersion::operator==(const NativeCodeVersion & rhs) const { return m_pMethod == rhs.m_pMethod; }
40bool NativeCodeVersion::operator!=(const NativeCodeVersion & rhs) const { return !operator==(rhs); }
41
42
43#else // FEATURE_CODE_VERSIONING
44
45
46// This HRESULT is only used as a private implementation detail. If it escapes through public APIS
47// it is a bug. Corerror.xml has a comment in it reserving this value for our use but it doesn't
48// appear in the public headers.
49
50#define CORPROF_E_RUNTIME_SUSPEND_REQUIRED 0x80131381
51
52#ifndef DACCESS_COMPILE
53NativeCodeVersionNode::NativeCodeVersionNode(
54 NativeCodeVersionId id,
55 MethodDesc* pMethodDesc,
56 ReJITID parentId,
57 NativeCodeVersion::OptimizationTier optimizationTier)
58 :
59 m_pNativeCode(NULL),
60 m_pMethodDesc(pMethodDesc),
61 m_parentId(parentId),
62 m_pNextMethodDescSibling(NULL),
63 m_id(id),
64#ifdef FEATURE_TIERED_COMPILATION
65 m_optTier(optimizationTier),
66#endif
67 m_flags(0)
68{}
69#endif
70
71#ifdef DEBUG
72BOOL NativeCodeVersionNode::LockOwnedByCurrentThread() const
73{
74 LIMITED_METHOD_DAC_CONTRACT;
75 return GetMethodDesc()->GetCodeVersionManager()->LockOwnedByCurrentThread();
76}
77#endif //DEBUG
78
79PTR_MethodDesc NativeCodeVersionNode::GetMethodDesc() const
80{
81 LIMITED_METHOD_DAC_CONTRACT;
82 return m_pMethodDesc;
83}
84
85PCODE NativeCodeVersionNode::GetNativeCode() const
86{
87 LIMITED_METHOD_DAC_CONTRACT;
88 return m_pNativeCode;
89}
90
91ReJITID NativeCodeVersionNode::GetILVersionId() const
92{
93 LIMITED_METHOD_DAC_CONTRACT;
94 return m_parentId;
95}
96
97ILCodeVersion NativeCodeVersionNode::GetILCodeVersion() const
98{
99 LIMITED_METHOD_DAC_CONTRACT;
100#ifdef DEBUG
101 if (GetILVersionId() != 0)
102 {
103 _ASSERTE(LockOwnedByCurrentThread());
104 }
105#endif
106 PTR_MethodDesc pMD = GetMethodDesc();
107 return pMD->GetCodeVersionManager()->GetILCodeVersion(pMD, GetILVersionId());
108}
109
110NativeCodeVersionId NativeCodeVersionNode::GetVersionId() const
111{
112 LIMITED_METHOD_DAC_CONTRACT;
113 return m_id;
114}
115
116#ifndef DACCESS_COMPILE
117BOOL NativeCodeVersionNode::SetNativeCodeInterlocked(PCODE pCode, PCODE pExpected)
118{
119 LIMITED_METHOD_CONTRACT;
120 return FastInterlockCompareExchangePointer(&m_pNativeCode,
121 (TADDR&)pCode, (TADDR&)pExpected) == (TADDR&)pExpected;
122}
123#endif
124
125BOOL NativeCodeVersionNode::IsActiveChildVersion() const
126{
127 LIMITED_METHOD_DAC_CONTRACT;
128 _ASSERTE(LockOwnedByCurrentThread());
129 return (m_flags & IsActiveChildFlag) != 0;
130}
131
132#ifndef DACCESS_COMPILE
133void NativeCodeVersionNode::SetActiveChildFlag(BOOL isActive)
134{
135 LIMITED_METHOD_CONTRACT;
136 _ASSERTE(LockOwnedByCurrentThread());
137 if (isActive)
138 {
139 m_flags |= IsActiveChildFlag;
140 }
141 else
142 {
143 m_flags &= ~IsActiveChildFlag;
144 }
145}
146#endif
147
148
149#ifdef FEATURE_TIERED_COMPILATION
150NativeCodeVersion::OptimizationTier NativeCodeVersionNode::GetOptimizationTier() const
151{
152 LIMITED_METHOD_DAC_CONTRACT;
153 return m_optTier;
154}
155#endif // FEATURE_TIERED_COMPILATION
156
157NativeCodeVersion::NativeCodeVersion() :
158 m_storageKind(StorageKind::Unknown)
159{}
160
161NativeCodeVersion::NativeCodeVersion(const NativeCodeVersion & rhs) :
162 m_storageKind(rhs.m_storageKind)
163{
164 if(m_storageKind == StorageKind::Explicit)
165 {
166 m_pVersionNode = rhs.m_pVersionNode;
167 }
168 else if(m_storageKind == StorageKind::Synthetic)
169 {
170 m_synthetic = rhs.m_synthetic;
171 }
172}
173
174NativeCodeVersion::NativeCodeVersion(PTR_NativeCodeVersionNode pVersionNode) :
175 m_storageKind(pVersionNode != NULL ? StorageKind::Explicit : StorageKind::Unknown),
176 m_pVersionNode(pVersionNode)
177{}
178
179NativeCodeVersion::NativeCodeVersion(PTR_MethodDesc pMethod) :
180 m_storageKind(pMethod != NULL ? StorageKind::Synthetic : StorageKind::Unknown)
181{
182 LIMITED_METHOD_DAC_CONTRACT;
183 m_synthetic.m_pMethodDesc = pMethod;
184}
185
186BOOL NativeCodeVersion::IsNull() const
187{
188 LIMITED_METHOD_DAC_CONTRACT;
189 return m_storageKind == StorageKind::Unknown;
190}
191
192BOOL NativeCodeVersion::IsDefaultVersion() const
193{
194 LIMITED_METHOD_DAC_CONTRACT;
195 return m_storageKind == StorageKind::Synthetic;
196}
197
198PTR_MethodDesc NativeCodeVersion::GetMethodDesc() const
199{
200 LIMITED_METHOD_DAC_CONTRACT;
201 if (m_storageKind == StorageKind::Explicit)
202 {
203 return AsNode()->GetMethodDesc();
204 }
205 else
206 {
207 return m_synthetic.m_pMethodDesc;
208 }
209}
210
211PCODE NativeCodeVersion::GetNativeCode() const
212{
213 LIMITED_METHOD_DAC_CONTRACT;
214 if (m_storageKind == StorageKind::Explicit)
215 {
216 return AsNode()->GetNativeCode();
217 }
218 else
219 {
220 return GetMethodDesc()->GetNativeCode();
221 }
222}
223
224ReJITID NativeCodeVersion::GetILCodeVersionId() const
225{
226 LIMITED_METHOD_DAC_CONTRACT;
227 if (m_storageKind == StorageKind::Explicit)
228 {
229 return AsNode()->GetILVersionId();
230 }
231 else
232 {
233 return 0;
234 }
235}
236
237ILCodeVersion NativeCodeVersion::GetILCodeVersion() const
238{
239 LIMITED_METHOD_DAC_CONTRACT;
240 if (m_storageKind == StorageKind::Explicit)
241 {
242 return AsNode()->GetILCodeVersion();
243 }
244 else
245 {
246 PTR_MethodDesc pMethod = GetMethodDesc();
247 return ILCodeVersion(dac_cast<PTR_Module>(pMethod->GetModule()), pMethod->GetMemberDef());
248 }
249}
250
251NativeCodeVersionId NativeCodeVersion::GetVersionId() const
252{
253 LIMITED_METHOD_DAC_CONTRACT;
254 if (m_storageKind == StorageKind::Explicit)
255 {
256 return AsNode()->GetVersionId();
257 }
258 else
259 {
260 return 0;
261 }
262}
263
264#ifndef DACCESS_COMPILE
265BOOL NativeCodeVersion::SetNativeCodeInterlocked(PCODE pCode, PCODE pExpected)
266{
267 LIMITED_METHOD_CONTRACT;
268 if (m_storageKind == StorageKind::Explicit)
269 {
270 return AsNode()->SetNativeCodeInterlocked(pCode, pExpected);
271 }
272 else
273 {
274 return GetMethodDesc()->SetNativeCodeInterlocked(pCode, pExpected);
275 }
276}
277#endif
278
279BOOL NativeCodeVersion::IsActiveChildVersion() const
280{
281 LIMITED_METHOD_DAC_CONTRACT;
282 if (m_storageKind == StorageKind::Explicit)
283 {
284 return AsNode()->IsActiveChildVersion();
285 }
286 else
287 {
288 MethodDescVersioningState* pMethodVersioningState = GetMethodDescVersioningState();
289 if (pMethodVersioningState == NULL)
290 {
291 return TRUE;
292 }
293 return pMethodVersioningState->IsDefaultVersionActiveChild();
294 }
295}
296
297PTR_MethodDescVersioningState NativeCodeVersion::GetMethodDescVersioningState() const
298{
299 LIMITED_METHOD_DAC_CONTRACT;
300 PTR_MethodDesc pMethodDesc = GetMethodDesc();
301 CodeVersionManager* pCodeVersionManager = pMethodDesc->GetCodeVersionManager();
302 return pCodeVersionManager->GetMethodDescVersioningState(pMethodDesc);
303}
304
305#ifndef DACCESS_COMPILE
306void NativeCodeVersion::SetActiveChildFlag(BOOL isActive)
307{
308 LIMITED_METHOD_DAC_CONTRACT;
309 if (m_storageKind == StorageKind::Explicit)
310 {
311 AsNode()->SetActiveChildFlag(isActive);
312 }
313 else
314 {
315 MethodDescVersioningState* pMethodVersioningState = GetMethodDescVersioningState();
316 pMethodVersioningState->SetDefaultVersionActiveChildFlag(isActive);
317 }
318}
319
320MethodDescVersioningState* NativeCodeVersion::GetMethodDescVersioningState()
321{
322 LIMITED_METHOD_DAC_CONTRACT;
323 MethodDesc* pMethodDesc = GetMethodDesc();
324 CodeVersionManager* pCodeVersionManager = pMethodDesc->GetCodeVersionManager();
325 return pCodeVersionManager->GetMethodDescVersioningState(pMethodDesc);
326}
327#endif
328
329#ifdef FEATURE_TIERED_COMPILATION
330NativeCodeVersion::OptimizationTier NativeCodeVersion::GetOptimizationTier() const
331{
332 LIMITED_METHOD_DAC_CONTRACT;
333 if (m_storageKind == StorageKind::Explicit)
334 {
335 return AsNode()->GetOptimizationTier();
336 }
337 else
338 {
339 return TieredCompilationManager::GetInitialOptimizationTier(GetMethodDesc());
340 }
341}
342#endif
343
344PTR_NativeCodeVersionNode NativeCodeVersion::AsNode() const
345{
346 LIMITED_METHOD_DAC_CONTRACT;
347 if (m_storageKind == StorageKind::Explicit)
348 {
349 return m_pVersionNode;
350 }
351 else
352 {
353 return NULL;
354 }
355}
356
357#ifndef DACCESS_COMPILE
358PTR_NativeCodeVersionNode NativeCodeVersion::AsNode()
359{
360 LIMITED_METHOD_CONTRACT;
361 if (m_storageKind == StorageKind::Explicit)
362 {
363 return m_pVersionNode;
364 }
365 else
366 {
367 return NULL;
368 }
369}
370#endif
371
372bool NativeCodeVersion::operator==(const NativeCodeVersion & rhs) const
373{
374 LIMITED_METHOD_DAC_CONTRACT;
375 if (m_storageKind == StorageKind::Explicit)
376 {
377 return (rhs.m_storageKind == StorageKind::Explicit) &&
378 (rhs.AsNode() == AsNode());
379 }
380 else if (m_storageKind == StorageKind::Synthetic)
381 {
382 return (rhs.m_storageKind == StorageKind::Synthetic) &&
383 (m_synthetic.m_pMethodDesc == rhs.m_synthetic.m_pMethodDesc);
384 }
385 else
386 {
387 return rhs.m_storageKind == StorageKind::Unknown;
388 }
389}
390bool NativeCodeVersion::operator!=(const NativeCodeVersion & rhs) const
391{
392 LIMITED_METHOD_DAC_CONTRACT;
393 return !operator==(rhs);
394}
395
396NativeCodeVersionCollection::NativeCodeVersionCollection(PTR_MethodDesc pMethodDescFilter, ILCodeVersion ilCodeFilter) :
397 m_pMethodDescFilter(pMethodDescFilter),
398 m_ilCodeFilter(ilCodeFilter)
399{
400}
401
402NativeCodeVersionIterator NativeCodeVersionCollection::Begin()
403{
404 LIMITED_METHOD_DAC_CONTRACT;
405 return NativeCodeVersionIterator(this);
406}
407NativeCodeVersionIterator NativeCodeVersionCollection::End()
408{
409 LIMITED_METHOD_DAC_CONTRACT;
410 return NativeCodeVersionIterator(NULL);
411}
412
413NativeCodeVersionIterator::NativeCodeVersionIterator(NativeCodeVersionCollection* pNativeCodeVersionCollection) :
414 m_stage(IterationStage::Initial),
415 m_pCollection(pNativeCodeVersionCollection),
416 m_pLinkedListCur(dac_cast<PTR_NativeCodeVersionNode>(nullptr))
417{
418 LIMITED_METHOD_DAC_CONTRACT;
419 First();
420}
421void NativeCodeVersionIterator::First()
422{
423 LIMITED_METHOD_DAC_CONTRACT;
424 if (m_pCollection == NULL)
425 {
426 m_stage = IterationStage::End;
427 }
428 Next();
429}
430void NativeCodeVersionIterator::Next()
431{
432 LIMITED_METHOD_DAC_CONTRACT;
433 if (m_stage == IterationStage::Initial)
434 {
435 ILCodeVersion ilCodeFilter = m_pCollection->m_ilCodeFilter;
436 m_stage = IterationStage::ImplicitCodeVersion;
437 if (ilCodeFilter.IsNull() || ilCodeFilter.IsDefaultVersion())
438 {
439 m_cur = NativeCodeVersion(m_pCollection->m_pMethodDescFilter);
440 return;
441 }
442 }
443 if (m_stage == IterationStage::ImplicitCodeVersion)
444 {
445 m_stage = IterationStage::LinkedList;
446 CodeVersionManager* pCodeVersionManager = m_pCollection->m_pMethodDescFilter->GetCodeVersionManager();
447 MethodDescVersioningState* pMethodDescVersioningState = pCodeVersionManager->GetMethodDescVersioningState(m_pCollection->m_pMethodDescFilter);
448 if (pMethodDescVersioningState == NULL)
449 {
450 m_pLinkedListCur = NULL;
451 }
452 else
453 {
454 ILCodeVersion ilCodeFilter = m_pCollection->m_ilCodeFilter;
455 m_pLinkedListCur = pMethodDescVersioningState->GetFirstVersionNode();
456 while (m_pLinkedListCur != NULL && !ilCodeFilter.IsNull() && ilCodeFilter.GetVersionId() != m_pLinkedListCur->GetILVersionId())
457 {
458 m_pLinkedListCur = m_pLinkedListCur->m_pNextMethodDescSibling;
459 }
460 }
461 if (m_pLinkedListCur != NULL)
462 {
463 m_cur = NativeCodeVersion(m_pLinkedListCur);
464 return;
465 }
466 }
467 if (m_stage == IterationStage::LinkedList)
468 {
469 if (m_pLinkedListCur != NULL)
470 {
471 ILCodeVersion ilCodeFilter = m_pCollection->m_ilCodeFilter;
472 do
473 {
474 m_pLinkedListCur = m_pLinkedListCur->m_pNextMethodDescSibling;
475 } while (m_pLinkedListCur != NULL && !ilCodeFilter.IsNull() && ilCodeFilter.GetVersionId() != m_pLinkedListCur->GetILVersionId());
476 }
477 if (m_pLinkedListCur != NULL)
478 {
479 m_cur = NativeCodeVersion(m_pLinkedListCur);
480 return;
481 }
482 else
483 {
484 m_stage = IterationStage::End;
485 m_cur = NativeCodeVersion();
486 }
487 }
488}
489const NativeCodeVersion & NativeCodeVersionIterator::Get() const
490{
491 LIMITED_METHOD_DAC_CONTRACT;
492 return m_cur;
493}
494bool NativeCodeVersionIterator::Equal(const NativeCodeVersionIterator &i) const
495{
496 LIMITED_METHOD_DAC_CONTRACT;
497 return m_cur == i.m_cur;
498}
499
500ILCodeVersionNode::ILCodeVersionNode() :
501 m_pModule(dac_cast<PTR_Module>(nullptr)),
502 m_methodDef(0),
503 m_rejitId(0),
504 m_pNextILVersionNode(dac_cast<PTR_ILCodeVersionNode>(nullptr)),
505 m_rejitState(ILCodeVersion::kStateRequested),
506 m_pIL(),
507 m_jitFlags(0)
508{
509 m_pIL.Store(dac_cast<PTR_COR_ILMETHOD>(nullptr));
510}
511
512#ifndef DACCESS_COMPILE
513ILCodeVersionNode::ILCodeVersionNode(Module* pModule, mdMethodDef methodDef, ReJITID id) :
514 m_pModule(pModule),
515 m_methodDef(methodDef),
516 m_rejitId(id),
517 m_pNextILVersionNode(dac_cast<PTR_ILCodeVersionNode>(nullptr)),
518 m_rejitState(ILCodeVersion::kStateRequested),
519 m_pIL(nullptr),
520 m_jitFlags(0)
521{}
522#endif
523
524#ifdef DEBUG
525BOOL ILCodeVersionNode::LockOwnedByCurrentThread() const
526{
527 LIMITED_METHOD_DAC_CONTRACT;
528 return GetModule()->GetCodeVersionManager()->LockOwnedByCurrentThread();
529}
530#endif //DEBUG
531
532PTR_Module ILCodeVersionNode::GetModule() const
533{
534 LIMITED_METHOD_DAC_CONTRACT;
535 return m_pModule;
536}
537
538mdMethodDef ILCodeVersionNode::GetMethodDef() const
539{
540 LIMITED_METHOD_DAC_CONTRACT;
541 return m_methodDef;
542}
543
544ReJITID ILCodeVersionNode::GetVersionId() const
545{
546 LIMITED_METHOD_DAC_CONTRACT;
547 return m_rejitId;
548}
549
550ILCodeVersion::RejitFlags ILCodeVersionNode::GetRejitState() const
551{
552 LIMITED_METHOD_DAC_CONTRACT;
553 return m_rejitState.Load();
554}
555
556PTR_COR_ILMETHOD ILCodeVersionNode::GetIL() const
557{
558 LIMITED_METHOD_DAC_CONTRACT;
559 return dac_cast<PTR_COR_ILMETHOD>(m_pIL.Load());
560}
561
562DWORD ILCodeVersionNode::GetJitFlags() const
563{
564 LIMITED_METHOD_DAC_CONTRACT;
565 return m_jitFlags.Load();
566}
567
568const InstrumentedILOffsetMapping* ILCodeVersionNode::GetInstrumentedILMap() const
569{
570 LIMITED_METHOD_DAC_CONTRACT;
571 _ASSERTE(LockOwnedByCurrentThread());
572 return &m_instrumentedILMap;
573}
574
575PTR_ILCodeVersionNode ILCodeVersionNode::GetNextILVersionNode() const
576{
577 LIMITED_METHOD_DAC_CONTRACT;
578 _ASSERTE(LockOwnedByCurrentThread());
579 return m_pNextILVersionNode;
580}
581
582#ifndef DACCESS_COMPILE
583void ILCodeVersionNode::SetRejitState(ILCodeVersion::RejitFlags newState)
584{
585 LIMITED_METHOD_CONTRACT;
586 m_rejitState.Store(newState);
587}
588
589void ILCodeVersionNode::SetIL(COR_ILMETHOD* pIL)
590{
591 LIMITED_METHOD_CONTRACT;
592 m_pIL.Store(pIL);
593}
594
595void ILCodeVersionNode::SetJitFlags(DWORD flags)
596{
597 LIMITED_METHOD_CONTRACT;
598 m_jitFlags.Store(flags);
599}
600
601void ILCodeVersionNode::SetInstrumentedILMap(SIZE_T cMap, COR_IL_MAP * rgMap)
602{
603 LIMITED_METHOD_CONTRACT;
604 _ASSERTE(LockOwnedByCurrentThread());
605 m_instrumentedILMap.SetMappingInfo(cMap, rgMap);
606}
607
608void ILCodeVersionNode::SetNextILVersionNode(ILCodeVersionNode* pNextILVersionNode)
609{
610 LIMITED_METHOD_CONTRACT;
611 _ASSERTE(LockOwnedByCurrentThread());
612 m_pNextILVersionNode = pNextILVersionNode;
613}
614#endif
615
616ILCodeVersion::ILCodeVersion() :
617 m_storageKind(StorageKind::Unknown)
618{}
619
620ILCodeVersion::ILCodeVersion(const ILCodeVersion & ilCodeVersion) :
621 m_storageKind(ilCodeVersion.m_storageKind)
622{
623 if(m_storageKind == StorageKind::Explicit)
624 {
625 m_pVersionNode = ilCodeVersion.m_pVersionNode;
626 }
627 else if(m_storageKind == StorageKind::Synthetic)
628 {
629 m_synthetic = ilCodeVersion.m_synthetic;
630 }
631}
632
633ILCodeVersion::ILCodeVersion(PTR_ILCodeVersionNode pILCodeVersionNode) :
634 m_storageKind(pILCodeVersionNode != NULL ? StorageKind::Explicit : StorageKind::Unknown),
635 m_pVersionNode(pILCodeVersionNode)
636{}
637
638ILCodeVersion::ILCodeVersion(PTR_Module pModule, mdMethodDef methodDef) :
639 m_storageKind(pModule != NULL ? StorageKind::Synthetic : StorageKind::Unknown)
640{
641 LIMITED_METHOD_DAC_CONTRACT;
642 m_synthetic.m_pModule = pModule;
643 m_synthetic.m_methodDef = methodDef;
644}
645
646bool ILCodeVersion::operator==(const ILCodeVersion & rhs) const
647{
648 LIMITED_METHOD_DAC_CONTRACT;
649 if (m_storageKind == StorageKind::Explicit)
650 {
651 return (rhs.m_storageKind == StorageKind::Explicit) &&
652 (AsNode() == rhs.AsNode());
653 }
654 else if (m_storageKind == StorageKind::Synthetic)
655 {
656 return (rhs.m_storageKind == StorageKind::Synthetic) &&
657 (m_synthetic.m_pModule == rhs.m_synthetic.m_pModule) &&
658 (m_synthetic.m_methodDef == rhs.m_synthetic.m_methodDef);
659 }
660 else
661 {
662 return rhs.m_storageKind == StorageKind::Unknown;
663 }
664}
665
666BOOL ILCodeVersion::HasDefaultIL() const
667{
668 LIMITED_METHOD_CONTRACT;
669
670 return (m_storageKind == StorageKind::Synthetic) || (AsNode()->GetIL() == NULL);
671}
672
673BOOL ILCodeVersion::IsNull() const
674{
675 LIMITED_METHOD_DAC_CONTRACT;
676 return m_storageKind == StorageKind::Unknown;
677}
678
679BOOL ILCodeVersion::IsDefaultVersion() const
680{
681 LIMITED_METHOD_DAC_CONTRACT;
682 return m_storageKind == StorageKind::Synthetic;
683}
684
685PTR_Module ILCodeVersion::GetModule() const
686{
687 LIMITED_METHOD_DAC_CONTRACT;
688 if (m_storageKind == StorageKind::Explicit)
689 {
690 return AsNode()->GetModule();
691 }
692 else
693 {
694 return m_synthetic.m_pModule;
695 }
696}
697
698mdMethodDef ILCodeVersion::GetMethodDef() const
699{
700 LIMITED_METHOD_DAC_CONTRACT;
701 if (m_storageKind == StorageKind::Explicit)
702 {
703 return AsNode()->GetMethodDef();
704 }
705 else
706 {
707 return m_synthetic.m_methodDef;
708 }
709}
710
711ReJITID ILCodeVersion::GetVersionId() const
712{
713 LIMITED_METHOD_DAC_CONTRACT;
714 if (m_storageKind == StorageKind::Explicit)
715 {
716 return AsNode()->GetVersionId();
717 }
718 else
719 {
720 return 0;
721 }
722}
723
724NativeCodeVersionCollection ILCodeVersion::GetNativeCodeVersions(PTR_MethodDesc pClosedMethodDesc) const
725{
726 LIMITED_METHOD_DAC_CONTRACT;
727 return NativeCodeVersionCollection(pClosedMethodDesc, *this);
728}
729
730NativeCodeVersion ILCodeVersion::GetActiveNativeCodeVersion(PTR_MethodDesc pClosedMethodDesc) const
731{
732 LIMITED_METHOD_DAC_CONTRACT;
733 NativeCodeVersionCollection versions = GetNativeCodeVersions(pClosedMethodDesc);
734 for (NativeCodeVersionIterator cur = versions.Begin(), end = versions.End(); cur != end; cur++)
735 {
736 if (cur->IsActiveChildVersion())
737 {
738 return *cur;
739 }
740 }
741 return NativeCodeVersion();
742}
743
744ILCodeVersion::RejitFlags ILCodeVersion::GetRejitState() const
745{
746 LIMITED_METHOD_DAC_CONTRACT;
747 if (m_storageKind == StorageKind::Explicit)
748 {
749 return AsNode()->GetRejitState();
750 }
751 else
752 {
753 return ILCodeVersion::kStateActive;
754 }
755}
756
757PTR_COR_ILMETHOD ILCodeVersion::GetIL() const
758{
759 CONTRACTL
760 {
761 THROWS; //GetILHeader throws
762 GC_NOTRIGGER;
763 FORBID_FAULT;
764 MODE_ANY;
765 }
766 CONTRACTL_END
767
768 PTR_COR_ILMETHOD pIL = NULL;
769 if (m_storageKind == StorageKind::Explicit)
770 {
771 pIL = AsNode()->GetIL();
772 }
773
774 // For the default code version we always fetch the globally stored default IL for a method
775 //
776 // In the non-default code version we assume NULL is the equivalent of explicitly requesting to
777 // re-use the default IL. Ideally there would be no reason to create a new version that re-uses
778 // the default IL (just use the default code version for that) but we do it here for compat. We've
779 // got some profilers that use ReJIT to create a new code version and then instead of calling
780 // ICorProfilerFunctionControl::SetILFunctionBody they call ICorProfilerInfo::SetILFunctionBody.
781 // This mutates the default IL so that it is now correct for their new code version. Of course this
782 // also overwrote the previous default IL so now the default code version GetIL() is out of sync
783 // with the jitted code. In the majority of cases we never re-read the IL after the initial
784 // jitting so this issue goes unnoticed.
785 //
786 // If changing the default IL after it is in use becomes more problematic in the future we would
787 // need to add enforcement that prevents profilers from using ICorProfilerInfo::SetILFunctionBody
788 // that way + coordinate with them because it is a breaking change for any profiler currently doing it.
789 if(pIL == NULL)
790 {
791 PTR_Module pModule = GetModule();
792 PTR_MethodDesc pMethodDesc = dac_cast<PTR_MethodDesc>(pModule->LookupMethodDef(GetMethodDef()));
793 if (pMethodDesc != NULL)
794 {
795 pIL = dac_cast<PTR_COR_ILMETHOD>(pMethodDesc->GetILHeader(TRUE));
796 }
797 }
798
799 return pIL;
800}
801
802PTR_COR_ILMETHOD ILCodeVersion::GetILNoThrow() const
803{
804 LIMITED_METHOD_DAC_CONTRACT;
805 PTR_COR_ILMETHOD ret;
806 EX_TRY
807 {
808 ret = GetIL();
809 }
810 EX_CATCH
811 {
812 ret = NULL;
813 }
814 EX_END_CATCH(RethrowTerminalExceptions);
815 return ret;
816}
817
818DWORD ILCodeVersion::GetJitFlags() const
819{
820 LIMITED_METHOD_DAC_CONTRACT;
821 if (m_storageKind == StorageKind::Explicit)
822 {
823 return AsNode()->GetJitFlags();
824 }
825 else
826 {
827 return 0;
828 }
829}
830
831const InstrumentedILOffsetMapping* ILCodeVersion::GetInstrumentedILMap() const
832{
833 LIMITED_METHOD_DAC_CONTRACT;
834 if (m_storageKind == StorageKind::Explicit)
835 {
836 return AsNode()->GetInstrumentedILMap();
837 }
838 else
839 {
840 return NULL;
841 }
842}
843
844#ifndef DACCESS_COMPILE
845void ILCodeVersion::SetRejitState(RejitFlags newState)
846{
847 LIMITED_METHOD_CONTRACT;
848 AsNode()->SetRejitState(newState);
849}
850
851void ILCodeVersion::SetIL(COR_ILMETHOD* pIL)
852{
853 LIMITED_METHOD_CONTRACT;
854 AsNode()->SetIL(pIL);
855}
856
857void ILCodeVersion::SetJitFlags(DWORD flags)
858{
859 LIMITED_METHOD_CONTRACT;
860 AsNode()->SetJitFlags(flags);
861}
862
863void ILCodeVersion::SetInstrumentedILMap(SIZE_T cMap, COR_IL_MAP * rgMap)
864{
865 LIMITED_METHOD_CONTRACT;
866 AsNode()->SetInstrumentedILMap(cMap, rgMap);
867}
868
869HRESULT ILCodeVersion::AddNativeCodeVersion(
870 MethodDesc* pClosedMethodDesc,
871 NativeCodeVersion::OptimizationTier optimizationTier,
872 NativeCodeVersion* pNativeCodeVersion)
873{
874 LIMITED_METHOD_CONTRACT;
875 CodeVersionManager* pManager = GetModule()->GetCodeVersionManager();
876 HRESULT hr = pManager->AddNativeCodeVersion(*this, pClosedMethodDesc, optimizationTier, pNativeCodeVersion);
877 if (FAILED(hr))
878 {
879 _ASSERTE(hr == E_OUTOFMEMORY);
880 return hr;
881 }
882 return S_OK;
883}
884
885HRESULT ILCodeVersion::GetOrCreateActiveNativeCodeVersion(MethodDesc* pClosedMethodDesc, NativeCodeVersion* pActiveNativeCodeVersion)
886{
887 LIMITED_METHOD_CONTRACT;
888 HRESULT hr = S_OK;
889 NativeCodeVersion activeNativeChild = GetActiveNativeCodeVersion(pClosedMethodDesc);
890 if (activeNativeChild.IsNull())
891 {
892 NativeCodeVersion::OptimizationTier optimizationTier =
893 TieredCompilationManager::GetInitialOptimizationTier(pClosedMethodDesc);
894 if (FAILED(hr = AddNativeCodeVersion(pClosedMethodDesc, optimizationTier, &activeNativeChild)))
895 {
896 _ASSERTE(hr == E_OUTOFMEMORY);
897 return hr;
898 }
899 }
900 // The first added child should automatically become active
901 _ASSERTE(GetActiveNativeCodeVersion(pClosedMethodDesc) == activeNativeChild);
902 *pActiveNativeCodeVersion = activeNativeChild;
903 return S_OK;
904}
905
906HRESULT ILCodeVersion::SetActiveNativeCodeVersion(NativeCodeVersion activeNativeCodeVersion, BOOL fEESuspended)
907{
908 LIMITED_METHOD_CONTRACT;
909 HRESULT hr = S_OK;
910 MethodDesc* pMethodDesc = activeNativeCodeVersion.GetMethodDesc();
911 NativeCodeVersion prevActiveVersion = GetActiveNativeCodeVersion(pMethodDesc);
912 if (prevActiveVersion == activeNativeCodeVersion)
913 {
914 //nothing to do, this version is already active
915 return S_OK;
916 }
917
918 if (!prevActiveVersion.IsNull())
919 {
920 prevActiveVersion.SetActiveChildFlag(FALSE);
921 }
922 activeNativeCodeVersion.SetActiveChildFlag(TRUE);
923
924 // If needed update the published code body for this method
925 CodeVersionManager* pCodeVersionManager = GetModule()->GetCodeVersionManager();
926 if (pCodeVersionManager->GetActiveILCodeVersion(GetModule(), GetMethodDef()) == *this)
927 {
928 if (FAILED(hr = pCodeVersionManager->PublishNativeCodeVersion(pMethodDesc, activeNativeCodeVersion, fEESuspended)))
929 {
930 return hr;
931 }
932 }
933
934 return S_OK;
935}
936
937ILCodeVersionNode* ILCodeVersion::AsNode()
938{
939 LIMITED_METHOD_CONTRACT;
940 //This is dangerous - NativeCodeVersion coerces non-explicit versions to NULL but ILCodeVersion assumes the caller
941 //will never invoke AsNode() on a non-explicit node. Asserting for now as a minimal fix, but we should revisit this.
942 _ASSERTE(m_storageKind == StorageKind::Explicit);
943 return m_pVersionNode;
944}
945#endif //DACCESS_COMPILE
946
947PTR_ILCodeVersionNode ILCodeVersion::AsNode() const
948{
949 LIMITED_METHOD_DAC_CONTRACT;
950 //This is dangerous - NativeCodeVersion coerces non-explicit versions to NULL but ILCodeVersion assumes the caller
951 //will never invoke AsNode() on a non-explicit node. Asserting for now as a minimal fix, but we should revisit this.
952 _ASSERTE(m_storageKind == StorageKind::Explicit);
953 return m_pVersionNode;
954}
955
956ILCodeVersionCollection::ILCodeVersionCollection(PTR_Module pModule, mdMethodDef methodDef) :
957 m_pModule(pModule),
958 m_methodDef(methodDef)
959{}
960
961ILCodeVersionIterator ILCodeVersionCollection::Begin()
962{
963 LIMITED_METHOD_DAC_CONTRACT;
964 return ILCodeVersionIterator(this);
965}
966
967ILCodeVersionIterator ILCodeVersionCollection::End()
968{
969 LIMITED_METHOD_DAC_CONTRACT;
970 return ILCodeVersionIterator(NULL);
971}
972
973ILCodeVersionIterator::ILCodeVersionIterator(const ILCodeVersionIterator & iter) :
974 m_stage(iter.m_stage),
975 m_cur(iter.m_cur),
976 m_pLinkedListCur(iter.m_pLinkedListCur),
977 m_pCollection(iter.m_pCollection)
978{}
979
980ILCodeVersionIterator::ILCodeVersionIterator(ILCodeVersionCollection* pCollection) :
981 m_stage(pCollection != NULL ? IterationStage::Initial : IterationStage::End),
982 m_pLinkedListCur(dac_cast<PTR_ILCodeVersionNode>(nullptr)),
983 m_pCollection(pCollection)
984{
985 LIMITED_METHOD_DAC_CONTRACT;
986 First();
987}
988
989const ILCodeVersion & ILCodeVersionIterator::Get() const
990{
991 LIMITED_METHOD_DAC_CONTRACT;
992 return m_cur;
993}
994
995void ILCodeVersionIterator::First()
996{
997 LIMITED_METHOD_DAC_CONTRACT;
998 Next();
999}
1000
1001void ILCodeVersionIterator::Next()
1002{
1003 LIMITED_METHOD_DAC_CONTRACT;
1004 if (m_stage == IterationStage::Initial)
1005 {
1006 m_stage = IterationStage::ImplicitCodeVersion;
1007 m_cur = ILCodeVersion(m_pCollection->m_pModule, m_pCollection->m_methodDef);
1008 return;
1009 }
1010 if (m_stage == IterationStage::ImplicitCodeVersion)
1011 {
1012 CodeVersionManager* pCodeVersionManager = m_pCollection->m_pModule->GetCodeVersionManager();
1013 _ASSERTE(pCodeVersionManager->LockOwnedByCurrentThread());
1014 PTR_ILCodeVersioningState pILCodeVersioningState = pCodeVersionManager->GetILCodeVersioningState(m_pCollection->m_pModule, m_pCollection->m_methodDef);
1015 if (pILCodeVersioningState != NULL)
1016 {
1017 m_pLinkedListCur = pILCodeVersioningState->GetFirstVersionNode();
1018 }
1019 m_stage = IterationStage::LinkedList;
1020 if (m_pLinkedListCur != NULL)
1021 {
1022 m_cur = ILCodeVersion(m_pLinkedListCur);
1023 return;
1024 }
1025 }
1026 if (m_stage == IterationStage::LinkedList)
1027 {
1028 if (m_pLinkedListCur != NULL)
1029 {
1030 m_pLinkedListCur = m_pLinkedListCur->GetNextILVersionNode();
1031 }
1032 if (m_pLinkedListCur != NULL)
1033 {
1034 m_cur = ILCodeVersion(m_pLinkedListCur);
1035 return;
1036 }
1037 else
1038 {
1039 m_stage = IterationStage::End;
1040 m_cur = ILCodeVersion();
1041 return;
1042 }
1043 }
1044}
1045
1046bool ILCodeVersionIterator::Equal(const ILCodeVersionIterator &i) const
1047{
1048 LIMITED_METHOD_DAC_CONTRACT;
1049 return m_cur == i.m_cur;
1050}
1051
1052MethodDescVersioningState::MethodDescVersioningState(PTR_MethodDesc pMethodDesc) :
1053 m_pMethodDesc(pMethodDesc),
1054 m_flags(IsDefaultVersionActiveChildFlag),
1055 m_nextId(1),
1056 m_pFirstVersionNode(dac_cast<PTR_NativeCodeVersionNode>(nullptr))
1057{
1058 LIMITED_METHOD_DAC_CONTRACT;
1059#ifdef FEATURE_JUMPSTAMP
1060 ZeroMemory(m_rgSavedCode, JumpStubSize);
1061#endif
1062}
1063
1064PTR_MethodDesc MethodDescVersioningState::GetMethodDesc() const
1065{
1066 LIMITED_METHOD_DAC_CONTRACT;
1067 return m_pMethodDesc;
1068}
1069
1070#ifndef DACCESS_COMPILE
1071NativeCodeVersionId MethodDescVersioningState::AllocateVersionId()
1072{
1073 LIMITED_METHOD_CONTRACT;
1074 return m_nextId++;
1075}
1076#endif
1077
1078PTR_NativeCodeVersionNode MethodDescVersioningState::GetFirstVersionNode() const
1079{
1080 LIMITED_METHOD_DAC_CONTRACT;
1081 return m_pFirstVersionNode;
1082}
1083
1084#ifdef FEATURE_JUMPSTAMP
1085MethodDescVersioningState::JumpStampFlags MethodDescVersioningState::GetJumpStampState()
1086{
1087 LIMITED_METHOD_DAC_CONTRACT;
1088 return (JumpStampFlags)(m_flags & JumpStampMask);
1089}
1090
1091#ifndef DACCESS_COMPILE
1092void MethodDescVersioningState::SetJumpStampState(JumpStampFlags newState)
1093{
1094 LIMITED_METHOD_CONTRACT;
1095 m_flags = (m_flags & ~JumpStampMask) | (BYTE)newState;
1096}
1097#endif // DACCESS_COMPILE
1098
1099#ifndef DACCESS_COMPILE
1100HRESULT MethodDescVersioningState::SyncJumpStamp(NativeCodeVersion nativeCodeVersion, BOOL fEESuspended)
1101 {
1102 LIMITED_METHOD_CONTRACT;
1103 HRESULT hr = S_OK;
1104 PCODE pCode = nativeCodeVersion.IsNull() ? NULL : nativeCodeVersion.GetNativeCode();
1105 MethodDesc* pMethod = GetMethodDesc();
1106 _ASSERTE(pMethod->IsVersionable() && pMethod->IsVersionableWithJumpStamp());
1107
1108 if (!pMethod->HasNativeCode())
1109 {
1110 //we'll set up the jump-stamp when the default native code is created
1111 return S_OK;
1112 }
1113
1114 if (!nativeCodeVersion.IsNull() && nativeCodeVersion.IsDefaultVersion())
1115 {
1116 return UndoJumpStampNativeCode(fEESuspended);
1117 }
1118 else
1119 {
1120 // We don't have new code ready yet, jumpstamp back to the prestub to let us generate it the next time
1121 // the method is called
1122 if (pCode == NULL)
1123 {
1124 if (!fEESuspended)
1125 {
1126 return CORPROF_E_RUNTIME_SUSPEND_REQUIRED;
1127 }
1128 return JumpStampNativeCode();
1129 }
1130 // We do know the new code body, install the jump stamp now
1131 else
1132 {
1133 return UpdateJumpTarget(fEESuspended, pCode);
1134 }
1135 }
1136}
1137#endif // DACCESS_COMPILE
1138
1139//---------------------------------------------------------------------------------------
1140//
1141// Simple, thin abstraction of debugger breakpoint patching. Given an address and a
1142// previously procured DebuggerControllerPatch governing the code address, this decides
1143// whether the code address is patched. If so, it returns a pointer to the debugger's
1144// buffer (of what's "underneath" the int 3 patch); otherwise, it returns the code
1145// address itself.
1146//
1147// Arguments:
1148// * pbCode - Code address to return if unpatched
1149// * dbgpatch - DebuggerControllerPatch to test
1150//
1151// Return Value:
1152// Either pbCode or the debugger's patch buffer, as per description above.
1153//
1154// Assumptions:
1155// Caller must manually grab (and hold) the ControllerLockHolder and get the
1156// DebuggerControllerPatch before calling this helper.
1157//
1158// Notes:
1159// pbCode need not equal the code address governed by dbgpatch, but is always
1160// "related" (and sometimes really is equal). For example, this helper may be used
1161// when writing a code byte to an internal rejit buffer (e.g., in preparation for an
1162// eventual 64-bit interlocked write into the code stream), and thus pbCode would
1163// point into the internal rejit buffer whereas dbgpatch governs the corresponding
1164// code byte in the live code stream. This function would then be used to determine
1165// whether a byte should be written into the internal rejit buffer OR into the
1166// debugger controller's breakpoint buffer.
1167//
1168
1169LPBYTE FirstCodeByteAddr(LPBYTE pbCode, DebuggerControllerPatch * dbgpatch)
1170{
1171 LIMITED_METHOD_CONTRACT;
1172
1173 if (dbgpatch != NULL && dbgpatch->IsActivated())
1174 {
1175 // Debugger has patched the code, so return the address of the buffer
1176 return LPBYTE(&(dbgpatch->opcode));
1177 }
1178
1179 // no active patch, just return the direct code address
1180 return pbCode;
1181}
1182
1183
1184#ifdef _DEBUG
1185#ifndef DACCESS_COMPILE
1186BOOL MethodDescVersioningState::CodeIsSaved()
1187{
1188 LIMITED_METHOD_CONTRACT;
1189
1190 for (size_t i = 0; i < sizeof(m_rgSavedCode); i++)
1191 {
1192 if (m_rgSavedCode[i] != 0)
1193 return TRUE;
1194 }
1195 return FALSE;
1196}
1197#endif //DACCESS_COMPILE
1198#endif //_DEBUG
1199
1200//---------------------------------------------------------------------------------------
1201//
1202// Do the actual work of stamping the top of originally-jitted-code with a jmp that goes
1203// to the prestub. This can be called in one of three ways:
1204// * Case 1: By RequestReJIT against an already-jitted function, in which case the
1205// PCODE may be inferred by the MethodDesc, and our caller will have suspended
1206// the EE for us, OR
1207// * Case 2: By the prestub worker after jitting the original code of a function
1208// (i.e., the "pre-rejit" scenario). In this case, the EE is not suspended. But
1209// that's ok, because the PCODE has not yet been published to the MethodDesc, and
1210// no thread can be executing inside the originally JITted function yet.
1211// * Case 3: At type/method restore time for an NGEN'ed assembly. This is also the pre-rejit
1212// scenario because we are guaranteed to do this before the code in the module
1213// is executable. EE suspend is not required.
1214//
1215// Arguments:
1216// * pCode - Case 1 (above): will be NULL, and we can infer the PCODE from the
1217// MethodDesc; Case 2+3 (above, pre-rejit): will be non-NULL, and we'll need to use
1218// this to find the code to stamp on top of.
1219//
1220// Return Value:
1221// * S_OK: Either we successfully did the jmp-stamp, or a racing thread took care of
1222// it for us.
1223// * Else, HRESULT indicating failure.
1224//
1225// Assumptions:
1226// The caller will have suspended the EE if necessary (case 1), before this is
1227// called.
1228//
1229#ifndef DACCESS_COMPILE
1230HRESULT MethodDescVersioningState::JumpStampNativeCode(PCODE pCode /* = NULL */)
1231{
1232 CONTRACTL
1233 {
1234 NOTHROW;
1235 GC_NOTRIGGER;
1236 // It may seem dangerous to be stamping jumps over code while a GC is going on,
1237 // but we're actually safe. As we assert below, either we're holding the thread
1238 // store lock (and thus preventing a GC) OR we're stamping code that has not yet
1239 // been published (and will thus not be executed by managed therads or examined
1240 // by the GC).
1241 MODE_ANY;
1242 }
1243 CONTRACTL_END;
1244
1245 PCODE pCodePublished = GetMethodDesc()->GetNativeCode();
1246
1247 _ASSERTE((pCode != NULL) || (pCodePublished != NULL));
1248 _ASSERTE(GetMethodDesc()->GetCodeVersionManager()->LockOwnedByCurrentThread());
1249
1250 HRESULT hr = S_OK;
1251
1252 // We'll jump-stamp over pCode, or if pCode is NULL, jump-stamp over the published
1253 // code for this's MethodDesc.
1254 LPBYTE pbCode = (LPBYTE)pCode;
1255 if (pbCode == NULL)
1256 {
1257 // If caller didn't specify a pCode, just use the one that was published after
1258 // the original JIT. (A specific pCode would be passed in the pre-rejit case,
1259 // to jump-stamp the original code BEFORE the PCODE gets published.)
1260 pbCode = (LPBYTE)pCodePublished;
1261 }
1262 _ASSERTE(pbCode != NULL);
1263
1264 // The debugging API may also try to write to the very top of this function (though
1265 // with an int 3 for breakpoint purposes). Coordinate with the debugger so we know
1266 // whether we can safely patch the actual code, or instead write to the debugger's
1267 // buffer.
1268 DebuggerController::ControllerLockHolder lockController;
1269
1270 if (GetJumpStampState() == JumpStampToPrestub)
1271 {
1272 // The method has already been jump stamped so nothing left to do
1273 _ASSERTE(CodeIsSaved());
1274 return S_OK;
1275 }
1276
1277 // Remember what we're stamping our jump on top of, so we can replace it during a
1278 // revert.
1279 if (GetJumpStampState() == JumpStampNone)
1280 {
1281 for (int i = 0; i < sizeof(m_rgSavedCode); i++)
1282 {
1283 m_rgSavedCode[i] = *FirstCodeByteAddr(pbCode + i, DebuggerController::GetPatchTable()->GetPatch((CORDB_ADDRESS_TYPE *)(pbCode + i)));
1284 }
1285 }
1286
1287 EX_TRY
1288 {
1289 AllocMemTracker amt;
1290
1291 // This guy might throw on out-of-memory, so rely on the tracker to clean-up
1292 Precode * pPrecode = Precode::Allocate(PRECODE_STUB, GetMethodDesc(), GetMethodDesc()->GetLoaderAllocator(), &amt);
1293 PCODE target = pPrecode->GetEntryPoint();
1294
1295#if defined(_X86_) || defined(_AMD64_)
1296
1297 // Normal unpatched code never starts with a jump
1298 _ASSERTE(GetJumpStampState() == JumpStampToActiveVersion ||
1299 *FirstCodeByteAddr(pbCode, DebuggerController::GetPatchTable()->GetPatch((CORDB_ADDRESS_TYPE *)pbCode)) != X86_INSTR_JMP_REL32);
1300
1301 INT64 i64OldCode = *(INT64*)pbCode;
1302 INT64 i64NewCode = i64OldCode;
1303 LPBYTE pbNewValue = (LPBYTE)&i64NewCode;
1304 *pbNewValue = X86_INSTR_JMP_REL32;
1305 INT32 UNALIGNED * pOffset = reinterpret_cast<INT32 UNALIGNED *>(&pbNewValue[1]);
1306 // This will throw for out-of-memory, so don't write anything until
1307 // after he succeeds
1308 // This guy will leak/cache/reuse the jumpstub
1309 *pOffset = rel32UsingJumpStub(reinterpret_cast<INT32 UNALIGNED *>(pbCode + 1), target, GetMethodDesc(), GetMethodDesc()->GetLoaderAllocator());
1310
1311 // If we have the EE suspended or the code is unpublished there won't be contention on this code
1312 hr = UpdateJumpStampHelper(pbCode, i64OldCode, i64NewCode, FALSE);
1313 if (FAILED(hr))
1314 {
1315 ThrowHR(hr);
1316 }
1317
1318 //
1319 // No failure point after this!
1320 //
1321 amt.SuppressRelease();
1322
1323#else // _X86_ || _AMD64_
1324#error "Need to define a way to jump-stamp the prolog in a safe way for this platform"
1325#endif // _X86_ || _AMD64_
1326
1327 SetJumpStampState(JumpStampToPrestub);
1328 }
1329 EX_CATCH_HRESULT(hr);
1330 _ASSERT(hr == S_OK || hr == E_OUTOFMEMORY);
1331
1332 if (SUCCEEDED(hr))
1333 {
1334 _ASSERTE(GetJumpStampState() == JumpStampToPrestub);
1335 _ASSERTE(m_rgSavedCode[0] != 0); // saved code should not start with 0
1336 }
1337
1338 return hr;
1339}
1340
1341
1342//---------------------------------------------------------------------------------------
1343//
1344// After code has been rejitted, this is called to update the jump-stamp to go from
1345// pointing to the prestub, to pointing to the newly rejitted code.
1346//
1347// Arguments:
1348// fEESuspended - TRUE if the caller keeps the EE suspended during this call
1349// pRejittedCode - jitted code for the updated IL this method should execute
1350//
1351// Assumptions:
1352// This rejit manager's table crst should be held by the caller
1353//
1354// Returns - S_OK if the jump target is updated
1355// CORPROF_E_RUNTIME_SUSPEND_REQUIRED if the ee isn't suspended and it
1356// will need to be in order to do the update safely
1357HRESULT MethodDescVersioningState::UpdateJumpTarget(BOOL fEESuspended, PCODE pRejittedCode)
1358{
1359 CONTRACTL
1360 {
1361 NOTHROW;
1362 GC_NOTRIGGER;
1363 MODE_PREEMPTIVE;
1364 }
1365 CONTRACTL_END;
1366
1367 MethodDesc * pMD = GetMethodDesc();
1368 _ASSERTE(pMD->GetCodeVersionManager()->LockOwnedByCurrentThread());
1369
1370 // It isn't safe to overwrite the original method prolog with a jmp because threads might
1371 // be at an IP in the middle of the jump stamp already. However converting between different
1372 // jump stamps is OK (when done atomically) because this only changes the jmp target, not
1373 // instruction boundaries.
1374 if (GetJumpStampState() == JumpStampNone && !fEESuspended)
1375 {
1376 return CORPROF_E_RUNTIME_SUSPEND_REQUIRED;
1377 }
1378
1379 // Beginning of originally JITted code containing the jmp that we will redirect.
1380 BYTE * pbCode = (BYTE*)pMD->GetNativeCode();
1381
1382 // Remember what we're stamping our jump on top of, so we can replace it during a
1383 // revert.
1384 if (GetJumpStampState() == JumpStampNone)
1385 {
1386 for (int i = 0; i < sizeof(m_rgSavedCode); i++)
1387 {
1388 m_rgSavedCode[i] = *FirstCodeByteAddr(pbCode + i, DebuggerController::GetPatchTable()->GetPatch((CORDB_ADDRESS_TYPE *)(pbCode + i)));
1389 }
1390 }
1391
1392#if defined(_X86_) || defined(_AMD64_)
1393
1394 HRESULT hr = S_OK;
1395 {
1396 DebuggerController::ControllerLockHolder lockController;
1397
1398 // This will throw for out-of-memory, so don't write anything until
1399 // after he succeeds
1400 // This guy will leak/cache/reuse the jumpstub
1401 INT32 offset = 0;
1402 EX_TRY
1403 {
1404 offset = rel32UsingJumpStub(
1405 reinterpret_cast<INT32 UNALIGNED *>(&pbCode[1]), // base of offset
1406 pRejittedCode, // target of jump
1407 pMD,
1408 pMD->GetLoaderAllocator());
1409 }
1410 EX_CATCH_HRESULT(hr);
1411 _ASSERT(hr == S_OK || hr == E_OUTOFMEMORY);
1412 if (FAILED(hr))
1413 {
1414 return hr;
1415 }
1416 // For validation later, remember what pbCode is right now
1417 INT64 i64OldValue = *(INT64 *)pbCode;
1418
1419 // Assemble the INT64 of the new code bytes to write. Start with what's there now
1420 INT64 i64NewValue = i64OldValue;
1421 LPBYTE pbNewValue = (LPBYTE)&i64NewValue;
1422
1423 // First byte becomes a rel32 jmp instruction (if it wasn't already)
1424 *pbNewValue = X86_INSTR_JMP_REL32;
1425 // Next 4 bytes are the jmp target (offset to jmp stub)
1426 INT32 UNALIGNED * pnOffset = reinterpret_cast<INT32 UNALIGNED *>(&pbNewValue[1]);
1427 *pnOffset = offset;
1428
1429 hr = UpdateJumpStampHelper(pbCode, i64OldValue, i64NewValue, !fEESuspended);
1430 _ASSERTE(hr == S_OK || (hr == CORPROF_E_RUNTIME_SUSPEND_REQUIRED && !fEESuspended));
1431 }
1432 if (FAILED(hr))
1433 {
1434 return hr;
1435 }
1436
1437#else // _X86_ || _AMD64_
1438#error "Need to define a way to jump-stamp the prolog in a safe way for this platform"
1439#endif // _X86_ || _AMD64_
1440
1441 // State transition
1442 SetJumpStampState(JumpStampToActiveVersion);
1443 return S_OK;
1444}
1445
1446
1447//---------------------------------------------------------------------------------------
1448//
1449// Poke the JITted code to satsify a revert request (or to perform an implicit revert as
1450// part of a second, third, etc. rejit request). Reinstates the originally JITted code
1451// that had been jump-stamped over to perform a prior rejit.
1452//
1453// Arguments
1454// fEESuspended - TRUE if the caller keeps the EE suspended during this call
1455//
1456//
1457// Return Value:
1458// S_OK to indicate the revert succeeded,
1459// CORPROF_E_RUNTIME_SUSPEND_REQUIRED to indicate the jumpstamp hasn't been reverted
1460// and EE suspension will be needed for success
1461// other failure HRESULT indicating what went wrong.
1462//
1463// Assumptions:
1464// Caller must be holding the owning ReJitManager's table crst.
1465//
1466HRESULT MethodDescVersioningState::UndoJumpStampNativeCode(BOOL fEESuspended)
1467{
1468 CONTRACTL
1469 {
1470 NOTHROW;
1471 GC_NOTRIGGER;
1472 MODE_ANY;
1473 }
1474 CONTRACTL_END;
1475
1476 _ASSERTE(GetMethodDesc()->GetCodeVersionManager()->LockOwnedByCurrentThread());
1477 if (GetJumpStampState() == JumpStampNone)
1478 {
1479 return S_OK;
1480 }
1481
1482 _ASSERTE(m_rgSavedCode[0] != 0); // saved code should not start with 0
1483
1484 BYTE * pbCode = (BYTE*)GetMethodDesc()->GetNativeCode();
1485 DebuggerController::ControllerLockHolder lockController;
1486
1487#if defined(_X86_) || defined(_AMD64_)
1488 _ASSERTE(m_rgSavedCode[0] != X86_INSTR_JMP_REL32);
1489 _ASSERTE(*FirstCodeByteAddr(pbCode, DebuggerController::GetPatchTable()->GetPatch((CORDB_ADDRESS_TYPE *)pbCode)) == X86_INSTR_JMP_REL32);
1490#else
1491#error "Need to define a way to jump-stamp the prolog in a safe way for this platform"
1492#endif // _X86_ || _AMD64_
1493
1494 // For the interlocked compare, remember what pbCode is right now
1495 INT64 i64OldValue = *(INT64 *)pbCode;
1496 // Assemble the INT64 of the new code bytes to write. Start with what's there now
1497 INT64 i64NewValue = i64OldValue;
1498 memcpy(LPBYTE(&i64NewValue), m_rgSavedCode, sizeof(m_rgSavedCode));
1499 HRESULT hr = UpdateJumpStampHelper(pbCode, i64OldValue, i64NewValue, !fEESuspended);
1500 _ASSERTE(hr == S_OK || (hr == CORPROF_E_RUNTIME_SUSPEND_REQUIRED && !fEESuspended));
1501 if (hr != S_OK)
1502 return hr;
1503
1504 // Transition state of this ReJitInfo to indicate the MD no longer has any jump stamp
1505 SetJumpStampState(JumpStampNone);
1506 return S_OK;
1507}
1508#endif
1509
1510//---------------------------------------------------------------------------------------
1511//
1512// This is called to modify the jump-stamp area, the first ReJitInfo::JumpStubSize bytes
1513// in the method's code.
1514//
1515// Notes:
1516// Callers use this method in a variety of circumstances:
1517// a) when the code is unpublished (fContentionPossible == FALSE)
1518// b) when the caller has taken the ThreadStoreLock and suspended the EE
1519// (fContentionPossible == FALSE)
1520// c) when the code is published, the EE isn't suspended, and the jumpstamp
1521// area consists of a single 5 byte long jump instruction
1522// (fContentionPossible == TRUE)
1523// This method will attempt to alter the jump-stamp even if the caller has not prevented
1524// contention, but there is no guarantee it will be succesful. When the caller has prevented
1525// contention, then success is assured. Callers may opportunistically try without
1526// EE suspension, and then upgrade to EE suspension if the first attempt fails.
1527//
1528// Assumptions:
1529// This rejit manager's table crst should be held by the caller or fContentionPossible==FALSE
1530// The debugger patch table lock should be held by the caller
1531//
1532// Arguments:
1533// pbCode - pointer to the code where the jump stamp is placed
1534// i64OldValue - the bytes which should currently be at the start of the method code
1535// i64NewValue - the new bytes which should be written at the start of the method code
1536// fContentionPossible - See the Notes section above.
1537//
1538// Returns:
1539// S_OK => the jumpstamp has been succesfully updated.
1540// CORPROF_E_RUNTIME_SUSPEND_REQUIRED => the jumpstamp remains unchanged (preventing contention will be necessary)
1541// other failing HR => VirtualProtect failed, the jumpstamp remains unchanged
1542//
1543#ifndef DACCESS_COMPILE
1544HRESULT MethodDescVersioningState::UpdateJumpStampHelper(BYTE* pbCode, INT64 i64OldValue, INT64 i64NewValue, BOOL fContentionPossible)
1545{
1546 CONTRACTL
1547 {
1548 NOTHROW;
1549 GC_NOTRIGGER;
1550 MODE_ANY;
1551 }
1552 CONTRACTL_END;
1553
1554 MethodDesc * pMD = GetMethodDesc();
1555 _ASSERTE(pMD->GetCodeVersionManager()->LockOwnedByCurrentThread() || !fContentionPossible);
1556
1557 // When ReJIT is enabled, method entrypoints are always at least 8-byte aligned (see
1558 // code:EEJitManager::allocCode), so we can do a single 64-bit interlocked operation
1559 // to update the jump target. However, some code may have gotten compiled before
1560 // the profiler had a chance to enable ReJIT (e.g., NGENd code, or code JITted
1561 // before a profiler attaches). In such cases, we cannot rely on a simple
1562 // interlocked operation, and instead must suspend the runtime to ensure we can
1563 // safely update the jmp instruction.
1564 //
1565 // This method doesn't verify that the method is actually safe to rejit, we expect
1566 // callers to do that. At the moment NGEN'ed code is safe to rejit even if
1567 // it is unaligned, but code generated before the profiler attaches is not.
1568 if (fContentionPossible && !(IS_ALIGNED(pbCode, sizeof(INT64))))
1569 {
1570 return CORPROF_E_RUNTIME_SUSPEND_REQUIRED;
1571 }
1572
1573 // The debugging API may also try to write to this function (though
1574 // with an int 3 for breakpoint purposes). Coordinate with the debugger so we know
1575 // whether we can safely patch the actual code, or instead write to the debugger's
1576 // buffer.
1577 if (fContentionPossible)
1578 {
1579 for (CORDB_ADDRESS_TYPE* pbProbeAddr = pbCode; pbProbeAddr < pbCode + MethodDescVersioningState::JumpStubSize; pbProbeAddr++)
1580 {
1581 if (NULL != DebuggerController::GetPatchTable()->GetPatch(pbProbeAddr))
1582 {
1583 return CORPROF_E_RUNTIME_SUSPEND_REQUIRED;
1584 }
1585 }
1586 }
1587
1588#if defined(_X86_) || defined(_AMD64_)
1589
1590 DWORD oldProt;
1591 if (!ClrVirtualProtect((LPVOID)pbCode, 8, PAGE_EXECUTE_READWRITE, &oldProt))
1592 {
1593 return HRESULT_FROM_WIN32(GetLastError());
1594 }
1595
1596 if (fContentionPossible)
1597 {
1598 INT64 i64InterlockReportedOldValue = FastInterlockCompareExchangeLong((INT64 *)pbCode, i64NewValue, i64OldValue);
1599 // Since changes to these bytes are protected by this rejitmgr's m_crstTable, we
1600 // shouldn't have two writers conflicting.
1601 _ASSERTE(i64InterlockReportedOldValue == i64OldValue);
1602 }
1603 else
1604 {
1605 // In this path the caller ensures:
1606 // a) no thread will execute through the prologue area we are modifying
1607 // b) no thread is stopped in a prologue such that it resumes in the middle of code we are modifying
1608 // c) no thread is doing a debugger patch skip operation in which an unmodified copy of the method's
1609 // code could be executed from a patch skip buffer.
1610
1611 // PERF: we might still want a faster path through here if we aren't debugging that doesn't do
1612 // all the patch checks
1613 for (int i = 0; i < MethodDescVersioningState::JumpStubSize; i++)
1614 {
1615 *FirstCodeByteAddr(pbCode + i, DebuggerController::GetPatchTable()->GetPatch(pbCode + i)) = ((BYTE*)&i64NewValue)[i];
1616 }
1617 }
1618
1619 if (oldProt != PAGE_EXECUTE_READWRITE)
1620 {
1621 // The CLR codebase in many locations simply ignores failures to restore the page protections
1622 // Its true that it isn't a problem functionally, but it seems a bit sketchy?
1623 // I am following the convention for now.
1624 ClrVirtualProtect((LPVOID)pbCode, 8, oldProt, &oldProt);
1625 }
1626
1627 FlushInstructionCache(GetCurrentProcess(), pbCode, MethodDescVersioningState::JumpStubSize);
1628 return S_OK;
1629
1630#else // _X86_ || _AMD64_
1631#error "Need to define a way to jump-stamp the prolog in a safe way for this platform"
1632#endif // _X86_ || _AMD64_
1633}
1634#endif
1635#endif // FEATURE_JUMPSTAMP
1636
1637BOOL MethodDescVersioningState::IsDefaultVersionActiveChild() const
1638{
1639 LIMITED_METHOD_DAC_CONTRACT;
1640 return (m_flags & IsDefaultVersionActiveChildFlag) != 0;
1641}
1642#ifndef DACCESS_COMPILE
1643void MethodDescVersioningState::SetDefaultVersionActiveChildFlag(BOOL isActive)
1644{
1645 LIMITED_METHOD_CONTRACT;
1646 if (isActive)
1647 {
1648 m_flags |= IsDefaultVersionActiveChildFlag;
1649 }
1650 else
1651 {
1652 m_flags &= ~IsDefaultVersionActiveChildFlag;
1653 }
1654}
1655
1656void MethodDescVersioningState::LinkNativeCodeVersionNode(NativeCodeVersionNode* pNativeCodeVersionNode)
1657{
1658 LIMITED_METHOD_CONTRACT;
1659 pNativeCodeVersionNode->m_pNextMethodDescSibling = m_pFirstVersionNode;
1660 m_pFirstVersionNode = pNativeCodeVersionNode;
1661}
1662#endif
1663
1664ILCodeVersioningState::ILCodeVersioningState(PTR_Module pModule, mdMethodDef methodDef) :
1665 m_activeVersion(ILCodeVersion(pModule,methodDef)),
1666 m_pFirstVersionNode(dac_cast<PTR_ILCodeVersionNode>(nullptr)),
1667 m_pModule(pModule),
1668 m_methodDef(methodDef)
1669{}
1670
1671
1672ILCodeVersioningState::Key::Key() :
1673 m_pModule(dac_cast<PTR_Module>(nullptr)),
1674 m_methodDef(0)
1675{}
1676
1677ILCodeVersioningState::Key::Key(PTR_Module pModule, mdMethodDef methodDef) :
1678 m_pModule(pModule),
1679 m_methodDef(methodDef)
1680{}
1681
1682size_t ILCodeVersioningState::Key::Hash() const
1683{
1684 LIMITED_METHOD_DAC_CONTRACT;
1685 return (size_t)(dac_cast<TADDR>(m_pModule) ^ m_methodDef);
1686}
1687
1688bool ILCodeVersioningState::Key::operator==(const Key & rhs) const
1689{
1690 LIMITED_METHOD_DAC_CONTRACT;
1691 return (m_pModule == rhs.m_pModule) && (m_methodDef == rhs.m_methodDef);
1692}
1693
1694ILCodeVersioningState::Key ILCodeVersioningState::GetKey() const
1695{
1696 LIMITED_METHOD_DAC_CONTRACT;
1697 return Key(m_pModule, m_methodDef);
1698}
1699
1700ILCodeVersion ILCodeVersioningState::GetActiveVersion() const
1701{
1702 LIMITED_METHOD_DAC_CONTRACT;
1703 return m_activeVersion;
1704}
1705
1706PTR_ILCodeVersionNode ILCodeVersioningState::GetFirstVersionNode() const
1707{
1708 LIMITED_METHOD_DAC_CONTRACT;
1709 return m_pFirstVersionNode;
1710}
1711
1712#ifndef DACCESS_COMPILE
1713void ILCodeVersioningState::SetActiveVersion(ILCodeVersion ilActiveCodeVersion)
1714{
1715 LIMITED_METHOD_CONTRACT;
1716 m_activeVersion = ilActiveCodeVersion;
1717}
1718
1719void ILCodeVersioningState::LinkILCodeVersionNode(ILCodeVersionNode* pILCodeVersionNode)
1720{
1721 LIMITED_METHOD_CONTRACT;
1722 pILCodeVersionNode->SetNextILVersionNode(m_pFirstVersionNode);
1723 m_pFirstVersionNode = pILCodeVersionNode;
1724}
1725#endif
1726
1727CodeVersionManager::CodeVersionManager()
1728{}
1729
1730//---------------------------------------------------------------------------------------
1731//
1732// Called from BaseDomain::BaseDomain to do any constructor-time initialization.
1733// Presently, this takes care of initializing the Crst.
1734//
1735
1736void CodeVersionManager::PreInit()
1737{
1738 CONTRACTL
1739 {
1740 THROWS;
1741 GC_TRIGGERS;
1742 CAN_TAKE_LOCK;
1743 MODE_ANY;
1744 }
1745 CONTRACTL_END;
1746
1747#ifndef DACCESS_COMPILE
1748 m_crstTable.Init(
1749 CrstReJITDomainTable,
1750 CrstFlags(CRST_UNSAFE_ANYMODE | CRST_DEBUGGER_THREAD | CRST_REENTRANCY | CRST_TAKEN_DURING_SHUTDOWN));
1751#endif // DACCESS_COMPILE
1752}
1753
1754CodeVersionManager::TableLockHolder::TableLockHolder(CodeVersionManager* pCodeVersionManager) :
1755 CrstHolder(&pCodeVersionManager->m_crstTable)
1756{
1757}
1758#ifndef DACCESS_COMPILE
1759void CodeVersionManager::EnterLock()
1760{
1761 m_crstTable.Enter();
1762}
1763void CodeVersionManager::LeaveLock()
1764{
1765 m_crstTable.Leave();
1766}
1767#endif
1768
1769#ifdef DEBUG
1770BOOL CodeVersionManager::LockOwnedByCurrentThread() const
1771{
1772 LIMITED_METHOD_DAC_CONTRACT;
1773#ifdef DACCESS_COMPILE
1774 return TRUE;
1775#else
1776 return const_cast<CrstExplicitInit &>(m_crstTable).OwnedByCurrentThread();
1777#endif
1778}
1779#endif
1780
1781PTR_ILCodeVersioningState CodeVersionManager::GetILCodeVersioningState(PTR_Module pModule, mdMethodDef methodDef) const
1782{
1783 LIMITED_METHOD_DAC_CONTRACT;
1784 ILCodeVersioningState::Key key = ILCodeVersioningState::Key(pModule, methodDef);
1785 return m_ilCodeVersioningStateMap.Lookup(key);
1786}
1787
1788PTR_MethodDescVersioningState CodeVersionManager::GetMethodDescVersioningState(PTR_MethodDesc pClosedMethodDesc) const
1789{
1790 LIMITED_METHOD_DAC_CONTRACT;
1791 return m_methodDescVersioningStateMap.Lookup(pClosedMethodDesc);
1792}
1793
1794#ifndef DACCESS_COMPILE
1795HRESULT CodeVersionManager::GetOrCreateILCodeVersioningState(Module* pModule, mdMethodDef methodDef, ILCodeVersioningState** ppILCodeVersioningState)
1796{
1797 LIMITED_METHOD_CONTRACT;
1798 HRESULT hr = S_OK;
1799 ILCodeVersioningState* pILCodeVersioningState = GetILCodeVersioningState(pModule, methodDef);
1800 if (pILCodeVersioningState == NULL)
1801 {
1802 pILCodeVersioningState = new (nothrow) ILCodeVersioningState(pModule, methodDef);
1803 if (pILCodeVersioningState == NULL)
1804 {
1805 return E_OUTOFMEMORY;
1806 }
1807 EX_TRY
1808 {
1809 // This throws when out of memory, but remains internally
1810 // consistent (without adding the new element)
1811 m_ilCodeVersioningStateMap.Add(pILCodeVersioningState);
1812 }
1813 EX_CATCH_HRESULT(hr);
1814 if (FAILED(hr))
1815 {
1816 delete pILCodeVersioningState;
1817 return hr;
1818 }
1819 }
1820 *ppILCodeVersioningState = pILCodeVersioningState;
1821 return S_OK;
1822}
1823
1824HRESULT CodeVersionManager::GetOrCreateMethodDescVersioningState(MethodDesc* pMethod, MethodDescVersioningState** ppMethodVersioningState)
1825{
1826 LIMITED_METHOD_CONTRACT;
1827 HRESULT hr = S_OK;
1828 MethodDescVersioningState* pMethodVersioningState = m_methodDescVersioningStateMap.Lookup(pMethod);
1829 if (pMethodVersioningState == NULL)
1830 {
1831 pMethodVersioningState = new (nothrow) MethodDescVersioningState(pMethod);
1832 if (pMethodVersioningState == NULL)
1833 {
1834 return E_OUTOFMEMORY;
1835 }
1836 EX_TRY
1837 {
1838 // This throws when out of memory, but remains internally
1839 // consistent (without adding the new element)
1840 m_methodDescVersioningStateMap.Add(pMethodVersioningState);
1841 }
1842 EX_CATCH_HRESULT(hr);
1843 if (FAILED(hr))
1844 {
1845 delete pMethodVersioningState;
1846 return hr;
1847 }
1848 }
1849 *ppMethodVersioningState = pMethodVersioningState;
1850 return S_OK;
1851}
1852#endif // DACCESS_COMPILE
1853
1854DWORD CodeVersionManager::GetNonDefaultILVersionCount()
1855{
1856 LIMITED_METHOD_DAC_CONTRACT;
1857
1858 //This function is legal to call WITHOUT taking the lock
1859 //It is used to do a quick check if work might be needed without paying the overhead
1860 //of acquiring the lock and doing dictionary lookups
1861 return m_ilCodeVersioningStateMap.GetCount();
1862}
1863
1864ILCodeVersionCollection CodeVersionManager::GetILCodeVersions(PTR_MethodDesc pMethod)
1865{
1866 LIMITED_METHOD_DAC_CONTRACT;
1867 _ASSERTE(LockOwnedByCurrentThread());
1868 return GetILCodeVersions(dac_cast<PTR_Module>(pMethod->GetModule()), pMethod->GetMemberDef());
1869}
1870
1871ILCodeVersionCollection CodeVersionManager::GetILCodeVersions(PTR_Module pModule, mdMethodDef methodDef)
1872{
1873 LIMITED_METHOD_DAC_CONTRACT;
1874 _ASSERTE(LockOwnedByCurrentThread());
1875 return ILCodeVersionCollection(pModule, methodDef);
1876}
1877
1878ILCodeVersion CodeVersionManager::GetActiveILCodeVersion(PTR_MethodDesc pMethod)
1879{
1880 LIMITED_METHOD_DAC_CONTRACT;
1881 _ASSERTE(LockOwnedByCurrentThread());
1882 return GetActiveILCodeVersion(dac_cast<PTR_Module>(pMethod->GetModule()), pMethod->GetMemberDef());
1883}
1884
1885ILCodeVersion CodeVersionManager::GetActiveILCodeVersion(PTR_Module pModule, mdMethodDef methodDef)
1886{
1887 LIMITED_METHOD_DAC_CONTRACT;
1888 _ASSERTE(LockOwnedByCurrentThread());
1889 ILCodeVersioningState* pILCodeVersioningState = GetILCodeVersioningState(pModule, methodDef);
1890 if (pILCodeVersioningState == NULL)
1891 {
1892 return ILCodeVersion(pModule, methodDef);
1893 }
1894 else
1895 {
1896 return pILCodeVersioningState->GetActiveVersion();
1897 }
1898}
1899
1900ILCodeVersion CodeVersionManager::GetILCodeVersion(PTR_MethodDesc pMethod, ReJITID rejitId)
1901{
1902 LIMITED_METHOD_DAC_CONTRACT;
1903 _ASSERTE(LockOwnedByCurrentThread());
1904
1905#ifdef FEATURE_REJIT
1906 ILCodeVersionCollection collection = GetILCodeVersions(pMethod);
1907 for (ILCodeVersionIterator cur = collection.Begin(), end = collection.End(); cur != end; cur++)
1908 {
1909 if (cur->GetVersionId() == rejitId)
1910 {
1911 return *cur;
1912 }
1913 }
1914 return ILCodeVersion();
1915#else // FEATURE_REJIT
1916 _ASSERTE(rejitId == 0);
1917 return ILCodeVersion(dac_cast<PTR_Module>(pMethod->GetModule()), pMethod->GetMemberDef());
1918#endif // FEATURE_REJIT
1919}
1920
1921NativeCodeVersionCollection CodeVersionManager::GetNativeCodeVersions(PTR_MethodDesc pMethod) const
1922{
1923 LIMITED_METHOD_DAC_CONTRACT;
1924 _ASSERTE(LockOwnedByCurrentThread());
1925 return NativeCodeVersionCollection(pMethod, ILCodeVersion());
1926}
1927
1928NativeCodeVersion CodeVersionManager::GetNativeCodeVersion(PTR_MethodDesc pMethod, PCODE codeStartAddress) const
1929{
1930 LIMITED_METHOD_DAC_CONTRACT;
1931 _ASSERTE(LockOwnedByCurrentThread());
1932
1933 NativeCodeVersionCollection nativeCodeVersions = GetNativeCodeVersions(pMethod);
1934 for (NativeCodeVersionIterator cur = nativeCodeVersions.Begin(), end = nativeCodeVersions.End(); cur != end; cur++)
1935 {
1936 if (cur->GetNativeCode() == codeStartAddress)
1937 {
1938 return *cur;
1939 }
1940 }
1941 return NativeCodeVersion();
1942}
1943
1944#ifndef DACCESS_COMPILE
1945HRESULT CodeVersionManager::AddILCodeVersion(Module* pModule, mdMethodDef methodDef, ReJITID rejitId, ILCodeVersion* pILCodeVersion)
1946{
1947 LIMITED_METHOD_CONTRACT;
1948 _ASSERTE(LockOwnedByCurrentThread());
1949
1950 ILCodeVersioningState* pILCodeVersioningState;
1951 HRESULT hr = GetOrCreateILCodeVersioningState(pModule, methodDef, &pILCodeVersioningState);
1952 if (FAILED(hr))
1953 {
1954 _ASSERTE(hr == E_OUTOFMEMORY);
1955 return hr;
1956 }
1957
1958 ILCodeVersionNode* pILCodeVersionNode = new (nothrow) ILCodeVersionNode(pModule, methodDef, rejitId);
1959 if (pILCodeVersionNode == NULL)
1960 {
1961 return E_OUTOFMEMORY;
1962 }
1963 pILCodeVersioningState->LinkILCodeVersionNode(pILCodeVersionNode);
1964 *pILCodeVersion = ILCodeVersion(pILCodeVersionNode);
1965 return S_OK;
1966}
1967
1968HRESULT CodeVersionManager::SetActiveILCodeVersions(ILCodeVersion* pActiveVersions, DWORD cActiveVersions, BOOL fEESuspended, CDynArray<CodePublishError> * pErrors)
1969{
1970 // If the IL version is in the shared domain we need to iterate all domains
1971 // looking for instantiations. The domain iterator lock is bigger than
1972 // the code version manager lock so we can't do this atomically. In one atomic
1973 // update the bookkeeping for IL versioning will happen and then in a second
1974 // update the active native code versions will change/code jumpstamps+precodes
1975 // will update.
1976 //
1977 // Note: For all domains other than the shared AppDomain we could do this
1978 // atomically, but for now we use the lowest common denominator for all
1979 // domains.
1980 CONTRACTL
1981 {
1982 NOTHROW;
1983 GC_TRIGGERS;
1984 MODE_PREEMPTIVE;
1985 CAN_TAKE_LOCK;
1986 PRECONDITION(CheckPointer(pActiveVersions));
1987 PRECONDITION(CheckPointer(pErrors, NULL_OK));
1988 }
1989 CONTRACTL_END;
1990 _ASSERTE(!LockOwnedByCurrentThread());
1991 HRESULT hr = S_OK;
1992
1993#if DEBUG
1994 for (DWORD i = 0; i < cActiveVersions; i++)
1995 {
1996 ILCodeVersion activeVersion = pActiveVersions[i];
1997 if (activeVersion.IsNull())
1998 {
1999 _ASSERTE(!"The active IL version can't be NULL");
2000 }
2001 }
2002#endif
2003
2004 // step 1 - mark the IL versions as being active, this ensures that
2005 // any new method instantiations added after this point will bind to
2006 // the correct version
2007 {
2008 TableLockHolder(this);
2009 for (DWORD i = 0; i < cActiveVersions; i++)
2010 {
2011 ILCodeVersion activeVersion = pActiveVersions[i];
2012 ILCodeVersioningState* pILCodeVersioningState = NULL;
2013 if (FAILED(hr = GetOrCreateILCodeVersioningState(activeVersion.GetModule(), activeVersion.GetMethodDef(), &pILCodeVersioningState)))
2014 {
2015 _ASSERTE(hr == E_OUTOFMEMORY);
2016 return hr;
2017 }
2018 pILCodeVersioningState->SetActiveVersion(activeVersion);
2019 }
2020 }
2021
2022 // step 2 - determine the set of pre-existing method instantiations
2023
2024 // a parallel array to activeVersions
2025 // for each ILCodeVersion in activeVersions, this lists the set
2026 // MethodDescs that will need to be updated
2027 CDynArray<CDynArray<MethodDesc*>> methodDescsToUpdate;
2028 CDynArray<CodePublishError> errorRecords;
2029 for (DWORD i = 0; i < cActiveVersions; i++)
2030 {
2031 CDynArray<MethodDesc*>* pMethodDescs = methodDescsToUpdate.Append();
2032 if (pMethodDescs == NULL)
2033 {
2034 return E_OUTOFMEMORY;
2035 }
2036 *pMethodDescs = CDynArray<MethodDesc*>();
2037
2038 MethodDesc* pLoadedMethodDesc = pActiveVersions[i].GetModule()->LookupMethodDef(pActiveVersions[i].GetMethodDef());
2039 if (FAILED(hr = CodeVersionManager::EnumerateClosedMethodDescs(pLoadedMethodDesc, pMethodDescs, &errorRecords)))
2040 {
2041 _ASSERTE(hr == E_OUTOFMEMORY);
2042 return hr;
2043 }
2044 }
2045
2046 // step 3 - update each pre-existing method instantiation
2047 {
2048 TableLockHolder lock(this);
2049 for (DWORD i = 0; i < cActiveVersions; i++)
2050 {
2051 // Its possible the active IL version has changed if
2052 // another caller made an update while this method wasn't
2053 // holding the lock. We will ensure that we synchronize
2054 // publishing to whatever version is currently active, even
2055 // if that isn't the IL version we set above.
2056 //
2057 // Note: Although we attempt to handle this case gracefully
2058 // it isn't recommended for callers to do this. Racing two calls
2059 // that set the IL version to different results means it will be
2060 // completely arbitrary which version wins.
2061 ILCodeVersion requestedActiveILVersion = pActiveVersions[i];
2062 ILCodeVersion activeILVersion = GetActiveILCodeVersion(requestedActiveILVersion.GetModule(), requestedActiveILVersion.GetMethodDef());
2063
2064 CDynArray<MethodDesc*> methodDescs = methodDescsToUpdate[i];
2065 for (int j = 0; j < methodDescs.Count(); j++)
2066 {
2067 // Get an the active child code version for this method instantiation (it might be NULL, that is OK)
2068 NativeCodeVersion activeNativeChild = activeILVersion.GetActiveNativeCodeVersion(methodDescs[j]);
2069
2070 // Publish that child version, because it is the active native child of the active IL version
2071 // Failing to publish is non-fatal, but we do record it so the caller is aware
2072 if (FAILED(hr = PublishNativeCodeVersion(methodDescs[j], activeNativeChild, fEESuspended)))
2073 {
2074 if (FAILED(hr = AddCodePublishError(activeILVersion.GetModule(), activeILVersion.GetMethodDef(), methodDescs[j], hr, &errorRecords)))
2075 {
2076 _ASSERTE(hr == E_OUTOFMEMORY);
2077 return hr;
2078 }
2079 }
2080 }
2081 }
2082 }
2083
2084 return S_OK;
2085}
2086
2087HRESULT CodeVersionManager::AddNativeCodeVersion(
2088 ILCodeVersion ilCodeVersion,
2089 MethodDesc* pClosedMethodDesc,
2090 NativeCodeVersion::OptimizationTier optimizationTier,
2091 NativeCodeVersion* pNativeCodeVersion)
2092{
2093 LIMITED_METHOD_CONTRACT;
2094 _ASSERTE(LockOwnedByCurrentThread());
2095
2096 MethodDescVersioningState* pMethodVersioningState;
2097 HRESULT hr = GetOrCreateMethodDescVersioningState(pClosedMethodDesc, &pMethodVersioningState);
2098 if (FAILED(hr))
2099 {
2100 _ASSERTE(hr == E_OUTOFMEMORY);
2101 return hr;
2102 }
2103
2104 NativeCodeVersionId newId = pMethodVersioningState->AllocateVersionId();
2105 NativeCodeVersionNode* pNativeCodeVersionNode = new (nothrow) NativeCodeVersionNode(newId, pClosedMethodDesc, ilCodeVersion.GetVersionId(), optimizationTier);
2106 if (pNativeCodeVersionNode == NULL)
2107 {
2108 return E_OUTOFMEMORY;
2109 }
2110
2111 pMethodVersioningState->LinkNativeCodeVersionNode(pNativeCodeVersionNode);
2112
2113 // the first child added is automatically considered the active one.
2114 if (ilCodeVersion.GetActiveNativeCodeVersion(pClosedMethodDesc).IsNull())
2115 {
2116 pNativeCodeVersionNode->SetActiveChildFlag(TRUE);
2117 _ASSERTE(!ilCodeVersion.GetActiveNativeCodeVersion(pClosedMethodDesc).IsNull());
2118
2119 // the new child shouldn't have any native code. If it did we might need to
2120 // publish that code as part of adding the node which would require callers
2121 // to pay attention to GC suspension and we'd need to report publishing errors
2122 // back to them.
2123 _ASSERTE(pNativeCodeVersionNode->GetNativeCode() == NULL);
2124 }
2125 *pNativeCodeVersion = NativeCodeVersion(pNativeCodeVersionNode);
2126 return S_OK;
2127}
2128
2129PCODE CodeVersionManager::PublishVersionableCodeIfNecessary(MethodDesc* pMethodDesc, BOOL fCanBackpatchPrestub)
2130{
2131 STANDARD_VM_CONTRACT;
2132 _ASSERTE(!LockOwnedByCurrentThread());
2133 _ASSERTE(pMethodDesc->IsVersionable());
2134 _ASSERTE(!pMethodDesc->IsPointingToPrestub() || !pMethodDesc->IsVersionableWithJumpStamp());
2135
2136 HRESULT hr = S_OK;
2137 PCODE pCode = NULL;
2138 BOOL fIsJumpStampMethod = pMethodDesc->IsVersionableWithJumpStamp();
2139
2140 NativeCodeVersion activeVersion;
2141 {
2142 TableLockHolder lock(this);
2143 if (FAILED(hr = GetActiveILCodeVersion(pMethodDesc).GetOrCreateActiveNativeCodeVersion(pMethodDesc, &activeVersion)))
2144 {
2145 _ASSERTE(hr == E_OUTOFMEMORY);
2146 ReportCodePublishError(pMethodDesc->GetModule(), pMethodDesc->GetMemberDef(), pMethodDesc, hr);
2147 return NULL;
2148 }
2149 }
2150
2151 BOOL fEESuspend = FALSE;
2152 while (true)
2153 {
2154 // compile the code if needed
2155 pCode = activeVersion.GetNativeCode();
2156 if (pCode == NULL)
2157 {
2158 pCode = pMethodDesc->PrepareCode(activeVersion);
2159 }
2160
2161 // suspend in preparation for publishing if needed
2162 if (fEESuspend)
2163 {
2164 ThreadSuspend::SuspendEE(ThreadSuspend::SUSPEND_FOR_REJIT);
2165 }
2166
2167 {
2168 TableLockHolder lock(this);
2169 // The common case is that newActiveCode == activeCode, however we did leave the lock so there is
2170 // possibility that the active version has changed. If it has we need to restart the compilation
2171 // and publishing process with the new active version instead.
2172 //
2173 // In theory it should be legitimate to break out of this loop and run the less recent active version,
2174 // because ultimately this is a race between one thread that is updating the version and another thread
2175 // trying to run the current version. However for back-compat with ReJIT we need to guarantee that
2176 // a versioning update at least as late as the profiler JitCompilationFinished callback wins the race.
2177 NativeCodeVersion newActiveVersion;
2178 if (FAILED(hr = GetActiveILCodeVersion(pMethodDesc).GetOrCreateActiveNativeCodeVersion(pMethodDesc, &newActiveVersion)))
2179 {
2180 _ASSERTE(hr == E_OUTOFMEMORY);
2181 ReportCodePublishError(pMethodDesc->GetModule(), pMethodDesc->GetMemberDef(), pMethodDesc, hr);
2182 pCode = NULL;
2183 break;
2184 }
2185 if (newActiveVersion != activeVersion)
2186 {
2187 activeVersion = newActiveVersion;
2188 }
2189 else
2190 {
2191 // if we aren't allowed to backpatch we are done
2192 if (!fCanBackpatchPrestub)
2193 {
2194 break;
2195 }
2196
2197 // attempt to publish the active version still under the lock
2198 if (FAILED(hr = PublishNativeCodeVersion(pMethodDesc, activeVersion, fEESuspend)))
2199 {
2200 // If we need an EESuspend to publish then start over. We have to leave the lock in order to suspend,
2201 // and when we leave the lock the active version might change again. However now we know that suspend is
2202 // necessary.
2203 if (hr == CORPROF_E_RUNTIME_SUSPEND_REQUIRED)
2204 {
2205 _ASSERTE(!fEESuspend);
2206 fEESuspend = true;
2207 continue; // skip RestartEE() below since SuspendEE() has not been called yet
2208 }
2209 else
2210 {
2211 ReportCodePublishError(pMethodDesc->GetModule(), pMethodDesc->GetMemberDef(), pMethodDesc, hr);
2212 pCode = NULL;
2213 break;
2214 }
2215 }
2216 else
2217 {
2218 //success
2219 break;
2220 }
2221 }
2222 } // exit lock
2223
2224 if (fEESuspend)
2225 {
2226 ThreadSuspend::RestartEE(FALSE, TRUE);
2227 }
2228 }
2229
2230 // if the EE is still suspended from breaking in the middle of the loop, resume it
2231 if (fEESuspend)
2232 {
2233 ThreadSuspend::RestartEE(FALSE, TRUE);
2234 }
2235 return pCode;
2236}
2237
2238HRESULT CodeVersionManager::PublishNativeCodeVersion(MethodDesc* pMethod, NativeCodeVersion nativeCodeVersion, BOOL fEESuspended)
2239{
2240 // TODO: This function needs to make sure it does not change the precode's target if call counting is in progress. Track
2241 // whether call counting is currently being done for the method, and use a lock to ensure the expected precode target.
2242 LIMITED_METHOD_CONTRACT;
2243 _ASSERTE(LockOwnedByCurrentThread());
2244 _ASSERTE(pMethod->IsVersionable());
2245 HRESULT hr = S_OK;
2246 PCODE pCode = nativeCodeVersion.IsNull() ? NULL : nativeCodeVersion.GetNativeCode();
2247 if (pMethod->IsVersionableWithPrecode())
2248 {
2249 Precode* pPrecode = pMethod->GetOrCreatePrecode();
2250 if (pCode == NULL)
2251 {
2252 EX_TRY
2253 {
2254 pPrecode->Reset();
2255 }
2256 EX_CATCH_HRESULT(hr);
2257 return hr;
2258 }
2259 else
2260 {
2261 EX_TRY
2262 {
2263 pPrecode->SetTargetInterlocked(pCode, FALSE);
2264
2265 // SetTargetInterlocked() would return false if it lost the race with another thread. That is fine, this thread
2266 // can continue assuming it was successful, similarly to it successfully updating the target and another thread
2267 // updating the target again shortly afterwards.
2268 hr = S_OK;
2269 }
2270 EX_CATCH_HRESULT(hr);
2271 return hr;
2272 }
2273 }
2274 else
2275 {
2276#ifndef FEATURE_JUMPSTAMP
2277 _ASSERTE(!"This platform doesn't support JumpStamp but this method doesn't version with Precode,"
2278 " this method can't be updated");
2279 return E_FAIL;
2280#else
2281 MethodDescVersioningState* pVersioningState;
2282 if (FAILED(hr = GetOrCreateMethodDescVersioningState(pMethod, &pVersioningState)))
2283 {
2284 _ASSERTE(hr == E_OUTOFMEMORY);
2285 return hr;
2286 }
2287 return pVersioningState->SyncJumpStamp(nativeCodeVersion, fEESuspended);
2288#endif
2289 }
2290}
2291
2292// static
2293HRESULT CodeVersionManager::EnumerateClosedMethodDescs(
2294 MethodDesc* pMD,
2295 CDynArray<MethodDesc*> * pClosedMethodDescs,
2296 CDynArray<CodePublishError> * pUnsupportedMethodErrors)
2297{
2298 CONTRACTL
2299 {
2300 NOTHROW;
2301 GC_TRIGGERS;
2302 MODE_PREEMPTIVE;
2303 CAN_TAKE_LOCK;
2304 PRECONDITION(CheckPointer(pMD, NULL_OK));
2305 PRECONDITION(CheckPointer(pClosedMethodDescs));
2306 PRECONDITION(CheckPointer(pUnsupportedMethodErrors));
2307 }
2308 CONTRACTL_END;
2309 HRESULT hr = S_OK;
2310 if (pMD == NULL)
2311 {
2312 // nothing is loaded yet so we're done for this method.
2313 return S_OK;
2314 }
2315
2316 if (!pMD->HasClassOrMethodInstantiation())
2317 {
2318 // We have a JITted non-generic.
2319 MethodDesc ** ppMD = pClosedMethodDescs->Append();
2320 if (ppMD == NULL)
2321 {
2322 return E_OUTOFMEMORY;
2323 }
2324 *ppMD = pMD;
2325 }
2326
2327 if (!pMD->HasClassOrMethodInstantiation())
2328 {
2329 // not generic, we're done for this method
2330 return S_OK;
2331 }
2332
2333 // Ok, now the case of a generic function (or function on generic class), which
2334 // is loaded, and may thus have compiled instantiations.
2335 // It's impossible to get to any other kind of domain from the profiling API
2336 Module* pModule = pMD->GetModule();
2337 mdMethodDef methodDef = pMD->GetMemberDef();
2338 BaseDomain * pBaseDomainFromModule = pModule->GetDomain();
2339 _ASSERTE(pBaseDomainFromModule->IsAppDomain() ||
2340 pBaseDomainFromModule->IsSharedDomain());
2341
2342 if (pBaseDomainFromModule->IsSharedDomain())
2343 {
2344 // Iterate through all modules loaded into the shared domain, to
2345 // find all instantiations living in the shared domain. This will
2346 // include orphaned code (i.e., shared code used by ADs that have
2347 // all unloaded), which is good, because orphaned code could get
2348 // re-adopted if a new AD is created that can use that shared code
2349 hr = EnumerateDomainClosedMethodDescs(
2350 NULL, // NULL means to search SharedDomain instead of an AD
2351 pModule,
2352 methodDef,
2353 pClosedMethodDescs,
2354 pUnsupportedMethodErrors);
2355 }
2356 else
2357 {
2358 // Module is unshared, so just use the module's domain to find instantiations.
2359 hr = EnumerateDomainClosedMethodDescs(
2360 pBaseDomainFromModule->AsAppDomain(),
2361 pModule,
2362 methodDef,
2363 pClosedMethodDescs,
2364 pUnsupportedMethodErrors);
2365 }
2366 if (FAILED(hr))
2367 {
2368 _ASSERTE(hr == E_OUTOFMEMORY);
2369 return hr;
2370 }
2371
2372 // We want to iterate through all compilations of existing instantiations to
2373 // ensure they get marked for rejit. Note: There may be zero instantiations,
2374 // but we won't know until we try.
2375 if (pBaseDomainFromModule->IsSharedDomain())
2376 {
2377 // Iterate through all real domains, to find shared instantiations.
2378 AppDomainIterator appDomainIterator(TRUE);
2379 while (appDomainIterator.Next())
2380 {
2381 AppDomain * pAppDomain = appDomainIterator.GetDomain();
2382 hr = EnumerateDomainClosedMethodDescs(
2383 pAppDomain,
2384 pModule,
2385 methodDef,
2386 pClosedMethodDescs,
2387 pUnsupportedMethodErrors);
2388 if (FAILED(hr))
2389 {
2390 _ASSERTE(hr == E_OUTOFMEMORY);
2391 return hr;
2392 }
2393 }
2394 }
2395 return S_OK;
2396}
2397
2398// static
2399HRESULT CodeVersionManager::EnumerateDomainClosedMethodDescs(
2400 AppDomain * pAppDomainToSearch,
2401 Module* pModuleContainingMethodDef,
2402 mdMethodDef methodDef,
2403 CDynArray<MethodDesc*> * pClosedMethodDescs,
2404 CDynArray<CodePublishError> * pUnsupportedMethodErrors)
2405{
2406 CONTRACTL
2407 {
2408 NOTHROW;
2409 GC_NOTRIGGER;
2410 MODE_PREEMPTIVE;
2411 CAN_TAKE_LOCK;
2412 PRECONDITION(CheckPointer(pAppDomainToSearch, NULL_OK));
2413 PRECONDITION(CheckPointer(pModuleContainingMethodDef));
2414 PRECONDITION(CheckPointer(pClosedMethodDescs));
2415 PRECONDITION(CheckPointer(pUnsupportedMethodErrors));
2416 }
2417 CONTRACTL_END;
2418
2419 _ASSERTE(methodDef != mdTokenNil);
2420
2421 HRESULT hr;
2422
2423 BaseDomain * pDomainContainingGenericDefinition = pModuleContainingMethodDef->GetDomain();
2424
2425#ifdef _DEBUG
2426 // If the generic definition is not loaded domain-neutral, then all its
2427 // instantiations will also be non-domain-neutral and loaded into the same
2428 // domain as the generic definition. So the caller may only pass the
2429 // domain containing the generic definition as pAppDomainToSearch
2430 if (!pDomainContainingGenericDefinition->IsSharedDomain())
2431 {
2432 _ASSERTE(pDomainContainingGenericDefinition == pAppDomainToSearch);
2433 }
2434#endif //_DEBUG
2435
2436 // these are the default flags which won't actually be used in shared mode other than
2437 // asserting they were specified with their default values
2438 AssemblyIterationFlags assemFlags = (AssemblyIterationFlags)(kIncludeLoaded | kIncludeExecution);
2439 ModuleIterationOption moduleFlags = (ModuleIterationOption)kModIterIncludeLoaded;
2440 if (pAppDomainToSearch != NULL)
2441 {
2442 assemFlags = (AssemblyIterationFlags)(kIncludeAvailableToProfilers | kIncludeExecution);
2443 moduleFlags = (ModuleIterationOption)kModIterIncludeAvailableToProfilers;
2444 }
2445 LoadedMethodDescIterator it(
2446 pAppDomainToSearch,
2447 pModuleContainingMethodDef,
2448 methodDef,
2449 assemFlags,
2450 moduleFlags);
2451 CollectibleAssemblyHolder<DomainAssembly *> pDomainAssembly;
2452 while (it.Next(pDomainAssembly.This()))
2453 {
2454 MethodDesc * pLoadedMD = it.Current();
2455
2456 if (!pLoadedMD->IsVersionable())
2457 {
2458 // For compatibility with the rejit APIs we ensure certain errors are detected and reported using their
2459 // original HRESULTS
2460 HRESULT errorHR = GetNonVersionableError(pLoadedMD);
2461 if (FAILED(errorHR))
2462 {
2463 if (FAILED(hr = CodeVersionManager::AddCodePublishError(pModuleContainingMethodDef, methodDef, pLoadedMD, CORPROF_E_FUNCTION_IS_COLLECTIBLE, pUnsupportedMethodErrors)))
2464 {
2465 _ASSERTE(hr == E_OUTOFMEMORY);
2466 return hr;
2467 }
2468 }
2469 continue;
2470 }
2471
2472#ifdef _DEBUG
2473 if (!pDomainContainingGenericDefinition->IsSharedDomain())
2474 {
2475 // Method is defined outside of the shared domain, so its instantiation must
2476 // be defined in the AD we're iterating over (pAppDomainToSearch, which, as
2477 // asserted above, must be the same domain as the generic's definition)
2478 _ASSERTE(pLoadedMD->GetDomain() == pAppDomainToSearch);
2479 }
2480#endif // _DEBUG
2481
2482 MethodDesc ** ppMD = pClosedMethodDescs->Append();
2483 if (ppMD == NULL)
2484 {
2485 return E_OUTOFMEMORY;
2486 }
2487 *ppMD = pLoadedMD;
2488 }
2489 return S_OK;
2490}
2491#endif // DACCESS_COMPILE
2492
2493
2494//---------------------------------------------------------------------------------------
2495//
2496// Given the default version code for a MethodDesc that is about to published, add
2497// a jumpstamp pointing back to the prestub if the currently active version isn't
2498// the default one. This called from the PublishMethodHolder.
2499//
2500// Arguments:
2501// * pMD - MethodDesc to jmp-stamp
2502// * pCode - Top of the code that was just jitted (using original IL).
2503//
2504//
2505// Return value:
2506// * S_OK: Either we successfully did the jmp-stamp, or we didn't have to
2507// * Else, HRESULT indicating failure.
2508
2509// Assumptions:
2510// The caller has not yet published pCode to the MethodDesc, so no threads can be
2511// executing inside pMD's code yet. Thus, we don't need to suspend the runtime while
2512// applying the jump-stamp like we usually do for rejit requests that are made after
2513// a function has been JITted.
2514//
2515#ifndef DACCESS_COMPILE
2516HRESULT CodeVersionManager::DoJumpStampIfNecessary(MethodDesc* pMD, PCODE pCode)
2517{
2518 CONTRACTL
2519 {
2520 NOTHROW;
2521 GC_NOTRIGGER;
2522 MODE_ANY;
2523 CAN_TAKE_LOCK;
2524 PRECONDITION(CheckPointer(pMD));
2525 PRECONDITION(pCode != NULL);
2526 }
2527 CONTRACTL_END;
2528
2529 _ASSERTE(LockOwnedByCurrentThread());
2530
2531 NativeCodeVersion activeCodeVersion = GetActiveILCodeVersion(pMD).GetActiveNativeCodeVersion(pMD);
2532 if (activeCodeVersion.IsDefaultVersion())
2533 {
2534 //Method not requested to be rejitted, nothing to do
2535 return S_OK;
2536 }
2537
2538 if (!(pMD->IsVersionable() && pMD->IsVersionableWithJumpStamp()))
2539 {
2540 return GetNonVersionableError(pMD);
2541 }
2542
2543#ifndef FEATURE_JUMPSTAMP
2544 _ASSERTE(!"How did we get here? IsVersionableWithJumpStamp() should have been FALSE above");
2545 return S_OK;
2546#else
2547 HRESULT hr;
2548 MethodDescVersioningState* pVersioningState;
2549 if (FAILED(hr = GetOrCreateMethodDescVersioningState(pMD, &pVersioningState)))
2550 {
2551 _ASSERTE(hr == E_OUTOFMEMORY);
2552 return hr;
2553 }
2554 if (pVersioningState->GetJumpStampState() != MethodDescVersioningState::JumpStampNone)
2555 {
2556 //JumpStamp already in place
2557 return S_OK;
2558 }
2559 return pVersioningState->JumpStampNativeCode(pCode);
2560#endif // FEATURE_JUMPSTAMP
2561
2562}
2563#endif // DACCESS_COMPILE
2564
2565#ifndef DACCESS_COMPILE
2566//static
2567void CodeVersionManager::OnAppDomainExit(AppDomain * pAppDomain)
2568{
2569 LIMITED_METHOD_CONTRACT;
2570 // This would clean up all the allocations we have done and synchronize with any threads that might
2571 // still be using the data
2572 _ASSERTE(!".Net Core shouldn't be doing app domain shutdown - if we start doing so this needs to be implemented");
2573}
2574#endif
2575
2576//---------------------------------------------------------------------------------------
2577//
2578// Small helper to determine whether a given (possibly instantiated generic) MethodDesc
2579// is safe to rejit.
2580//
2581// Arguments:
2582// pMD - MethodDesc to test
2583// Return Value:
2584// S_OK iff pMD is safe to rejit
2585// CORPROF_E_FUNCTION_IS_COLLECTIBLE - function can't be rejitted because it is collectible
2586//
2587
2588// static
2589#ifndef DACCESS_COMPILE
2590HRESULT CodeVersionManager::GetNonVersionableError(MethodDesc* pMD)
2591{
2592 CONTRACTL
2593 {
2594 NOTHROW;
2595 GC_NOTRIGGER;
2596 CAN_TAKE_LOCK;
2597 MODE_ANY;
2598 }
2599 CONTRACTL_END;
2600
2601 _ASSERTE(pMD != NULL);
2602
2603 // Weird, non-user functions were already weeded out in RequestReJIT(), and will
2604 // also never be passed to us by the prestub worker (for the pre-rejit case).
2605 _ASSERTE(pMD->IsIL());
2606
2607 // Any MethodDescs that could be collected are not currently supported. Although we
2608 // rule out all Ref.Emit modules in RequestReJIT(), there can still exist types defined
2609 // in a non-reflection module and instantiated into a collectible assembly
2610 // (e.g., List<MyCollectibleStruct>). In the future we may lift this
2611 // restriction by updating the ReJitManager when the collectible assemblies
2612 // owning the instantiations get collected.
2613 if (pMD->GetLoaderAllocator()->IsCollectible())
2614 {
2615 return CORPROF_E_FUNCTION_IS_COLLECTIBLE;
2616 }
2617
2618 return S_OK;
2619}
2620#endif
2621
2622//---------------------------------------------------------------------------------------
2623//
2624// Helper that inits a new CodePublishError and adds it to the pErrors array
2625//
2626// Arguments:
2627// * pModule - The module in the module/MethodDef identifier pair for the method which
2628// had an error during rejit
2629// * methodDef - The MethodDef in the module/MethodDef identifier pair for the method which
2630// had an error during rejit
2631// * pMD - If available, the specific method instance which had an error during rejit
2632// * hrStatus - HRESULT for the rejit error that occurred
2633// * pErrors - the list of error records that this method will append to
2634//
2635// Return Value:
2636// * S_OK: error was appended
2637// * E_OUTOFMEMORY: Not enough memory to create the new error item. The array is unchanged.
2638//
2639
2640//static
2641#ifndef DACCESS_COMPILE
2642HRESULT CodeVersionManager::AddCodePublishError(Module* pModule, mdMethodDef methodDef, MethodDesc* pMD, HRESULT hrStatus, CDynArray<CodePublishError> * pErrors)
2643{
2644 CONTRACTL
2645 {
2646 NOTHROW;
2647 GC_NOTRIGGER;
2648 MODE_ANY;
2649 }
2650 CONTRACTL_END;
2651
2652 if (pErrors == NULL)
2653 {
2654 return S_OK;
2655 }
2656
2657 CodePublishError* pError = pErrors->Append();
2658 if (pError == NULL)
2659 {
2660 return E_OUTOFMEMORY;
2661 }
2662 pError->pModule = pModule;
2663 pError->methodDef = methodDef;
2664 pError->pMethodDesc = pMD;
2665 pError->hrStatus = hrStatus;
2666 return S_OK;
2667}
2668#endif
2669
2670#ifndef DACCESS_COMPILE
2671void CodeVersionManager::ReportCodePublishError(CodePublishError* pErrorRecord)
2672{
2673 CONTRACTL
2674 {
2675 NOTHROW;
2676 GC_TRIGGERS;
2677 CAN_TAKE_LOCK;
2678 MODE_ANY;
2679 }
2680 CONTRACTL_END;
2681
2682 ReportCodePublishError(pErrorRecord->pModule, pErrorRecord->methodDef, pErrorRecord->pMethodDesc, pErrorRecord->hrStatus);
2683}
2684
2685void CodeVersionManager::ReportCodePublishError(Module* pModule, mdMethodDef methodDef, MethodDesc* pMD, HRESULT hrStatus)
2686{
2687 CONTRACTL
2688 {
2689 NOTHROW;
2690 GC_TRIGGERS;
2691 CAN_TAKE_LOCK;
2692 MODE_ANY;
2693 }
2694 CONTRACTL_END;
2695
2696#ifdef FEATURE_REJIT
2697 BOOL isRejitted = FALSE;
2698 {
2699 TableLockHolder(this);
2700 isRejitted = !GetActiveILCodeVersion(pModule, methodDef).IsDefaultVersion();
2701 }
2702
2703 // this isn't perfect, we might be activating a tiered jitting variation of a rejitted
2704 // method for example. If it proves to be an issue we can revisit.
2705 if (isRejitted)
2706 {
2707 ReJitManager::ReportReJITError(pModule, methodDef, pMD, hrStatus);
2708 }
2709#endif
2710}
2711#endif // DACCESS_COMPILE
2712
2713//---------------------------------------------------------------------------------------
2714//
2715// PrepareCodeConfig::SetNativeCode() calls this to determine if there's a non-default code
2716// version requested for a MethodDesc that has just been jitted for the first time.
2717// This is also called when methods are being restored in NGEN images. The sequence looks like:
2718// *Enter holder
2719// Enter code version manager lock
2720// DoJumpStampIfNecessary
2721// *Runtime code publishes/restores method
2722// *Exit holder
2723// Leave code version manager lock
2724// Send rejit error callbacks if needed
2725//
2726//
2727// #PublishCode:
2728// Note that the runtime needs to publish/restore the PCODE while this holder is
2729// on the stack, so it can happen under the code version manager's lock.
2730// This prevents a race with a profiler that calls
2731// RequestReJIT just as the method finishes compiling. In particular, the locking ensures
2732// atomicity between this set of steps (performed in DoJumpStampIfNecessary):
2733// * (1) Checking whether there is a non-default version for this MD
2734// * (2) If not, skip doing the jmp-stamp
2735// * (3) Publishing the PCODE
2736//
2737// with respect to these steps performed in RequestReJIT:
2738// * (a) Is PCODE published yet?
2739// * (b) Create non-default ILCodeVersion which the prestub will
2740// consult when it JITs the original IL
2741//
2742// Without this atomicity, we could get the ordering (1), (2), (a), (b), (3), resulting
2743// in the rejit request getting completely ignored (i.e., we file away the new ILCodeVersion
2744// AFTER the prestub checks for it).
2745//
2746// A similar race is possible for code being restored. In that case the restoring thread
2747// does:
2748// * (1) Check if there is a non-default ILCodeVersion for this MD
2749// * (2) If not, no need to jmp-stamp
2750// * (3) Restore the MD
2751
2752// And RequestRejit does:
2753// * (a) [In LoadedMethodDescIterator] Is a potential MD restored yet?
2754// * (b) [In EnumerateDomainClosedMethodDescs] If not, don't queue it for jump-stamping
2755//
2756// Same ordering (1), (2), (a), (b), (3) results in missing both opportunities to jump
2757// stamp.
2758
2759#if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
2760PublishMethodHolder::PublishMethodHolder(MethodDesc* pMethodDesc, PCODE pCode) :
2761 m_pMD(NULL), m_hr(S_OK)
2762{
2763 // This method can't have a contract because entering the table lock
2764 // below increments GCNoTrigger count. Contracts always revert these changes
2765 // at the end of the method but we need the incremented count to flow out of the
2766 // method. The balancing decrement occurs in the destructor.
2767 STATIC_CONTRACT_NOTHROW;
2768 STATIC_CONTRACT_GC_NOTRIGGER;
2769 STATIC_CONTRACT_CAN_TAKE_LOCK;
2770 STATIC_CONTRACT_MODE_ANY;
2771
2772 // We come here from the PreStub and from MethodDesc::CheckRestore
2773 // The method should be effectively restored, but we haven't yet
2774 // cleared the unrestored bit so we can't assert pMethodDesc->IsRestored()
2775 // We can assert:
2776 _ASSERTE(pMethodDesc->GetMethodTable()->IsRestored());
2777
2778 if (pCode != NULL)
2779 {
2780 m_pMD = pMethodDesc;
2781 CodeVersionManager* pCodeVersionManager = pMethodDesc->GetCodeVersionManager();
2782 pCodeVersionManager->EnterLock();
2783 m_hr = pCodeVersionManager->DoJumpStampIfNecessary(pMethodDesc, pCode);
2784 }
2785}
2786
2787
2788PublishMethodHolder::~PublishMethodHolder()
2789{
2790 // This method can't have a contract because leaving the table lock
2791 // below decrements GCNoTrigger count. Contracts always revert these changes
2792 // at the end of the method but we need the decremented count to flow out of the
2793 // method. The balancing increment occurred in the constructor.
2794 STATIC_CONTRACT_NOTHROW;
2795 STATIC_CONTRACT_GC_TRIGGERS; // NOTRIGGER until we leave the lock
2796 STATIC_CONTRACT_CAN_TAKE_LOCK;
2797 STATIC_CONTRACT_MODE_ANY;
2798
2799 if (m_pMD)
2800 {
2801 CodeVersionManager* pCodeVersionManager = m_pMD->GetCodeVersionManager();
2802 pCodeVersionManager->LeaveLock();
2803 if (FAILED(m_hr))
2804 {
2805 pCodeVersionManager->ReportCodePublishError(m_pMD->GetModule(), m_pMD->GetMemberDef(), m_pMD, m_hr);
2806 }
2807 }
2808}
2809
2810PublishMethodTableHolder::PublishMethodTableHolder(MethodTable* pMethodTable) :
2811 m_pMethodTable(NULL)
2812{
2813 // This method can't have a contract because entering the table lock
2814 // below increments GCNoTrigger count. Contracts always revert these changes
2815 // at the end of the method but we need the incremented count to flow out of the
2816 // method. The balancing decrement occurs in the destructor.
2817 STATIC_CONTRACT_NOTHROW;
2818 STATIC_CONTRACT_GC_NOTRIGGER;
2819 STATIC_CONTRACT_CAN_TAKE_LOCK;
2820 STATIC_CONTRACT_MODE_ANY;
2821
2822 // We come here from MethodTable::SetIsRestored
2823 // The method table should be effectively restored, but we haven't yet
2824 // cleared the unrestored bit so we can't assert pMethodTable->IsRestored()
2825
2826 m_pMethodTable = pMethodTable;
2827 CodeVersionManager* pCodeVersionManager = pMethodTable->GetModule()->GetCodeVersionManager();
2828 pCodeVersionManager->EnterLock();
2829 MethodTable::IntroducedMethodIterator itMethods(pMethodTable, FALSE);
2830 for (; itMethods.IsValid(); itMethods.Next())
2831 {
2832 // Although the MethodTable is restored, the methods might not be.
2833 // We need to be careful to only query portions of the MethodDesc
2834 // that work in a partially restored state. The only methods that need
2835 // further restoration are IL stubs (which aren't rejittable) and
2836 // generic methods. The only generic methods directly accessible from
2837 // the MethodTable are definitions. GetNativeCode() on generic defs
2838 // will run succesfully and return NULL which short circuits the
2839 // rest of the logic.
2840 MethodDesc * pMD = itMethods.GetMethodDesc();
2841 PCODE pCode = pMD->GetNativeCode();
2842 if (pCode != NULL)
2843 {
2844 HRESULT hr = pCodeVersionManager->DoJumpStampIfNecessary(pMD, pCode);
2845 if (FAILED(hr))
2846 {
2847 CodeVersionManager::AddCodePublishError(pMD->GetModule(), pMD->GetMemberDef(), pMD, hr, &m_errors);
2848 }
2849 }
2850 }
2851}
2852
2853
2854PublishMethodTableHolder::~PublishMethodTableHolder()
2855{
2856 // This method can't have a contract because leaving the table lock
2857 // below decrements GCNoTrigger count. Contracts always revert these changes
2858 // at the end of the method but we need the decremented count to flow out of the
2859 // method. The balancing increment occurred in the constructor.
2860 STATIC_CONTRACT_NOTHROW;
2861 STATIC_CONTRACT_GC_TRIGGERS; // NOTRIGGER until we leave the lock
2862 STATIC_CONTRACT_CAN_TAKE_LOCK;
2863 STATIC_CONTRACT_MODE_ANY;
2864
2865 if (m_pMethodTable)
2866 {
2867 CodeVersionManager* pCodeVersionManager = m_pMethodTable->GetModule()->GetCodeVersionManager();
2868 pCodeVersionManager->LeaveLock();
2869 for (int i = 0; i < m_errors.Count(); i++)
2870 {
2871 pCodeVersionManager->ReportCodePublishError(&(m_errors[i]));
2872 }
2873 }
2874}
2875#endif // !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
2876
2877#endif // FEATURE_CODE_VERSIONING
2878
2879