| 1 | // Licensed to the .NET Foundation under one or more agreements. |
| 2 | // The .NET Foundation licenses this file to you under the MIT license. |
| 3 | // See the LICENSE file in the project root for more information. |
| 4 | // =========================================================================== |
| 5 | // File: CallCounter.CPP |
| 6 | // |
| 7 | // =========================================================================== |
| 8 | |
| 9 | |
| 10 | |
| 11 | #include "common.h" |
| 12 | #include "excep.h" |
| 13 | #include "log.h" |
| 14 | #include "tieredcompilation.h" |
| 15 | #include "callcounter.h" |
| 16 | |
| 17 | #ifdef FEATURE_TIERED_COMPILATION |
| 18 | |
| 19 | CallCounter::CallCounter() |
| 20 | { |
| 21 | LIMITED_METHOD_CONTRACT; |
| 22 | |
| 23 | m_lock.Init(LOCK_TYPE_DEFAULT); |
| 24 | } |
| 25 | |
| 26 | // This is called by the prestub each time the method is invoked in a particular |
| 27 | // AppDomain (the AppDomain for which AppDomain.GetCallCounter() == this). These |
| 28 | // calls continue until we backpatch the prestub to avoid future calls. This allows |
| 29 | // us to track the number of calls to each method and use it as a trigger for tiered |
| 30 | // compilation. |
| 31 | // |
| 32 | // Returns TRUE if no future invocations are needed (we reached the count we cared about) |
| 33 | // and FALSE otherwise. It is permissible to keep calling even when TRUE was previously |
| 34 | // returned and multi-threaded race conditions will surely cause this to occur. |
| 35 | void CallCounter::OnMethodCalled( |
| 36 | MethodDesc* pMethodDesc, |
| 37 | TieredCompilationManager *pTieredCompilationManager, |
| 38 | BOOL* shouldStopCountingCallsRef, |
| 39 | BOOL* wasPromotedToTier1Ref) |
| 40 | { |
| 41 | STANDARD_VM_CONTRACT; |
| 42 | |
| 43 | _ASSERTE(pMethodDesc->IsEligibleForTieredCompilation()); |
| 44 | _ASSERTE(pTieredCompilationManager != nullptr); |
| 45 | _ASSERTE(shouldStopCountingCallsRef != nullptr); |
| 46 | _ASSERTE(wasPromotedToTier1Ref != nullptr); |
| 47 | |
| 48 | // PERF: This as a simple to implement, but not so performant, call counter |
| 49 | // Currently this is only called until we reach a fixed call count and then |
| 50 | // disabled. Its likely we'll want to improve this at some point but |
| 51 | // its not as bad as you might expect. Allocating a counter inline in the |
| 52 | // MethodDesc or at some location computable from the MethodDesc should |
| 53 | // eliminate 1 pointer per-method (the MethodDesc* key) and the CPU |
| 54 | // overhead to acquire the lock/search the dictionary. Depending on where it |
| 55 | // is we may also be able to reduce it to 1 byte counter without wasting the |
| 56 | // following bytes for alignment. Further work to inline the OnMethodCalled |
| 57 | // callback directly into the jitted code would eliminate CPU overhead of |
| 58 | // leaving the prestub unpatched, but may not be good overall as it increases |
| 59 | // the size of the jitted code. |
| 60 | |
| 61 | |
| 62 | TieredCompilationManager* pCallCounterSink = NULL; |
| 63 | int callCount; |
| 64 | { |
| 65 | //Be careful if you convert to something fully lock/interlocked-free that |
| 66 | //you correctly handle what happens when some N simultaneous calls don't |
| 67 | //all increment the counter. The slight drift is probably neglible for tuning |
| 68 | //but TieredCompilationManager::OnMethodCalled() doesn't expect multiple calls |
| 69 | //each claiming to be exactly the threshhold call count needed to trigger |
| 70 | //optimization. |
| 71 | SpinLockHolder holder(&m_lock); |
| 72 | CallCounterEntry* pEntry = const_cast<CallCounterEntry*>(m_methodToCallCount.LookupPtr(pMethodDesc)); |
| 73 | if (pEntry == NULL) |
| 74 | { |
| 75 | callCount = 1; |
| 76 | m_methodToCallCount.Add(CallCounterEntry(pMethodDesc, callCount)); |
| 77 | } |
| 78 | else |
| 79 | { |
| 80 | pEntry->callCount++; |
| 81 | callCount = pEntry->callCount; |
| 82 | } |
| 83 | } |
| 84 | |
| 85 | pTieredCompilationManager->OnMethodCalled(pMethodDesc, callCount, shouldStopCountingCallsRef, wasPromotedToTier1Ref); |
| 86 | } |
| 87 | |
| 88 | #endif // FEATURE_TIERED_COMPILATION |
| 89 | |