1 | // Licensed to the .NET Foundation under one or more agreements. |
2 | // The .NET Foundation licenses this file to you under the MIT license. |
3 | // See the LICENSE file in the project root for more information. |
4 | // |
5 | // spinlock.cpp |
6 | // |
7 | |
8 | // |
9 | |
10 | |
11 | #include "common.h" |
12 | |
13 | #include "slist.h" |
14 | #include "spinlock.h" |
15 | #include "threads.h" |
16 | #include "corhost.h" |
17 | |
18 | enum |
19 | { |
20 | BACKOFF_LIMIT = 1000 // used in spin to acquire |
21 | }; |
22 | |
23 | #ifdef _DEBUG |
24 | |
25 | // profile information |
26 | ULONG SpinLockProfiler::s_ulBackOffs = 0; |
27 | ULONG SpinLockProfiler::s_ulCollisons [LOCK_TYPE_DEFAULT + 1] = { 0 }; |
28 | ULONG SpinLockProfiler::s_ulSpins [LOCK_TYPE_DEFAULT + 1] = { 0 }; |
29 | |
30 | #endif |
31 | |
32 | SpinLock::SpinLock() |
33 | { |
34 | // Global SpinLock variables will cause the constructor to be |
35 | // called during DllInit, which means we cannot use full contracts |
36 | // because we have not called InitUtilCode yet. |
37 | STATIC_CONTRACT_NOTHROW; |
38 | STATIC_CONTRACT_GC_NOTRIGGER; |
39 | |
40 | m_Initialized = UnInitialized; |
41 | } |
42 | |
43 | |
44 | SpinLock::~SpinLock() |
45 | { |
46 | CONTRACTL |
47 | { |
48 | NOTHROW; |
49 | GC_NOTRIGGER; |
50 | } |
51 | CONTRACTL_END; |
52 | |
53 | } |
54 | |
55 | void SpinLock::Init(LOCK_TYPE type, bool RequireCoopGC) |
56 | { |
57 | CONTRACTL |
58 | { |
59 | THROWS; |
60 | GC_NOTRIGGER; |
61 | } |
62 | CONTRACTL_END; |
63 | |
64 | if (m_Initialized == Initialized) |
65 | { |
66 | _ASSERTE (type == m_LockType); |
67 | _ASSERTE (RequireCoopGC == m_requireCoopGCMode); |
68 | |
69 | // We have initialized this spinlock. |
70 | return; |
71 | } |
72 | |
73 | while (TRUE) |
74 | { |
75 | LONG curValue = FastInterlockCompareExchange((LONG*)&m_Initialized, BeingInitialized, UnInitialized); |
76 | if (curValue == Initialized) |
77 | { |
78 | return; |
79 | } |
80 | else if (curValue == UnInitialized) |
81 | { |
82 | // We are the first to initialize the lock |
83 | break; |
84 | } |
85 | else |
86 | { |
87 | __SwitchToThread(10, CALLER_LIMITS_SPINNING); |
88 | } |
89 | } |
90 | |
91 | { |
92 | m_lock = 0; |
93 | } |
94 | |
95 | #ifdef _DEBUG |
96 | m_LockType = type; |
97 | m_requireCoopGCMode = RequireCoopGC; |
98 | #endif |
99 | |
100 | _ASSERTE (m_Initialized == BeingInitialized); |
101 | m_Initialized = Initialized; |
102 | } |
103 | |
104 | #ifdef _DEBUG |
105 | BOOL SpinLock::OwnedByCurrentThread() |
106 | { |
107 | CONTRACTL |
108 | { |
109 | NOTHROW; |
110 | GC_NOTRIGGER; |
111 | DEBUG_ONLY; |
112 | } |
113 | CONTRACTL_END; |
114 | |
115 | return m_holdingThreadId.IsCurrentThread(); |
116 | } |
117 | #endif |
118 | |
119 | DEBUG_NOINLINE void SpinLock::AcquireLock(SpinLock *s, Thread * pThread) |
120 | { |
121 | SCAN_SCOPE_BEGIN; |
122 | STATIC_CONTRACT_GC_NOTRIGGER; |
123 | |
124 | s->GetLock(pThread); |
125 | } |
126 | |
127 | DEBUG_NOINLINE void SpinLock::ReleaseLock(SpinLock *s, Thread * pThread) |
128 | { |
129 | SCAN_SCOPE_END; |
130 | |
131 | s->FreeLock(pThread); |
132 | } |
133 | |
134 | |
135 | void SpinLock::GetLock(Thread* pThread) |
136 | { |
137 | CONTRACTL |
138 | { |
139 | DISABLED(THROWS); // need to rewrite spin locks to no-throw. |
140 | GC_NOTRIGGER; |
141 | CAN_TAKE_LOCK; |
142 | SO_TOLERANT; |
143 | } |
144 | CONTRACTL_END; |
145 | |
146 | _ASSERTE(m_Initialized == Initialized); |
147 | |
148 | #ifdef _DEBUG |
149 | dbg_PreEnterLock(); |
150 | #endif |
151 | |
152 | { |
153 | // Not CLR Sync hosted, so we use interlocked operations on |
154 | // m_lock to acquire the lock. This will automatically cause |
155 | // us to call EE_LOCK_TAKEN(this); |
156 | if (!GetLockNoWait()) |
157 | { |
158 | SpinToAcquire(); |
159 | } |
160 | } |
161 | |
162 | INCTHREADLOCKCOUNTTHREAD(pThread); |
163 | #ifdef _DEBUG |
164 | m_holdingThreadId.SetToCurrentThread(); |
165 | dbg_EnterLock(); |
166 | #endif |
167 | } |
168 | |
169 | //---------------------------------------------------------------------------- |
170 | // SpinLock::GetLockNoWait |
171 | // used interlocked exchange and fast lock acquire |
172 | |
173 | BOOL SpinLock::GetLockNoWait() |
174 | { |
175 | CONTRACTL |
176 | { |
177 | NOTHROW; |
178 | GC_NOTRIGGER; |
179 | CAN_TAKE_LOCK; |
180 | SO_TOLERANT; |
181 | } |
182 | CONTRACTL_END; |
183 | |
184 | { |
185 | if (VolatileLoad(&m_lock) == 0 && FastInterlockExchange (&m_lock, 1) == 0) |
186 | { |
187 | EE_LOCK_TAKEN(this); |
188 | return 1; |
189 | } |
190 | return 0; |
191 | } |
192 | } |
193 | |
194 | //---------------------------------------------------------------------------- |
195 | // SpinLock::FreeLock |
196 | // Release the spinlock |
197 | // |
198 | void SpinLock::FreeLock(Thread* pThread) |
199 | { |
200 | CONTRACTL |
201 | { |
202 | NOTHROW; |
203 | GC_NOTRIGGER; |
204 | SO_TOLERANT; |
205 | } |
206 | CONTRACTL_END; |
207 | |
208 | _ASSERTE(m_Initialized == Initialized); |
209 | |
210 | #ifdef _DEBUG |
211 | _ASSERTE(OwnedByCurrentThread()); |
212 | m_holdingThreadId.Clear(); |
213 | dbg_LeaveLock(); |
214 | #endif |
215 | |
216 | { |
217 | VolatileStore(&m_lock, (LONG)0); |
218 | } |
219 | |
220 | DECTHREADLOCKCOUNTTHREAD(pThread); |
221 | EE_LOCK_RELEASED(this); |
222 | |
223 | } // SpinLock::FreeLock () |
224 | |
225 | |
226 | //---------------------------------------------------------------------------- |
227 | // SpinLock::SpinToAcquire , non-inline function, called from inline Acquire |
228 | // |
229 | // Spin waiting for a spinlock to become free. |
230 | // |
231 | // |
232 | void |
233 | SpinLock::SpinToAcquire() |
234 | { |
235 | CONTRACTL |
236 | { |
237 | NOTHROW; |
238 | GC_NOTRIGGER; |
239 | CAN_TAKE_LOCK; |
240 | SO_TOLERANT; |
241 | } |
242 | CONTRACTL_END; |
243 | |
244 | DWORD backoffs = 0; |
245 | ULONG ulSpins = 0; |
246 | |
247 | while (true) |
248 | { |
249 | for (unsigned i = ulSpins+10000; |
250 | ulSpins < i; |
251 | ulSpins++) |
252 | { |
253 | // Note: Must use Volatile to ensure the lock is |
254 | // refetched from memory. |
255 | // |
256 | if (VolatileLoad(&m_lock) == 0) |
257 | { |
258 | break; |
259 | } |
260 | YieldProcessor(); // indicate to the processor that we are spining |
261 | } |
262 | |
263 | // Try the inline atomic test again. |
264 | // |
265 | if (GetLockNoWait()) |
266 | { |
267 | // EE_LOCK_TAKEN(this) has already been called by GetLockNoWait |
268 | break; |
269 | } |
270 | |
271 | //backoff |
272 | __SwitchToThread(0, backoffs++); |
273 | } |
274 | |
275 | #ifdef _DEBUG |
276 | //profile info |
277 | SpinLockProfiler::IncrementCollisions (m_LockType); |
278 | SpinLockProfiler::IncrementSpins (m_LockType, ulSpins); |
279 | SpinLockProfiler::IncrementBackoffs (backoffs); |
280 | #endif |
281 | |
282 | } // SpinLock::SpinToAcquire () |
283 | |
284 | #ifdef _DEBUG |
285 | // If a GC is not allowed when we enter the lock, we'd better not do anything inside |
286 | // the lock that could provoke a GC. Otherwise other threads attempting to block |
287 | // (which are presumably in the same GC mode as this one) will block. This will cause |
288 | // a deadlock if we do attempt a GC because we can't suspend blocking threads and we |
289 | // can't release the spin lock. |
290 | void SpinLock::dbg_PreEnterLock() |
291 | { |
292 | CONTRACTL |
293 | { |
294 | NOTHROW; |
295 | GC_NOTRIGGER; |
296 | DEBUG_ONLY; |
297 | } |
298 | CONTRACTL_END; |
299 | |
300 | Thread* pThread = GetThread(); |
301 | if (pThread) |
302 | { |
303 | // SpinLock can not be nested. |
304 | _ASSERTE ((pThread->m_StateNC & Thread::TSNC_OwnsSpinLock) == 0); |
305 | |
306 | pThread->SetThreadStateNC(Thread::TSNC_OwnsSpinLock); |
307 | |
308 | if (!pThread->PreemptiveGCDisabled()) |
309 | _ASSERTE(!m_requireCoopGCMode); |
310 | } |
311 | } |
312 | |
313 | void SpinLock::dbg_EnterLock() |
314 | { |
315 | CONTRACTL |
316 | { |
317 | NOTHROW; |
318 | GC_NOTRIGGER; |
319 | DEBUG_ONLY; |
320 | } |
321 | CONTRACTL_END; |
322 | |
323 | Thread* pThread = GetThread(); |
324 | if (pThread) |
325 | { |
326 | INCONTRACT(pThread->BeginNoTriggerGC(__FILE__, __LINE__)); |
327 | } |
328 | } |
329 | |
330 | void SpinLock::dbg_LeaveLock() |
331 | { |
332 | CONTRACTL |
333 | { |
334 | NOTHROW; |
335 | GC_NOTRIGGER; |
336 | DEBUG_ONLY; |
337 | } |
338 | CONTRACTL_END; |
339 | |
340 | Thread* pThread = GetThread(); |
341 | if (pThread) |
342 | { |
343 | _ASSERTE ((pThread->m_StateNC & Thread::TSNC_OwnsSpinLock) != 0); |
344 | pThread->ResetThreadStateNC(Thread::TSNC_OwnsSpinLock); |
345 | INCONTRACT(pThread->EndNoTriggerGC()); |
346 | } |
347 | } |
348 | |
349 | |
350 | void SpinLockProfiler::InitStatics () |
351 | { |
352 | CONTRACTL |
353 | { |
354 | NOTHROW; |
355 | GC_NOTRIGGER; |
356 | DEBUG_ONLY; |
357 | } |
358 | CONTRACTL_END; |
359 | |
360 | s_ulBackOffs = 0; |
361 | memset (s_ulCollisons, 0, sizeof (s_ulCollisons)); |
362 | memset (s_ulSpins, 0, sizeof (s_ulSpins)); |
363 | } |
364 | |
365 | void SpinLockProfiler::IncrementSpins (LOCK_TYPE type, ULONG value) |
366 | { |
367 | CONTRACTL |
368 | { |
369 | NOTHROW; |
370 | GC_NOTRIGGER; |
371 | DEBUG_ONLY; |
372 | } |
373 | CONTRACTL_END; |
374 | |
375 | _ASSERTE(type <= LOCK_TYPE_DEFAULT); |
376 | s_ulSpins [type] += value; |
377 | } |
378 | |
379 | void SpinLockProfiler::IncrementCollisions (LOCK_TYPE type) |
380 | { |
381 | CONTRACTL |
382 | { |
383 | NOTHROW; |
384 | GC_NOTRIGGER; |
385 | DEBUG_ONLY; |
386 | } |
387 | CONTRACTL_END; |
388 | |
389 | ++s_ulCollisons [type]; |
390 | } |
391 | |
392 | void SpinLockProfiler::IncrementBackoffs (ULONG value) |
393 | { |
394 | CONTRACTL |
395 | { |
396 | NOTHROW; |
397 | GC_NOTRIGGER; |
398 | DEBUG_ONLY; |
399 | } |
400 | CONTRACTL_END; |
401 | |
402 | s_ulBackOffs += value; |
403 | } |
404 | |
405 | void SpinLockProfiler::DumpStatics() |
406 | { |
407 | CONTRACTL |
408 | { |
409 | NOTHROW; |
410 | GC_NOTRIGGER; |
411 | DEBUG_ONLY; |
412 | } |
413 | CONTRACTL_END; |
414 | |
415 | //<TODO>todo </TODO> |
416 | } |
417 | |
418 | #endif // _DEBUG |
419 | |
420 | // End of file: spinlock.cpp |
421 | |