1// Licensed to the .NET Foundation under one or more agreements.
2// The .NET Foundation licenses this file to you under the MIT license.
3// See the LICENSE file in the project root for more information.
4
5// =================================================================================
6// Code that works with liveness and related concepts (interference, debug scope)
7// =================================================================================
8
9#include "jitpch.h"
10#ifdef _MSC_VER
11#pragma hdrstop
12#endif
13
14#if !defined(_TARGET_64BIT_)
15#include "decomposelongs.h"
16#endif
17#include "lower.h" // for LowerRange()
18
19/*****************************************************************************
20 *
21 * Helper for Compiler::fgPerBlockLocalVarLiveness().
22 * The goal is to compute the USE and DEF sets for a basic block.
23 */
24void Compiler::fgMarkUseDef(GenTreeLclVarCommon* tree)
25{
26 assert((tree->OperIsLocal() && (tree->OperGet() != GT_PHI_ARG)) || tree->OperIsLocalAddr());
27
28 const unsigned lclNum = tree->gtLclNum;
29 assert(lclNum < lvaCount);
30
31 LclVarDsc* const varDsc = &lvaTable[lclNum];
32
33 // We should never encounter a reference to a lclVar that has a zero refCnt.
34 if (varDsc->lvRefCnt() == 0 && (!varTypeIsPromotable(varDsc) || !varDsc->lvPromoted))
35 {
36 JITDUMP("Found reference to V%02u with zero refCnt.\n", lclNum);
37 assert(!"We should never encounter a reference to a lclVar that has a zero refCnt.");
38 varDsc->setLvRefCnt(1);
39 }
40
41 const bool isDef = (tree->gtFlags & GTF_VAR_DEF) != 0;
42 const bool isUse = !isDef || ((tree->gtFlags & GTF_VAR_USEASG) != 0);
43
44 if (varDsc->lvTracked)
45 {
46 assert(varDsc->lvVarIndex < lvaTrackedCount);
47
48 // We don't treat stores to tracked locals as modifications of ByrefExposed memory;
49 // Make sure no tracked local is addr-exposed, to make sure we don't incorrectly CSE byref
50 // loads aliasing it across a store to it.
51 assert(!varDsc->lvAddrExposed);
52
53 if (isUse && !VarSetOps::IsMember(this, fgCurDefSet, varDsc->lvVarIndex))
54 {
55 // This is an exposed use; add it to the set of uses.
56 VarSetOps::AddElemD(this, fgCurUseSet, varDsc->lvVarIndex);
57 }
58
59 if (isDef)
60 {
61 // This is a def, add it to the set of defs.
62 VarSetOps::AddElemD(this, fgCurDefSet, varDsc->lvVarIndex);
63 }
64 }
65 else
66 {
67 if (varDsc->lvAddrExposed)
68 {
69 // Reflect the effect on ByrefExposed memory
70
71 if (isUse)
72 {
73 fgCurMemoryUse |= memoryKindSet(ByrefExposed);
74 }
75 if (isDef)
76 {
77 fgCurMemoryDef |= memoryKindSet(ByrefExposed);
78
79 // We've found a store that modifies ByrefExposed
80 // memory but not GcHeap memory, so track their
81 // states separately.
82 byrefStatesMatchGcHeapStates = false;
83 }
84 }
85
86 if (varTypeIsStruct(varDsc))
87 {
88 lvaPromotionType promotionType = lvaGetPromotionType(varDsc);
89
90 if (promotionType != PROMOTION_TYPE_NONE)
91 {
92 VARSET_TP bitMask(VarSetOps::MakeEmpty(this));
93
94 for (unsigned i = varDsc->lvFieldLclStart; i < varDsc->lvFieldLclStart + varDsc->lvFieldCnt; ++i)
95 {
96 noway_assert(lvaTable[i].lvIsStructField);
97 if (lvaTable[i].lvTracked)
98 {
99 noway_assert(lvaTable[i].lvVarIndex < lvaTrackedCount);
100 VarSetOps::AddElemD(this, bitMask, lvaTable[i].lvVarIndex);
101 }
102 }
103
104 // For pure defs (i.e. not an "update" def which is also a use), add to the (all) def set.
105 if (!isUse)
106 {
107 assert(isDef);
108 VarSetOps::UnionD(this, fgCurDefSet, bitMask);
109 }
110 else if (!VarSetOps::IsSubset(this, bitMask, fgCurDefSet))
111 {
112 // Mark as used any struct fields that are not yet defined.
113 VarSetOps::UnionD(this, fgCurUseSet, bitMask);
114 }
115 }
116 }
117 }
118}
119
120/*****************************************************************************/
121void Compiler::fgLocalVarLiveness()
122{
123#ifdef DEBUG
124 if (verbose)
125 {
126 printf("*************** In fgLocalVarLiveness()\n");
127
128 if (compRationalIRForm)
129 {
130 lvaTableDump();
131 }
132 }
133#endif // DEBUG
134
135 // Init liveness data structures.
136 fgLocalVarLivenessInit();
137
138 EndPhase(PHASE_LCLVARLIVENESS_INIT);
139
140 // Make sure we haven't noted any partial last uses of promoted structs.
141 ClearPromotedStructDeathVars();
142
143 // Initialize the per-block var sets.
144 fgInitBlockVarSets();
145
146 fgLocalVarLivenessChanged = false;
147 do
148 {
149 /* Figure out use/def info for all basic blocks */
150 fgPerBlockLocalVarLiveness();
151 EndPhase(PHASE_LCLVARLIVENESS_PERBLOCK);
152
153 /* Live variable analysis. */
154
155 fgStmtRemoved = false;
156 fgInterBlockLocalVarLiveness();
157 } while (fgStmtRemoved && fgLocalVarLivenessChanged);
158
159 EndPhase(PHASE_LCLVARLIVENESS_INTERBLOCK);
160}
161
162/*****************************************************************************/
163void Compiler::fgLocalVarLivenessInit()
164{
165 JITDUMP("In fgLocalVarLivenessInit\n");
166
167 // Sort locals first, if we're optimizing
168 if (opts.OptimizationEnabled())
169 {
170 lvaSortByRefCount();
171 }
172
173 // We mark a lcl as must-init in a first pass of local variable
174 // liveness (Liveness1), then assertion prop eliminates the
175 // uninit-use of a variable Vk, asserting it will be init'ed to
176 // null. Then, in a second local-var liveness (Liveness2), the
177 // variable Vk is no longer live on entry to the method, since its
178 // uses have been replaced via constant propagation.
179 //
180 // This leads to a bug: since Vk is no longer live on entry, the
181 // register allocator sees Vk and an argument Vj as having
182 // disjoint lifetimes, and allocates them to the same register.
183 // But Vk is still marked "must-init", and this initialization (of
184 // the register) trashes the value in Vj.
185 //
186 // Therefore, initialize must-init to false for all variables in
187 // each liveness phase.
188 for (unsigned lclNum = 0; lclNum < lvaCount; ++lclNum)
189 {
190 lvaTable[lclNum].lvMustInit = false;
191 }
192}
193
194//------------------------------------------------------------------------
195// fgPerNodeLocalVarLiveness:
196// Set fgCurMemoryUse and fgCurMemoryDef when memory is read or updated
197// Call fgMarkUseDef for any Local variables encountered
198//
199// Arguments:
200// tree - The current node.
201//
202void Compiler::fgPerNodeLocalVarLiveness(GenTree* tree)
203{
204 assert(tree != nullptr);
205
206 switch (tree->gtOper)
207 {
208 case GT_QMARK:
209 case GT_COLON:
210 // We never should encounter a GT_QMARK or GT_COLON node
211 noway_assert(!"unexpected GT_QMARK/GT_COLON");
212 break;
213
214 case GT_LCL_VAR:
215 case GT_LCL_FLD:
216 case GT_LCL_VAR_ADDR:
217 case GT_LCL_FLD_ADDR:
218 case GT_STORE_LCL_VAR:
219 case GT_STORE_LCL_FLD:
220 fgMarkUseDef(tree->AsLclVarCommon());
221 break;
222
223 case GT_CLS_VAR:
224 // For Volatile indirection, first mutate GcHeap/ByrefExposed.
225 // See comments in ValueNum.cpp (under case GT_CLS_VAR)
226 // This models Volatile reads as def-then-use of memory
227 // and allows for a CSE of a subsequent non-volatile read.
228 if ((tree->gtFlags & GTF_FLD_VOLATILE) != 0)
229 {
230 // For any Volatile indirection, we must handle it as a
231 // definition of GcHeap/ByrefExposed
232 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
233 }
234 // If the GT_CLS_VAR is the lhs of an assignment, we'll handle it as a GcHeap/ByrefExposed def, when we get
235 // to the assignment.
236 // Otherwise, we treat it as a use here.
237 if ((tree->gtFlags & GTF_CLS_VAR_ASG_LHS) == 0)
238 {
239 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
240 }
241 break;
242
243 case GT_IND:
244 // For Volatile indirection, first mutate GcHeap/ByrefExposed
245 // see comments in ValueNum.cpp (under case GT_CLS_VAR)
246 // This models Volatile reads as def-then-use of memory.
247 // and allows for a CSE of a subsequent non-volatile read
248 if ((tree->gtFlags & GTF_IND_VOLATILE) != 0)
249 {
250 // For any Volatile indirection, we must handle it as a
251 // definition of the GcHeap/ByrefExposed
252 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
253 }
254
255 // If the GT_IND is the lhs of an assignment, we'll handle it
256 // as a memory def, when we get to assignment.
257 // Otherwise, we treat it as a use here.
258 if ((tree->gtFlags & GTF_IND_ASG_LHS) == 0)
259 {
260 GenTreeLclVarCommon* dummyLclVarTree = nullptr;
261 bool dummyIsEntire = false;
262 GenTree* addrArg = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
263 if (!addrArg->DefinesLocalAddr(this, /*width doesn't matter*/ 0, &dummyLclVarTree, &dummyIsEntire))
264 {
265 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
266 }
267 else
268 {
269 // Defines a local addr
270 assert(dummyLclVarTree != nullptr);
271 fgMarkUseDef(dummyLclVarTree->AsLclVarCommon());
272 }
273 }
274 break;
275
276 // These should have been morphed away to become GT_INDs:
277 case GT_FIELD:
278 case GT_INDEX:
279 unreached();
280 break;
281
282 // We'll assume these are use-then-defs of memory.
283 case GT_LOCKADD:
284 case GT_XADD:
285 case GT_XCHG:
286 case GT_CMPXCHG:
287 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
288 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
289 fgCurMemoryHavoc |= memoryKindSet(GcHeap, ByrefExposed);
290 break;
291
292 case GT_MEMORYBARRIER:
293 // Simliar to any Volatile indirection, we must handle this as a definition of GcHeap/ByrefExposed
294 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
295 break;
296
297#ifdef FEATURE_HW_INTRINSICS
298 case GT_HWIntrinsic:
299 {
300 GenTreeHWIntrinsic* hwIntrinsicNode = tree->AsHWIntrinsic();
301
302 // We can't call fgMutateGcHeap unless the block has recorded a MemoryDef
303 //
304 if (hwIntrinsicNode->OperIsMemoryStore())
305 {
306 // We currently handle this like a Volatile store, so it counts as a definition of GcHeap/ByrefExposed
307 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
308 }
309 if (hwIntrinsicNode->OperIsMemoryLoad())
310 {
311 // This instruction loads from memory and we need to record this information
312 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
313 }
314 break;
315 }
316#endif
317
318 // For now, all calls read/write GcHeap/ByrefExposed, writes in their entirety. Might tighten this case later.
319 case GT_CALL:
320 {
321 GenTreeCall* call = tree->AsCall();
322 bool modHeap = true;
323 if (call->gtCallType == CT_HELPER)
324 {
325 CorInfoHelpFunc helpFunc = eeGetHelperNum(call->gtCallMethHnd);
326
327 if (!s_helperCallProperties.MutatesHeap(helpFunc) && !s_helperCallProperties.MayRunCctor(helpFunc))
328 {
329 modHeap = false;
330 }
331 }
332 if (modHeap)
333 {
334 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
335 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
336 fgCurMemoryHavoc |= memoryKindSet(GcHeap, ByrefExposed);
337 }
338 }
339
340 // If this is a p/invoke unmanaged call or if this is a tail-call
341 // and we have an unmanaged p/invoke call in the method,
342 // then we're going to run the p/invoke epilog.
343 // So we mark the FrameRoot as used by this instruction.
344 // This ensures that the block->bbVarUse will contain
345 // the FrameRoot local var if is it a tracked variable.
346
347 if ((tree->gtCall.IsUnmanaged() || (tree->gtCall.IsTailCall() && info.compCallUnmanaged)))
348 {
349 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
350 if (!opts.ShouldUsePInvokeHelpers())
351 {
352 /* Get the TCB local and mark it as used */
353
354 noway_assert(info.compLvFrameListRoot < lvaCount);
355
356 LclVarDsc* varDsc = &lvaTable[info.compLvFrameListRoot];
357
358 if (varDsc->lvTracked)
359 {
360 if (!VarSetOps::IsMember(this, fgCurDefSet, varDsc->lvVarIndex))
361 {
362 VarSetOps::AddElemD(this, fgCurUseSet, varDsc->lvVarIndex);
363 }
364 }
365 }
366 }
367
368 break;
369
370 default:
371
372 // Determine what memory locations it defines.
373 if (tree->OperIs(GT_ASG) || tree->OperIsBlkOp())
374 {
375 GenTreeLclVarCommon* dummyLclVarTree = nullptr;
376 if (tree->DefinesLocal(this, &dummyLclVarTree))
377 {
378 if (lvaVarAddrExposed(dummyLclVarTree->gtLclNum))
379 {
380 fgCurMemoryDef |= memoryKindSet(ByrefExposed);
381
382 // We've found a store that modifies ByrefExposed
383 // memory but not GcHeap memory, so track their
384 // states separately.
385 byrefStatesMatchGcHeapStates = false;
386 }
387 }
388 else
389 {
390 // If it doesn't define a local, then it might update GcHeap/ByrefExposed.
391 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
392 }
393 }
394 break;
395 }
396}
397
398/*****************************************************************************/
399void Compiler::fgPerBlockLocalVarLiveness()
400{
401#ifdef DEBUG
402 if (verbose)
403 {
404 printf("*************** In fgPerBlockLocalVarLiveness()\n");
405 }
406#endif // DEBUG
407
408 unsigned livenessVarEpoch = GetCurLVEpoch();
409
410 BasicBlock* block;
411
412 // If we don't require accurate local var lifetimes, things are simple.
413 if (!backendRequiresLocalVarLifetimes())
414 {
415 unsigned lclNum;
416 LclVarDsc* varDsc;
417
418 VARSET_TP liveAll(VarSetOps::MakeEmpty(this));
419
420 /* We simply make everything live everywhere */
421
422 for (lclNum = 0, varDsc = lvaTable; lclNum < lvaCount; lclNum++, varDsc++)
423 {
424 if (varDsc->lvTracked)
425 {
426 VarSetOps::AddElemD(this, liveAll, varDsc->lvVarIndex);
427 }
428 }
429
430 for (block = fgFirstBB; block; block = block->bbNext)
431 {
432 // Strictly speaking, the assignments for the "Def" cases aren't necessary here.
433 // The empty set would do as well. Use means "use-before-def", so as long as that's
434 // "all", this has the right effect.
435 VarSetOps::Assign(this, block->bbVarUse, liveAll);
436 VarSetOps::Assign(this, block->bbVarDef, liveAll);
437 VarSetOps::Assign(this, block->bbLiveIn, liveAll);
438 block->bbMemoryUse = fullMemoryKindSet;
439 block->bbMemoryDef = fullMemoryKindSet;
440 block->bbMemoryLiveIn = fullMemoryKindSet;
441 block->bbMemoryLiveOut = fullMemoryKindSet;
442
443 switch (block->bbJumpKind)
444 {
445 case BBJ_EHFINALLYRET:
446 case BBJ_THROW:
447 case BBJ_RETURN:
448 VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::MakeEmpty(this));
449 break;
450 default:
451 VarSetOps::Assign(this, block->bbLiveOut, liveAll);
452 break;
453 }
454 }
455
456 // In minopts, we don't explicitly build SSA or value-number; GcHeap and
457 // ByrefExposed implicitly (conservatively) change state at each instr.
458 byrefStatesMatchGcHeapStates = true;
459
460 return;
461 }
462
463 // Avoid allocations in the long case.
464 VarSetOps::AssignNoCopy(this, fgCurUseSet, VarSetOps::MakeEmpty(this));
465 VarSetOps::AssignNoCopy(this, fgCurDefSet, VarSetOps::MakeEmpty(this));
466
467 // GC Heap and ByrefExposed can share states unless we see a def of byref-exposed
468 // memory that is not a GC Heap def.
469 byrefStatesMatchGcHeapStates = true;
470
471 for (block = fgFirstBB; block; block = block->bbNext)
472 {
473 VarSetOps::ClearD(this, fgCurUseSet);
474 VarSetOps::ClearD(this, fgCurDefSet);
475
476 fgCurMemoryUse = emptyMemoryKindSet;
477 fgCurMemoryDef = emptyMemoryKindSet;
478 fgCurMemoryHavoc = emptyMemoryKindSet;
479
480 compCurBB = block;
481 if (block->IsLIR())
482 {
483 for (GenTree* node : LIR::AsRange(block).NonPhiNodes())
484 {
485 fgPerNodeLocalVarLiveness(node);
486 }
487 }
488 else
489 {
490 for (GenTreeStmt* stmt = block->FirstNonPhiDef(); stmt; stmt = stmt->gtNextStmt)
491 {
492 compCurStmt = stmt;
493 for (GenTree* node = stmt->gtStmtList; node != nullptr; node = node->gtNext)
494 {
495 fgPerNodeLocalVarLiveness(node);
496 }
497 }
498 }
499
500 /* Get the TCB local and mark it as used */
501
502 if (block->bbJumpKind == BBJ_RETURN && info.compCallUnmanaged)
503 {
504 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
505 if (!opts.ShouldUsePInvokeHelpers())
506 {
507 noway_assert(info.compLvFrameListRoot < lvaCount);
508
509 LclVarDsc* varDsc = &lvaTable[info.compLvFrameListRoot];
510
511 if (varDsc->lvTracked)
512 {
513 if (!VarSetOps::IsMember(this, fgCurDefSet, varDsc->lvVarIndex))
514 {
515 VarSetOps::AddElemD(this, fgCurUseSet, varDsc->lvVarIndex);
516 }
517 }
518 }
519 }
520
521#ifdef DEBUG
522 if (verbose)
523 {
524 VARSET_TP allVars(VarSetOps::Union(this, fgCurUseSet, fgCurDefSet));
525 printf(FMT_BB, block->bbNum);
526 printf(" USE(%d)=", VarSetOps::Count(this, fgCurUseSet));
527 lvaDispVarSet(fgCurUseSet, allVars);
528 for (MemoryKind memoryKind : allMemoryKinds())
529 {
530 if ((fgCurMemoryUse & memoryKindSet(memoryKind)) != 0)
531 {
532 printf(" + %s", memoryKindNames[memoryKind]);
533 }
534 }
535 printf("\n DEF(%d)=", VarSetOps::Count(this, fgCurDefSet));
536 lvaDispVarSet(fgCurDefSet, allVars);
537 for (MemoryKind memoryKind : allMemoryKinds())
538 {
539 if ((fgCurMemoryDef & memoryKindSet(memoryKind)) != 0)
540 {
541 printf(" + %s", memoryKindNames[memoryKind]);
542 }
543 if ((fgCurMemoryHavoc & memoryKindSet(memoryKind)) != 0)
544 {
545 printf("*");
546 }
547 }
548 printf("\n\n");
549 }
550#endif // DEBUG
551
552 VarSetOps::Assign(this, block->bbVarUse, fgCurUseSet);
553 VarSetOps::Assign(this, block->bbVarDef, fgCurDefSet);
554 block->bbMemoryUse = fgCurMemoryUse;
555 block->bbMemoryDef = fgCurMemoryDef;
556 block->bbMemoryHavoc = fgCurMemoryHavoc;
557
558 /* also initialize the IN set, just in case we will do multiple DFAs */
559
560 VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::MakeEmpty(this));
561 block->bbMemoryLiveIn = emptyMemoryKindSet;
562 }
563
564 noway_assert(livenessVarEpoch == GetCurLVEpoch());
565#ifdef DEBUG
566 if (verbose)
567 {
568 printf("** Memory liveness computed, GcHeap states and ByrefExposed states %s\n",
569 (byrefStatesMatchGcHeapStates ? "match" : "diverge"));
570 }
571#endif // DEBUG
572}
573
574// Helper functions to mark variables live over their entire scope
575
576void Compiler::fgBeginScopeLife(VARSET_TP* inScope, VarScopeDsc* var)
577{
578 assert(var);
579
580 LclVarDsc* lclVarDsc1 = &lvaTable[var->vsdVarNum];
581
582 if (lclVarDsc1->lvTracked)
583 {
584 VarSetOps::AddElemD(this, *inScope, lclVarDsc1->lvVarIndex);
585 }
586}
587
588void Compiler::fgEndScopeLife(VARSET_TP* inScope, VarScopeDsc* var)
589{
590 assert(var);
591
592 LclVarDsc* lclVarDsc1 = &lvaTable[var->vsdVarNum];
593
594 if (lclVarDsc1->lvTracked)
595 {
596 VarSetOps::RemoveElemD(this, *inScope, lclVarDsc1->lvVarIndex);
597 }
598}
599
600/*****************************************************************************/
601
602void Compiler::fgMarkInScope(BasicBlock* block, VARSET_VALARG_TP inScope)
603{
604#ifdef DEBUG
605 if (verbose)
606 {
607 printf("Scope info: block " FMT_BB " marking in scope: ", block->bbNum);
608 dumpConvertedVarSet(this, inScope);
609 printf("\n");
610 }
611#endif // DEBUG
612
613 /* Record which vars are artifically kept alive for debugging */
614
615 VarSetOps::Assign(this, block->bbScope, inScope);
616
617 /* Being in scope implies a use of the variable. Add the var to bbVarUse
618 so that redoing fgLiveVarAnalysis() will work correctly */
619
620 VarSetOps::UnionD(this, block->bbVarUse, inScope);
621
622 /* Artifically mark all vars in scope as alive */
623
624 VarSetOps::UnionD(this, block->bbLiveIn, inScope);
625 VarSetOps::UnionD(this, block->bbLiveOut, inScope);
626}
627
628void Compiler::fgUnmarkInScope(BasicBlock* block, VARSET_VALARG_TP unmarkScope)
629{
630#ifdef DEBUG
631 if (verbose)
632 {
633 printf("Scope info: block " FMT_BB " UNmarking in scope: ", block->bbNum);
634 dumpConvertedVarSet(this, unmarkScope);
635 printf("\n");
636 }
637#endif // DEBUG
638
639 assert(VarSetOps::IsSubset(this, unmarkScope, block->bbScope));
640
641 VarSetOps::DiffD(this, block->bbScope, unmarkScope);
642 VarSetOps::DiffD(this, block->bbVarUse, unmarkScope);
643 VarSetOps::DiffD(this, block->bbLiveIn, unmarkScope);
644 VarSetOps::DiffD(this, block->bbLiveOut, unmarkScope);
645}
646
647#ifdef DEBUG
648
649void Compiler::fgDispDebugScopes()
650{
651 printf("\nDebug scopes:\n");
652
653 BasicBlock* block;
654 for (block = fgFirstBB; block; block = block->bbNext)
655 {
656 printf(FMT_BB ": ", block->bbNum);
657 dumpConvertedVarSet(this, block->bbScope);
658 printf("\n");
659 }
660}
661
662#endif // DEBUG
663
664/*****************************************************************************
665 *
666 * Mark variables live across their entire scope.
667 */
668
669#if FEATURE_EH_FUNCLETS
670
671void Compiler::fgExtendDbgScopes()
672{
673 compResetScopeLists();
674
675#ifdef DEBUG
676 if (verbose)
677 {
678 printf("\nMarking vars alive over their entire scope :\n\n");
679 }
680
681 if (verbose)
682 {
683 compDispScopeLists();
684 }
685#endif // DEBUG
686
687 VARSET_TP inScope(VarSetOps::MakeEmpty(this));
688
689 // Mark all tracked LocalVars live over their scope - walk the blocks
690 // keeping track of the current life, and assign it to the blocks.
691
692 for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
693 {
694 // If we get to a funclet, reset the scope lists and start again, since the block
695 // offsets will be out of order compared to the previous block.
696
697 if (block->bbFlags & BBF_FUNCLET_BEG)
698 {
699 compResetScopeLists();
700 VarSetOps::ClearD(this, inScope);
701 }
702
703 // Process all scopes up to the current offset
704
705 if (block->bbCodeOffs != BAD_IL_OFFSET)
706 {
707 compProcessScopesUntil(block->bbCodeOffs, &inScope, &Compiler::fgBeginScopeLife, &Compiler::fgEndScopeLife);
708 }
709
710 // Assign the current set of variables that are in scope to the block variables tracking this.
711
712 fgMarkInScope(block, inScope);
713 }
714
715#ifdef DEBUG
716 if (verbose)
717 {
718 fgDispDebugScopes();
719 }
720#endif // DEBUG
721}
722
723#else // !FEATURE_EH_FUNCLETS
724
725void Compiler::fgExtendDbgScopes()
726{
727 compResetScopeLists();
728
729#ifdef DEBUG
730 if (verbose)
731 {
732 printf("\nMarking vars alive over their entire scope :\n\n");
733 compDispScopeLists();
734 }
735#endif // DEBUG
736
737 VARSET_TP inScope(VarSetOps::MakeEmpty(this));
738 compProcessScopesUntil(0, &inScope, &Compiler::fgBeginScopeLife, &Compiler::fgEndScopeLife);
739
740 IL_OFFSET lastEndOffs = 0;
741
742 // Mark all tracked LocalVars live over their scope - walk the blocks
743 // keeping track of the current life, and assign it to the blocks.
744
745 BasicBlock* block;
746 for (block = fgFirstBB; block; block = block->bbNext)
747 {
748 // Find scopes becoming alive. If there is a gap in the instr
749 // sequence, we need to process any scopes on those missing offsets.
750
751 if (block->bbCodeOffs != BAD_IL_OFFSET)
752 {
753 if (lastEndOffs != block->bbCodeOffs)
754 {
755 noway_assert(lastEndOffs < block->bbCodeOffs);
756
757 compProcessScopesUntil(block->bbCodeOffs, &inScope, &Compiler::fgBeginScopeLife,
758 &Compiler::fgEndScopeLife);
759 }
760 else
761 {
762 while (VarScopeDsc* varScope = compGetNextEnterScope(block->bbCodeOffs))
763 {
764 fgBeginScopeLife(&inScope, varScope);
765 }
766 }
767 }
768
769 // Assign the current set of variables that are in scope to the block variables tracking this.
770
771 fgMarkInScope(block, inScope);
772
773 // Find scopes going dead.
774
775 if (block->bbCodeOffsEnd != BAD_IL_OFFSET)
776 {
777 VarScopeDsc* varScope;
778 while ((varScope = compGetNextExitScope(block->bbCodeOffsEnd)) != nullptr)
779 {
780 fgEndScopeLife(&inScope, varScope);
781 }
782
783 lastEndOffs = block->bbCodeOffsEnd;
784 }
785 }
786
787 /* Everything should be out of scope by the end of the method. But if the
788 last BB got removed, then inScope may not be empty. */
789
790 noway_assert(VarSetOps::IsEmpty(this, inScope) || lastEndOffs < info.compILCodeSize);
791}
792
793#endif // !FEATURE_EH_FUNCLETS
794
795/*****************************************************************************
796 *
797 * For debuggable code, we allow redundant assignments to vars
798 * by marking them live over their entire scope.
799 */
800
801void Compiler::fgExtendDbgLifetimes()
802{
803#ifdef DEBUG
804 if (verbose)
805 {
806 printf("*************** In fgExtendDbgLifetimes()\n");
807 }
808#endif // DEBUG
809
810 noway_assert(opts.compDbgCode && (info.compVarScopesCount > 0));
811
812 /*-------------------------------------------------------------------------
813 * Extend the lifetimes over the entire reported scope of the variable.
814 */
815
816 fgExtendDbgScopes();
817
818/*-------------------------------------------------------------------------
819 * Partly update liveness info so that we handle any funky BBF_INTERNAL
820 * blocks inserted out of sequence.
821 */
822
823#ifdef DEBUG
824 if (verbose && 0)
825 {
826 fgDispBBLiveness();
827 }
828#endif
829
830 fgLiveVarAnalysis(true);
831
832 /* For compDbgCode, we prepend an empty BB which will hold the
833 initializations of variables which are in scope at IL offset 0 (but
834 not initialized by the IL code). Since they will currently be
835 marked as live on entry to fgFirstBB, unmark the liveness so that
836 the following code will know to add the initializations. */
837
838 assert(fgFirstBBisScratch());
839
840 VARSET_TP trackedArgs(VarSetOps::MakeEmpty(this));
841
842 for (unsigned argNum = 0; argNum < info.compArgsCount; argNum++)
843 {
844 LclVarDsc* argDsc = lvaTable + argNum;
845 if (argDsc->lvPromoted)
846 {
847 lvaPromotionType promotionType = lvaGetPromotionType(argDsc);
848
849 if (promotionType == PROMOTION_TYPE_INDEPENDENT)
850 {
851 noway_assert(argDsc->lvFieldCnt == 1); // We only handle one field here
852
853 unsigned fieldVarNum = argDsc->lvFieldLclStart;
854 argDsc = lvaTable + fieldVarNum;
855 }
856 }
857 noway_assert(argDsc->lvIsParam);
858 if (argDsc->lvTracked)
859 {
860 noway_assert(!VarSetOps::IsMember(this, trackedArgs, argDsc->lvVarIndex)); // Each arg should define a
861 // different bit.
862 VarSetOps::AddElemD(this, trackedArgs, argDsc->lvVarIndex);
863 }
864 }
865
866 // Don't unmark struct locals, either.
867 VARSET_TP noUnmarkVars(trackedArgs);
868
869 for (unsigned i = 0; i < lvaCount; i++)
870 {
871 LclVarDsc* varDsc = &lvaTable[i];
872 if (varTypeIsStruct(varDsc) && varDsc->lvTracked)
873 {
874 VarSetOps::AddElemD(this, noUnmarkVars, varDsc->lvVarIndex);
875 }
876 }
877 fgUnmarkInScope(fgFirstBB, VarSetOps::Diff(this, fgFirstBB->bbScope, noUnmarkVars));
878
879 /*-------------------------------------------------------------------------
880 * As we keep variables artifically alive over their entire scope,
881 * we need to also artificially initialize them if the scope does
882 * not exactly match the real lifetimes, or they will contain
883 * garbage until they are initialized by the IL code.
884 */
885
886 VARSET_TP initVars(VarSetOps::MakeEmpty(this)); // Vars which are artificially made alive
887
888 for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
889 {
890 VarSetOps::ClearD(this, initVars);
891
892 switch (block->bbJumpKind)
893 {
894 case BBJ_NONE:
895 PREFIX_ASSUME(block->bbNext != nullptr);
896 VarSetOps::UnionD(this, initVars, block->bbNext->bbScope);
897 break;
898
899 case BBJ_ALWAYS:
900 case BBJ_EHCATCHRET:
901 case BBJ_EHFILTERRET:
902 VarSetOps::UnionD(this, initVars, block->bbJumpDest->bbScope);
903 break;
904
905 case BBJ_CALLFINALLY:
906 if (!(block->bbFlags & BBF_RETLESS_CALL))
907 {
908 assert(block->isBBCallAlwaysPair());
909 PREFIX_ASSUME(block->bbNext != nullptr);
910 VarSetOps::UnionD(this, initVars, block->bbNext->bbScope);
911 }
912 VarSetOps::UnionD(this, initVars, block->bbJumpDest->bbScope);
913 break;
914
915 case BBJ_COND:
916 PREFIX_ASSUME(block->bbNext != nullptr);
917 VarSetOps::UnionD(this, initVars, block->bbNext->bbScope);
918 VarSetOps::UnionD(this, initVars, block->bbJumpDest->bbScope);
919 break;
920
921 case BBJ_SWITCH:
922 {
923 BasicBlock** jmpTab;
924 unsigned jmpCnt;
925
926 jmpCnt = block->bbJumpSwt->bbsCount;
927 jmpTab = block->bbJumpSwt->bbsDstTab;
928
929 do
930 {
931 VarSetOps::UnionD(this, initVars, (*jmpTab)->bbScope);
932 } while (++jmpTab, --jmpCnt);
933 }
934 break;
935
936 case BBJ_EHFINALLYRET:
937 case BBJ_RETURN:
938 break;
939
940 case BBJ_THROW:
941 /* We don't have to do anything as we mark
942 * all vars live on entry to a catch handler as
943 * volatile anyway
944 */
945 break;
946
947 default:
948 noway_assert(!"Unexpected bbJumpKind");
949 break;
950 }
951
952 /* If the var is already live on entry to the current BB,
953 we would have already initialized it. So ignore bbLiveIn */
954
955 VarSetOps::DiffD(this, initVars, block->bbLiveIn);
956
957 /* Add statements initializing the vars, if there are any to initialize */
958 unsigned blockWeight = block->getBBWeight(this);
959
960 VarSetOps::Iter iter(this, initVars);
961 unsigned varIndex = 0;
962 while (iter.NextElem(&varIndex))
963 {
964 /* Create initialization tree */
965
966 unsigned varNum = lvaTrackedToVarNum[varIndex];
967 LclVarDsc* varDsc = &lvaTable[varNum];
968 var_types type = varDsc->TypeGet();
969
970 // Don't extend struct lifetimes -- they aren't enregistered, anyway.
971 if (type == TYP_STRUCT)
972 {
973 continue;
974 }
975
976 // If we haven't already done this ...
977 if (!fgLocalVarLivenessDone)
978 {
979 // Create a "zero" node
980 GenTree* zero = gtNewZeroConNode(genActualType(type));
981
982 // Create initialization node
983 if (!block->IsLIR())
984 {
985 GenTree* varNode = gtNewLclvNode(varNum, type);
986 GenTree* initNode = gtNewAssignNode(varNode, zero);
987
988 // Create a statement for the initializer, sequence it, and append it to the current BB.
989 GenTree* initStmt = gtNewStmt(initNode);
990 gtSetStmtInfo(initStmt);
991 fgSetStmtSeq(initStmt);
992 fgInsertStmtNearEnd(block, initStmt);
993 }
994 else
995 {
996 GenTree* store =
997 new (this, GT_STORE_LCL_VAR) GenTreeLclVar(GT_STORE_LCL_VAR, type, varNum, BAD_IL_OFFSET);
998 store->gtOp.gtOp1 = zero;
999 store->gtFlags |= (GTF_VAR_DEF | GTF_ASG);
1000
1001 LIR::Range initRange = LIR::EmptyRange();
1002 initRange.InsertBefore(nullptr, zero, store);
1003
1004#if !defined(_TARGET_64BIT_)
1005 DecomposeLongs::DecomposeRange(this, blockWeight, initRange);
1006#endif // !defined(_TARGET_64BIT_)
1007 m_pLowering->LowerRange(block, initRange);
1008
1009 // Naively inserting the initializer at the end of the block may add code after the block's
1010 // terminator, in which case the inserted code will never be executed (and the IR for the
1011 // block will be invalid). Use `LIR::InsertBeforeTerminator` to avoid this problem.
1012 LIR::InsertBeforeTerminator(block, std::move(initRange));
1013 }
1014
1015#ifdef DEBUG
1016 if (verbose)
1017 {
1018 printf("Created zero-init of V%02u in " FMT_BB "\n", varNum, block->bbNum);
1019 }
1020#endif // DEBUG
1021 block->bbFlags |= BBF_CHANGED; // indicates that the contents of the block have changed.
1022 }
1023
1024 /* Update liveness information so that redoing fgLiveVarAnalysis()
1025 will work correctly if needed */
1026
1027 VarSetOps::AddElemD(this, block->bbVarDef, varIndex);
1028 VarSetOps::AddElemD(this, block->bbLiveOut, varIndex);
1029 }
1030 }
1031
1032 // raMarkStkVars() reserves stack space for unused variables (which
1033 // needs to be initialized). However, arguments don't need to be initialized.
1034 // So just ensure that they don't have a 0 ref cnt
1035
1036 unsigned lclNum = 0;
1037 for (LclVarDsc *varDsc = lvaTable; lclNum < lvaCount; lclNum++, varDsc++)
1038 {
1039 if (lclNum >= info.compArgsCount)
1040 {
1041 break; // early exit for loop
1042 }
1043
1044 if (varDsc->lvIsRegArg)
1045 {
1046 varDsc->lvImplicitlyReferenced = true;
1047 }
1048 }
1049
1050#ifdef DEBUG
1051 if (verbose)
1052 {
1053 printf("\nBB liveness after fgExtendDbgLifetimes():\n\n");
1054 fgDispBBLiveness();
1055 printf("\n");
1056 }
1057#endif // DEBUG
1058}
1059
1060VARSET_VALRET_TP Compiler::fgGetHandlerLiveVars(BasicBlock* block)
1061{
1062 noway_assert(block);
1063 noway_assert(ehBlockHasExnFlowDsc(block));
1064
1065 VARSET_TP liveVars(VarSetOps::MakeEmpty(this));
1066 EHblkDsc* HBtab = ehGetBlockExnFlowDsc(block);
1067
1068 do
1069 {
1070 /* Either we enter the filter first or the catch/finally */
1071
1072 if (HBtab->HasFilter())
1073 {
1074 VarSetOps::UnionD(this, liveVars, HBtab->ebdFilter->bbLiveIn);
1075#if FEATURE_EH_FUNCLETS
1076 // The EH subsystem can trigger a stack walk after the filter
1077 // has returned, but before invoking the handler, and the only
1078 // IP address reported from this method will be the original
1079 // faulting instruction, thus everything in the try body
1080 // must report as live any variables live-out of the filter
1081 // (which is the same as those live-in to the handler)
1082 VarSetOps::UnionD(this, liveVars, HBtab->ebdHndBeg->bbLiveIn);
1083#endif // FEATURE_EH_FUNCLETS
1084 }
1085 else
1086 {
1087 VarSetOps::UnionD(this, liveVars, HBtab->ebdHndBeg->bbLiveIn);
1088 }
1089
1090 /* If we have nested try's edbEnclosing will provide them */
1091 noway_assert((HBtab->ebdEnclosingTryIndex == EHblkDsc::NO_ENCLOSING_INDEX) ||
1092 (HBtab->ebdEnclosingTryIndex > ehGetIndex(HBtab)));
1093
1094 unsigned outerIndex = HBtab->ebdEnclosingTryIndex;
1095 if (outerIndex == EHblkDsc::NO_ENCLOSING_INDEX)
1096 {
1097 break;
1098 }
1099 HBtab = ehGetDsc(outerIndex);
1100
1101 } while (true);
1102
1103 return liveVars;
1104}
1105
1106class LiveVarAnalysis
1107{
1108 Compiler* m_compiler;
1109
1110 bool m_hasPossibleBackEdge;
1111
1112 unsigned m_memoryLiveIn;
1113 unsigned m_memoryLiveOut;
1114 VARSET_TP m_liveIn;
1115 VARSET_TP m_liveOut;
1116
1117 LiveVarAnalysis(Compiler* compiler)
1118 : m_compiler(compiler)
1119 , m_hasPossibleBackEdge(false)
1120 , m_memoryLiveIn(emptyMemoryKindSet)
1121 , m_memoryLiveOut(emptyMemoryKindSet)
1122 , m_liveIn(VarSetOps::MakeEmpty(compiler))
1123 , m_liveOut(VarSetOps::MakeEmpty(compiler))
1124 {
1125 }
1126
1127 bool PerBlockAnalysis(BasicBlock* block, bool updateInternalOnly, bool keepAliveThis)
1128 {
1129 /* Compute the 'liveOut' set */
1130 VarSetOps::ClearD(m_compiler, m_liveOut);
1131 m_memoryLiveOut = emptyMemoryKindSet;
1132 if (block->endsWithJmpMethod(m_compiler))
1133 {
1134 // A JMP uses all the arguments, so mark them all
1135 // as live at the JMP instruction
1136 //
1137 const LclVarDsc* varDscEndParams = m_compiler->lvaTable + m_compiler->info.compArgsCount;
1138 for (LclVarDsc* varDsc = m_compiler->lvaTable; varDsc < varDscEndParams; varDsc++)
1139 {
1140 noway_assert(!varDsc->lvPromoted);
1141 if (varDsc->lvTracked)
1142 {
1143 VarSetOps::AddElemD(m_compiler, m_liveOut, varDsc->lvVarIndex);
1144 }
1145 }
1146 }
1147
1148 // Additionally, union in all the live-in tracked vars of successors.
1149 for (BasicBlock* succ : block->GetAllSuccs(m_compiler))
1150 {
1151 VarSetOps::UnionD(m_compiler, m_liveOut, succ->bbLiveIn);
1152 m_memoryLiveOut |= succ->bbMemoryLiveIn;
1153 if (succ->bbNum <= block->bbNum)
1154 {
1155 m_hasPossibleBackEdge = true;
1156 }
1157 }
1158
1159 /* For lvaKeepAliveAndReportThis methods, "this" has to be kept alive everywhere
1160 Note that a function may end in a throw on an infinite loop (as opposed to a return).
1161 "this" has to be alive everywhere even in such methods. */
1162
1163 if (keepAliveThis)
1164 {
1165 VarSetOps::AddElemD(m_compiler, m_liveOut, m_compiler->lvaTable[m_compiler->info.compThisArg].lvVarIndex);
1166 }
1167
1168 /* Compute the 'm_liveIn' set */
1169 VarSetOps::LivenessD(m_compiler, m_liveIn, block->bbVarDef, block->bbVarUse, m_liveOut);
1170
1171 // Even if block->bbMemoryDef is set, we must assume that it doesn't kill memory liveness from m_memoryLiveOut,
1172 // since (without proof otherwise) the use and def may touch different memory at run-time.
1173 m_memoryLiveIn = m_memoryLiveOut | block->bbMemoryUse;
1174
1175 /* Can exceptions from this block be handled (in this function)? */
1176
1177 if (m_compiler->ehBlockHasExnFlowDsc(block))
1178 {
1179 const VARSET_TP& liveVars(m_compiler->fgGetHandlerLiveVars(block));
1180
1181 VarSetOps::UnionD(m_compiler, m_liveIn, liveVars);
1182 VarSetOps::UnionD(m_compiler, m_liveOut, liveVars);
1183 }
1184
1185 /* Has there been any change in either live set? */
1186
1187 bool liveInChanged = !VarSetOps::Equal(m_compiler, block->bbLiveIn, m_liveIn);
1188 if (liveInChanged || !VarSetOps::Equal(m_compiler, block->bbLiveOut, m_liveOut))
1189 {
1190 if (updateInternalOnly)
1191 {
1192 // Only "extend" liveness over BBF_INTERNAL blocks
1193
1194 noway_assert(block->bbFlags & BBF_INTERNAL);
1195
1196 liveInChanged = !VarSetOps::IsSubset(m_compiler, m_liveIn, block->bbLiveIn);
1197 if (liveInChanged || !VarSetOps::IsSubset(m_compiler, m_liveOut, block->bbLiveOut))
1198 {
1199#ifdef DEBUG
1200 if (m_compiler->verbose)
1201 {
1202 printf("Scope info: block " FMT_BB " LiveIn+ ", block->bbNum);
1203 dumpConvertedVarSet(m_compiler, VarSetOps::Diff(m_compiler, m_liveIn, block->bbLiveIn));
1204 printf(", LiveOut+ ");
1205 dumpConvertedVarSet(m_compiler, VarSetOps::Diff(m_compiler, m_liveOut, block->bbLiveOut));
1206 printf("\n");
1207 }
1208#endif // DEBUG
1209
1210 VarSetOps::UnionD(m_compiler, block->bbLiveIn, m_liveIn);
1211 VarSetOps::UnionD(m_compiler, block->bbLiveOut, m_liveOut);
1212 }
1213 }
1214 else
1215 {
1216 VarSetOps::Assign(m_compiler, block->bbLiveIn, m_liveIn);
1217 VarSetOps::Assign(m_compiler, block->bbLiveOut, m_liveOut);
1218 }
1219 }
1220
1221 const bool memoryLiveInChanged = (block->bbMemoryLiveIn != m_memoryLiveIn);
1222 if (memoryLiveInChanged || (block->bbMemoryLiveOut != m_memoryLiveOut))
1223 {
1224 block->bbMemoryLiveIn = m_memoryLiveIn;
1225 block->bbMemoryLiveOut = m_memoryLiveOut;
1226 }
1227
1228 return liveInChanged || memoryLiveInChanged;
1229 }
1230
1231 void Run(bool updateInternalOnly)
1232 {
1233 const bool keepAliveThis =
1234 m_compiler->lvaKeepAliveAndReportThis() && m_compiler->lvaTable[m_compiler->info.compThisArg].lvTracked;
1235
1236 /* Live Variable Analysis - Backward dataflow */
1237 bool changed;
1238 do
1239 {
1240 changed = false;
1241
1242 /* Visit all blocks and compute new data flow values */
1243
1244 VarSetOps::ClearD(m_compiler, m_liveIn);
1245 VarSetOps::ClearD(m_compiler, m_liveOut);
1246
1247 m_memoryLiveIn = emptyMemoryKindSet;
1248 m_memoryLiveOut = emptyMemoryKindSet;
1249
1250 for (BasicBlock* block = m_compiler->fgLastBB; block; block = block->bbPrev)
1251 {
1252 // sometimes block numbers are not monotonically increasing which
1253 // would cause us not to identify backedges
1254 if (block->bbNext && block->bbNext->bbNum <= block->bbNum)
1255 {
1256 m_hasPossibleBackEdge = true;
1257 }
1258
1259 if (updateInternalOnly)
1260 {
1261 /* Only update BBF_INTERNAL blocks as they may be
1262 syntactically out of sequence. */
1263
1264 noway_assert(m_compiler->opts.compDbgCode && (m_compiler->info.compVarScopesCount > 0));
1265
1266 if (!(block->bbFlags & BBF_INTERNAL))
1267 {
1268 continue;
1269 }
1270 }
1271
1272 if (PerBlockAnalysis(block, updateInternalOnly, keepAliveThis))
1273 {
1274 changed = true;
1275 }
1276 }
1277 // if there is no way we could have processed a block without seeing all of its predecessors
1278 // then there is no need to iterate
1279 if (!m_hasPossibleBackEdge)
1280 {
1281 break;
1282 }
1283 } while (changed);
1284 }
1285
1286public:
1287 static void Run(Compiler* compiler, bool updateInternalOnly)
1288 {
1289 LiveVarAnalysis analysis(compiler);
1290 analysis.Run(updateInternalOnly);
1291 }
1292};
1293
1294/*****************************************************************************
1295 *
1296 * This is the classic algorithm for Live Variable Analysis.
1297 * If updateInternalOnly==true, only update BBF_INTERNAL blocks.
1298 */
1299
1300void Compiler::fgLiveVarAnalysis(bool updateInternalOnly)
1301{
1302 if (!backendRequiresLocalVarLifetimes())
1303 {
1304 return;
1305 }
1306
1307 LiveVarAnalysis::Run(this, updateInternalOnly);
1308
1309#ifdef DEBUG
1310 if (verbose && !updateInternalOnly)
1311 {
1312 printf("\nBB liveness after fgLiveVarAnalysis():\n\n");
1313 fgDispBBLiveness();
1314 }
1315#endif // DEBUG
1316}
1317
1318/*****************************************************************************
1319 * For updating liveset during traversal AFTER fgComputeLife has completed
1320 */
1321
1322VARSET_VALRET_TP Compiler::fgUpdateLiveSet(VARSET_VALARG_TP liveSet, GenTree* tree)
1323{
1324 VARSET_TP newLiveSet(VarSetOps::MakeCopy(this, liveSet));
1325 assert(fgLocalVarLivenessDone == true);
1326 GenTree* lclVarTree = tree; // After the tests below, "lclVarTree" will be the local variable.
1327 if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_LCL_FLD ||
1328 (lclVarTree = fgIsIndirOfAddrOfLocal(tree)) != nullptr)
1329 {
1330 const VARSET_TP& varBits(fgGetVarBits(lclVarTree));
1331
1332 if (!VarSetOps::IsEmpty(this, varBits))
1333 {
1334 if (tree->gtFlags & GTF_VAR_DEATH)
1335 {
1336 // We'd like to be able to assert the following, however if we are walking
1337 // through a qmark/colon tree, we may encounter multiple last-use nodes.
1338 // assert (VarSetOps::IsSubset(this, varBits, newLiveSet));
1339
1340 // We maintain the invariant that if the lclVarTree is a promoted struct, but the
1341 // the lookup fails, then all the field vars (i.e., "varBits") are dying.
1342 VARSET_TP* deadVarBits = nullptr;
1343 if (varTypeIsStruct(lclVarTree) && LookupPromotedStructDeathVars(lclVarTree, &deadVarBits))
1344 {
1345 VarSetOps::DiffD(this, newLiveSet, *deadVarBits);
1346 }
1347 else
1348 {
1349 VarSetOps::DiffD(this, newLiveSet, varBits);
1350 }
1351 }
1352 else if ((tree->gtFlags & GTF_VAR_DEF) != 0 && (tree->gtFlags & GTF_VAR_USEASG) == 0)
1353 {
1354 assert(tree == lclVarTree); // LDOBJ case should only be a use.
1355
1356 // This shouldn't be in newLiveSet, unless this is debug code, in which
1357 // case we keep vars live everywhere, OR it is address-exposed, OR this block
1358 // is part of a try block, in which case it may be live at the handler
1359 // Could add a check that, if it's in the newLiveSet, that it's also in
1360 // fgGetHandlerLiveVars(compCurBB), but seems excessive
1361 //
1362 assert(VarSetOps::IsEmptyIntersection(this, newLiveSet, varBits) || opts.compDbgCode ||
1363 lvaTable[tree->gtLclVarCommon.gtLclNum].lvAddrExposed ||
1364 (compCurBB != nullptr && ehBlockHasExnFlowDsc(compCurBB)));
1365 VarSetOps::UnionD(this, newLiveSet, varBits);
1366 }
1367 }
1368 }
1369 return newLiveSet;
1370}
1371
1372//------------------------------------------------------------------------
1373// Compiler::fgComputeLifeCall: compute the changes to local var liveness
1374// due to a GT_CALL node.
1375//
1376// Arguments:
1377// life - The live set that is being computed.
1378// call - The call node in question.
1379//
1380void Compiler::fgComputeLifeCall(VARSET_TP& life, GenTreeCall* call)
1381{
1382 assert(call != nullptr);
1383
1384 // If this is a tail-call and we have any unmanaged p/invoke calls in
1385 // the method then we're going to run the p/invoke epilog
1386 // So we mark the FrameRoot as used by this instruction.
1387 // This ensure that this variable is kept alive at the tail-call
1388 if (call->IsTailCall() && info.compCallUnmanaged)
1389 {
1390 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
1391 if (!opts.ShouldUsePInvokeHelpers())
1392 {
1393 /* Get the TCB local and make it live */
1394
1395 noway_assert(info.compLvFrameListRoot < lvaCount);
1396
1397 LclVarDsc* frameVarDsc = &lvaTable[info.compLvFrameListRoot];
1398
1399 if (frameVarDsc->lvTracked)
1400 {
1401 VarSetOps::AddElemD(this, life, frameVarDsc->lvVarIndex);
1402 }
1403 }
1404 }
1405
1406 // TODO: we should generate the code for saving to/restoring
1407 // from the inlined N/Direct frame instead.
1408
1409 /* Is this call to unmanaged code? */
1410 if (call->IsUnmanaged())
1411 {
1412 /* Get the TCB local and make it live */
1413 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
1414 if (!opts.ShouldUsePInvokeHelpers())
1415 {
1416 noway_assert(info.compLvFrameListRoot < lvaCount);
1417
1418 LclVarDsc* frameVarDsc = &lvaTable[info.compLvFrameListRoot];
1419
1420 if (frameVarDsc->lvTracked)
1421 {
1422 unsigned varIndex = frameVarDsc->lvVarIndex;
1423 noway_assert(varIndex < lvaTrackedCount);
1424
1425 // Is the variable already known to be alive?
1426 //
1427 if (VarSetOps::IsMember(this, life, varIndex))
1428 {
1429 // Since we may call this multiple times, clear the GTF_CALL_M_FRAME_VAR_DEATH if set.
1430 //
1431 call->gtCallMoreFlags &= ~GTF_CALL_M_FRAME_VAR_DEATH;
1432 }
1433 else
1434 {
1435 // The variable is just coming to life
1436 // Since this is a backwards walk of the trees
1437 // that makes this change in liveness a 'last-use'
1438 //
1439 VarSetOps::AddElemD(this, life, varIndex);
1440 call->gtCallMoreFlags |= GTF_CALL_M_FRAME_VAR_DEATH;
1441 }
1442 }
1443 }
1444 }
1445}
1446
1447//------------------------------------------------------------------------
1448// Compiler::fgComputeLifeTrackedLocalUse:
1449// Compute the changes to local var liveness due to a use of a tracked local var.
1450//
1451// Arguments:
1452// life - The live set that is being computed.
1453// varDsc - The LclVar descriptor for the variable being used or defined.
1454// node - The node that is defining the lclVar.
1455void Compiler::fgComputeLifeTrackedLocalUse(VARSET_TP& life, LclVarDsc& varDsc, GenTreeLclVarCommon* node)
1456{
1457 assert(node != nullptr);
1458 assert((node->gtFlags & GTF_VAR_DEF) == 0);
1459 assert(varDsc.lvTracked);
1460
1461 const unsigned varIndex = varDsc.lvVarIndex;
1462
1463 // Is the variable already known to be alive?
1464 if (VarSetOps::IsMember(this, life, varIndex))
1465 {
1466 // Since we may do liveness analysis multiple times, clear the GTF_VAR_DEATH if set.
1467 node->gtFlags &= ~GTF_VAR_DEATH;
1468 return;
1469 }
1470
1471#ifdef DEBUG
1472 if (verbose && 0)
1473 {
1474 printf("Ref V%02u,T%02u] at ", node->gtLclNum, varIndex);
1475 printTreeID(node);
1476 printf(" life %s -> %s\n", VarSetOps::ToString(this, life),
1477 VarSetOps::ToString(this, VarSetOps::AddElem(this, life, varIndex)));
1478 }
1479#endif // DEBUG
1480
1481 // The variable is being used, and it is not currently live.
1482 // So the variable is just coming to life
1483 node->gtFlags |= GTF_VAR_DEATH;
1484 VarSetOps::AddElemD(this, life, varIndex);
1485}
1486
1487//------------------------------------------------------------------------
1488// Compiler::fgComputeLifeTrackedLocalDef:
1489// Compute the changes to local var liveness due to a def of a tracked local var and return `true` if the def is a
1490// dead store.
1491//
1492// Arguments:
1493// life - The live set that is being computed.
1494// keepAliveVars - The current set of variables to keep alive regardless of their actual lifetime.
1495// varDsc - The LclVar descriptor for the variable being used or defined.
1496// node - The node that is defining the lclVar.
1497//
1498// Returns:
1499// `true` if the def is a dead store; `false` otherwise.
1500bool Compiler::fgComputeLifeTrackedLocalDef(VARSET_TP& life,
1501 VARSET_VALARG_TP keepAliveVars,
1502 LclVarDsc& varDsc,
1503 GenTreeLclVarCommon* node)
1504{
1505 assert(node != nullptr);
1506 assert((node->gtFlags & GTF_VAR_DEF) != 0);
1507 assert(varDsc.lvTracked);
1508
1509 const unsigned varIndex = varDsc.lvVarIndex;
1510 if (VarSetOps::IsMember(this, life, varIndex))
1511 {
1512 // The variable is live
1513 if ((node->gtFlags & GTF_VAR_USEASG) == 0)
1514 {
1515 // Remove the variable from the live set if it is not in the keepalive set.
1516 if (!VarSetOps::IsMember(this, keepAliveVars, varIndex))
1517 {
1518 VarSetOps::RemoveElemD(this, life, varIndex);
1519 }
1520#ifdef DEBUG
1521 if (verbose && 0)
1522 {
1523 printf("Def V%02u,T%02u at ", node->gtLclNum, varIndex);
1524 printTreeID(node);
1525 printf(" life %s -> %s\n",
1526 VarSetOps::ToString(this,
1527 VarSetOps::Union(this, life, VarSetOps::MakeSingleton(this, varIndex))),
1528 VarSetOps::ToString(this, life));
1529 }
1530#endif // DEBUG
1531 }
1532 }
1533 else
1534 {
1535 // Dead store
1536 node->gtFlags |= GTF_VAR_DEATH;
1537
1538 if (!opts.MinOpts())
1539 {
1540 // keepAliveVars always stay alive
1541 noway_assert(!VarSetOps::IsMember(this, keepAliveVars, varIndex));
1542
1543 // Do not consider this store dead if the target local variable represents
1544 // a promoted struct field of an address exposed local or if the address
1545 // of the variable has been exposed. Improved alias analysis could allow
1546 // stores to these sorts of variables to be removed at the cost of compile
1547 // time.
1548 return !varDsc.lvAddrExposed && !(varDsc.lvIsStructField && lvaTable[varDsc.lvParentLcl].lvAddrExposed);
1549 }
1550 }
1551
1552 return false;
1553}
1554
1555//------------------------------------------------------------------------
1556// Compiler::fgComputeLifeUntrackedLocal:
1557// Compute the changes to local var liveness due to a use or a def of an untracked local var.
1558//
1559// Note:
1560// It may seem a bit counter-intuitive that a change to an untracked lclVar could affect the liveness of tracked
1561// lclVars. In theory, this could happen with promoted (especially dependently-promoted) structs: in these cases,
1562// a use or def of the untracked struct var is treated as a use or def of any of its component fields that are
1563// tracked.
1564//
1565// Arguments:
1566// life - The live set that is being computed.
1567// keepAliveVars - The current set of variables to keep alive regardless of their actual lifetime.
1568// varDsc - The LclVar descriptor for the variable being used or defined.
1569// lclVarNode - The node that corresponds to the local var def or use.
1570void Compiler::fgComputeLifeUntrackedLocal(VARSET_TP& life,
1571 VARSET_VALARG_TP keepAliveVars,
1572 LclVarDsc& varDsc,
1573 GenTreeLclVarCommon* lclVarNode)
1574{
1575 assert(lclVarNode != nullptr);
1576
1577 if (!varTypeIsStruct(varDsc.lvType) || (lvaGetPromotionType(&varDsc) == PROMOTION_TYPE_NONE))
1578 {
1579 return;
1580 }
1581
1582 VARSET_TP varBit(VarSetOps::MakeEmpty(this));
1583
1584 for (unsigned i = varDsc.lvFieldLclStart; i < varDsc.lvFieldLclStart + varDsc.lvFieldCnt; ++i)
1585 {
1586#if !defined(_TARGET_64BIT_)
1587 if (!varTypeIsLong(lvaTable[i].lvType) || !lvaTable[i].lvPromoted)
1588#endif // !defined(_TARGET_64BIT_)
1589 {
1590 noway_assert(lvaTable[i].lvIsStructField);
1591 }
1592 if (lvaTable[i].lvTracked)
1593 {
1594 const unsigned varIndex = lvaTable[i].lvVarIndex;
1595 noway_assert(varIndex < lvaTrackedCount);
1596 VarSetOps::AddElemD(this, varBit, varIndex);
1597 }
1598 }
1599 if (lclVarNode->gtFlags & GTF_VAR_DEF)
1600 {
1601 VarSetOps::DiffD(this, varBit, keepAliveVars);
1602 VarSetOps::DiffD(this, life, varBit);
1603 return;
1604 }
1605 // This is a use.
1606
1607 // Are the variables already known to be alive?
1608 if (VarSetOps::IsSubset(this, varBit, life))
1609 {
1610 lclVarNode->gtFlags &= ~GTF_VAR_DEATH; // Since we may now call this multiple times, reset if live.
1611 return;
1612 }
1613
1614 // Some variables are being used, and they are not currently live.
1615 // So they are just coming to life, in the backwards traversal; in a forwards
1616 // traversal, one or more are dying. Mark this.
1617
1618 lclVarNode->gtFlags |= GTF_VAR_DEATH;
1619
1620 // Are all the variables becoming alive (in the backwards traversal), or just a subset?
1621 if (!VarSetOps::IsEmptyIntersection(this, varBit, life))
1622 {
1623 // Only a subset of the variables are become live; we must record that subset.
1624 // (Lack of an entry for "lclVarNode" will be considered to imply all become dead in the
1625 // forward traversal.)
1626 VARSET_TP* deadVarSet = new (this, CMK_bitset) VARSET_TP;
1627 VarSetOps::AssignNoCopy(this, *deadVarSet, VarSetOps::Diff(this, varBit, life));
1628 GetPromotedStructDeathVars()->Set(lclVarNode, deadVarSet);
1629 }
1630
1631 // In any case, all the field vars are now live (in the backwards traversal).
1632 VarSetOps::UnionD(this, life, varBit);
1633}
1634
1635//------------------------------------------------------------------------
1636// Compiler::fgComputeLifeLocal:
1637// Compute the changes to local var liveness due to a use or a def of a local var and indicates whether the use/def
1638// is a dead store.
1639//
1640// Arguments:
1641// life - The live set that is being computed.
1642// keepAliveVars - The current set of variables to keep alive regardless of their actual lifetime.
1643// lclVarNode - The node that corresponds to the local var def or use.
1644//
1645// Returns:
1646// `true` if the local var node corresponds to a dead store; `false` otherwise.
1647bool Compiler::fgComputeLifeLocal(VARSET_TP& life, VARSET_VALARG_TP keepAliveVars, GenTree* lclVarNode)
1648{
1649 unsigned lclNum = lclVarNode->gtLclVarCommon.gtLclNum;
1650
1651 assert(lclNum < lvaCount);
1652 LclVarDsc& varDsc = lvaTable[lclNum];
1653
1654 // Is this a tracked variable?
1655 if (varDsc.lvTracked)
1656 {
1657 /* Is this a definition or use? */
1658 if (lclVarNode->gtFlags & GTF_VAR_DEF)
1659 {
1660 return fgComputeLifeTrackedLocalDef(life, keepAliveVars, varDsc, lclVarNode->AsLclVarCommon());
1661 }
1662 else
1663 {
1664 fgComputeLifeTrackedLocalUse(life, varDsc, lclVarNode->AsLclVarCommon());
1665 }
1666 }
1667 else
1668 {
1669 fgComputeLifeUntrackedLocal(life, keepAliveVars, varDsc, lclVarNode->AsLclVarCommon());
1670 }
1671 return false;
1672}
1673
1674/*****************************************************************************
1675 *
1676 * Compute the set of live variables at each node in a given statement
1677 * or subtree of a statement moving backward from startNode to endNode
1678 */
1679
1680void Compiler::fgComputeLife(VARSET_TP& life,
1681 GenTree* startNode,
1682 GenTree* endNode,
1683 VARSET_VALARG_TP volatileVars,
1684 bool* pStmtInfoDirty DEBUGARG(bool* treeModf))
1685{
1686 GenTree* tree;
1687
1688 // Don't kill vars in scope
1689 VARSET_TP keepAliveVars(VarSetOps::Union(this, volatileVars, compCurBB->bbScope));
1690
1691 noway_assert(VarSetOps::IsSubset(this, keepAliveVars, life));
1692 noway_assert(compCurStmt->gtOper == GT_STMT);
1693 noway_assert(endNode || (startNode == compCurStmt->gtStmt.gtStmtExpr));
1694
1695 // NOTE: Live variable analysis will not work if you try
1696 // to use the result of an assignment node directly!
1697 for (tree = startNode; tree != endNode; tree = tree->gtPrev)
1698 {
1699 AGAIN:
1700 assert(tree->OperGet() != GT_QMARK);
1701
1702 if (tree->gtOper == GT_CALL)
1703 {
1704 fgComputeLifeCall(life, tree->AsCall());
1705 }
1706 else if (tree->OperIsNonPhiLocal() || tree->OperIsLocalAddr())
1707 {
1708 bool isDeadStore = fgComputeLifeLocal(life, keepAliveVars, tree);
1709 if (isDeadStore)
1710 {
1711 LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.gtLclNum];
1712
1713 bool doAgain = false;
1714 if (fgRemoveDeadStore(&tree, varDsc, life, &doAgain, pStmtInfoDirty DEBUGARG(treeModf)))
1715 {
1716 assert(!doAgain);
1717 break;
1718 }
1719
1720 if (doAgain)
1721 {
1722 goto AGAIN;
1723 }
1724 }
1725 }
1726 }
1727}
1728
1729void Compiler::fgComputeLifeLIR(VARSET_TP& life, BasicBlock* block, VARSET_VALARG_TP volatileVars)
1730{
1731 // Don't kill volatile vars and vars in scope.
1732 VARSET_TP keepAliveVars(VarSetOps::Union(this, volatileVars, block->bbScope));
1733
1734 noway_assert(VarSetOps::IsSubset(this, keepAliveVars, life));
1735
1736 LIR::Range& blockRange = LIR::AsRange(block);
1737 GenTree* firstNonPhiNode = blockRange.FirstNonPhiNode();
1738 if (firstNonPhiNode == nullptr)
1739 {
1740 return;
1741 }
1742 for (GenTree *node = blockRange.LastNode(), *next = nullptr, *end = firstNonPhiNode->gtPrev; node != end;
1743 node = next)
1744 {
1745 next = node->gtPrev;
1746
1747 bool isDeadStore;
1748 switch (node->OperGet())
1749 {
1750 case GT_CALL:
1751 {
1752 GenTreeCall* const call = node->AsCall();
1753 if (((call->TypeGet() == TYP_VOID) || call->IsUnusedValue()) && !call->HasSideEffects(this))
1754 {
1755 JITDUMP("Removing dead call:\n");
1756 DISPNODE(call);
1757
1758 node->VisitOperands([](GenTree* operand) -> GenTree::VisitResult {
1759 if (operand->IsValue())
1760 {
1761 operand->SetUnusedValue();
1762 }
1763
1764 // Special-case PUTARG_STK: since this operator is not considered a value, DCE will not remove
1765 // these nodes.
1766 if (operand->OperIs(GT_PUTARG_STK))
1767 {
1768 operand->AsPutArgStk()->gtOp1->SetUnusedValue();
1769 operand->gtBashToNOP();
1770 }
1771
1772 return GenTree::VisitResult::Continue;
1773 });
1774
1775 blockRange.Remove(node);
1776
1777 // Removing a call does not affect liveness unless it is a tail call in a nethod with P/Invokes or
1778 // is itself a P/Invoke, in which case it may affect the liveness of the frame root variable.
1779 if (!opts.MinOpts() && !opts.ShouldUsePInvokeHelpers() &&
1780 ((call->IsTailCall() && info.compCallUnmanaged) || call->IsUnmanaged()) &&
1781 lvaTable[info.compLvFrameListRoot].lvTracked)
1782 {
1783 fgStmtRemoved = true;
1784 }
1785 }
1786 else
1787 {
1788 fgComputeLifeCall(life, call);
1789 }
1790 break;
1791 }
1792
1793 case GT_LCL_VAR:
1794 case GT_LCL_FLD:
1795 {
1796 GenTreeLclVarCommon* const lclVarNode = node->AsLclVarCommon();
1797 LclVarDsc& varDsc = lvaTable[lclVarNode->gtLclNum];
1798
1799 if (node->IsUnusedValue())
1800 {
1801 JITDUMP("Removing dead LclVar use:\n");
1802 DISPNODE(lclVarNode);
1803
1804 blockRange.Delete(this, block, node);
1805 if (varDsc.lvTracked && !opts.MinOpts())
1806 {
1807 fgStmtRemoved = true;
1808 }
1809 }
1810 else if (varDsc.lvTracked)
1811 {
1812 fgComputeLifeTrackedLocalUse(life, varDsc, lclVarNode);
1813 }
1814 else
1815 {
1816 fgComputeLifeUntrackedLocal(life, keepAliveVars, varDsc, lclVarNode);
1817 }
1818 break;
1819 }
1820
1821 case GT_LCL_VAR_ADDR:
1822 case GT_LCL_FLD_ADDR:
1823 if (node->IsUnusedValue())
1824 {
1825 JITDUMP("Removing dead LclVar address:\n");
1826 DISPNODE(node);
1827
1828 const bool isTracked = lvaTable[node->AsLclVarCommon()->gtLclNum].lvTracked;
1829 blockRange.Delete(this, block, node);
1830 if (isTracked && !opts.MinOpts())
1831 {
1832 fgStmtRemoved = true;
1833 }
1834 }
1835 else
1836 {
1837 isDeadStore = fgComputeLifeLocal(life, keepAliveVars, node);
1838 if (isDeadStore)
1839 {
1840 LIR::Use addrUse;
1841 if (blockRange.TryGetUse(node, &addrUse) && (addrUse.User()->OperGet() == GT_STOREIND))
1842 {
1843 // Remove the store. DCE will iteratively clean up any ununsed operands.
1844 GenTreeStoreInd* const store = addrUse.User()->AsStoreInd();
1845
1846 JITDUMP("Removing dead indirect store:\n");
1847 DISPNODE(store);
1848
1849 assert(store->Addr() == node);
1850 blockRange.Delete(this, block, node);
1851
1852 store->Data()->SetUnusedValue();
1853
1854 blockRange.Remove(store);
1855
1856 assert(!opts.MinOpts());
1857 fgStmtRemoved = true;
1858 }
1859 }
1860 }
1861 break;
1862
1863 case GT_STORE_LCL_VAR:
1864 case GT_STORE_LCL_FLD:
1865 {
1866 GenTreeLclVarCommon* const lclVarNode = node->AsLclVarCommon();
1867
1868 LclVarDsc& varDsc = lvaTable[lclVarNode->gtLclNum];
1869 if (varDsc.lvTracked)
1870 {
1871 isDeadStore = fgComputeLifeTrackedLocalDef(life, keepAliveVars, varDsc, lclVarNode);
1872 if (isDeadStore)
1873 {
1874 JITDUMP("Removing dead store:\n");
1875 DISPNODE(lclVarNode);
1876
1877 // Remove the store. DCE will iteratively clean up any ununsed operands.
1878 lclVarNode->gtOp1->SetUnusedValue();
1879
1880 // If the store is marked as a late argument, it is referenced by a call. Instead of removing
1881 // it, bash it to a NOP.
1882 if ((node->gtFlags & GTF_LATE_ARG) != 0)
1883 {
1884 JITDUMP("node is a late arg; replacing with NOP\n");
1885 node->gtBashToNOP();
1886
1887 // NOTE: this is a bit of a hack. We need to keep these nodes around as they are
1888 // referenced by the call, but they're considered side-effect-free non-value-producing
1889 // nodes, so they will be removed if we don't do this.
1890 node->gtFlags |= GTF_ORDER_SIDEEFF;
1891 }
1892 else
1893 {
1894 blockRange.Remove(node);
1895 }
1896
1897 assert(!opts.MinOpts());
1898 fgStmtRemoved = true;
1899 }
1900 }
1901 else
1902 {
1903 fgComputeLifeUntrackedLocal(life, keepAliveVars, varDsc, lclVarNode);
1904 }
1905 break;
1906 }
1907
1908 case GT_LABEL:
1909 case GT_FTN_ADDR:
1910 case GT_CNS_INT:
1911 case GT_CNS_LNG:
1912 case GT_CNS_DBL:
1913 case GT_CNS_STR:
1914 case GT_CLS_VAR_ADDR:
1915 case GT_PHYSREG:
1916 // These are all side-effect-free leaf nodes.
1917 if (node->IsUnusedValue())
1918 {
1919 JITDUMP("Removing dead node:\n");
1920 DISPNODE(node);
1921
1922 blockRange.Remove(node);
1923 }
1924 break;
1925
1926 case GT_LOCKADD:
1927 case GT_XADD:
1928 case GT_XCHG:
1929 case GT_CMPXCHG:
1930 case GT_MEMORYBARRIER:
1931 case GT_JMP:
1932 case GT_STOREIND:
1933 case GT_ARR_BOUNDS_CHECK:
1934 case GT_STORE_OBJ:
1935 case GT_STORE_BLK:
1936 case GT_STORE_DYN_BLK:
1937#if defined(FEATURE_SIMD)
1938 case GT_SIMD_CHK:
1939#endif // FEATURE_SIMD
1940#ifdef FEATURE_HW_INTRINSICS
1941 case GT_HW_INTRINSIC_CHK:
1942#endif // FEATURE_HW_INTRINSICS
1943 case GT_JCMP:
1944 case GT_CMP:
1945 case GT_JCC:
1946 case GT_JTRUE:
1947 case GT_RETURN:
1948 case GT_SWITCH:
1949 case GT_RETFILT:
1950 case GT_START_NONGC:
1951 case GT_PROF_HOOK:
1952#if !FEATURE_EH_FUNCLETS
1953 case GT_END_LFIN:
1954#endif // !FEATURE_EH_FUNCLETS
1955 case GT_SWITCH_TABLE:
1956 case GT_PINVOKE_PROLOG:
1957 case GT_PINVOKE_EPILOG:
1958 case GT_RETURNTRAP:
1959 case GT_PUTARG_STK:
1960 case GT_IL_OFFSET:
1961#ifdef FEATURE_HW_INTRINSICS
1962 case GT_HWIntrinsic:
1963#endif // FEATURE_HW_INTRINSICS
1964 // Never remove these nodes, as they are always side-effecting.
1965 //
1966 // NOTE: the only side-effect of some of these nodes (GT_CMP, GT_SUB_HI) is a write to the flags
1967 // register.
1968 // Properly modeling this would allow these nodes to be removed.
1969 break;
1970
1971 case GT_NOP:
1972 // NOTE: we need to keep some NOPs around because they are referenced by calls. See the dead store
1973 // removal code above (case GT_STORE_LCL_VAR) for more explanation.
1974 if ((node->gtFlags & GTF_ORDER_SIDEEFF) != 0)
1975 {
1976 break;
1977 }
1978 __fallthrough;
1979
1980 default:
1981 assert(!node->OperIsLocal());
1982 if (!node->IsValue() || node->IsUnusedValue())
1983 {
1984 // We are only interested in avoiding the removal of nodes with direct side-effects
1985 // (as opposed to side effects of their children).
1986 // This default case should never include calls or assignments.
1987 assert(!node->OperRequiresAsgFlag() && !node->OperIs(GT_CALL));
1988 if (!node->gtSetFlags() && !node->OperMayThrow(this))
1989 {
1990 JITDUMP("Removing dead node:\n");
1991 DISPNODE(node);
1992
1993 node->VisitOperands([](GenTree* operand) -> GenTree::VisitResult {
1994 operand->SetUnusedValue();
1995 return GenTree::VisitResult::Continue;
1996 });
1997
1998 blockRange.Remove(node);
1999 }
2000 }
2001 break;
2002 }
2003 }
2004}
2005
2006// fgRemoveDeadStore - remove a store to a local which has no exposed uses.
2007//
2008// pTree - GenTree** to local, including store-form local or local addr (post-rationalize)
2009// varDsc - var that is being stored to
2010// life - current live tracked vars (maintained as we walk backwards)
2011// doAgain - out parameter, true if we should restart the statement
2012// pStmtInfoDirty - should defer the cost computation to the point after the reverse walk is completed?
2013//
2014// Returns: true if we should skip the rest of the statement, false if we should continue
2015
2016bool Compiler::fgRemoveDeadStore(GenTree** pTree,
2017 LclVarDsc* varDsc,
2018 VARSET_VALARG_TP life,
2019 bool* doAgain,
2020 bool* pStmtInfoDirty DEBUGARG(bool* treeModf))
2021{
2022 assert(!compRationalIRForm);
2023
2024 // Vars should have already been checked for address exposure by this point.
2025 assert(!varDsc->lvIsStructField || !lvaTable[varDsc->lvParentLcl].lvAddrExposed);
2026 assert(!varDsc->lvAddrExposed);
2027
2028 GenTree* asgNode = nullptr;
2029 GenTree* rhsNode = nullptr;
2030 GenTree* addrNode = nullptr;
2031 GenTree* const tree = *pTree;
2032
2033 GenTree* nextNode = tree->gtNext;
2034
2035 // First, characterize the lclVarTree and see if we are taking its address.
2036 if (tree->OperIsLocalStore())
2037 {
2038 rhsNode = tree->gtOp.gtOp1;
2039 asgNode = tree;
2040 }
2041 else if (tree->OperIsLocal())
2042 {
2043 if (nextNode == nullptr)
2044 {
2045 return false;
2046 }
2047 if (nextNode->OperGet() == GT_ADDR)
2048 {
2049 addrNode = nextNode;
2050 nextNode = nextNode->gtNext;
2051 }
2052 }
2053 else
2054 {
2055 assert(tree->OperIsLocalAddr());
2056 addrNode = tree;
2057 }
2058
2059 // Next, find the assignment.
2060 if (asgNode == nullptr)
2061 {
2062 if (addrNode == nullptr)
2063 {
2064 asgNode = nextNode;
2065 }
2066 else if (asgNode == nullptr)
2067 {
2068 // This may be followed by GT_IND/assign or GT_STOREIND.
2069 if (nextNode == nullptr)
2070 {
2071 return false;
2072 }
2073 if (nextNode->OperIsIndir())
2074 {
2075 // This must be a non-nullcheck form of indir, or it would not be a def.
2076 assert(nextNode->OperGet() != GT_NULLCHECK);
2077 if (nextNode->OperIsStore())
2078 {
2079 asgNode = nextNode;
2080 if (asgNode->OperIsBlk())
2081 {
2082 rhsNode = asgNode->AsBlk()->Data();
2083 }
2084 // TODO-1stClassStructs: There should be an else clause here to handle
2085 // the non-block forms of store ops (GT_STORE_LCL_VAR, etc.) for which
2086 // rhsNode is op1. (This isn't really a 1stClassStructs item, but the
2087 // above was added to catch what used to be dead block ops, and that
2088 // made this omission apparent.)
2089 }
2090 else
2091 {
2092 asgNode = nextNode->gtNext;
2093 }
2094 }
2095 }
2096 }
2097
2098 if (asgNode == nullptr)
2099 {
2100 return false;
2101 }
2102
2103 if (asgNode->OperIs(GT_ASG))
2104 {
2105 rhsNode = asgNode->gtGetOp2();
2106 }
2107 else if (rhsNode == nullptr)
2108 {
2109 return false;
2110 }
2111
2112 if (asgNode && (asgNode->gtFlags & GTF_ASG))
2113 {
2114 noway_assert(rhsNode);
2115 noway_assert(tree->gtFlags & GTF_VAR_DEF);
2116
2117 assert(asgNode->OperIs(GT_ASG));
2118
2119 // Do not remove if this local variable represents
2120 // a promoted struct field of an address exposed local.
2121 if (varDsc->lvIsStructField && lvaTable[varDsc->lvParentLcl].lvAddrExposed)
2122 {
2123 return false;
2124 }
2125
2126 // Do not remove if the address of the variable has been exposed.
2127 if (varDsc->lvAddrExposed)
2128 {
2129 return false;
2130 }
2131
2132 /* Test for interior statement */
2133
2134 if (asgNode->gtNext == nullptr)
2135 {
2136 /* This is a "NORMAL" statement with the
2137 * assignment node hanging from the GT_STMT node */
2138
2139 noway_assert(compCurStmt->gtStmt.gtStmtExpr == asgNode);
2140 JITDUMP("top level assign\n");
2141
2142 /* Check for side effects */
2143
2144 if (rhsNode->gtFlags & GTF_SIDE_EFFECT)
2145 {
2146 EXTRACT_SIDE_EFFECTS:
2147 /* Extract the side effects */
2148
2149 GenTree* sideEffList = nullptr;
2150#ifdef DEBUG
2151 if (verbose)
2152 {
2153 printf(FMT_BB " - Dead assignment has side effects...\n", compCurBB->bbNum);
2154 gtDispTree(asgNode);
2155 printf("\n");
2156 }
2157#endif // DEBUG
2158 if (rhsNode->TypeGet() == TYP_STRUCT)
2159 {
2160 // This is a block assignment. An indirection of the rhs is not considered to
2161 // happen until the assignment, so we will extract the side effects from only
2162 // the address.
2163 if (rhsNode->OperIsIndir())
2164 {
2165 assert(rhsNode->OperGet() != GT_NULLCHECK);
2166 rhsNode = rhsNode->AsIndir()->Addr();
2167 }
2168 }
2169 gtExtractSideEffList(rhsNode, &sideEffList);
2170
2171 if (sideEffList)
2172 {
2173 noway_assert(sideEffList->gtFlags & GTF_SIDE_EFFECT);
2174#ifdef DEBUG
2175 if (verbose)
2176 {
2177 printf("Extracted side effects list...\n");
2178 gtDispTree(sideEffList);
2179 printf("\n");
2180 }
2181#endif // DEBUG
2182
2183 /* Replace the assignment statement with the list of side effects */
2184 noway_assert(sideEffList->gtOper != GT_STMT);
2185
2186 *pTree = compCurStmt->gtStmt.gtStmtExpr = sideEffList;
2187#ifdef DEBUG
2188 *treeModf = true;
2189#endif // DEBUG
2190 /* Update ordering, costs, FP levels, etc. */
2191 gtSetStmtInfo(compCurStmt);
2192
2193 /* Re-link the nodes for this statement */
2194 fgSetStmtSeq(compCurStmt);
2195
2196 // Since the whole statement gets replaced it is safe to
2197 // re-thread and update order. No need to compute costs again.
2198 *pStmtInfoDirty = false;
2199
2200 /* Compute the live set for the new statement */
2201 *doAgain = true;
2202 return false;
2203 }
2204 else
2205 {
2206 /* No side effects, most likely we forgot to reset some flags */
2207 fgRemoveStmt(compCurBB, compCurStmt);
2208
2209 return true;
2210 }
2211 }
2212 else
2213 {
2214 /* If this is GT_CATCH_ARG saved to a local var don't bother */
2215
2216 JITDUMP("removing stmt with no side effects\n");
2217
2218 if (asgNode->gtFlags & GTF_ORDER_SIDEEFF)
2219 {
2220 if (rhsNode->gtOper == GT_CATCH_ARG)
2221 {
2222 goto EXTRACT_SIDE_EFFECTS;
2223 }
2224 }
2225
2226 /* No side effects - remove the whole statement from the block->bbTreeList */
2227
2228 fgRemoveStmt(compCurBB, compCurStmt);
2229
2230 /* Since we removed it do not process the rest (i.e. RHS) of the statement
2231 * variables in the RHS will not be marked as live, so we get the benefit of
2232 * propagating dead variables up the chain */
2233
2234 return true;
2235 }
2236 }
2237 else
2238 {
2239 /* This is an INTERIOR STATEMENT with a dead assignment - remove it */
2240
2241 noway_assert(!VarSetOps::IsMember(this, life, varDsc->lvVarIndex));
2242
2243 if (rhsNode->gtFlags & GTF_SIDE_EFFECT)
2244 {
2245 /* :-( we have side effects */
2246
2247 GenTree* sideEffList = nullptr;
2248#ifdef DEBUG
2249 if (verbose)
2250 {
2251 printf(FMT_BB " - INTERIOR dead assignment has side effects...\n", compCurBB->bbNum);
2252 gtDispTree(asgNode);
2253 printf("\n");
2254 }
2255#endif // DEBUG
2256 gtExtractSideEffList(rhsNode, &sideEffList);
2257
2258 if (!sideEffList)
2259 {
2260 goto NO_SIDE_EFFECTS;
2261 }
2262
2263 noway_assert(sideEffList->gtFlags & GTF_SIDE_EFFECT);
2264#ifdef DEBUG
2265 if (verbose)
2266 {
2267 printf("Extracted side effects list from condition...\n");
2268 gtDispTree(sideEffList);
2269 printf("\n");
2270 }
2271#endif // DEBUG
2272 if (sideEffList->gtOper == asgNode->gtOper)
2273 {
2274#ifdef DEBUG
2275 *treeModf = true;
2276#endif // DEBUG
2277 asgNode->gtOp.gtOp1 = sideEffList->gtOp.gtOp1;
2278 asgNode->gtOp.gtOp2 = sideEffList->gtOp.gtOp2;
2279 asgNode->gtType = sideEffList->gtType;
2280 }
2281 else
2282 {
2283#ifdef DEBUG
2284 *treeModf = true;
2285#endif // DEBUG
2286 /* Change the node to a GT_COMMA holding the side effect list */
2287 asgNode->gtBashToNOP();
2288
2289 asgNode->ChangeOper(GT_COMMA);
2290 asgNode->gtFlags |= sideEffList->gtFlags & GTF_ALL_EFFECT;
2291
2292 if (sideEffList->gtOper == GT_COMMA)
2293 {
2294 asgNode->gtOp.gtOp1 = sideEffList->gtOp.gtOp1;
2295 asgNode->gtOp.gtOp2 = sideEffList->gtOp.gtOp2;
2296 }
2297 else
2298 {
2299 asgNode->gtOp.gtOp1 = sideEffList;
2300 asgNode->gtOp.gtOp2 = gtNewNothingNode();
2301 }
2302 }
2303 }
2304 else
2305 {
2306 NO_SIDE_EFFECTS:
2307#ifdef DEBUG
2308 if (verbose)
2309 {
2310 printf("\nRemoving tree ");
2311 printTreeID(asgNode);
2312 printf(" in " FMT_BB " as useless\n", compCurBB->bbNum);
2313 gtDispTree(asgNode);
2314 printf("\n");
2315 }
2316#endif // DEBUG
2317 /* No side effects - Change the assignment to a GT_NOP node */
2318 asgNode->gtBashToNOP();
2319
2320#ifdef DEBUG
2321 *treeModf = true;
2322#endif // DEBUG
2323 }
2324
2325 /* Re-link the nodes for this statement - Do not update ordering! */
2326
2327 // Do not update costs by calling gtSetStmtInfo. fgSetStmtSeq modifies
2328 // the tree threading based on the new costs. Removing nodes could
2329 // cause a subtree to get evaluated first (earlier second) during the
2330 // liveness walk. Instead just set a flag that costs are dirty and
2331 // caller has to call gtSetStmtInfo.
2332 *pStmtInfoDirty = true;
2333
2334 fgSetStmtSeq(compCurStmt);
2335
2336 /* Continue analysis from this node */
2337
2338 *pTree = asgNode;
2339
2340 return false;
2341 }
2342 }
2343 return false;
2344}
2345
2346/*****************************************************************************
2347 *
2348 * Iterative data flow for live variable info and availability of range
2349 * check index expressions.
2350 */
2351void Compiler::fgInterBlockLocalVarLiveness()
2352{
2353#ifdef DEBUG
2354 if (verbose)
2355 {
2356 printf("*************** In fgInterBlockLocalVarLiveness()\n");
2357 }
2358#endif
2359
2360 /* This global flag is set whenever we remove a statement */
2361
2362 fgStmtRemoved = false;
2363
2364 // keep track if a bbLiveIn changed due to dead store removal
2365 fgLocalVarLivenessChanged = false;
2366
2367 /* Compute the IN and OUT sets for tracked variables */
2368
2369 fgLiveVarAnalysis();
2370
2371 /* For debuggable code, we mark vars as live over their entire
2372 * reported scope, so that it will be visible over the entire scope
2373 */
2374
2375 if (opts.compDbgCode && (info.compVarScopesCount > 0))
2376 {
2377 fgExtendDbgLifetimes();
2378 }
2379
2380 // Nothing more to be done if the backend does not require accurate local var lifetimes.
2381 if (!backendRequiresLocalVarLifetimes())
2382 {
2383 fgLocalVarLivenessDone = true;
2384 return;
2385 }
2386
2387 /*-------------------------------------------------------------------------
2388 * Variables involved in exception-handlers and finally blocks need
2389 * to be specially marked
2390 */
2391 BasicBlock* block;
2392
2393 VARSET_TP exceptVars(VarSetOps::MakeEmpty(this)); // vars live on entry to a handler
2394 VARSET_TP finallyVars(VarSetOps::MakeEmpty(this)); // vars live on exit of a 'finally' block
2395 VARSET_TP filterVars(VarSetOps::MakeEmpty(this)); // vars live on exit from a 'filter'
2396
2397 for (block = fgFirstBB; block; block = block->bbNext)
2398 {
2399 if (block->bbCatchTyp != BBCT_NONE)
2400 {
2401 /* Note the set of variables live on entry to exception handler */
2402
2403 VarSetOps::UnionD(this, exceptVars, block->bbLiveIn);
2404 }
2405
2406 if (block->bbJumpKind == BBJ_EHFILTERRET)
2407 {
2408 /* Get the set of live variables on exit from a 'filter' */
2409 VarSetOps::UnionD(this, filterVars, block->bbLiveOut);
2410 }
2411 else if (block->bbJumpKind == BBJ_EHFINALLYRET)
2412 {
2413 /* Get the set of live variables on exit from a 'finally' block */
2414
2415 VarSetOps::UnionD(this, finallyVars, block->bbLiveOut);
2416 }
2417#if FEATURE_EH_FUNCLETS
2418 // Funclets are called and returned from, as such we can only count on the frame
2419 // pointer being restored, and thus everything live in or live out must be on the
2420 // stack
2421 if (block->bbFlags & BBF_FUNCLET_BEG)
2422 {
2423 VarSetOps::UnionD(this, exceptVars, block->bbLiveIn);
2424 }
2425 if ((block->bbJumpKind == BBJ_EHFINALLYRET) || (block->bbJumpKind == BBJ_EHFILTERRET) ||
2426 (block->bbJumpKind == BBJ_EHCATCHRET))
2427 {
2428 VarSetOps::UnionD(this, exceptVars, block->bbLiveOut);
2429 }
2430#endif // FEATURE_EH_FUNCLETS
2431 }
2432
2433 LclVarDsc* varDsc;
2434 unsigned varNum;
2435
2436 for (varNum = 0, varDsc = lvaTable; varNum < lvaCount; varNum++, varDsc++)
2437 {
2438 /* Ignore the variable if it's not tracked */
2439
2440 if (!varDsc->lvTracked)
2441 {
2442 continue;
2443 }
2444
2445 if (lvaIsFieldOfDependentlyPromotedStruct(varDsc))
2446 {
2447 continue;
2448 }
2449
2450 /* Un-init locals may need auto-initialization. Note that the
2451 liveness of such locals will bubble to the top (fgFirstBB)
2452 in fgInterBlockLocalVarLiveness() */
2453
2454 if (!varDsc->lvIsParam && VarSetOps::IsMember(this, fgFirstBB->bbLiveIn, varDsc->lvVarIndex) &&
2455 (info.compInitMem || varTypeIsGC(varDsc->TypeGet())))
2456 {
2457 varDsc->lvMustInit = true;
2458 }
2459
2460 // Mark all variables that are live on entry to an exception handler
2461 // or on exit from a filter handler or finally as DoNotEnregister */
2462
2463 if (VarSetOps::IsMember(this, exceptVars, varDsc->lvVarIndex) ||
2464 VarSetOps::IsMember(this, filterVars, varDsc->lvVarIndex))
2465 {
2466 /* Mark the variable appropriately */
2467 lvaSetVarDoNotEnregister(varNum DEBUGARG(DNER_LiveInOutOfHandler));
2468 }
2469
2470 /* Mark all pointer variables live on exit from a 'finally'
2471 block as either volatile for non-GC ref types or as
2472 'explicitly initialized' (volatile and must-init) for GC-ref types */
2473
2474 if (VarSetOps::IsMember(this, finallyVars, varDsc->lvVarIndex))
2475 {
2476 lvaSetVarDoNotEnregister(varNum DEBUGARG(DNER_LiveInOutOfHandler));
2477
2478 /* Don't set lvMustInit unless we have a non-arg, GC pointer */
2479
2480 if (varDsc->lvIsParam)
2481 {
2482 continue;
2483 }
2484
2485 if (!varTypeIsGC(varDsc->TypeGet()))
2486 {
2487 continue;
2488 }
2489
2490 /* Mark it */
2491 varDsc->lvMustInit = true;
2492 }
2493 }
2494
2495 /*-------------------------------------------------------------------------
2496 * Now fill in liveness info within each basic block - Backward DataFlow
2497 */
2498
2499 for (block = fgFirstBB; block; block = block->bbNext)
2500 {
2501 /* Tell everyone what block we're working on */
2502
2503 compCurBB = block;
2504
2505 /* Remember those vars live on entry to exception handlers */
2506 /* if we are part of a try block */
2507
2508 VARSET_TP volatileVars(VarSetOps::MakeEmpty(this));
2509
2510 if (ehBlockHasExnFlowDsc(block))
2511 {
2512 VarSetOps::Assign(this, volatileVars, fgGetHandlerLiveVars(block));
2513
2514 // volatileVars is a subset of exceptVars
2515 noway_assert(VarSetOps::IsSubset(this, volatileVars, exceptVars));
2516 }
2517
2518 /* Start with the variables live on exit from the block */
2519
2520 VARSET_TP life(VarSetOps::MakeCopy(this, block->bbLiveOut));
2521
2522 /* Mark any interference we might have at the end of the block */
2523
2524 if (block->IsLIR())
2525 {
2526 fgComputeLifeLIR(life, block, volatileVars);
2527 }
2528 else
2529 {
2530 /* Get the first statement in the block */
2531
2532 GenTree* firstStmt = block->FirstNonPhiDef();
2533
2534 if (firstStmt == nullptr)
2535 {
2536 continue;
2537 }
2538
2539 /* Walk all the statements of the block backwards - Get the LAST stmt */
2540
2541 GenTree* nextStmt = block->bbTreeList->gtPrev;
2542
2543 do
2544 {
2545#ifdef DEBUG
2546 bool treeModf = false;
2547#endif // DEBUG
2548 noway_assert(nextStmt);
2549 noway_assert(nextStmt->gtOper == GT_STMT);
2550
2551 compCurStmt = nextStmt;
2552 nextStmt = nextStmt->gtPrev;
2553
2554 /* Compute the liveness for each tree node in the statement */
2555 bool stmtInfoDirty = false;
2556
2557 fgComputeLife(life, compCurStmt->gtStmt.gtStmtExpr, nullptr, volatileVars,
2558 &stmtInfoDirty DEBUGARG(&treeModf));
2559
2560 if (stmtInfoDirty)
2561 {
2562 gtSetStmtInfo(compCurStmt);
2563 fgSetStmtSeq(compCurStmt);
2564 gtUpdateStmtSideEffects(compCurStmt);
2565 }
2566
2567#ifdef DEBUG
2568 if (verbose && treeModf)
2569 {
2570 printf("\nfgComputeLife modified tree:\n");
2571 gtDispTree(compCurStmt->gtStmt.gtStmtExpr);
2572 printf("\n");
2573 }
2574#endif // DEBUG
2575 } while (compCurStmt != firstStmt);
2576 }
2577
2578 /* Done with the current block - if we removed any statements, some
2579 * variables may have become dead at the beginning of the block
2580 * -> have to update bbLiveIn */
2581
2582 if (!VarSetOps::Equal(this, life, block->bbLiveIn))
2583 {
2584 /* some variables have become dead all across the block
2585 So life should be a subset of block->bbLiveIn */
2586
2587 // We changed the liveIn of the block, which may affect liveOut of others,
2588 // which may expose more dead stores.
2589 fgLocalVarLivenessChanged = true;
2590
2591 noway_assert(VarSetOps::IsSubset(this, life, block->bbLiveIn));
2592
2593 /* set the new bbLiveIn */
2594
2595 VarSetOps::Assign(this, block->bbLiveIn, life);
2596
2597 /* compute the new bbLiveOut for all the predecessors of this block */
2598 }
2599
2600 noway_assert(compCurBB == block);
2601#ifdef DEBUG
2602 compCurBB = nullptr;
2603#endif
2604 }
2605
2606 fgLocalVarLivenessDone = true;
2607}
2608
2609#ifdef DEBUG
2610
2611/*****************************************************************************/
2612
2613void Compiler::fgDispBBLiveness(BasicBlock* block)
2614{
2615 VARSET_TP allVars(VarSetOps::Union(this, block->bbLiveIn, block->bbLiveOut));
2616 printf(FMT_BB, block->bbNum);
2617 printf(" IN (%d)=", VarSetOps::Count(this, block->bbLiveIn));
2618 lvaDispVarSet(block->bbLiveIn, allVars);
2619 for (MemoryKind memoryKind : allMemoryKinds())
2620 {
2621 if ((block->bbMemoryLiveIn & memoryKindSet(memoryKind)) != 0)
2622 {
2623 printf(" + %s", memoryKindNames[memoryKind]);
2624 }
2625 }
2626 printf("\n OUT(%d)=", VarSetOps::Count(this, block->bbLiveOut));
2627 lvaDispVarSet(block->bbLiveOut, allVars);
2628 for (MemoryKind memoryKind : allMemoryKinds())
2629 {
2630 if ((block->bbMemoryLiveOut & memoryKindSet(memoryKind)) != 0)
2631 {
2632 printf(" + %s", memoryKindNames[memoryKind]);
2633 }
2634 }
2635 printf("\n\n");
2636}
2637
2638void Compiler::fgDispBBLiveness()
2639{
2640 for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
2641 {
2642 fgDispBBLiveness(block);
2643 }
2644}
2645
2646#endif // DEBUG
2647