1 | // Licensed to the .NET Foundation under one or more agreements. |
2 | // The .NET Foundation licenses this file to you under the MIT license. |
3 | // See the LICENSE file in the project root for more information. |
4 | |
5 | /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
6 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
7 | XX XX |
8 | XX BasicBlock XX |
9 | XX XX |
10 | XX XX |
11 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
12 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
13 | */ |
14 | #include "jitpch.h" |
15 | #ifdef _MSC_VER |
16 | #pragma hdrstop |
17 | #endif |
18 | |
19 | #if MEASURE_BLOCK_SIZE |
20 | /* static */ |
21 | size_t BasicBlock::s_Size; |
22 | /* static */ |
23 | size_t BasicBlock::s_Count; |
24 | #endif // MEASURE_BLOCK_SIZE |
25 | |
26 | #ifdef DEBUG |
27 | // The max # of tree nodes in any BB |
28 | /* static */ |
29 | unsigned BasicBlock::s_nMaxTrees; |
30 | #endif // DEBUG |
31 | |
32 | #ifdef DEBUG |
33 | flowList* ShuffleHelper(unsigned hash, flowList* res) |
34 | { |
35 | flowList* head = res; |
36 | for (flowList *prev = nullptr; res != nullptr; prev = res, res = res->flNext) |
37 | { |
38 | unsigned blkHash = (hash ^ (res->flBlock->bbNum << 16) ^ res->flBlock->bbNum); |
39 | if (((blkHash % 1879) & 1) && prev != nullptr) |
40 | { |
41 | // Swap res with head. |
42 | prev->flNext = head; |
43 | jitstd::swap(head->flNext, res->flNext); |
44 | jitstd::swap(head, res); |
45 | } |
46 | } |
47 | return head; |
48 | } |
49 | |
50 | unsigned SsaStressHashHelper() |
51 | { |
52 | // hash = 0: turned off, hash = 1: use method hash, hash = *: use custom hash. |
53 | unsigned hash = JitConfig.JitSsaStress(); |
54 | |
55 | if (hash == 0) |
56 | { |
57 | return hash; |
58 | } |
59 | if (hash == 1) |
60 | { |
61 | return JitTls::GetCompiler()->info.compMethodHash(); |
62 | } |
63 | return ((hash >> 16) == 0) ? ((hash << 16) | hash) : hash; |
64 | } |
65 | #endif |
66 | |
67 | EHSuccessorIterPosition::EHSuccessorIterPosition(Compiler* comp, BasicBlock* block) |
68 | : m_remainingRegSuccs(block->NumSucc(comp)), m_curRegSucc(nullptr), m_curTry(comp->ehGetBlockExnFlowDsc(block)) |
69 | { |
70 | // If "block" is a "leave helper" block (the empty BBJ_ALWAYS block that pairs with a |
71 | // preceding BBJ_CALLFINALLY block to implement a "leave" IL instruction), then no exceptions |
72 | // can occur within it, so clear m_curTry if it's non-null. |
73 | if (m_curTry != nullptr) |
74 | { |
75 | BasicBlock* beforeBlock = block->bbPrev; |
76 | if (beforeBlock != nullptr && beforeBlock->isBBCallAlwaysPair()) |
77 | { |
78 | m_curTry = nullptr; |
79 | } |
80 | } |
81 | |
82 | if (m_curTry == nullptr && m_remainingRegSuccs > 0) |
83 | { |
84 | // Examine the successors to see if any are the start of try blocks. |
85 | FindNextRegSuccTry(comp, block); |
86 | } |
87 | } |
88 | |
89 | void EHSuccessorIterPosition::FindNextRegSuccTry(Compiler* comp, BasicBlock* block) |
90 | { |
91 | assert(m_curTry == nullptr); |
92 | |
93 | // Must now consider the next regular successor, if any. |
94 | while (m_remainingRegSuccs > 0) |
95 | { |
96 | m_remainingRegSuccs--; |
97 | m_curRegSucc = block->GetSucc(m_remainingRegSuccs, comp); |
98 | if (comp->bbIsTryBeg(m_curRegSucc)) |
99 | { |
100 | assert(m_curRegSucc->hasTryIndex()); // Since it is a try begin. |
101 | unsigned newTryIndex = m_curRegSucc->getTryIndex(); |
102 | |
103 | // If the try region started by "m_curRegSucc" (represented by newTryIndex) contains m_block, |
104 | // we've already yielded its handler, as one of the EH handler successors of m_block itself. |
105 | if (comp->bbInExnFlowRegions(newTryIndex, block)) |
106 | { |
107 | continue; |
108 | } |
109 | |
110 | // Otherwise, consider this try. |
111 | m_curTry = comp->ehGetDsc(newTryIndex); |
112 | break; |
113 | } |
114 | } |
115 | } |
116 | |
117 | void EHSuccessorIterPosition::Advance(Compiler* comp, BasicBlock* block) |
118 | { |
119 | assert(m_curTry != nullptr); |
120 | if (m_curTry->ebdEnclosingTryIndex != EHblkDsc::NO_ENCLOSING_INDEX) |
121 | { |
122 | m_curTry = comp->ehGetDsc(m_curTry->ebdEnclosingTryIndex); |
123 | |
124 | // If we've gone over into considering try's containing successors, |
125 | // then the enclosing try must have the successor as its first block. |
126 | if (m_curRegSucc == nullptr || m_curTry->ebdTryBeg == m_curRegSucc) |
127 | { |
128 | return; |
129 | } |
130 | |
131 | // Otherwise, give up, try the next regular successor. |
132 | m_curTry = nullptr; |
133 | } |
134 | else |
135 | { |
136 | m_curTry = nullptr; |
137 | } |
138 | |
139 | // We've exhausted all try blocks. |
140 | // See if there are any remaining regular successors that start try blocks. |
141 | FindNextRegSuccTry(comp, block); |
142 | } |
143 | |
144 | BasicBlock* EHSuccessorIterPosition::Current(Compiler* comp, BasicBlock* block) |
145 | { |
146 | assert(m_curTry != nullptr); |
147 | return m_curTry->ExFlowBlock(); |
148 | } |
149 | |
150 | flowList* Compiler::BlockPredsWithEH(BasicBlock* blk) |
151 | { |
152 | BlockToFlowListMap* ehPreds = GetBlockToEHPreds(); |
153 | flowList* res; |
154 | if (ehPreds->Lookup(blk, &res)) |
155 | { |
156 | return res; |
157 | } |
158 | |
159 | res = blk->bbPreds; |
160 | unsigned tryIndex; |
161 | if (bbIsExFlowBlock(blk, &tryIndex)) |
162 | { |
163 | // Find the first block of the try. |
164 | EHblkDsc* ehblk = ehGetDsc(tryIndex); |
165 | BasicBlock* tryStart = ehblk->ebdTryBeg; |
166 | for (flowList* tryStartPreds = tryStart->bbPreds; tryStartPreds != nullptr; |
167 | tryStartPreds = tryStartPreds->flNext) |
168 | { |
169 | res = new (this, CMK_FlowList) flowList(tryStartPreds->flBlock, res); |
170 | |
171 | #if MEASURE_BLOCK_SIZE |
172 | genFlowNodeCnt += 1; |
173 | genFlowNodeSize += sizeof(flowList); |
174 | #endif // MEASURE_BLOCK_SIZE |
175 | } |
176 | |
177 | // Now add all blocks handled by this handler (except for second blocks of BBJ_CALLFINALLY/BBJ_ALWAYS pairs; |
178 | // these cannot cause transfer to the handler...) |
179 | BasicBlock* prevBB = nullptr; |
180 | |
181 | // TODO-Throughput: It would be nice if we could iterate just over the blocks in the try, via |
182 | // something like: |
183 | // for (BasicBlock* bb = ehblk->ebdTryBeg; bb != ehblk->ebdTryLast->bbNext; bb = bb->bbNext) |
184 | // (plus adding in any filter blocks outside the try whose exceptions are handled here). |
185 | // That doesn't work, however: funclets have caused us to sometimes split the body of a try into |
186 | // more than one sequence of contiguous blocks. We need to find a better way to do this. |
187 | for (BasicBlock *bb = fgFirstBB; bb != nullptr; prevBB = bb, bb = bb->bbNext) |
188 | { |
189 | if (bbInExnFlowRegions(tryIndex, bb) && (prevBB == nullptr || !prevBB->isBBCallAlwaysPair())) |
190 | { |
191 | res = new (this, CMK_FlowList) flowList(bb, res); |
192 | |
193 | #if MEASURE_BLOCK_SIZE |
194 | genFlowNodeCnt += 1; |
195 | genFlowNodeSize += sizeof(flowList); |
196 | #endif // MEASURE_BLOCK_SIZE |
197 | } |
198 | } |
199 | |
200 | #ifdef DEBUG |
201 | unsigned hash = SsaStressHashHelper(); |
202 | if (hash != 0) |
203 | { |
204 | res = ShuffleHelper(hash, res); |
205 | } |
206 | #endif // DEBUG |
207 | |
208 | ehPreds->Set(blk, res); |
209 | } |
210 | return res; |
211 | } |
212 | |
213 | #ifdef DEBUG |
214 | |
215 | //------------------------------------------------------------------------ |
216 | // dspBlockILRange(): Display the block's IL range as [XXX...YYY), where XXX and YYY might be "???" for BAD_IL_OFFSET. |
217 | // |
218 | void BasicBlock::dspBlockILRange() |
219 | { |
220 | if (bbCodeOffs != BAD_IL_OFFSET) |
221 | { |
222 | printf("[%03X.." , bbCodeOffs); |
223 | } |
224 | else |
225 | { |
226 | printf("[???" |
227 | ".." ); |
228 | } |
229 | |
230 | if (bbCodeOffsEnd != BAD_IL_OFFSET) |
231 | { |
232 | // brace-matching editor workaround for following line: ( |
233 | printf("%03X)" , bbCodeOffsEnd); |
234 | } |
235 | else |
236 | { |
237 | // brace-matching editor workaround for following line: ( |
238 | printf("???" |
239 | ")" ); |
240 | } |
241 | } |
242 | |
243 | //------------------------------------------------------------------------ |
244 | // dspFlags: Print out the block's flags |
245 | // |
246 | void BasicBlock::dspFlags() |
247 | { |
248 | if (bbFlags & BBF_VISITED) |
249 | { |
250 | printf("v " ); |
251 | } |
252 | if (bbFlags & BBF_MARKED) |
253 | { |
254 | printf("m " ); |
255 | } |
256 | if (bbFlags & BBF_CHANGED) |
257 | { |
258 | printf("! " ); |
259 | } |
260 | if (bbFlags & BBF_REMOVED) |
261 | { |
262 | printf("del " ); |
263 | } |
264 | if (bbFlags & BBF_DONT_REMOVE) |
265 | { |
266 | printf("keep " ); |
267 | } |
268 | if (bbFlags & BBF_IMPORTED) |
269 | { |
270 | printf("i " ); |
271 | } |
272 | if (bbFlags & BBF_INTERNAL) |
273 | { |
274 | printf("internal " ); |
275 | } |
276 | if (bbFlags & BBF_FAILED_VERIFICATION) |
277 | { |
278 | printf("failV " ); |
279 | } |
280 | if (bbFlags & BBF_TRY_BEG) |
281 | { |
282 | printf("try " ); |
283 | } |
284 | if (bbFlags & BBF_NEEDS_GCPOLL) |
285 | { |
286 | printf("poll " ); |
287 | } |
288 | if (bbFlags & BBF_RUN_RARELY) |
289 | { |
290 | printf("rare " ); |
291 | } |
292 | if (bbFlags & BBF_LOOP_HEAD) |
293 | { |
294 | printf("Loop " ); |
295 | } |
296 | if (bbFlags & BBF_LOOP_CALL0) |
297 | { |
298 | printf("Loop0 " ); |
299 | } |
300 | if (bbFlags & BBF_LOOP_CALL1) |
301 | { |
302 | printf("Loop1 " ); |
303 | } |
304 | if (bbFlags & BBF_HAS_LABEL) |
305 | { |
306 | printf("label " ); |
307 | } |
308 | if (bbFlags & BBF_JMP_TARGET) |
309 | { |
310 | printf("target " ); |
311 | } |
312 | if (bbFlags & BBF_HAS_JMP) |
313 | { |
314 | printf("jmp " ); |
315 | } |
316 | if (bbFlags & BBF_GC_SAFE_POINT) |
317 | { |
318 | printf("gcsafe " ); |
319 | } |
320 | if (bbFlags & BBF_FUNCLET_BEG) |
321 | { |
322 | printf("flet " ); |
323 | } |
324 | if (bbFlags & BBF_HAS_IDX_LEN) |
325 | { |
326 | printf("idxlen " ); |
327 | } |
328 | if (bbFlags & BBF_HAS_NEWARRAY) |
329 | { |
330 | printf("new[] " ); |
331 | } |
332 | if (bbFlags & BBF_HAS_NEWOBJ) |
333 | { |
334 | printf("newobj " ); |
335 | } |
336 | #if FEATURE_EH_FUNCLETS && defined(_TARGET_ARM_) |
337 | if (bbFlags & BBF_FINALLY_TARGET) |
338 | { |
339 | printf("ftarget " ); |
340 | } |
341 | #endif // FEATURE_EH_FUNCLETS && defined(_TARGET_ARM_) |
342 | if (bbFlags & BBF_BACKWARD_JUMP) |
343 | { |
344 | printf("bwd " ); |
345 | } |
346 | if (bbFlags & BBF_RETLESS_CALL) |
347 | { |
348 | printf("retless " ); |
349 | } |
350 | if (bbFlags & BBF_LOOP_PREHEADER) |
351 | { |
352 | printf("LoopPH " ); |
353 | } |
354 | if (bbFlags & BBF_COLD) |
355 | { |
356 | printf("cold " ); |
357 | } |
358 | if (bbFlags & BBF_PROF_WEIGHT) |
359 | { |
360 | printf("IBC " ); |
361 | } |
362 | if (bbFlags & BBF_IS_LIR) |
363 | { |
364 | printf("LIR " ); |
365 | } |
366 | if (bbFlags & BBF_KEEP_BBJ_ALWAYS) |
367 | { |
368 | printf("KEEP " ); |
369 | } |
370 | if (bbFlags & BBF_CLONED_FINALLY_BEGIN) |
371 | { |
372 | printf("cfb " ); |
373 | } |
374 | if (bbFlags & BBF_CLONED_FINALLY_END) |
375 | { |
376 | printf("cfe " ); |
377 | } |
378 | } |
379 | |
380 | /***************************************************************************** |
381 | * |
382 | * Display the bbPreds basic block list (the block predecessors). |
383 | * Returns the number of characters printed. |
384 | */ |
385 | |
386 | unsigned BasicBlock::dspPreds() |
387 | { |
388 | unsigned count = 0; |
389 | for (flowList* pred = bbPreds; pred != nullptr; pred = pred->flNext) |
390 | { |
391 | if (count != 0) |
392 | { |
393 | printf("," ); |
394 | count += 1; |
395 | } |
396 | printf(FMT_BB, pred->flBlock->bbNum); |
397 | count += 4; |
398 | |
399 | // Account for %02u only handling 2 digits, but we can display more than that. |
400 | unsigned digits = CountDigits(pred->flBlock->bbNum); |
401 | if (digits > 2) |
402 | { |
403 | count += digits - 2; |
404 | } |
405 | |
406 | // Does this predecessor have an interesting dup count? If so, display it. |
407 | if (pred->flDupCount > 1) |
408 | { |
409 | printf("(%u)" , pred->flDupCount); |
410 | count += 2 + CountDigits(pred->flDupCount); |
411 | } |
412 | } |
413 | return count; |
414 | } |
415 | |
416 | /***************************************************************************** |
417 | * |
418 | * Display the bbCheapPreds basic block list (the block predecessors). |
419 | * Returns the number of characters printed. |
420 | */ |
421 | |
422 | unsigned BasicBlock::dspCheapPreds() |
423 | { |
424 | unsigned count = 0; |
425 | for (BasicBlockList* pred = bbCheapPreds; pred != nullptr; pred = pred->next) |
426 | { |
427 | if (count != 0) |
428 | { |
429 | printf("," ); |
430 | count += 1; |
431 | } |
432 | printf(FMT_BB, pred->block->bbNum); |
433 | count += 4; |
434 | |
435 | // Account for %02u only handling 2 digits, but we can display more than that. |
436 | unsigned digits = CountDigits(pred->block->bbNum); |
437 | if (digits > 2) |
438 | { |
439 | count += digits - 2; |
440 | } |
441 | } |
442 | return count; |
443 | } |
444 | |
445 | /***************************************************************************** |
446 | * |
447 | * Display the basic block successors. |
448 | * Returns the count of successors. |
449 | */ |
450 | |
451 | unsigned BasicBlock::dspSuccs(Compiler* compiler) |
452 | { |
453 | unsigned numSuccs = NumSucc(compiler); |
454 | unsigned count = 0; |
455 | for (unsigned i = 0; i < numSuccs; i++) |
456 | { |
457 | printf("%s" , (count == 0) ? "" : "," ); |
458 | printf(FMT_BB, GetSucc(i, compiler)->bbNum); |
459 | count++; |
460 | } |
461 | return count; |
462 | } |
463 | |
464 | // Display a compact representation of the bbJumpKind, that is, where this block branches. |
465 | // This is similar to code in Compiler::fgTableDispBasicBlock(), but doesn't have that code's requirements to align |
466 | // things strictly. |
467 | void BasicBlock::dspJumpKind() |
468 | { |
469 | switch (bbJumpKind) |
470 | { |
471 | case BBJ_EHFINALLYRET: |
472 | printf(" (finret)" ); |
473 | break; |
474 | |
475 | case BBJ_EHFILTERRET: |
476 | printf(" (fltret)" ); |
477 | break; |
478 | |
479 | case BBJ_EHCATCHRET: |
480 | printf(" -> " FMT_BB " (cret)" , bbJumpDest->bbNum); |
481 | break; |
482 | |
483 | case BBJ_THROW: |
484 | printf(" (throw)" ); |
485 | break; |
486 | |
487 | case BBJ_RETURN: |
488 | printf(" (return)" ); |
489 | break; |
490 | |
491 | case BBJ_NONE: |
492 | // For fall-through blocks, print nothing. |
493 | break; |
494 | |
495 | case BBJ_ALWAYS: |
496 | if (bbFlags & BBF_KEEP_BBJ_ALWAYS) |
497 | { |
498 | printf(" -> " FMT_BB " (ALWAYS)" , bbJumpDest->bbNum); |
499 | } |
500 | else |
501 | { |
502 | printf(" -> " FMT_BB " (always)" , bbJumpDest->bbNum); |
503 | } |
504 | break; |
505 | |
506 | case BBJ_LEAVE: |
507 | printf(" -> " FMT_BB " (leave)" , bbJumpDest->bbNum); |
508 | break; |
509 | |
510 | case BBJ_CALLFINALLY: |
511 | printf(" -> " FMT_BB " (callf)" , bbJumpDest->bbNum); |
512 | break; |
513 | |
514 | case BBJ_COND: |
515 | printf(" -> " FMT_BB " (cond)" , bbJumpDest->bbNum); |
516 | break; |
517 | |
518 | case BBJ_SWITCH: |
519 | printf(" ->" ); |
520 | |
521 | unsigned jumpCnt; |
522 | jumpCnt = bbJumpSwt->bbsCount; |
523 | BasicBlock** jumpTab; |
524 | jumpTab = bbJumpSwt->bbsDstTab; |
525 | do |
526 | { |
527 | printf("%c" FMT_BB, (jumpTab == bbJumpSwt->bbsDstTab) ? ' ' : ',', (*jumpTab)->bbNum); |
528 | } while (++jumpTab, --jumpCnt); |
529 | |
530 | printf(" (switch)" ); |
531 | break; |
532 | |
533 | default: |
534 | unreached(); |
535 | break; |
536 | } |
537 | } |
538 | |
539 | void BasicBlock::(Compiler* compiler, |
540 | bool showKind /*= true*/, |
541 | bool showFlags /*= false*/, |
542 | bool showPreds /*= true*/) |
543 | { |
544 | printf(FMT_BB " " , bbNum); |
545 | dspBlockILRange(); |
546 | if (showKind) |
547 | { |
548 | dspJumpKind(); |
549 | } |
550 | if (showPreds) |
551 | { |
552 | printf(", preds={" ); |
553 | if (compiler->fgCheapPredsValid) |
554 | { |
555 | dspCheapPreds(); |
556 | } |
557 | else |
558 | { |
559 | dspPreds(); |
560 | } |
561 | printf("} succs={" ); |
562 | dspSuccs(compiler); |
563 | printf("}" ); |
564 | } |
565 | if (showFlags) |
566 | { |
567 | const unsigned lowFlags = (unsigned)bbFlags; |
568 | const unsigned highFlags = (unsigned)(bbFlags >> 32); |
569 | printf(" flags=0x%08x.%08x: " , highFlags, lowFlags); |
570 | dspFlags(); |
571 | } |
572 | printf("\n" ); |
573 | } |
574 | |
575 | const char* BasicBlock::dspToString(int blockNumPadding /* = 2*/) |
576 | { |
577 | static char buffers[3][64]; // static array of 3 to allow 3 concurrent calls in one printf() |
578 | static int nextBufferIndex = 0; |
579 | |
580 | auto& buffer = buffers[nextBufferIndex]; |
581 | nextBufferIndex = (nextBufferIndex + 1) % _countof(buffers); |
582 | _snprintf_s(buffer, _countof(buffer), _countof(buffer), FMT_BB "%*s [%04u]" , bbNum, blockNumPadding, "" , bbID); |
583 | return buffer; |
584 | } |
585 | |
586 | #endif // DEBUG |
587 | |
588 | // Allocation function for MemoryPhiArg. |
589 | void* BasicBlock::MemoryPhiArg::operator new(size_t sz, Compiler* comp) |
590 | { |
591 | return comp->getAllocator(CMK_MemoryPhiArg).allocate<char>(sz); |
592 | } |
593 | |
594 | //------------------------------------------------------------------------ |
595 | // CloneBlockState: Try to populate `to` block with a copy of `from` block's statements, replacing |
596 | // uses of local `varNum` with IntCns `varVal`. |
597 | // |
598 | // Arguments: |
599 | // compiler - Jit compiler instance |
600 | // to - New/empty block to copy statements into |
601 | // from - Block to copy statements from |
602 | // varNum - lclVar uses with lclNum `varNum` will be replaced; can be ~0 to indicate no replacement. |
603 | // varVal - If replacing uses of `varNum`, replace them with int constants with value `varVal`. |
604 | // |
605 | // Return Value: |
606 | // Cloning may fail because this routine uses `gtCloneExpr` for cloning and it can't handle all |
607 | // IR nodes. If cloning of any statement fails, `false` will be returned and block `to` may be |
608 | // partially populated. If cloning of all statements succeeds, `true` will be returned and |
609 | // block `to` will be fully populated. |
610 | |
611 | bool BasicBlock::CloneBlockState( |
612 | Compiler* compiler, BasicBlock* to, const BasicBlock* from, unsigned varNum, int varVal) |
613 | { |
614 | assert(to->bbTreeList == nullptr); |
615 | |
616 | to->bbFlags = from->bbFlags; |
617 | to->bbWeight = from->bbWeight; |
618 | BlockSetOps::AssignAllowUninitRhs(compiler, to->bbReach, from->bbReach); |
619 | to->copyEHRegion(from); |
620 | to->bbCatchTyp = from->bbCatchTyp; |
621 | to->bbRefs = from->bbRefs; |
622 | to->bbStkTempsIn = from->bbStkTempsIn; |
623 | to->bbStkTempsOut = from->bbStkTempsOut; |
624 | to->bbStkDepth = from->bbStkDepth; |
625 | to->bbCodeOffs = from->bbCodeOffs; |
626 | to->bbCodeOffsEnd = from->bbCodeOffsEnd; |
627 | VarSetOps::AssignAllowUninitRhs(compiler, to->bbScope, from->bbScope); |
628 | to->bbNatLoopNum = from->bbNatLoopNum; |
629 | #ifdef DEBUG |
630 | to->bbLoopNum = from->bbLoopNum; |
631 | to->bbTgtStkDepth = from->bbTgtStkDepth; |
632 | #endif // DEBUG |
633 | |
634 | for (GenTree* fromStmt = from->bbTreeList; fromStmt != nullptr; fromStmt = fromStmt->gtNext) |
635 | { |
636 | auto newExpr = compiler->gtCloneExpr(fromStmt->gtStmt.gtStmtExpr, 0, varNum, varVal); |
637 | if (!newExpr) |
638 | { |
639 | // gtCloneExpr doesn't handle all opcodes, so may fail to clone a statement. |
640 | // When that happens, it returns nullptr; abandon the rest of this block and |
641 | // return `false` to the caller to indicate that cloning was unsuccessful. |
642 | return false; |
643 | } |
644 | compiler->fgInsertStmtAtEnd(to, compiler->fgNewStmtFromTree(newExpr)); |
645 | } |
646 | return true; |
647 | } |
648 | |
649 | // LIR helpers |
650 | void BasicBlock::MakeLIR(GenTree* firstNode, GenTree* lastNode) |
651 | { |
652 | assert(!IsLIR()); |
653 | assert((firstNode == nullptr) == (lastNode == nullptr)); |
654 | assert((firstNode == lastNode) || firstNode->Precedes(lastNode)); |
655 | |
656 | m_firstNode = firstNode; |
657 | m_lastNode = lastNode; |
658 | bbFlags |= BBF_IS_LIR; |
659 | } |
660 | |
661 | bool BasicBlock::IsLIR() |
662 | { |
663 | const bool isLIR = (bbFlags & BBF_IS_LIR) != 0; |
664 | assert((bbTreeList == nullptr) || ((isLIR) == !bbTreeList->IsStatement())); |
665 | return isLIR; |
666 | } |
667 | |
668 | //------------------------------------------------------------------------ |
669 | // firstStmt: Returns the first statement in the block |
670 | // |
671 | // Arguments: |
672 | // None. |
673 | // |
674 | // Return Value: |
675 | // The first statement in the block's bbTreeList. |
676 | // |
677 | GenTreeStmt* BasicBlock::firstStmt() const |
678 | { |
679 | if (bbTreeList == nullptr) |
680 | { |
681 | return nullptr; |
682 | } |
683 | |
684 | return bbTreeList->AsStmt(); |
685 | } |
686 | |
687 | //------------------------------------------------------------------------ |
688 | // lastStmt: Returns the last statement in the block |
689 | // |
690 | // Arguments: |
691 | // None. |
692 | // |
693 | // Return Value: |
694 | // The last statement in the block's bbTreeList. |
695 | // |
696 | GenTreeStmt* BasicBlock::lastStmt() const |
697 | { |
698 | if (bbTreeList == nullptr) |
699 | { |
700 | return nullptr; |
701 | } |
702 | |
703 | GenTree* result = bbTreeList->gtPrev; |
704 | assert(result && result->gtNext == nullptr); |
705 | return result->AsStmt(); |
706 | } |
707 | |
708 | //------------------------------------------------------------------------ |
709 | // BasicBlock::firstNode: Returns the first node in the block. |
710 | // |
711 | GenTree* BasicBlock::firstNode() |
712 | { |
713 | return IsLIR() ? bbTreeList : Compiler::fgGetFirstNode(firstStmt()->gtStmtExpr); |
714 | } |
715 | |
716 | //------------------------------------------------------------------------ |
717 | // BasicBlock::lastNode: Returns the last node in the block. |
718 | // |
719 | GenTree* BasicBlock::lastNode() |
720 | { |
721 | return IsLIR() ? m_lastNode : lastStmt()->gtStmtExpr; |
722 | } |
723 | |
724 | //------------------------------------------------------------------------ |
725 | // GetUniquePred: Returns the unique predecessor of a block, if one exists. |
726 | // The predecessor lists must be accurate. |
727 | // |
728 | // Arguments: |
729 | // None. |
730 | // |
731 | // Return Value: |
732 | // The unique predecessor of a block, or nullptr if there is no unique predecessor. |
733 | // |
734 | // Notes: |
735 | // If the first block has a predecessor (which it may have, if it is the target of |
736 | // a backedge), we never want to consider it "unique" because the prolog is an |
737 | // implicit predecessor. |
738 | |
739 | BasicBlock* BasicBlock::GetUniquePred(Compiler* compiler) |
740 | { |
741 | if ((bbPreds == nullptr) || (bbPreds->flNext != nullptr) || (this == compiler->fgFirstBB)) |
742 | { |
743 | return nullptr; |
744 | } |
745 | else |
746 | { |
747 | return bbPreds->flBlock; |
748 | } |
749 | } |
750 | |
751 | //------------------------------------------------------------------------ |
752 | // GetUniqueSucc: Returns the unique successor of a block, if one exists. |
753 | // Only considers BBJ_ALWAYS and BBJ_NONE block types. |
754 | // |
755 | // Arguments: |
756 | // None. |
757 | // |
758 | // Return Value: |
759 | // The unique successor of a block, or nullptr if there is no unique successor. |
760 | |
761 | BasicBlock* BasicBlock::GetUniqueSucc() |
762 | { |
763 | if (bbJumpKind == BBJ_ALWAYS) |
764 | { |
765 | return bbJumpDest; |
766 | } |
767 | else if (bbJumpKind == BBJ_NONE) |
768 | { |
769 | return bbNext; |
770 | } |
771 | else |
772 | { |
773 | return nullptr; |
774 | } |
775 | } |
776 | |
777 | // Static vars. |
778 | BasicBlock::MemoryPhiArg* BasicBlock::EmptyMemoryPhiDef = (BasicBlock::MemoryPhiArg*)0x1; |
779 | |
780 | unsigned JitPtrKeyFuncs<BasicBlock>::GetHashCode(const BasicBlock* ptr) |
781 | { |
782 | #ifdef DEBUG |
783 | unsigned hash = SsaStressHashHelper(); |
784 | if (hash != 0) |
785 | { |
786 | return (hash ^ (ptr->bbNum << 16) ^ ptr->bbNum); |
787 | } |
788 | #endif |
789 | return ptr->bbNum; |
790 | } |
791 | |
792 | bool BasicBlock::isEmpty() |
793 | { |
794 | if (!IsLIR()) |
795 | { |
796 | return (this->FirstNonPhiDef() == nullptr); |
797 | } |
798 | |
799 | for (GenTree* node : LIR::AsRange(this).NonPhiNodes()) |
800 | { |
801 | if (node->OperGet() != GT_IL_OFFSET) |
802 | { |
803 | return false; |
804 | } |
805 | } |
806 | |
807 | return true; |
808 | } |
809 | |
810 | GenTreeStmt* BasicBlock::FirstNonPhiDef() |
811 | { |
812 | GenTree* stmt = bbTreeList; |
813 | if (stmt == nullptr) |
814 | { |
815 | return nullptr; |
816 | } |
817 | GenTree* tree = stmt->gtStmt.gtStmtExpr; |
818 | while ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_PHI) || |
819 | (tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_PHI)) |
820 | { |
821 | stmt = stmt->gtNext; |
822 | if (stmt == nullptr) |
823 | { |
824 | return nullptr; |
825 | } |
826 | tree = stmt->gtStmt.gtStmtExpr; |
827 | } |
828 | return stmt->AsStmt(); |
829 | } |
830 | |
831 | GenTree* BasicBlock::FirstNonPhiDefOrCatchArgAsg() |
832 | { |
833 | GenTree* stmt = FirstNonPhiDef(); |
834 | if (stmt == nullptr) |
835 | { |
836 | return nullptr; |
837 | } |
838 | GenTree* tree = stmt->gtStmt.gtStmtExpr; |
839 | if ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_CATCH_ARG) || |
840 | (tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_CATCH_ARG)) |
841 | { |
842 | stmt = stmt->gtNext; |
843 | } |
844 | return stmt; |
845 | } |
846 | |
847 | /***************************************************************************** |
848 | * |
849 | * Mark a block as rarely run, we also don't want to have a loop in a |
850 | * rarely run block, and we set it's weight to zero. |
851 | */ |
852 | |
853 | void BasicBlock::bbSetRunRarely() |
854 | { |
855 | setBBWeight(BB_ZERO_WEIGHT); |
856 | if (bbWeight == BB_ZERO_WEIGHT) |
857 | { |
858 | bbFlags |= BBF_RUN_RARELY; // This block is never/rarely run |
859 | } |
860 | } |
861 | |
862 | /***************************************************************************** |
863 | * |
864 | * Can a BasicBlock be inserted after this without altering the flowgraph |
865 | */ |
866 | |
867 | bool BasicBlock::bbFallsThrough() |
868 | { |
869 | switch (bbJumpKind) |
870 | { |
871 | |
872 | case BBJ_THROW: |
873 | case BBJ_EHFINALLYRET: |
874 | case BBJ_EHFILTERRET: |
875 | case BBJ_EHCATCHRET: |
876 | case BBJ_RETURN: |
877 | case BBJ_ALWAYS: |
878 | case BBJ_LEAVE: |
879 | case BBJ_SWITCH: |
880 | return false; |
881 | |
882 | case BBJ_NONE: |
883 | case BBJ_COND: |
884 | return true; |
885 | |
886 | case BBJ_CALLFINALLY: |
887 | return ((bbFlags & BBF_RETLESS_CALL) == 0); |
888 | |
889 | default: |
890 | assert(!"Unknown bbJumpKind in bbFallsThrough()" ); |
891 | return true; |
892 | } |
893 | } |
894 | |
895 | //------------------------------------------------------------------------ |
896 | // NumSucc: Returns the count of block successors. See the declaration comment for details. |
897 | // |
898 | // Arguments: |
899 | // None. |
900 | // |
901 | // Return Value: |
902 | // Count of block successors. |
903 | // |
904 | unsigned BasicBlock::NumSucc() |
905 | { |
906 | switch (bbJumpKind) |
907 | { |
908 | case BBJ_THROW: |
909 | case BBJ_RETURN: |
910 | case BBJ_EHFINALLYRET: |
911 | case BBJ_EHFILTERRET: |
912 | return 0; |
913 | |
914 | case BBJ_CALLFINALLY: |
915 | case BBJ_ALWAYS: |
916 | case BBJ_EHCATCHRET: |
917 | case BBJ_LEAVE: |
918 | case BBJ_NONE: |
919 | return 1; |
920 | |
921 | case BBJ_COND: |
922 | if (bbJumpDest == bbNext) |
923 | { |
924 | return 1; |
925 | } |
926 | else |
927 | { |
928 | return 2; |
929 | } |
930 | |
931 | case BBJ_SWITCH: |
932 | return bbJumpSwt->bbsCount; |
933 | |
934 | default: |
935 | unreached(); |
936 | } |
937 | } |
938 | |
939 | //------------------------------------------------------------------------ |
940 | // GetSucc: Returns the requested block successor. See the declaration comment for details. |
941 | // |
942 | // Arguments: |
943 | // i - index of successor to return. 0 <= i <= NumSucc(). |
944 | // |
945 | // Return Value: |
946 | // Requested successor block |
947 | // |
948 | BasicBlock* BasicBlock::GetSucc(unsigned i) |
949 | { |
950 | assert(i < NumSucc()); // Index bounds check. |
951 | switch (bbJumpKind) |
952 | { |
953 | case BBJ_CALLFINALLY: |
954 | case BBJ_ALWAYS: |
955 | case BBJ_EHCATCHRET: |
956 | case BBJ_LEAVE: |
957 | return bbJumpDest; |
958 | |
959 | case BBJ_NONE: |
960 | return bbNext; |
961 | |
962 | case BBJ_COND: |
963 | if (i == 0) |
964 | { |
965 | return bbNext; |
966 | } |
967 | else |
968 | { |
969 | assert(i == 1); |
970 | return bbJumpDest; |
971 | } |
972 | |
973 | case BBJ_SWITCH: |
974 | return bbJumpSwt->bbsDstTab[i]; |
975 | |
976 | default: |
977 | unreached(); |
978 | } |
979 | } |
980 | |
981 | //------------------------------------------------------------------------ |
982 | // NumSucc: Returns the count of block successors. See the declaration comment for details. |
983 | // |
984 | // Arguments: |
985 | // comp - Compiler instance |
986 | // |
987 | // Return Value: |
988 | // Count of block successors. |
989 | // |
990 | unsigned BasicBlock::NumSucc(Compiler* comp) |
991 | { |
992 | assert(comp != nullptr); |
993 | |
994 | switch (bbJumpKind) |
995 | { |
996 | case BBJ_THROW: |
997 | case BBJ_RETURN: |
998 | return 0; |
999 | |
1000 | case BBJ_EHFINALLYRET: |
1001 | { |
1002 | // The first block of the handler is labelled with the catch type. |
1003 | BasicBlock* hndBeg = comp->fgFirstBlockOfHandler(this); |
1004 | if (hndBeg->bbCatchTyp == BBCT_FINALLY) |
1005 | { |
1006 | return comp->fgNSuccsOfFinallyRet(this); |
1007 | } |
1008 | else |
1009 | { |
1010 | assert(hndBeg->bbCatchTyp == BBCT_FAULT); // We can only BBJ_EHFINALLYRET from FINALLY and FAULT. |
1011 | // A FAULT block has no successors. |
1012 | return 0; |
1013 | } |
1014 | } |
1015 | |
1016 | case BBJ_CALLFINALLY: |
1017 | case BBJ_ALWAYS: |
1018 | case BBJ_EHCATCHRET: |
1019 | case BBJ_EHFILTERRET: |
1020 | case BBJ_LEAVE: |
1021 | case BBJ_NONE: |
1022 | return 1; |
1023 | |
1024 | case BBJ_COND: |
1025 | if (bbJumpDest == bbNext) |
1026 | { |
1027 | return 1; |
1028 | } |
1029 | else |
1030 | { |
1031 | return 2; |
1032 | } |
1033 | |
1034 | case BBJ_SWITCH: |
1035 | { |
1036 | Compiler::SwitchUniqueSuccSet sd = comp->GetDescriptorForSwitch(this); |
1037 | return sd.numDistinctSuccs; |
1038 | } |
1039 | |
1040 | default: |
1041 | unreached(); |
1042 | } |
1043 | } |
1044 | |
1045 | //------------------------------------------------------------------------ |
1046 | // GetSucc: Returns the requested block successor. See the declaration comment for details. |
1047 | // |
1048 | // Arguments: |
1049 | // i - index of successor to return. 0 <= i <= NumSucc(comp). |
1050 | // comp - Compiler instance |
1051 | // |
1052 | // Return Value: |
1053 | // Requested successor block |
1054 | // |
1055 | BasicBlock* BasicBlock::GetSucc(unsigned i, Compiler* comp) |
1056 | { |
1057 | assert(comp != nullptr); |
1058 | |
1059 | assert(i < NumSucc(comp)); // Index bounds check. |
1060 | switch (bbJumpKind) |
1061 | { |
1062 | case BBJ_EHFILTERRET: |
1063 | { |
1064 | // Handler is the (sole) normal successor of the filter. |
1065 | assert(comp->fgFirstBlockOfHandler(this) == bbJumpDest); |
1066 | return bbJumpDest; |
1067 | } |
1068 | |
1069 | case BBJ_EHFINALLYRET: |
1070 | // Note: the following call is expensive. |
1071 | return comp->fgSuccOfFinallyRet(this, i); |
1072 | |
1073 | case BBJ_CALLFINALLY: |
1074 | case BBJ_ALWAYS: |
1075 | case BBJ_EHCATCHRET: |
1076 | case BBJ_LEAVE: |
1077 | return bbJumpDest; |
1078 | |
1079 | case BBJ_NONE: |
1080 | return bbNext; |
1081 | |
1082 | case BBJ_COND: |
1083 | if (i == 0) |
1084 | { |
1085 | return bbNext; |
1086 | } |
1087 | else |
1088 | { |
1089 | assert(i == 1); |
1090 | return bbJumpDest; |
1091 | } |
1092 | |
1093 | case BBJ_SWITCH: |
1094 | { |
1095 | Compiler::SwitchUniqueSuccSet sd = comp->GetDescriptorForSwitch(this); |
1096 | assert(i < sd.numDistinctSuccs); // Range check. |
1097 | return sd.nonDuplicates[i]; |
1098 | } |
1099 | |
1100 | default: |
1101 | unreached(); |
1102 | } |
1103 | } |
1104 | |
1105 | void BasicBlock::InitVarSets(Compiler* comp) |
1106 | { |
1107 | VarSetOps::AssignNoCopy(comp, bbVarUse, VarSetOps::MakeEmpty(comp)); |
1108 | VarSetOps::AssignNoCopy(comp, bbVarDef, VarSetOps::MakeEmpty(comp)); |
1109 | VarSetOps::AssignNoCopy(comp, bbLiveIn, VarSetOps::MakeEmpty(comp)); |
1110 | VarSetOps::AssignNoCopy(comp, bbLiveOut, VarSetOps::MakeEmpty(comp)); |
1111 | VarSetOps::AssignNoCopy(comp, bbScope, VarSetOps::MakeEmpty(comp)); |
1112 | |
1113 | bbMemoryUse = emptyMemoryKindSet; |
1114 | bbMemoryDef = emptyMemoryKindSet; |
1115 | bbMemoryLiveIn = emptyMemoryKindSet; |
1116 | bbMemoryLiveOut = emptyMemoryKindSet; |
1117 | } |
1118 | |
1119 | // Returns true if the basic block ends with GT_JMP |
1120 | bool BasicBlock::endsWithJmpMethod(Compiler* comp) |
1121 | { |
1122 | if (comp->compJmpOpUsed && (bbJumpKind == BBJ_RETURN) && (bbFlags & BBF_HAS_JMP)) |
1123 | { |
1124 | GenTree* lastNode = this->lastNode(); |
1125 | assert(lastNode != nullptr); |
1126 | return lastNode->OperGet() == GT_JMP; |
1127 | } |
1128 | |
1129 | return false; |
1130 | } |
1131 | |
1132 | // Returns true if the basic block ends with either |
1133 | // i) GT_JMP or |
1134 | // ii) tail call (implicit or explicit) |
1135 | // |
1136 | // Params: |
1137 | // comp - Compiler instance |
1138 | // fastTailCallsOnly - Only consider fast tail calls excluding tail calls via helper. |
1139 | // |
1140 | bool BasicBlock::endsWithTailCallOrJmp(Compiler* comp, bool fastTailCallsOnly /*=false*/) |
1141 | { |
1142 | GenTree* tailCall = nullptr; |
1143 | bool tailCallsConvertibleToLoopOnly = false; |
1144 | return endsWithJmpMethod(comp) || |
1145 | endsWithTailCall(comp, fastTailCallsOnly, tailCallsConvertibleToLoopOnly, &tailCall); |
1146 | } |
1147 | |
1148 | //------------------------------------------------------------------------------ |
1149 | // endsWithTailCall : Check if the block ends with a tail call. |
1150 | // |
1151 | // Arguments: |
1152 | // comp - compiler instance |
1153 | // fastTailCallsOnly - check for fast tail calls only |
1154 | // tailCallsConvertibleToLoopOnly - check for tail calls convertible to loop only |
1155 | // tailCall - a pointer to a tree that will be set to the call tree if the block |
1156 | // ends with a tail call and will be set to nullptr otherwise. |
1157 | // |
1158 | // Return Value: |
1159 | // true if the block ends with a tail call; false otherwise. |
1160 | // |
1161 | // Notes: |
1162 | // At most one of fastTailCallsOnly and tailCallsConvertibleToLoopOnly flags can be true. |
1163 | // |
1164 | bool BasicBlock::endsWithTailCall(Compiler* comp, |
1165 | bool fastTailCallsOnly, |
1166 | bool tailCallsConvertibleToLoopOnly, |
1167 | GenTree** tailCall) |
1168 | { |
1169 | assert(!fastTailCallsOnly || !tailCallsConvertibleToLoopOnly); |
1170 | *tailCall = nullptr; |
1171 | bool result = false; |
1172 | |
1173 | // Is this a tail call? |
1174 | // The reason for keeping this under RyuJIT is so as not to impact existing Jit32 x86 and arm |
1175 | // targets. |
1176 | if (comp->compTailCallUsed) |
1177 | { |
1178 | if (fastTailCallsOnly || tailCallsConvertibleToLoopOnly) |
1179 | { |
1180 | // Only fast tail calls or only tail calls convertible to loops |
1181 | result = (bbFlags & BBF_HAS_JMP) && (bbJumpKind == BBJ_RETURN); |
1182 | } |
1183 | else |
1184 | { |
1185 | // Fast tail calls, tail calls convertible to loops, and tails calls dispatched via helper |
1186 | result = (bbJumpKind == BBJ_THROW) || ((bbFlags & BBF_HAS_JMP) && (bbJumpKind == BBJ_RETURN)); |
1187 | } |
1188 | |
1189 | if (result) |
1190 | { |
1191 | GenTree* lastNode = this->lastNode(); |
1192 | if (lastNode->OperGet() == GT_CALL) |
1193 | { |
1194 | GenTreeCall* call = lastNode->AsCall(); |
1195 | if (tailCallsConvertibleToLoopOnly) |
1196 | { |
1197 | result = call->IsTailCallConvertibleToLoop(); |
1198 | } |
1199 | else if (fastTailCallsOnly) |
1200 | { |
1201 | result = call->IsFastTailCall(); |
1202 | } |
1203 | else |
1204 | { |
1205 | result = call->IsTailCall(); |
1206 | } |
1207 | |
1208 | if (result) |
1209 | { |
1210 | *tailCall = call; |
1211 | } |
1212 | } |
1213 | else |
1214 | { |
1215 | result = false; |
1216 | } |
1217 | } |
1218 | } |
1219 | |
1220 | return result; |
1221 | } |
1222 | |
1223 | //------------------------------------------------------------------------------ |
1224 | // endsWithTailCallConvertibleToLoop : Check if the block ends with a tail call convertible to loop. |
1225 | // |
1226 | // Arguments: |
1227 | // comp - compiler instance |
1228 | // tailCall - a pointer to a tree that will be set to the call tree if the block |
1229 | // ends with a tail call convertible to loop and will be set to nullptr otherwise. |
1230 | // |
1231 | // Return Value: |
1232 | // true if the block ends with a tail call convertible to loop. |
1233 | // |
1234 | bool BasicBlock::endsWithTailCallConvertibleToLoop(Compiler* comp, GenTree** tailCall) |
1235 | { |
1236 | bool fastTailCallsOnly = false; |
1237 | bool tailCallsConvertibleToLoopOnly = true; |
1238 | return endsWithTailCall(comp, fastTailCallsOnly, tailCallsConvertibleToLoopOnly, tailCall); |
1239 | } |
1240 | |
1241 | /***************************************************************************** |
1242 | * |
1243 | * Allocate a basic block but don't append it to the current BB list. |
1244 | */ |
1245 | |
1246 | BasicBlock* Compiler::bbNewBasicBlock(BBjumpKinds jumpKind) |
1247 | { |
1248 | BasicBlock* block; |
1249 | |
1250 | /* Allocate the block descriptor and zero it out */ |
1251 | assert(fgSafeBasicBlockCreation); |
1252 | |
1253 | block = new (this, CMK_BasicBlock) BasicBlock; |
1254 | |
1255 | #if MEASURE_BLOCK_SIZE |
1256 | BasicBlock::s_Count += 1; |
1257 | BasicBlock::s_Size += sizeof(*block); |
1258 | #endif |
1259 | |
1260 | #ifdef DEBUG |
1261 | // fgLookupBB() is invalid until fgInitBBLookup() is called again. |
1262 | fgBBs = (BasicBlock**)0xCDCD; |
1263 | #endif |
1264 | |
1265 | // TODO-Throughput: The following memset is pretty expensive - do something else? |
1266 | // Note that some fields have to be initialized to 0 (like bbFPStateX87) |
1267 | memset(block, 0, sizeof(*block)); |
1268 | |
1269 | // scopeInfo needs to be able to differentiate between blocks which |
1270 | // correspond to some instrs (and so may have some LocalVarInfo |
1271 | // boundaries), or have been inserted by the JIT |
1272 | block->bbCodeOffs = BAD_IL_OFFSET; |
1273 | block->bbCodeOffsEnd = BAD_IL_OFFSET; |
1274 | |
1275 | #ifdef DEBUG |
1276 | block->bbID = compBasicBlockID++; |
1277 | #endif |
1278 | |
1279 | /* Give the block a number, set the ancestor count and weight */ |
1280 | |
1281 | ++fgBBcount; |
1282 | |
1283 | if (compIsForInlining()) |
1284 | { |
1285 | block->bbNum = ++impInlineInfo->InlinerCompiler->fgBBNumMax; |
1286 | } |
1287 | else |
1288 | { |
1289 | block->bbNum = ++fgBBNumMax; |
1290 | } |
1291 | |
1292 | if (compRationalIRForm) |
1293 | { |
1294 | block->bbFlags |= BBF_IS_LIR; |
1295 | } |
1296 | |
1297 | block->bbRefs = 1; |
1298 | block->bbWeight = BB_UNITY_WEIGHT; |
1299 | |
1300 | block->bbStkTempsIn = NO_BASE_TMP; |
1301 | block->bbStkTempsOut = NO_BASE_TMP; |
1302 | |
1303 | block->bbEntryState = nullptr; |
1304 | |
1305 | /* Record the jump kind in the block */ |
1306 | |
1307 | block->bbJumpKind = jumpKind; |
1308 | |
1309 | if (jumpKind == BBJ_THROW) |
1310 | { |
1311 | block->bbSetRunRarely(); |
1312 | } |
1313 | |
1314 | #ifdef DEBUG |
1315 | if (verbose) |
1316 | { |
1317 | printf("New Basic Block %s created.\n" , block->dspToString()); |
1318 | } |
1319 | #endif |
1320 | |
1321 | // We will give all the blocks var sets after the number of tracked variables |
1322 | // is determined and frozen. After that, if we dynamically create a basic block, |
1323 | // we will initialize its var sets. |
1324 | if (fgBBVarSetsInited) |
1325 | { |
1326 | VarSetOps::AssignNoCopy(this, block->bbVarUse, VarSetOps::MakeEmpty(this)); |
1327 | VarSetOps::AssignNoCopy(this, block->bbVarDef, VarSetOps::MakeEmpty(this)); |
1328 | VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::MakeEmpty(this)); |
1329 | VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::MakeEmpty(this)); |
1330 | VarSetOps::AssignNoCopy(this, block->bbScope, VarSetOps::MakeEmpty(this)); |
1331 | } |
1332 | else |
1333 | { |
1334 | VarSetOps::AssignNoCopy(this, block->bbVarUse, VarSetOps::UninitVal()); |
1335 | VarSetOps::AssignNoCopy(this, block->bbVarDef, VarSetOps::UninitVal()); |
1336 | VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::UninitVal()); |
1337 | VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::UninitVal()); |
1338 | VarSetOps::AssignNoCopy(this, block->bbScope, VarSetOps::UninitVal()); |
1339 | } |
1340 | |
1341 | block->bbMemoryUse = emptyMemoryKindSet; |
1342 | block->bbMemoryDef = emptyMemoryKindSet; |
1343 | block->bbMemoryLiveIn = emptyMemoryKindSet; |
1344 | block->bbMemoryLiveOut = emptyMemoryKindSet; |
1345 | |
1346 | for (MemoryKind memoryKind : allMemoryKinds()) |
1347 | { |
1348 | block->bbMemorySsaPhiFunc[memoryKind] = nullptr; |
1349 | block->bbMemorySsaNumIn[memoryKind] = 0; |
1350 | block->bbMemorySsaNumOut[memoryKind] = 0; |
1351 | } |
1352 | |
1353 | // Make sure we reserve a NOT_IN_LOOP value that isn't a legal table index. |
1354 | static_assert_no_msg(MAX_LOOP_NUM < BasicBlock::NOT_IN_LOOP); |
1355 | |
1356 | block->bbNatLoopNum = BasicBlock::NOT_IN_LOOP; |
1357 | |
1358 | return block; |
1359 | } |
1360 | |