1 | // Licensed to the .NET Foundation under one or more agreements. |
2 | // The .NET Foundation licenses this file to you under the MIT license. |
3 | // See the LICENSE file in the project root for more information. |
4 | |
5 | /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
6 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
7 | XX XX |
8 | XX ObjectAllocator XX |
9 | XX XX |
10 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
11 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
12 | */ |
13 | |
14 | #include "jitpch.h" |
15 | #ifdef _MSC_VER |
16 | #pragma hdrstop |
17 | #endif |
18 | |
19 | #include "gentree.h" |
20 | |
21 | //------------------------------------------------------------------------ |
22 | // DoPhase: Run analysis (if object stack allocation is enabled) and then |
23 | // morph each GT_ALLOCOBJ node either into an allocation helper |
24 | // call or stack allocation. |
25 | // |
26 | // Notes: |
27 | // Runs only if Compiler::optMethodFlags has flag OMF_HAS_NEWOBJ set. |
28 | |
29 | void ObjectAllocator::DoPhase() |
30 | { |
31 | JITDUMP("\n*** ObjectAllocationPhase: " ); |
32 | if ((comp->optMethodFlags & OMF_HAS_NEWOBJ) == 0) |
33 | { |
34 | JITDUMP("no newobjs in this method; punting\n" ); |
35 | return; |
36 | } |
37 | |
38 | if (IsObjectStackAllocationEnabled()) |
39 | { |
40 | JITDUMP("enabled, analyzing...\n" ); |
41 | DoAnalysis(); |
42 | } |
43 | else |
44 | { |
45 | JITDUMP("disabled, punting\n" ); |
46 | } |
47 | |
48 | const bool didStackAllocate = MorphAllocObjNodes(); |
49 | |
50 | if (didStackAllocate) |
51 | { |
52 | RewriteUses(); |
53 | } |
54 | } |
55 | |
56 | //------------------------------------------------------------------------------ |
57 | // MarkLclVarAsEscaping : Mark local variable as escaping. |
58 | // |
59 | // |
60 | // Arguments: |
61 | // lclNum - Escaping pointing local variable number |
62 | |
63 | void ObjectAllocator::MarkLclVarAsEscaping(unsigned int lclNum) |
64 | { |
65 | BitVecOps::AddElemD(&m_bitVecTraits, m_EscapingPointers, lclNum); |
66 | } |
67 | |
68 | //------------------------------------------------------------------------------ |
69 | // IsLclVarEscaping : Check if the local variable has been marked as escaping. |
70 | // |
71 | // |
72 | // Arguments: |
73 | // lclNum - Local variable number |
74 | // |
75 | // Return Value: |
76 | // True if the local variable has been marked as escaping; false otherwise |
77 | |
78 | bool ObjectAllocator::IsLclVarEscaping(unsigned int lclNum) |
79 | { |
80 | return BitVecOps::IsMember(&m_bitVecTraits, m_EscapingPointers, lclNum); |
81 | } |
82 | |
83 | //------------------------------------------------------------------------------ |
84 | // AddConnGraphEdge : Record that the source local variable may point to the same set of objects |
85 | // as the set pointed to by target local variable. |
86 | // |
87 | // Arguments: |
88 | // sourceLclNum - Local variable number of the edge source |
89 | // targetLclNum - Local variable number of the edge target |
90 | |
91 | void ObjectAllocator::AddConnGraphEdge(unsigned int sourceLclNum, unsigned int targetLclNum) |
92 | { |
93 | BitVecOps::AddElemD(&m_bitVecTraits, m_ConnGraphAdjacencyMatrix[sourceLclNum], targetLclNum); |
94 | } |
95 | |
96 | //------------------------------------------------------------------------ |
97 | // DoAnalysis: Walk over basic blocks of the method and detect all local |
98 | // variables that can be allocated on the stack. |
99 | |
100 | void ObjectAllocator::DoAnalysis() |
101 | { |
102 | assert(m_IsObjectStackAllocationEnabled); |
103 | assert(!m_AnalysisDone); |
104 | |
105 | if (comp->lvaCount > 0) |
106 | { |
107 | m_EscapingPointers = BitVecOps::MakeEmpty(&m_bitVecTraits); |
108 | m_ConnGraphAdjacencyMatrix = new (comp->getAllocator(CMK_ObjectAllocator)) BitSetShortLongRep[comp->lvaCount]; |
109 | |
110 | MarkEscapingVarsAndBuildConnGraph(); |
111 | ComputeEscapingNodes(&m_bitVecTraits, m_EscapingPointers); |
112 | } |
113 | |
114 | m_AnalysisDone = true; |
115 | } |
116 | |
117 | //------------------------------------------------------------------------------ |
118 | // MarkEscapingVarsAndBuildConnGraph : Walk the trees of the method and mark any ref/byref/i_impl |
119 | // local variables that may escape. Build a connection graph |
120 | // for ref/by_ref/i_impl local variables. |
121 | // |
122 | // Arguments: |
123 | // sourceLclNum - Local variable number of the edge source |
124 | // targetLclNum - Local variable number of the edge target |
125 | // |
126 | // Notes: |
127 | // The connection graph has an edge from local variable s to local variable t if s may point |
128 | // to the objects t points to at some point in the method. It's a simplified version |
129 | // of the graph described in this paper: |
130 | // https://www.cc.gatech.edu/~harrold/6340/cs6340_fall2009/Readings/choi99escape.pdf |
131 | // We currently don't have field edges and the edges we do have are called "deferred" in the paper. |
132 | |
133 | void ObjectAllocator::MarkEscapingVarsAndBuildConnGraph() |
134 | { |
135 | class BuildConnGraphVisitor final : public GenTreeVisitor<BuildConnGraphVisitor> |
136 | { |
137 | ObjectAllocator* m_allocator; |
138 | |
139 | public: |
140 | enum |
141 | { |
142 | DoPreOrder = true, |
143 | DoLclVarsOnly = true, |
144 | ComputeStack = true, |
145 | }; |
146 | |
147 | BuildConnGraphVisitor(ObjectAllocator* allocator) |
148 | : GenTreeVisitor<BuildConnGraphVisitor>(allocator->comp), m_allocator(allocator) |
149 | { |
150 | } |
151 | |
152 | Compiler::fgWalkResult PreOrderVisit(GenTree** use, GenTree* user) |
153 | { |
154 | GenTree* tree = *use; |
155 | assert(tree != nullptr); |
156 | assert(tree->IsLocal()); |
157 | |
158 | var_types type = tree->TypeGet(); |
159 | if ((tree->OperGet() == GT_LCL_VAR) && (type == TYP_REF || type == TYP_BYREF || type == TYP_I_IMPL)) |
160 | { |
161 | unsigned int lclNum = tree->AsLclVar()->GetLclNum(); |
162 | assert(tree == m_ancestors.Index(0)); |
163 | |
164 | if (m_allocator->CanLclVarEscapeViaParentStack(&m_ancestors, lclNum)) |
165 | { |
166 | if (!m_allocator->IsLclVarEscaping(lclNum)) |
167 | { |
168 | JITDUMP("V%02u first escapes via [%06u]\n" , lclNum, m_compiler->dspTreeID(tree)); |
169 | } |
170 | m_allocator->MarkLclVarAsEscaping(lclNum); |
171 | } |
172 | } |
173 | return Compiler::fgWalkResult::WALK_CONTINUE; |
174 | } |
175 | }; |
176 | |
177 | for (unsigned int lclNum = 0; lclNum < comp->lvaCount; ++lclNum) |
178 | { |
179 | var_types type = comp->lvaTable[lclNum].TypeGet(); |
180 | |
181 | if (type == TYP_REF || type == TYP_I_IMPL || type == TYP_BYREF) |
182 | { |
183 | m_ConnGraphAdjacencyMatrix[lclNum] = BitVecOps::MakeEmpty(&m_bitVecTraits); |
184 | |
185 | if (comp->lvaTable[lclNum].lvAddrExposed) |
186 | { |
187 | JITDUMP(" V%02u is address exposed\n" , lclNum); |
188 | MarkLclVarAsEscaping(lclNum); |
189 | } |
190 | } |
191 | else |
192 | { |
193 | // Variable that may not point to objects will not participate in our analysis. |
194 | m_ConnGraphAdjacencyMatrix[lclNum] = BitVecOps::UninitVal(); |
195 | } |
196 | } |
197 | |
198 | BasicBlock* block; |
199 | |
200 | foreach_block(comp, block) |
201 | { |
202 | for (GenTreeStmt* stmt = block->firstStmt(); stmt; stmt = stmt->gtNextStmt) |
203 | { |
204 | BuildConnGraphVisitor buildConnGraphVisitor(this); |
205 | buildConnGraphVisitor.WalkTree(&stmt->gtStmtExpr, nullptr); |
206 | } |
207 | } |
208 | } |
209 | |
210 | //------------------------------------------------------------------------------ |
211 | // ComputeEscapingNodes : Given an initial set of escaping nodes, update it to contain the full set |
212 | // of escaping nodes by computing nodes reachable from the given set. |
213 | // |
214 | // Arguments: |
215 | // bitVecTraits - Bit vector traits |
216 | // escapingNodes [in/out] - Initial set of escaping nodes |
217 | |
218 | void ObjectAllocator::ComputeEscapingNodes(BitVecTraits* bitVecTraits, BitVec& escapingNodes) |
219 | { |
220 | BitSetShortLongRep escapingNodesToProcess = BitVecOps::MakeCopy(bitVecTraits, escapingNodes); |
221 | BitSetShortLongRep newEscapingNodes = BitVecOps::UninitVal(); |
222 | |
223 | unsigned int lclNum; |
224 | |
225 | bool doOneMoreIteration = true; |
226 | while (doOneMoreIteration) |
227 | { |
228 | BitVecOps::Iter iterator(bitVecTraits, escapingNodesToProcess); |
229 | doOneMoreIteration = false; |
230 | |
231 | while (iterator.NextElem(&lclNum)) |
232 | { |
233 | doOneMoreIteration = true; |
234 | |
235 | // newEscapingNodes = adjacentNodes[lclNum] |
236 | BitVecOps::Assign(bitVecTraits, newEscapingNodes, m_ConnGraphAdjacencyMatrix[lclNum]); |
237 | // newEscapingNodes = newEscapingNodes \ escapingNodes |
238 | BitVecOps::DiffD(bitVecTraits, newEscapingNodes, escapingNodes); |
239 | // escapingNodesToProcess = escapingNodesToProcess U newEscapingNodes |
240 | BitVecOps::UnionD(bitVecTraits, escapingNodesToProcess, newEscapingNodes); |
241 | // escapingNodes = escapingNodes U newEscapingNodes |
242 | BitVecOps::UnionD(bitVecTraits, escapingNodes, newEscapingNodes); |
243 | // escapingNodesToProcess = escapingNodesToProcess \ { lclNum } |
244 | BitVecOps::RemoveElemD(bitVecTraits, escapingNodesToProcess, lclNum); |
245 | } |
246 | } |
247 | } |
248 | |
249 | //------------------------------------------------------------------------ |
250 | // MorphAllocObjNodes: Morph each GT_ALLOCOBJ node either into an |
251 | // allocation helper call or stack allocation. |
252 | // |
253 | // Returns: |
254 | // true if any allocation was done as a stack allocation. |
255 | // |
256 | // Notes: |
257 | // Runs only over the blocks having bbFlags BBF_HAS_NEWOBJ set. |
258 | |
259 | bool ObjectAllocator::MorphAllocObjNodes() |
260 | { |
261 | bool didStackAllocate = false; |
262 | |
263 | BasicBlock* block; |
264 | |
265 | foreach_block(comp, block) |
266 | { |
267 | const bool basicBlockHasNewObj = (block->bbFlags & BBF_HAS_NEWOBJ) == BBF_HAS_NEWOBJ; |
268 | const bool basicBlockHasBackwardJump = (block->bbFlags & BBF_BACKWARD_JUMP) == BBF_BACKWARD_JUMP; |
269 | #ifndef DEBUG |
270 | if (!basicBlockHasNewObj) |
271 | { |
272 | continue; |
273 | } |
274 | #endif // DEBUG |
275 | |
276 | for (GenTreeStmt* stmt = block->firstStmt(); stmt; stmt = stmt->gtNextStmt) |
277 | { |
278 | GenTree* stmtExpr = stmt->gtStmtExpr; |
279 | GenTree* op2 = nullptr; |
280 | |
281 | bool canonicalAllocObjFound = false; |
282 | |
283 | if (stmtExpr->OperGet() == GT_ASG && stmtExpr->TypeGet() == TYP_REF) |
284 | { |
285 | op2 = stmtExpr->gtGetOp2(); |
286 | |
287 | if (op2->OperGet() == GT_ALLOCOBJ) |
288 | { |
289 | canonicalAllocObjFound = true; |
290 | } |
291 | } |
292 | |
293 | if (canonicalAllocObjFound) |
294 | { |
295 | assert(basicBlockHasNewObj); |
296 | //------------------------------------------------------------------------ |
297 | // We expect the following expression tree at this point |
298 | // * STMT void |
299 | // | /--* ALLOCOBJ ref |
300 | // | | \--* CNS_INT(h) long |
301 | // \--* ASG ref |
302 | // \--* LCL_VAR ref |
303 | //------------------------------------------------------------------------ |
304 | |
305 | GenTree* op1 = stmtExpr->gtGetOp1(); |
306 | |
307 | assert(op1->OperGet() == GT_LCL_VAR); |
308 | assert(op1->TypeGet() == TYP_REF); |
309 | assert(op2 != nullptr); |
310 | assert(op2->OperGet() == GT_ALLOCOBJ); |
311 | |
312 | GenTreeAllocObj* asAllocObj = op2->AsAllocObj(); |
313 | unsigned int lclNum = op1->AsLclVar()->GetLclNum(); |
314 | CORINFO_CLASS_HANDLE clsHnd = op2->AsAllocObj()->gtAllocObjClsHnd; |
315 | |
316 | // Don't attempt to do stack allocations inside basic blocks that may be in a loop. |
317 | if (IsObjectStackAllocationEnabled() && !basicBlockHasBackwardJump && |
318 | CanAllocateLclVarOnStack(lclNum, clsHnd)) |
319 | { |
320 | JITDUMP("Allocating local variable V%02u on the stack\n" , lclNum); |
321 | |
322 | const unsigned int stackLclNum = MorphAllocObjNodeIntoStackAlloc(asAllocObj, block, stmt); |
323 | m_HeapLocalToStackLocalMap.AddOrUpdate(lclNum, stackLclNum); |
324 | stmt->gtStmtExpr->gtBashToNOP(); |
325 | comp->optMethodFlags |= OMF_HAS_OBJSTACKALLOC; |
326 | didStackAllocate = true; |
327 | } |
328 | else |
329 | { |
330 | if (IsObjectStackAllocationEnabled()) |
331 | { |
332 | JITDUMP("Allocating local variable V%02u on the heap\n" , lclNum); |
333 | } |
334 | |
335 | op2 = MorphAllocObjNodeIntoHelperCall(asAllocObj); |
336 | } |
337 | |
338 | // Propagate flags of op2 to its parent. |
339 | stmtExpr->gtOp.gtOp2 = op2; |
340 | stmtExpr->gtFlags |= op2->gtFlags & GTF_ALL_EFFECT; |
341 | } |
342 | |
343 | #ifdef DEBUG |
344 | else |
345 | { |
346 | // We assume that GT_ALLOCOBJ nodes are always present in the |
347 | // canonical form. |
348 | comp->fgWalkTreePre(&stmt->gtStmtExpr, AssertWhenAllocObjFoundVisitor); |
349 | } |
350 | #endif // DEBUG |
351 | } |
352 | } |
353 | |
354 | return didStackAllocate; |
355 | } |
356 | |
357 | //------------------------------------------------------------------------ |
358 | // MorphAllocObjNodeIntoHelperCall: Morph a GT_ALLOCOBJ node into an |
359 | // allocation helper call. |
360 | // |
361 | // Arguments: |
362 | // allocObj - GT_ALLOCOBJ that will be replaced by helper call. |
363 | // |
364 | // Return Value: |
365 | // Address of helper call node (can be the same as allocObj). |
366 | // |
367 | // Notes: |
368 | // Must update parents flags after this. |
369 | |
370 | GenTree* ObjectAllocator::MorphAllocObjNodeIntoHelperCall(GenTreeAllocObj* allocObj) |
371 | { |
372 | assert(allocObj != nullptr); |
373 | |
374 | GenTree* op1 = allocObj->gtGetOp1(); |
375 | unsigned int helper = allocObj->gtNewHelper; |
376 | bool helperHasSideEffects = allocObj->gtHelperHasSideEffects; |
377 | |
378 | GenTreeArgList* args; |
379 | #ifdef FEATURE_READYTORUN_COMPILER |
380 | CORINFO_CONST_LOOKUP entryPoint = allocObj->gtEntryPoint; |
381 | if (helper == CORINFO_HELP_READYTORUN_NEW) |
382 | { |
383 | args = nullptr; |
384 | } |
385 | else |
386 | #endif |
387 | { |
388 | args = comp->gtNewArgList(op1); |
389 | } |
390 | |
391 | const bool morphArgs = false; |
392 | GenTree* helperCall = comp->fgMorphIntoHelperCall(allocObj, allocObj->gtNewHelper, args, morphArgs); |
393 | if (helperHasSideEffects) |
394 | { |
395 | helperCall->gtCall.gtCallMoreFlags |= GTF_CALL_M_ALLOC_SIDE_EFFECTS; |
396 | } |
397 | |
398 | #ifdef FEATURE_READYTORUN_COMPILER |
399 | if (entryPoint.addr != nullptr) |
400 | { |
401 | assert(comp->opts.IsReadyToRun()); |
402 | helperCall->gtCall.setEntryPoint(entryPoint); |
403 | } |
404 | #endif |
405 | |
406 | return helperCall; |
407 | } |
408 | |
409 | //------------------------------------------------------------------------ |
410 | // MorphAllocObjNodeIntoStackAlloc: Morph a GT_ALLOCOBJ node into stack |
411 | // allocation. |
412 | // Arguments: |
413 | // allocObj - GT_ALLOCOBJ that will be replaced by a stack allocation |
414 | // block - a basic block where allocObj is |
415 | // stmt - a statement where allocObj is |
416 | // |
417 | // Return Value: |
418 | // local num for the new stack allocated local |
419 | // |
420 | // Notes: |
421 | // This function can insert additional statements before stmt. |
422 | |
423 | unsigned int ObjectAllocator::MorphAllocObjNodeIntoStackAlloc(GenTreeAllocObj* allocObj, |
424 | BasicBlock* block, |
425 | GenTreeStmt* stmt) |
426 | { |
427 | assert(allocObj != nullptr); |
428 | assert(m_AnalysisDone); |
429 | |
430 | const bool shortLifetime = false; |
431 | const unsigned int lclNum = comp->lvaGrabTemp(shortLifetime DEBUGARG("MorphAllocObjNodeIntoStackAlloc temp" )); |
432 | const int unsafeValueClsCheck = true; |
433 | comp->lvaSetStruct(lclNum, allocObj->gtAllocObjClsHnd, unsafeValueClsCheck); |
434 | |
435 | // Initialize the object memory if necessary |
436 | if (comp->fgStructTempNeedsExplicitZeroInit(comp->lvaTable + lclNum, block)) |
437 | { |
438 | unsigned int structSize = comp->lvaTable[lclNum].lvSize(); |
439 | |
440 | //------------------------------------------------------------------------ |
441 | // * STMT void |
442 | // | /--* CNS_INT int 0 |
443 | // \--* ASG struct (init) |
444 | // \--* LCL_VAR struct |
445 | //------------------------------------------------------------------------ |
446 | |
447 | GenTree* tree = comp->gtNewLclvNode(lclNum, TYP_STRUCT); |
448 | const bool isVolatile = false; |
449 | const bool isCopyBlock = false; |
450 | tree = comp->gtNewBlkOpNode(tree, comp->gtNewIconNode(0), structSize, isVolatile, isCopyBlock); |
451 | |
452 | GenTreeStmt* newStmt = comp->gtNewStmt(tree); |
453 | |
454 | comp->fgInsertStmtBefore(block, stmt, newStmt); |
455 | } |
456 | |
457 | //------------------------------------------------------------------------ |
458 | // * STMT void |
459 | // | /--* CNS_INT(h) long |
460 | // \--* ASG long |
461 | // \--* FIELD long #PseudoField:0x0 |
462 | // \--* ADDR byref |
463 | // \--* LCL_VAR struct |
464 | //------------------------------------------------------------------------ |
465 | |
466 | // Create a local representing the object |
467 | GenTree* tree = comp->gtNewLclvNode(lclNum, TYP_STRUCT); |
468 | |
469 | // Add a pseudo-field for the method table pointer and initialize it |
470 | tree = comp->gtNewOperNode(GT_ADDR, TYP_BYREF, tree); |
471 | tree = comp->gtNewFieldRef(TYP_I_IMPL, FieldSeqStore::FirstElemPseudoField, tree, 0); |
472 | tree = comp->gtNewAssignNode(tree, allocObj->gtGetOp1()); |
473 | |
474 | GenTreeStmt* newStmt = comp->gtNewStmt(tree); |
475 | |
476 | comp->fgInsertStmtBefore(block, stmt, newStmt); |
477 | |
478 | return lclNum; |
479 | } |
480 | |
481 | //------------------------------------------------------------------------ |
482 | // CanLclVarEscapeViaParentStack: Check if the local variable escapes via the given parent stack. |
483 | // Update the connection graph as necessary. |
484 | // |
485 | // Arguments: |
486 | // parentStack - Parent stack of the current visit |
487 | // lclNum - Local variable number |
488 | // |
489 | // Return Value: |
490 | // true if the local can escape via the parent stack; false otherwise |
491 | // |
492 | // Notes: |
493 | // The method currently treats all locals assigned to a field as escaping. |
494 | // The can potentially be tracked by special field edges in the connection graph. |
495 | |
496 | bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parentStack, unsigned int lclNum) |
497 | { |
498 | assert(parentStack != nullptr); |
499 | int parentIndex = 1; |
500 | |
501 | bool keepChecking = true; |
502 | bool canLclVarEscapeViaParentStack = true; |
503 | |
504 | while (keepChecking) |
505 | { |
506 | if (parentStack->Height() <= parentIndex) |
507 | { |
508 | canLclVarEscapeViaParentStack = false; |
509 | break; |
510 | } |
511 | |
512 | canLclVarEscapeViaParentStack = true; |
513 | GenTree* tree = parentStack->Index(parentIndex - 1); |
514 | GenTree* parent = parentStack->Index(parentIndex); |
515 | keepChecking = false; |
516 | |
517 | switch (parent->OperGet()) |
518 | { |
519 | case GT_ASG: |
520 | { |
521 | // Use the following conservative behavior for GT_ASG parent node: |
522 | // Consider local variable to be escaping if |
523 | // 1. lclVar appears on the rhs of a GT_ASG node |
524 | // AND |
525 | // 2. The lhs of the GT_ASG is not another lclVar |
526 | |
527 | GenTree* op1 = parent->AsOp()->gtGetOp1(); |
528 | |
529 | if (op1 == tree) |
530 | { |
531 | // Assigning to a local doesn't make it escaping. |
532 | // If there is another local variable on the rhs, |
533 | // we will update the connection graph when we visit it. |
534 | canLclVarEscapeViaParentStack = false; |
535 | } |
536 | else |
537 | { |
538 | // lclVar is on the rhs of GT_ASG node |
539 | assert(parent->AsOp()->gtGetOp2() == tree); |
540 | |
541 | // Update the connection graph if we are assigning to a local. |
542 | // For all other assignments we mark the rhs local as escaping. |
543 | // TODO-ObjectStackAllocation: track assignments to fields. |
544 | if (op1->OperGet() == GT_LCL_VAR) |
545 | { |
546 | // We expect the following tree at this point |
547 | // /--* GT_LCL_VAR ref rhsLclVar |
548 | // --* = ref |
549 | // \--* GT_LCL_VAR ref lhsLclVar |
550 | |
551 | // Add an edge to the connection graph. |
552 | const unsigned int lhsLclNum = op1->AsLclVar()->GetLclNum(); |
553 | const unsigned int rhsLclNum = lclNum; |
554 | |
555 | AddConnGraphEdge(lhsLclNum, rhsLclNum); |
556 | canLclVarEscapeViaParentStack = false; |
557 | } |
558 | } |
559 | break; |
560 | } |
561 | |
562 | case GT_EQ: |
563 | case GT_NE: |
564 | canLclVarEscapeViaParentStack = false; |
565 | break; |
566 | |
567 | case GT_COMMA: |
568 | if (parent->AsOp()->gtGetOp1() == parentStack->Index(parentIndex - 1)) |
569 | { |
570 | // Left child of GT_COMMA, it will be discarded |
571 | canLclVarEscapeViaParentStack = false; |
572 | break; |
573 | } |
574 | __fallthrough; |
575 | case GT_COLON: |
576 | case GT_QMARK: |
577 | case GT_ADD: |
578 | // Check whether the local escapes via its grandparent. |
579 | ++parentIndex; |
580 | keepChecking = true; |
581 | break; |
582 | |
583 | case GT_FIELD: |
584 | case GT_IND: |
585 | { |
586 | int grandParentIndex = parentIndex + 1; |
587 | if ((parentStack->Height() > grandParentIndex) && |
588 | (parentStack->Index(grandParentIndex)->OperGet() == GT_ADDR)) |
589 | { |
590 | // Check if the address of the field/ind escapes. |
591 | parentIndex += 2; |
592 | keepChecking = true; |
593 | } |
594 | else |
595 | { |
596 | // Address of the field/ind is not taken so the local doesn't escape. |
597 | canLclVarEscapeViaParentStack = false; |
598 | } |
599 | break; |
600 | } |
601 | |
602 | case GT_CALL: |
603 | { |
604 | GenTreeCall* asCall = parent->AsCall(); |
605 | |
606 | if (asCall->gtCallType == CT_HELPER) |
607 | { |
608 | // TODO-ObjectStackAllocation: Special-case helpers here that |
609 | // 1. Don't make objects escape. |
610 | // 2. Protect objects as interior (GCPROTECT_BEGININTERIOR() instead of GCPROTECT_BEGIN()). |
611 | // 3. Don't check that the object is in the heap in ValidateInner. |
612 | |
613 | canLclVarEscapeViaParentStack = true; |
614 | } |
615 | break; |
616 | } |
617 | |
618 | default: |
619 | break; |
620 | } |
621 | } |
622 | |
623 | return canLclVarEscapeViaParentStack; |
624 | } |
625 | |
626 | #ifdef DEBUG |
627 | //------------------------------------------------------------------------ |
628 | // AssertWhenAllocObjFoundVisitor: Look for a GT_ALLOCOBJ node and assert |
629 | // when found one. |
630 | // |
631 | // Arguments: |
632 | // pTree - Tree to examine |
633 | // data - Walker data |
634 | // |
635 | // Return Value: |
636 | // Always returns fgWalkResult::WALK_CONTINUE |
637 | |
638 | Compiler::fgWalkResult ObjectAllocator::AssertWhenAllocObjFoundVisitor(GenTree** pTree, Compiler::fgWalkData* data) |
639 | { |
640 | GenTree* tree = *pTree; |
641 | |
642 | assert(tree != nullptr); |
643 | assert(tree->OperGet() != GT_ALLOCOBJ); |
644 | |
645 | return Compiler::fgWalkResult::WALK_CONTINUE; |
646 | } |
647 | |
648 | #endif // DEBUG |
649 | |
650 | //------------------------------------------------------------------------ |
651 | // RewriteUses: Find uses of the newobj temp for stack-allocated |
652 | // objects and replace with address of the stack local. |
653 | |
654 | void ObjectAllocator::RewriteUses() |
655 | { |
656 | class RewriteUsesVisitor final : public GenTreeVisitor<RewriteUsesVisitor> |
657 | { |
658 | ObjectAllocator* m_allocator; |
659 | |
660 | public: |
661 | enum |
662 | { |
663 | DoPreOrder = true, |
664 | DoLclVarsOnly = true, |
665 | }; |
666 | |
667 | RewriteUsesVisitor(ObjectAllocator* allocator) |
668 | : GenTreeVisitor<RewriteUsesVisitor>(allocator->comp), m_allocator(allocator) |
669 | { |
670 | } |
671 | |
672 | Compiler::fgWalkResult PreOrderVisit(GenTree** use, GenTree* user) |
673 | { |
674 | GenTree* tree = *use; |
675 | assert(tree != nullptr); |
676 | assert(tree->IsLocal()); |
677 | |
678 | const unsigned int lclNum = tree->AsLclVarCommon()->gtLclNum; |
679 | unsigned int newLclNum = BAD_VAR_NUM; |
680 | |
681 | if (m_allocator->m_HeapLocalToStackLocalMap.TryGetValue(lclNum, &newLclNum)) |
682 | { |
683 | GenTree* newTree = |
684 | m_compiler->gtNewOperNode(GT_ADDR, TYP_I_IMPL, m_compiler->gtNewLclvNode(newLclNum, TYP_STRUCT)); |
685 | *use = newTree; |
686 | } |
687 | |
688 | return Compiler::fgWalkResult::WALK_CONTINUE; |
689 | } |
690 | }; |
691 | |
692 | BasicBlock* block; |
693 | |
694 | foreach_block(comp, block) |
695 | { |
696 | for (GenTreeStmt* stmt = block->firstStmt(); stmt; stmt = stmt->gtNextStmt) |
697 | { |
698 | RewriteUsesVisitor rewriteUsesVisitor(this); |
699 | rewriteUsesVisitor.WalkTree(&stmt->gtStmtExpr, nullptr); |
700 | } |
701 | } |
702 | } |
703 | |