1 | // Licensed to the .NET Foundation under one or more agreements. |
2 | // The .NET Foundation licenses this file to you under the MIT license. |
3 | // See the LICENSE file in the project root for more information. |
4 | |
5 | /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
6 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
7 | XX XX |
8 | XX GenTree XX |
9 | XX XX |
10 | XX This is the node in the semantic tree graph. It represents the operation XX |
11 | XX corresponding to the node, and other information during code-gen. XX |
12 | XX XX |
13 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
14 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
15 | */ |
16 | |
17 | /*****************************************************************************/ |
18 | #ifndef _GENTREE_H_ |
19 | #define _GENTREE_H_ |
20 | /*****************************************************************************/ |
21 | |
22 | #include "vartype.h" // For "var_types" |
23 | #include "target.h" // For "regNumber" |
24 | #include "ssaconfig.h" // For "SsaConfig::RESERVED_SSA_NUM" |
25 | #include "reglist.h" |
26 | #include "valuenumtype.h" |
27 | #include "jitstd.h" |
28 | #include "jithashtable.h" |
29 | #include "simd.h" |
30 | #include "namedintrinsiclist.h" |
31 | |
32 | // Debugging GenTree is much easier if we add a magic virtual function to make the debugger able to figure out what type |
33 | // it's got. This is enabled by default in DEBUG. To enable it in RET builds (temporarily!), you need to change the |
34 | // build to define DEBUGGABLE_GENTREE=1, as well as pass /OPT:NOICF to the linker (or else all the vtables get merged, |
35 | // making the debugging value supplied by them useless). See protojit.nativeproj for a commented example of setting the |
36 | // build flags correctly. |
37 | #ifndef DEBUGGABLE_GENTREE |
38 | #ifdef DEBUG |
39 | #define DEBUGGABLE_GENTREE 1 |
40 | #else // !DEBUG |
41 | #define DEBUGGABLE_GENTREE 0 |
42 | #endif // !DEBUG |
43 | #endif // !DEBUGGABLE_GENTREE |
44 | |
45 | // The SpecialCodeKind enum is used to indicate the type of special (unique) |
46 | // target block that will be targeted by an instruction. |
47 | // These are used by: |
48 | // GenTreeBoundsChk nodes (SCK_RNGCHK_FAIL, SCK_ARG_EXCPN, SCK_ARG_RNG_EXCPN) |
49 | // - these nodes have a field (gtThrowKind) to indicate which kind |
50 | // GenTreeOps nodes, for which codegen will generate the branch |
51 | // - it will use the appropriate kind based on the opcode, though it's not |
52 | // clear why SCK_OVERFLOW == SCK_ARITH_EXCPN |
53 | // SCK_PAUSE_EXEC is not currently used. |
54 | // |
55 | enum SpecialCodeKind |
56 | { |
57 | SCK_NONE, |
58 | SCK_RNGCHK_FAIL, // target when range check fails |
59 | SCK_PAUSE_EXEC, // target to stop (e.g. to allow GC) |
60 | SCK_DIV_BY_ZERO, // target for divide by zero (Not used on X86/X64) |
61 | SCK_ARITH_EXCPN, // target on arithmetic exception |
62 | SCK_OVERFLOW = SCK_ARITH_EXCPN, // target on overflow |
63 | SCK_ARG_EXCPN, // target on ArgumentException (currently used only for SIMD intrinsics) |
64 | SCK_ARG_RNG_EXCPN, // target on ArgumentOutOfRangeException (currently used only for SIMD intrinsics) |
65 | SCK_COUNT |
66 | }; |
67 | |
68 | /*****************************************************************************/ |
69 | |
70 | enum genTreeOps : BYTE |
71 | { |
72 | #define GTNODE(en, st, cm, ok) GT_##en, |
73 | #include "gtlist.h" |
74 | |
75 | GT_COUNT, |
76 | |
77 | #ifdef _TARGET_64BIT_ |
78 | // GT_CNS_NATIVELONG is the gtOper symbol for GT_CNS_LNG or GT_CNS_INT, depending on the target. |
79 | // For the 64-bit targets we will only use GT_CNS_INT as it used to represent all the possible sizes |
80 | GT_CNS_NATIVELONG = GT_CNS_INT, |
81 | #else |
82 | // For the 32-bit targets we use a GT_CNS_LNG to hold a 64-bit integer constant and GT_CNS_INT for all others. |
83 | // In the future when we retarget the JIT for x86 we should consider eliminating GT_CNS_LNG |
84 | GT_CNS_NATIVELONG = GT_CNS_LNG, |
85 | #endif |
86 | }; |
87 | |
88 | /***************************************************************************** |
89 | * |
90 | * The following enum defines a set of bit flags that can be used |
91 | * to classify expression tree nodes. Note that some operators will |
92 | * have more than one bit set, as follows: |
93 | * |
94 | * GTK_CONST implies GTK_LEAF |
95 | * GTK_RELOP implies GTK_BINOP |
96 | * GTK_LOGOP implies GTK_BINOP |
97 | */ |
98 | |
99 | enum genTreeKinds |
100 | { |
101 | GTK_SPECIAL = 0x0000, // unclassified operator (special handling reqd) |
102 | |
103 | GTK_CONST = 0x0001, // constant operator |
104 | GTK_LEAF = 0x0002, // leaf operator |
105 | GTK_UNOP = 0x0004, // unary operator |
106 | GTK_BINOP = 0x0008, // binary operator |
107 | GTK_RELOP = 0x0010, // comparison operator |
108 | GTK_LOGOP = 0x0020, // logical operator |
109 | |
110 | GTK_KINDMASK = 0x007F, // operator kind mask |
111 | |
112 | GTK_COMMUTE = 0x0080, // commutative operator |
113 | |
114 | GTK_EXOP = 0x0100, // Indicates that an oper for a node type that extends GenTreeOp (or GenTreeUnOp) |
115 | // by adding non-node fields to unary or binary operator. |
116 | |
117 | GTK_LOCAL = 0x0200, // is a local access (load, store, phi) |
118 | |
119 | GTK_NOVALUE = 0x0400, // node does not produce a value |
120 | GTK_NOTLIR = 0x0800, // node is not allowed in LIR |
121 | |
122 | GTK_NOCONTAIN = 0x1000, // this node is a value, but may not be contained |
123 | |
124 | /* Define composite value(s) */ |
125 | |
126 | GTK_SMPOP = (GTK_UNOP | GTK_BINOP | GTK_RELOP | GTK_LOGOP) |
127 | }; |
128 | |
129 | /*****************************************************************************/ |
130 | |
131 | #define SMALL_TREE_NODES 1 |
132 | |
133 | /*****************************************************************************/ |
134 | |
135 | enum gtCallTypes : BYTE |
136 | { |
137 | CT_USER_FUNC, // User function |
138 | CT_HELPER, // Jit-helper |
139 | CT_INDIRECT, // Indirect call |
140 | |
141 | CT_COUNT // fake entry (must be last) |
142 | }; |
143 | |
144 | /*****************************************************************************/ |
145 | |
146 | struct BasicBlock; |
147 | struct InlineCandidateInfo; |
148 | struct GuardedDevirtualizationCandidateInfo; |
149 | |
150 | typedef unsigned short AssertionIndex; |
151 | |
152 | static const AssertionIndex NO_ASSERTION_INDEX = 0; |
153 | |
154 | //------------------------------------------------------------------------ |
155 | // GetAssertionIndex: return 1-based AssertionIndex from 0-based int index. |
156 | // |
157 | // Arguments: |
158 | // index - 0-based index |
159 | // Return Value: |
160 | // 1-based AssertionIndex. |
161 | inline AssertionIndex GetAssertionIndex(unsigned index) |
162 | { |
163 | return (AssertionIndex)(index + 1); |
164 | } |
165 | |
166 | class AssertionInfo |
167 | { |
168 | // true if the assertion holds on the bbNext edge instead of the bbJumpDest edge (for GT_JTRUE nodes) |
169 | unsigned short m_isNextEdgeAssertion : 1; |
170 | // 1-based index of the assertion |
171 | unsigned short m_assertionIndex : 15; |
172 | |
173 | AssertionInfo(bool isNextEdgeAssertion, AssertionIndex assertionIndex) |
174 | : m_isNextEdgeAssertion(isNextEdgeAssertion), m_assertionIndex(assertionIndex) |
175 | { |
176 | assert(m_assertionIndex == assertionIndex); |
177 | } |
178 | |
179 | public: |
180 | AssertionInfo() : AssertionInfo(false, 0) |
181 | { |
182 | } |
183 | |
184 | AssertionInfo(AssertionIndex assertionIndex) : AssertionInfo(false, assertionIndex) |
185 | { |
186 | } |
187 | |
188 | static AssertionInfo ForNextEdge(AssertionIndex assertionIndex) |
189 | { |
190 | // Ignore the edge information if there's no assertion |
191 | bool isNextEdge = (assertionIndex != NO_ASSERTION_INDEX); |
192 | return AssertionInfo(isNextEdge, assertionIndex); |
193 | } |
194 | |
195 | void Clear() |
196 | { |
197 | m_isNextEdgeAssertion = 0; |
198 | m_assertionIndex = NO_ASSERTION_INDEX; |
199 | } |
200 | |
201 | bool HasAssertion() const |
202 | { |
203 | return m_assertionIndex != NO_ASSERTION_INDEX; |
204 | } |
205 | |
206 | AssertionIndex GetAssertionIndex() const |
207 | { |
208 | return m_assertionIndex; |
209 | } |
210 | |
211 | bool IsNextEdgeAssertion() const |
212 | { |
213 | return m_isNextEdgeAssertion; |
214 | } |
215 | }; |
216 | |
217 | /*****************************************************************************/ |
218 | |
219 | // GT_FIELD nodes will be lowered into more "code-gen-able" representations, like |
220 | // GT_IND's of addresses, or GT_LCL_FLD nodes. We'd like to preserve the more abstract |
221 | // information, and will therefore annotate such lowered nodes with FieldSeq's. A FieldSeq |
222 | // represents a (possibly) empty sequence of fields. The fields are in the order |
223 | // in which they are dereferenced. The first field may be an object field or a struct field; |
224 | // all subsequent fields must be struct fields. |
225 | struct FieldSeqNode |
226 | { |
227 | CORINFO_FIELD_HANDLE m_fieldHnd; |
228 | FieldSeqNode* m_next; |
229 | |
230 | FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next) : m_fieldHnd(fieldHnd), m_next(next) |
231 | { |
232 | } |
233 | |
234 | // returns true when this is the pseudo #FirstElem field sequence |
235 | bool IsFirstElemFieldSeq(); |
236 | |
237 | // returns true when this is the pseudo #ConstantIndex field sequence |
238 | bool IsConstantIndexFieldSeq(); |
239 | |
240 | // returns true when this is the the pseudo #FirstElem field sequence or the pseudo #ConstantIndex field sequence |
241 | bool IsPseudoField(); |
242 | |
243 | // Make sure this provides methods that allow it to be used as a KeyFuncs type in SimplerHash. |
244 | static int GetHashCode(FieldSeqNode fsn) |
245 | { |
246 | return static_cast<int>(reinterpret_cast<intptr_t>(fsn.m_fieldHnd)) ^ |
247 | static_cast<int>(reinterpret_cast<intptr_t>(fsn.m_next)); |
248 | } |
249 | |
250 | static bool Equals(FieldSeqNode fsn1, FieldSeqNode fsn2) |
251 | { |
252 | return fsn1.m_fieldHnd == fsn2.m_fieldHnd && fsn1.m_next == fsn2.m_next; |
253 | } |
254 | }; |
255 | |
256 | // This class canonicalizes field sequences. |
257 | class FieldSeqStore |
258 | { |
259 | typedef JitHashTable<FieldSeqNode, /*KeyFuncs*/ FieldSeqNode, FieldSeqNode*> FieldSeqNodeCanonMap; |
260 | |
261 | CompAllocator m_alloc; |
262 | FieldSeqNodeCanonMap* m_canonMap; |
263 | |
264 | static FieldSeqNode s_notAField; // No value, just exists to provide an address. |
265 | |
266 | // Dummy variables to provide the addresses for the "pseudo field handle" statics below. |
267 | static int FirstElemPseudoFieldStruct; |
268 | static int ConstantIndexPseudoFieldStruct; |
269 | |
270 | public: |
271 | FieldSeqStore(CompAllocator alloc); |
272 | |
273 | // Returns the (canonical in the store) singleton field sequence for the given handle. |
274 | FieldSeqNode* CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd); |
275 | |
276 | // This is a special distinguished FieldSeqNode indicating that a constant does *not* |
277 | // represent a valid field sequence. This is "infectious", in the sense that appending it |
278 | // (on either side) to any field sequence yields the "NotAField()" sequence. |
279 | static FieldSeqNode* NotAField() |
280 | { |
281 | return &s_notAField; |
282 | } |
283 | |
284 | // Returns the (canonical in the store) field sequence representing the concatenation of |
285 | // the sequences represented by "a" and "b". Assumes that "a" and "b" are canonical; that is, |
286 | // they are the results of CreateSingleton, NotAField, or Append calls. If either of the arguments |
287 | // are the "NotAField" value, so is the result. |
288 | FieldSeqNode* Append(FieldSeqNode* a, FieldSeqNode* b); |
289 | |
290 | // We have a few "pseudo" field handles: |
291 | |
292 | // This treats the constant offset of the first element of something as if it were a field. |
293 | // Works for method table offsets of boxed structs, or first elem offset of arrays/strings. |
294 | static CORINFO_FIELD_HANDLE FirstElemPseudoField; |
295 | |
296 | // If there is a constant index, we make a psuedo field to correspond to the constant added to |
297 | // offset of the indexed field. This keeps the field sequence structure "normalized", especially in the |
298 | // case where the element type is a struct, so we might add a further struct field offset. |
299 | static CORINFO_FIELD_HANDLE ConstantIndexPseudoField; |
300 | |
301 | static bool IsPseudoField(CORINFO_FIELD_HANDLE hnd) |
302 | { |
303 | return hnd == FirstElemPseudoField || hnd == ConstantIndexPseudoField; |
304 | } |
305 | }; |
306 | |
307 | class GenTreeUseEdgeIterator; |
308 | class GenTreeOperandIterator; |
309 | |
310 | /*****************************************************************************/ |
311 | |
312 | // Forward declarations of the subtypes |
313 | #define GTSTRUCT_0(fn, en) struct GenTree##fn; |
314 | #define GTSTRUCT_1(fn, en) struct GenTree##fn; |
315 | #define GTSTRUCT_2(fn, en, en2) struct GenTree##fn; |
316 | #define GTSTRUCT_3(fn, en, en2, en3) struct GenTree##fn; |
317 | #define GTSTRUCT_4(fn, en, en2, en3, en4) struct GenTree##fn; |
318 | #define GTSTRUCT_N(fn, ...) struct GenTree##fn; |
319 | #define GTSTRUCT_2_SPECIAL(fn, en, en2) GTSTRUCT_2(fn, en, en2) |
320 | #define GTSTRUCT_3_SPECIAL(fn, en, en2, en3) GTSTRUCT_3(fn, en, en2, en3) |
321 | #include "gtstructs.h" |
322 | |
323 | /*****************************************************************************/ |
324 | |
325 | #ifndef _HOST_64BIT_ |
326 | #include <pshpack4.h> |
327 | #endif |
328 | |
329 | struct GenTree |
330 | { |
331 | // We use GT_STRUCT_0 only for the category of simple ops. |
332 | #define GTSTRUCT_0(fn, en) \ |
333 | GenTree##fn* As##fn() \ |
334 | { \ |
335 | assert(OperIsSimple()); \ |
336 | return reinterpret_cast<GenTree##fn*>(this); \ |
337 | } \ |
338 | const GenTree##fn* As##fn() const \ |
339 | { \ |
340 | assert(OperIsSimple()); \ |
341 | return reinterpret_cast<const GenTree##fn*>(this); \ |
342 | } \ |
343 | GenTree##fn& As##fn##Ref() \ |
344 | { \ |
345 | return *As##fn(); \ |
346 | } \ |
347 | __declspec(property(get = As##fn##Ref)) GenTree##fn& gt##fn; |
348 | |
349 | #define GTSTRUCT_N(fn, ...) \ |
350 | GenTree##fn* As##fn() \ |
351 | { \ |
352 | assert(OperIs(__VA_ARGS__)); \ |
353 | return reinterpret_cast<GenTree##fn*>(this); \ |
354 | } \ |
355 | const GenTree##fn* As##fn() const \ |
356 | { \ |
357 | assert(OperIs(__VA_ARGS__)); \ |
358 | return reinterpret_cast<const GenTree##fn*>(this); \ |
359 | } \ |
360 | GenTree##fn& As##fn##Ref() \ |
361 | { \ |
362 | return *As##fn(); \ |
363 | } \ |
364 | __declspec(property(get = As##fn##Ref)) GenTree##fn& gt##fn; |
365 | |
366 | #define GTSTRUCT_1(fn, en) GTSTRUCT_N(fn, en) |
367 | #define GTSTRUCT_2(fn, en, en2) GTSTRUCT_N(fn, en, en2) |
368 | #define GTSTRUCT_3(fn, en, en2, en3) GTSTRUCT_N(fn, en, en2, en3) |
369 | #define GTSTRUCT_4(fn, en, en2, en3, en4) GTSTRUCT_N(fn, en, en2, en3, en4) |
370 | #define GTSTRUCT_2_SPECIAL(fn, en, en2) GTSTRUCT_2(fn, en, en2) |
371 | #define GTSTRUCT_3_SPECIAL(fn, en, en2, en3) GTSTRUCT_3(fn, en, en2, en3) |
372 | |
373 | #include "gtstructs.h" |
374 | |
375 | genTreeOps gtOper; // enum subtype BYTE |
376 | var_types gtType; // enum subtype BYTE |
377 | |
378 | genTreeOps OperGet() const |
379 | { |
380 | return gtOper; |
381 | } |
382 | var_types TypeGet() const |
383 | { |
384 | return gtType; |
385 | } |
386 | |
387 | #ifdef DEBUG |
388 | genTreeOps gtOperSave; // Only used to save gtOper when we destroy a node, to aid debugging. |
389 | #endif |
390 | |
391 | #if FEATURE_ANYCSE |
392 | |
393 | #define NO_CSE (0) |
394 | |
395 | #define IS_CSE_INDEX(x) (x != 0) |
396 | #define IS_CSE_USE(x) (x > 0) |
397 | #define IS_CSE_DEF(x) (x < 0) |
398 | #define GET_CSE_INDEX(x) ((x > 0) ? x : -x) |
399 | #define TO_CSE_DEF(x) (-x) |
400 | |
401 | signed char gtCSEnum; // 0 or the CSE index (negated if def) |
402 | // valid only for CSE expressions |
403 | |
404 | #endif // FEATURE_ANYCSE |
405 | |
406 | unsigned char gtLIRFlags; // Used for nodes that are in LIR. See LIR::Flags in lir.h for the various flags. |
407 | |
408 | #if ASSERTION_PROP |
409 | AssertionInfo gtAssertionInfo; // valid only for non-GT_STMT nodes |
410 | |
411 | bool GeneratesAssertion() const |
412 | { |
413 | return gtAssertionInfo.HasAssertion(); |
414 | } |
415 | |
416 | void ClearAssertion() |
417 | { |
418 | gtAssertionInfo.Clear(); |
419 | } |
420 | |
421 | AssertionInfo GetAssertionInfo() const |
422 | { |
423 | return gtAssertionInfo; |
424 | } |
425 | |
426 | void SetAssertionInfo(AssertionInfo info) |
427 | { |
428 | gtAssertionInfo = info; |
429 | } |
430 | #endif |
431 | |
432 | // |
433 | // Cost metrics on the node. Don't allow direct access to the variable for setting. |
434 | // |
435 | |
436 | public: |
437 | #ifdef DEBUG |
438 | // You are not allowed to read the cost values before they have been set in gtSetEvalOrder(). |
439 | // Keep track of whether the costs have been initialized, and assert if they are read before being initialized. |
440 | // Obviously, this information does need to be initialized when a node is created. |
441 | // This is public so the dumpers can see it. |
442 | |
443 | bool gtCostsInitialized; |
444 | #endif // DEBUG |
445 | |
446 | #define MAX_COST UCHAR_MAX |
447 | #define IND_COST_EX 3 // execution cost for an indirection |
448 | |
449 | __declspec(property(get = GetCostEx)) unsigned char gtCostEx; // estimate of expression execution cost |
450 | |
451 | __declspec(property(get = GetCostSz)) unsigned char gtCostSz; // estimate of expression code size cost |
452 | |
453 | unsigned char GetCostEx() const |
454 | { |
455 | assert(gtCostsInitialized); |
456 | return _gtCostEx; |
457 | } |
458 | unsigned char GetCostSz() const |
459 | { |
460 | assert(gtCostsInitialized); |
461 | return _gtCostSz; |
462 | } |
463 | |
464 | // Set the costs. They are always both set at the same time. |
465 | // Don't use the "put" property: force calling this function, to make it more obvious in the few places |
466 | // that set the values. |
467 | // Note that costs are only set in gtSetEvalOrder() and its callees. |
468 | void SetCosts(unsigned costEx, unsigned costSz) |
469 | { |
470 | assert(costEx != (unsigned)-1); // looks bogus |
471 | assert(costSz != (unsigned)-1); // looks bogus |
472 | INDEBUG(gtCostsInitialized = true;) |
473 | |
474 | _gtCostEx = (costEx > MAX_COST) ? MAX_COST : (unsigned char)costEx; |
475 | _gtCostSz = (costSz > MAX_COST) ? MAX_COST : (unsigned char)costSz; |
476 | } |
477 | |
478 | // Opimized copy function, to avoid the SetCosts() function comparisons, and make it more clear that a node copy is |
479 | // happening. |
480 | void CopyCosts(const GenTree* const tree) |
481 | { |
482 | // If the 'tree' costs aren't initialized, we'll hit an assert below. |
483 | INDEBUG(gtCostsInitialized = tree->gtCostsInitialized;) |
484 | _gtCostEx = tree->gtCostEx; |
485 | _gtCostSz = tree->gtCostSz; |
486 | } |
487 | |
488 | // Same as CopyCosts, but avoids asserts if the costs we are copying have not been initialized. |
489 | // This is because the importer, for example, clones nodes, before these costs have been initialized. |
490 | // Note that we directly access the 'tree' costs, not going through the accessor functions (either |
491 | // directly or through the properties). |
492 | void CopyRawCosts(const GenTree* const tree) |
493 | { |
494 | INDEBUG(gtCostsInitialized = tree->gtCostsInitialized;) |
495 | _gtCostEx = tree->_gtCostEx; |
496 | _gtCostSz = tree->_gtCostSz; |
497 | } |
498 | |
499 | private: |
500 | unsigned char _gtCostEx; // estimate of expression execution cost |
501 | unsigned char _gtCostSz; // estimate of expression code size cost |
502 | |
503 | // |
504 | // Register or register pair number of the node. |
505 | // |
506 | CLANG_FORMAT_COMMENT_ANCHOR; |
507 | |
508 | #ifdef DEBUG |
509 | |
510 | public: |
511 | enum genRegTag |
512 | { |
513 | GT_REGTAG_NONE, // Nothing has been assigned to _gtRegNum |
514 | GT_REGTAG_REG // _gtRegNum has been assigned |
515 | }; |
516 | genRegTag GetRegTag() const |
517 | { |
518 | assert(gtRegTag == GT_REGTAG_NONE || gtRegTag == GT_REGTAG_REG); |
519 | return gtRegTag; |
520 | } |
521 | |
522 | private: |
523 | genRegTag gtRegTag; // What is in _gtRegNum? |
524 | |
525 | #endif // DEBUG |
526 | |
527 | private: |
528 | // This stores the register assigned to the node. If a register is not assigned, _gtRegNum is set to REG_NA. |
529 | regNumberSmall _gtRegNum; |
530 | |
531 | public: |
532 | // The register number is stored in a small format (8 bits), but the getters return and the setters take |
533 | // a full-size (unsigned) format, to localize the casts here. |
534 | |
535 | __declspec(property(get = GetRegNum, put = SetRegNum)) regNumber gtRegNum; |
536 | |
537 | bool canBeContained() const; |
538 | |
539 | // for codegen purposes, is this node a subnode of its parent |
540 | bool isContained() const; |
541 | |
542 | bool isContainedIndir() const; |
543 | |
544 | bool isIndirAddrMode(); |
545 | |
546 | bool isIndir() const; |
547 | |
548 | bool isContainedIntOrIImmed() const |
549 | { |
550 | return isContained() && IsCnsIntOrI() && !isUsedFromSpillTemp(); |
551 | } |
552 | |
553 | bool isContainedFltOrDblImmed() const |
554 | { |
555 | return isContained() && (OperGet() == GT_CNS_DBL); |
556 | } |
557 | |
558 | bool isLclField() const |
559 | { |
560 | return OperGet() == GT_LCL_FLD || OperGet() == GT_STORE_LCL_FLD; |
561 | } |
562 | |
563 | bool isUsedFromSpillTemp() const; |
564 | |
565 | // Indicates whether it is a memory op. |
566 | // Right now it includes Indir and LclField ops. |
567 | bool isMemoryOp() const |
568 | { |
569 | return isIndir() || isLclField(); |
570 | } |
571 | |
572 | bool isUsedFromMemory() const |
573 | { |
574 | return ((isContained() && (isMemoryOp() || (OperGet() == GT_LCL_VAR) || (OperGet() == GT_CNS_DBL))) || |
575 | isUsedFromSpillTemp()); |
576 | } |
577 | |
578 | bool isLclVarUsedFromMemory() const |
579 | { |
580 | return (OperGet() == GT_LCL_VAR) && (isContained() || isUsedFromSpillTemp()); |
581 | } |
582 | |
583 | bool isLclFldUsedFromMemory() const |
584 | { |
585 | return isLclField() && (isContained() || isUsedFromSpillTemp()); |
586 | } |
587 | |
588 | bool isUsedFromReg() const |
589 | { |
590 | return !isContained() && !isUsedFromSpillTemp(); |
591 | } |
592 | |
593 | regNumber GetRegNum() const |
594 | { |
595 | assert((gtRegTag == GT_REGTAG_REG) || (gtRegTag == GT_REGTAG_NONE)); // TODO-Cleanup: get rid of the NONE case, |
596 | // and fix everyplace that reads undefined |
597 | // values |
598 | regNumber reg = (regNumber)_gtRegNum; |
599 | assert((gtRegTag == GT_REGTAG_NONE) || // TODO-Cleanup: get rid of the NONE case, and fix everyplace that reads |
600 | // undefined values |
601 | (reg >= REG_FIRST && reg <= REG_COUNT)); |
602 | return reg; |
603 | } |
604 | |
605 | void SetRegNum(regNumber reg) |
606 | { |
607 | assert(reg >= REG_FIRST && reg <= REG_COUNT); |
608 | _gtRegNum = (regNumberSmall)reg; |
609 | INDEBUG(gtRegTag = GT_REGTAG_REG;) |
610 | assert(_gtRegNum == reg); |
611 | } |
612 | |
613 | // Copy the _gtRegNum/gtRegTag fields |
614 | void CopyReg(GenTree* from); |
615 | bool gtHasReg() const; |
616 | |
617 | int GetRegisterDstCount() const; |
618 | |
619 | regMaskTP gtGetRegMask() const; |
620 | |
621 | unsigned gtFlags; // see GTF_xxxx below |
622 | |
623 | #if defined(DEBUG) |
624 | unsigned gtDebugFlags; // see GTF_DEBUG_xxx below |
625 | #endif // defined(DEBUG) |
626 | |
627 | ValueNumPair gtVNPair; |
628 | |
629 | regMaskSmall gtRsvdRegs; // set of fixed trashed registers |
630 | |
631 | unsigned AvailableTempRegCount(regMaskTP mask = (regMaskTP)-1) const; |
632 | regNumber GetSingleTempReg(regMaskTP mask = (regMaskTP)-1); |
633 | regNumber (regMaskTP mask = (regMaskTP)-1); |
634 | |
635 | void SetVNsFromNode(GenTree* tree) |
636 | { |
637 | gtVNPair = tree->gtVNPair; |
638 | } |
639 | |
640 | ValueNum GetVN(ValueNumKind vnk) const |
641 | { |
642 | if (vnk == VNK_Liberal) |
643 | { |
644 | return gtVNPair.GetLiberal(); |
645 | } |
646 | else |
647 | { |
648 | assert(vnk == VNK_Conservative); |
649 | return gtVNPair.GetConservative(); |
650 | } |
651 | } |
652 | void SetVN(ValueNumKind vnk, ValueNum vn) |
653 | { |
654 | if (vnk == VNK_Liberal) |
655 | { |
656 | return gtVNPair.SetLiberal(vn); |
657 | } |
658 | else |
659 | { |
660 | assert(vnk == VNK_Conservative); |
661 | return gtVNPair.SetConservative(vn); |
662 | } |
663 | } |
664 | void SetVNs(ValueNumPair vnp) |
665 | { |
666 | gtVNPair = vnp; |
667 | } |
668 | void ClearVN() |
669 | { |
670 | gtVNPair = ValueNumPair(); // Initializes both elements to "NoVN". |
671 | } |
672 | |
673 | // clang-format off |
674 | |
675 | //--------------------------------------------------------------------- |
676 | // |
677 | // GenTree flags stored in gtFlags. |
678 | // |
679 | //--------------------------------------------------------------------- |
680 | |
681 | //--------------------------------------------------------------------- |
682 | // The first set of flags can be used with a large set of nodes, and |
683 | // thus they must all have distinct values. That is, one can test any |
684 | // expression node for one of these flags. |
685 | //--------------------------------------------------------------------- |
686 | |
687 | #define GTF_ASG 0x00000001 // sub-expression contains an assignment |
688 | #define GTF_CALL 0x00000002 // sub-expression contains a func. call |
689 | #define GTF_EXCEPT 0x00000004 // sub-expression might throw an exception |
690 | #define GTF_GLOB_REF 0x00000008 // sub-expression uses global variable(s) |
691 | #define GTF_ORDER_SIDEEFF 0x00000010 // sub-expression has a re-ordering side effect |
692 | |
693 | // If you set these flags, make sure that code:gtExtractSideEffList knows how to find the tree, |
694 | // otherwise the C# (run csc /o-) code: |
695 | // var v = side_eff_operation |
696 | // with no use of v will drop your tree on the floor. |
697 | #define GTF_PERSISTENT_SIDE_EFFECTS (GTF_ASG | GTF_CALL) |
698 | #define GTF_SIDE_EFFECT (GTF_PERSISTENT_SIDE_EFFECTS | GTF_EXCEPT) |
699 | #define GTF_GLOB_EFFECT (GTF_SIDE_EFFECT | GTF_GLOB_REF) |
700 | #define GTF_ALL_EFFECT (GTF_GLOB_EFFECT | GTF_ORDER_SIDEEFF) |
701 | |
702 | // The extra flag GTF_IS_IN_CSE is used to tell the consumer of these flags |
703 | // that we are calling in the context of performing a CSE, thus we |
704 | // should allow the run-once side effects of running a class constructor. |
705 | // |
706 | // The only requirement of this flag is that it not overlap any of the |
707 | // side-effect flags. The actual bit used is otherwise arbitrary. |
708 | #define GTF_IS_IN_CSE GTF_BOOLEAN |
709 | |
710 | // Can any side-effects be observed externally, say by a caller method? |
711 | // For assignments, only assignments to global memory can be observed |
712 | // externally, whereas simple assignments to local variables can not. |
713 | // |
714 | // Be careful when using this inside a "try" protected region as the |
715 | // order of assignments to local variables would need to be preserved |
716 | // wrt side effects if the variables are alive on entry to the |
717 | // "catch/finally" region. In such cases, even assignments to locals |
718 | // will have to be restricted. |
719 | #define GTF_GLOBALLY_VISIBLE_SIDE_EFFECTS(flags) \ |
720 | (((flags) & (GTF_CALL | GTF_EXCEPT)) || (((flags) & (GTF_ASG | GTF_GLOB_REF)) == (GTF_ASG | GTF_GLOB_REF))) |
721 | |
722 | #define GTF_REVERSE_OPS 0x00000020 // operand op2 should be evaluated before op1 (normally, op1 is evaluated first and op2 is evaluated second) |
723 | #define GTF_CONTAINED 0x00000040 // This node is contained (executed as part of its parent) |
724 | #define GTF_SPILLED 0x00000080 // the value has been spilled |
725 | |
726 | #define GTF_NOREG_AT_USE 0x00000100 // tree node is in memory at the point of use |
727 | |
728 | #define GTF_SET_FLAGS 0x00000800 // Requires that codegen for this node set the flags. Use gtSetFlags() to check this flag. |
729 | #define GTF_USE_FLAGS 0x00001000 // Indicates that this node uses the flags bits. |
730 | |
731 | #define GTF_MAKE_CSE 0x00002000 // Hoisted expression: try hard to make this into CSE (see optPerformHoistExpr) |
732 | #define GTF_DONT_CSE 0x00004000 // Don't bother CSE'ing this expr |
733 | #define GTF_COLON_COND 0x00008000 // This node is conditionally executed (part of ? :) |
734 | |
735 | #define GTF_NODE_MASK (GTF_COLON_COND) |
736 | |
737 | #define GTF_BOOLEAN 0x00040000 // value is known to be 0/1 |
738 | |
739 | #define GTF_UNSIGNED 0x00100000 // With GT_CAST: the source operand is an unsigned type |
740 | // With operators: the specified node is an unsigned operator |
741 | #define GTF_LATE_ARG 0x00200000 // The specified node is evaluated to a temp in the arg list, and this temp is added to gtCallLateArgs. |
742 | #define GTF_SPILL 0x00400000 // Needs to be spilled here |
743 | |
744 | #define GTF_COMMON_MASK 0x007FFFFF // mask of all the flags above |
745 | |
746 | #define GTF_REUSE_REG_VAL 0x00800000 // This is set by the register allocator on nodes whose value already exists in the |
747 | // register assigned to this node, so the code generator does not have to generate |
748 | // code to produce the value. It is currently used only on constant nodes. |
749 | // It CANNOT be set on var (GT_LCL*) nodes, or on indir (GT_IND or GT_STOREIND) nodes, since |
750 | // it is not needed for lclVars and is highly unlikely to be useful for indir nodes. |
751 | |
752 | //--------------------------------------------------------------------- |
753 | // The following flags can be used only with a small set of nodes, and |
754 | // thus their values need not be distinct (other than within the set |
755 | // that goes with a particular node/nodes, of course). That is, one can |
756 | // only test for one of these flags if the 'gtOper' value is tested as |
757 | // well to make sure it's the right operator for the particular flag. |
758 | //--------------------------------------------------------------------- |
759 | |
760 | // NB: GTF_VAR_* and GTF_REG_* share the same namespace of flags. |
761 | // These flags are also used by GT_LCL_FLD. |
762 | #define GTF_VAR_DEF 0x80000000 // GT_LCL_VAR -- this is a definition |
763 | #define GTF_VAR_USEASG 0x40000000 // GT_LCL_VAR -- this is a partial definition, a use of the previous definition is implied |
764 | // A partial definition usually occurs when a struct field is assigned to (s.f = ...) or |
765 | // when a scalar typed variable is assigned to via a narrow store (*((byte*)&i) = ...). |
766 | #define GTF_VAR_CAST 0x10000000 // GT_LCL_VAR -- has been explictly cast (variable node may not be type of local) |
767 | #define GTF_VAR_ITERATOR 0x08000000 // GT_LCL_VAR -- this is a iterator reference in the loop condition |
768 | #define GTF_VAR_CLONED 0x01000000 // GT_LCL_VAR -- this node has been cloned or is a clone |
769 | // Relevant for inlining optimizations (see fgInlinePrependStatements) |
770 | |
771 | // TODO-Cleanup: Currently, GTF_REG_BIRTH is used only by stackfp |
772 | // We should consider using it more generally for VAR_BIRTH, instead of |
773 | // GTF_VAR_DEF && !GTF_VAR_USEASG |
774 | #define GTF_REG_BIRTH 0x04000000 // GT_LCL_VAR, -- enregistered variable born here |
775 | #define GTF_VAR_DEATH 0x02000000 // GT_LCL_VAR, -- variable dies here (last use) |
776 | |
777 | #define GTF_VAR_ARR_INDEX 0x00000020 // The variable is part of (the index portion of) an array index expression. |
778 | // Shares a value with GTF_REVERSE_OPS, which is meaningless for local var. |
779 | |
780 | #define GTF_LIVENESS_MASK (GTF_VAR_DEF | GTF_VAR_USEASG | GTF_REG_BIRTH | GTF_VAR_DEATH) |
781 | |
782 | // For additional flags for GT_CALL node see GTF_CALL_M_* |
783 | |
784 | #define GTF_CALL_UNMANAGED 0x80000000 // GT_CALL -- direct call to unmanaged code |
785 | #define GTF_CALL_INLINE_CANDIDATE 0x40000000 // GT_CALL -- this call has been marked as an inline candidate |
786 | |
787 | #define GTF_CALL_VIRT_KIND_MASK 0x30000000 // GT_CALL -- mask of the below call kinds |
788 | #define GTF_CALL_NONVIRT 0x00000000 // GT_CALL -- a non virtual call |
789 | #define GTF_CALL_VIRT_STUB 0x10000000 // GT_CALL -- a stub-dispatch virtual call |
790 | #define GTF_CALL_VIRT_VTABLE 0x20000000 // GT_CALL -- a vtable-based virtual call |
791 | |
792 | #define GTF_CALL_NULLCHECK 0x08000000 // GT_CALL -- must check instance pointer for null |
793 | #define GTF_CALL_POP_ARGS 0x04000000 // GT_CALL -- caller pop arguments? |
794 | #define GTF_CALL_HOISTABLE 0x02000000 // GT_CALL -- call is hoistable |
795 | |
796 | #define GTF_NOP_DEATH 0x40000000 // GT_NOP -- operand dies here |
797 | |
798 | #define GTF_FLD_VOLATILE 0x40000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE |
799 | #define GTF_FLD_INITCLASS 0x20000000 // GT_FIELD/GT_CLS_VAR -- field access requires preceding class/static init helper |
800 | |
801 | #define GTF_INX_RNGCHK 0x80000000 // GT_INDEX/GT_INDEX_ADDR -- the array reference should be range-checked. |
802 | #define GTF_INX_REFARR_LAYOUT 0x20000000 // GT_INDEX |
803 | #define GTF_INX_STRING_LAYOUT 0x40000000 // GT_INDEX -- this uses the special string array layout |
804 | |
805 | #define GTF_IND_VOLATILE 0x40000000 // GT_IND -- the load or store must use volatile sematics (this is a nop on X86) |
806 | #define GTF_IND_NONFAULTING 0x20000000 // Operations for which OperIsIndir() is true -- An indir that cannot fault. |
807 | // Same as GTF_ARRLEN_NONFAULTING. |
808 | #define GTF_IND_TGTANYWHERE 0x10000000 // GT_IND -- the target could be anywhere |
809 | #define GTF_IND_TLS_REF 0x08000000 // GT_IND -- the target is accessed via TLS |
810 | #define GTF_IND_ASG_LHS 0x04000000 // GT_IND -- this GT_IND node is (the effective val) of the LHS of an |
811 | // assignment; don't evaluate it independently. |
812 | #define GTF_IND_REQ_ADDR_IN_REG GTF_IND_ASG_LHS // GT_IND -- requires its addr operand to be evaluated |
813 | // into a register. This flag is useful in cases where it |
814 | // is required to generate register indirect addressing mode. |
815 | // One such case is virtual stub calls on xarch. This is only |
816 | // valid in the backend, where GTF_IND_ASG_LHS is not necessary |
817 | // (all such indirections will be lowered to GT_STOREIND). |
818 | #define GTF_IND_UNALIGNED 0x02000000 // GT_IND -- the load or store is unaligned (we assume worst case |
819 | // alignment of 1 byte) |
820 | #define GTF_IND_INVARIANT 0x01000000 // GT_IND -- the target is invariant (a prejit indirection) |
821 | #define GTF_IND_ARR_INDEX 0x00800000 // GT_IND -- the indirection represents an (SZ) array index |
822 | |
823 | #define GTF_IND_FLAGS \ |
824 | (GTF_IND_VOLATILE | GTF_IND_TGTANYWHERE | GTF_IND_NONFAULTING | GTF_IND_TLS_REF | \ |
825 | GTF_IND_UNALIGNED | GTF_IND_INVARIANT | GTF_IND_ARR_INDEX) |
826 | |
827 | #define GTF_CLS_VAR_VOLATILE 0x40000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_IND_VOLATILE |
828 | #define GTF_CLS_VAR_INITCLASS 0x20000000 // GT_FIELD/GT_CLS_VAR -- same as GTF_FLD_INITCLASS |
829 | #define GTF_CLS_VAR_ASG_LHS 0x04000000 // GT_CLS_VAR -- this GT_CLS_VAR node is (the effective val) of the LHS |
830 | // of an assignment; don't evaluate it independently. |
831 | |
832 | #define GTF_ADDR_ONSTACK 0x80000000 // GT_ADDR -- this expression is guaranteed to be on the stack |
833 | |
834 | #define GTF_ADDRMODE_NO_CSE 0x80000000 // GT_ADD/GT_MUL/GT_LSH -- Do not CSE this node only, forms complex |
835 | // addressing mode |
836 | |
837 | #define GTF_MUL_64RSLT 0x40000000 // GT_MUL -- produce 64-bit result |
838 | |
839 | #define GTF_RELOP_NAN_UN 0x80000000 // GT_<relop> -- Is branch taken if ops are NaN? |
840 | #define GTF_RELOP_JMP_USED 0x40000000 // GT_<relop> -- result of compare used for jump or ?: |
841 | #define GTF_RELOP_QMARK 0x20000000 // GT_<relop> -- the node is the condition for ?: |
842 | #define GTF_RELOP_ZTT 0x08000000 // GT_<relop> -- Loop test cloned for converting while-loops into do-while |
843 | // with explicit "loop test" in the header block. |
844 | |
845 | #define GTF_JCMP_EQ 0x80000000 // GTF_JCMP_EQ -- Branch on equal rather than not equal |
846 | #define GTF_JCMP_TST 0x40000000 // GTF_JCMP_TST -- Use bit test instruction rather than compare against zero instruction |
847 | |
848 | #define GTF_RET_MERGED 0x80000000 // GT_RETURN -- This is a return generated during epilog merging. |
849 | |
850 | #define GTF_QMARK_CAST_INSTOF 0x80000000 // GT_QMARK -- Is this a top (not nested) level qmark created for |
851 | // castclass or instanceof? |
852 | |
853 | #define GTF_BOX_VALUE 0x80000000 // GT_BOX -- "box" is on a value type |
854 | |
855 | #define GTF_ICON_HDL_MASK 0xF0000000 // Bits used by handle types below |
856 | #define GTF_ICON_SCOPE_HDL 0x10000000 // GT_CNS_INT -- constant is a scope handle |
857 | #define GTF_ICON_CLASS_HDL 0x20000000 // GT_CNS_INT -- constant is a class handle |
858 | #define GTF_ICON_METHOD_HDL 0x30000000 // GT_CNS_INT -- constant is a method handle |
859 | #define GTF_ICON_FIELD_HDL 0x40000000 // GT_CNS_INT -- constant is a field handle |
860 | #define GTF_ICON_STATIC_HDL 0x50000000 // GT_CNS_INT -- constant is a handle to static data |
861 | #define GTF_ICON_STR_HDL 0x60000000 // GT_CNS_INT -- constant is a string handle |
862 | #define GTF_ICON_PSTR_HDL 0x70000000 // GT_CNS_INT -- constant is a ptr to a string handle |
863 | #define GTF_ICON_PTR_HDL 0x80000000 // GT_CNS_INT -- constant is a ldptr handle |
864 | #define GTF_ICON_VARG_HDL 0x90000000 // GT_CNS_INT -- constant is a var arg cookie handle |
865 | #define GTF_ICON_PINVKI_HDL 0xA0000000 // GT_CNS_INT -- constant is a pinvoke calli handle |
866 | #define GTF_ICON_TOKEN_HDL 0xB0000000 // GT_CNS_INT -- constant is a token handle |
867 | #define GTF_ICON_TLS_HDL 0xC0000000 // GT_CNS_INT -- constant is a TLS ref with offset |
868 | #define GTF_ICON_FTN_ADDR 0xD0000000 // GT_CNS_INT -- constant is a function address |
869 | #define GTF_ICON_CIDMID_HDL 0xE0000000 // GT_CNS_INT -- constant is a class ID or a module ID |
870 | #define GTF_ICON_BBC_PTR 0xF0000000 // GT_CNS_INT -- constant is a basic block count pointer |
871 | |
872 | #define GTF_ICON_FIELD_OFF 0x08000000 // GT_CNS_INT -- constant is a field offset |
873 | #define GTF_ICON_SIMD_COUNT 0x04000000 // GT_CNS_INT -- constant is Vector<T>.Count |
874 | |
875 | #define GTF_ICON_INITCLASS 0x02000000 // GT_CNS_INT -- Constant is used to access a static that requires preceding |
876 | // class/static init helper. In some cases, the constant is |
877 | // the address of the static field itself, and in other cases |
878 | // there's an extra layer of indirection and it is the address |
879 | // of the cell that the runtime will fill in with the address |
880 | // of the static field; in both of those cases, the constant |
881 | // is what gets flagged. |
882 | |
883 | #define GTF_BLK_VOLATILE GTF_IND_VOLATILE // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is a volatile block operation |
884 | #define GTF_BLK_UNALIGNED GTF_IND_UNALIGNED // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is an unaligned block operation |
885 | |
886 | #define GTF_OVERFLOW 0x10000000 // Supported for: GT_ADD, GT_SUB, GT_MUL and GT_CAST. |
887 | // Requires an overflow check. Use gtOverflow(Ex)() to check this flag. |
888 | |
889 | #define GTF_ARR_BOUND_INBND 0x80000000 // GT_ARR_BOUNDS_CHECK -- have proved this check is always in-bounds |
890 | |
891 | #define GTF_ARRLEN_ARR_IDX 0x80000000 // GT_ARR_LENGTH -- Length which feeds into an array index expression |
892 | #define GTF_ARRLEN_NONFAULTING 0x20000000 // GT_ARR_LENGTH -- An array length operation that cannot fault. Same as GT_IND_NONFAULTING. |
893 | |
894 | #define GTF_FIELD_LIST_HEAD 0x80000000 // GT_FIELD_LIST -- Indicates that this is the first field in a list of |
895 | // struct fields constituting a single call argument. |
896 | |
897 | #define GTF_SIMD12_OP 0x80000000 // GT_SIMD -- Indicates that the operands need to be handled as SIMD12 |
898 | // even if they have been retyped as SIMD16. |
899 | |
900 | #define GTF_STMT_CMPADD 0x80000000 // GT_STMT -- added by compiler |
901 | #define GTF_STMT_HAS_CSE 0x40000000 // GT_STMT -- CSE def or use was subsituted |
902 | |
903 | //--------------------------------------------------------------------- |
904 | // |
905 | // GenTree flags stored in gtDebugFlags. |
906 | // |
907 | //--------------------------------------------------------------------- |
908 | |
909 | #if defined(DEBUG) |
910 | #define GTF_DEBUG_NONE 0x00000000 // No debug flags. |
911 | |
912 | #define GTF_DEBUG_NODE_MORPHED 0x00000001 // the node has been morphed (in the global morphing phase) |
913 | #define GTF_DEBUG_NODE_SMALL 0x00000002 |
914 | #define GTF_DEBUG_NODE_LARGE 0x00000004 |
915 | #define GTF_DEBUG_NODE_CG_PRODUCED 0x00000008 // genProduceReg has been called on this node |
916 | #define GTF_DEBUG_NODE_CG_CONSUMED 0x00000010 // genConsumeReg has been called on this node |
917 | #define GTF_DEBUG_NODE_LSRA_ADDED 0x00000020 // This node was added by LSRA |
918 | |
919 | #define GTF_DEBUG_NODE_MASK 0x0000003F // These flags are all node (rather than operation) properties. |
920 | |
921 | #define GTF_DEBUG_VAR_CSE_REF 0x00800000 // GT_LCL_VAR -- This is a CSE LCL_VAR node |
922 | #endif // defined(DEBUG) |
923 | |
924 | //--------------------------------------------------------------------- |
925 | // |
926 | // end of GenTree flags definitions |
927 | // |
928 | //--------------------------------------------------------------------- |
929 | |
930 | // clang-format on |
931 | |
932 | GenTree* gtNext; |
933 | GenTree* gtPrev; |
934 | |
935 | #ifdef DEBUG |
936 | unsigned gtTreeID; |
937 | unsigned gtSeqNum; // liveness traversal order within the current statement |
938 | |
939 | int gtUseNum; // use-ordered traversal within the function |
940 | #endif |
941 | |
942 | static const unsigned short gtOperKindTable[]; |
943 | |
944 | static unsigned OperKind(unsigned gtOper) |
945 | { |
946 | assert(gtOper < GT_COUNT); |
947 | |
948 | return gtOperKindTable[gtOper]; |
949 | } |
950 | |
951 | unsigned OperKind() const |
952 | { |
953 | assert(gtOper < GT_COUNT); |
954 | |
955 | return gtOperKindTable[gtOper]; |
956 | } |
957 | |
958 | static bool IsExOp(unsigned opKind) |
959 | { |
960 | return (opKind & GTK_EXOP) != 0; |
961 | } |
962 | // Returns the operKind with the GTK_EX_OP bit removed (the |
963 | // kind of operator, unary or binary, that is extended). |
964 | static unsigned StripExOp(unsigned opKind) |
965 | { |
966 | return opKind & ~GTK_EXOP; |
967 | } |
968 | |
969 | bool IsValue() const |
970 | { |
971 | if ((OperKind(gtOper) & GTK_NOVALUE) != 0) |
972 | { |
973 | return false; |
974 | } |
975 | |
976 | if (gtType == TYP_VOID) |
977 | { |
978 | // These are the only operators which can produce either VOID or non-VOID results. |
979 | assert(OperIs(GT_NOP, GT_CALL, GT_FIELD_LIST, GT_COMMA) || OperIsCompare() || OperIsLong() || |
980 | OperIsSIMD() || OperIsHWIntrinsic()); |
981 | return false; |
982 | } |
983 | |
984 | if (gtOper == GT_FIELD_LIST) |
985 | { |
986 | return (gtFlags & GTF_FIELD_LIST_HEAD) != 0; |
987 | } |
988 | |
989 | return true; |
990 | } |
991 | |
992 | bool IsLIR() const |
993 | { |
994 | if ((OperKind(gtOper) & GTK_NOTLIR) != 0) |
995 | { |
996 | return false; |
997 | } |
998 | |
999 | switch (gtOper) |
1000 | { |
1001 | case GT_NOP: |
1002 | // NOPs may only be present in LIR if they do not produce a value. |
1003 | return IsNothingNode(); |
1004 | |
1005 | case GT_LIST: |
1006 | // LIST nodes may not be present in a block's LIR sequence, but they may |
1007 | // be present as children of an LIR node. |
1008 | return (gtNext == nullptr) && (gtPrev == nullptr); |
1009 | |
1010 | case GT_FIELD_LIST: |
1011 | // Only the head of the FIELD_LIST is present in the block's LIR sequence. |
1012 | return (((gtFlags & GTF_FIELD_LIST_HEAD) != 0) || ((gtNext == nullptr) && (gtPrev == nullptr))); |
1013 | |
1014 | case GT_ADDR: |
1015 | { |
1016 | // ADDR ndoes may only be present in LIR if the location they refer to is not a |
1017 | // local, class variable, or IND node. |
1018 | GenTree* location = gtGetOp1(); |
1019 | genTreeOps locationOp = location->OperGet(); |
1020 | return !location->IsLocal() && (locationOp != GT_CLS_VAR) && (locationOp != GT_IND); |
1021 | } |
1022 | |
1023 | default: |
1024 | // All other nodes are assumed to be correct. |
1025 | return true; |
1026 | } |
1027 | } |
1028 | |
1029 | // LIR flags |
1030 | // These helper methods, along with the flag values they manipulate, are defined in lir.h |
1031 | // |
1032 | // UnusedValue indicates that, although this node produces a value, it is unused. |
1033 | inline void SetUnusedValue(); |
1034 | inline void ClearUnusedValue(); |
1035 | inline bool IsUnusedValue() const; |
1036 | // RegOptional indicates that codegen can still generate code even if it isn't allocated a register. |
1037 | inline bool IsRegOptional() const; |
1038 | inline void SetRegOptional(); |
1039 | inline void ClearRegOptional(); |
1040 | #ifdef DEBUG |
1041 | void dumpLIRFlags(); |
1042 | #endif |
1043 | |
1044 | bool OperIs(genTreeOps oper) const |
1045 | { |
1046 | return OperGet() == oper; |
1047 | } |
1048 | |
1049 | template <typename... T> |
1050 | bool OperIs(genTreeOps oper, T... rest) const |
1051 | { |
1052 | return OperIs(oper) || OperIs(rest...); |
1053 | } |
1054 | |
1055 | static bool OperIsConst(genTreeOps gtOper) |
1056 | { |
1057 | return (OperKind(gtOper) & GTK_CONST) != 0; |
1058 | } |
1059 | |
1060 | bool OperIsConst() const |
1061 | { |
1062 | return (OperKind(gtOper) & GTK_CONST) != 0; |
1063 | } |
1064 | |
1065 | static bool OperIsLeaf(genTreeOps gtOper) |
1066 | { |
1067 | return (OperKind(gtOper) & GTK_LEAF) != 0; |
1068 | } |
1069 | |
1070 | bool OperIsLeaf() const |
1071 | { |
1072 | return (OperKind(gtOper) & GTK_LEAF) != 0; |
1073 | } |
1074 | |
1075 | static bool OperIsCompare(genTreeOps gtOper) |
1076 | { |
1077 | return (OperKind(gtOper) & GTK_RELOP) != 0; |
1078 | } |
1079 | |
1080 | static bool OperIsLocal(genTreeOps gtOper) |
1081 | { |
1082 | bool result = (OperKind(gtOper) & GTK_LOCAL) != 0; |
1083 | assert(result == (gtOper == GT_LCL_VAR || gtOper == GT_PHI_ARG || gtOper == GT_LCL_FLD || |
1084 | gtOper == GT_STORE_LCL_VAR || gtOper == GT_STORE_LCL_FLD)); |
1085 | return result; |
1086 | } |
1087 | |
1088 | static bool OperIsLocalAddr(genTreeOps gtOper) |
1089 | { |
1090 | return (gtOper == GT_LCL_VAR_ADDR || gtOper == GT_LCL_FLD_ADDR); |
1091 | } |
1092 | |
1093 | static bool OperIsLocalField(genTreeOps gtOper) |
1094 | { |
1095 | return (gtOper == GT_LCL_FLD || gtOper == GT_LCL_FLD_ADDR || gtOper == GT_STORE_LCL_FLD); |
1096 | } |
1097 | |
1098 | inline bool OperIsLocalField() const |
1099 | { |
1100 | return OperIsLocalField(gtOper); |
1101 | } |
1102 | |
1103 | static bool OperIsScalarLocal(genTreeOps gtOper) |
1104 | { |
1105 | return (gtOper == GT_LCL_VAR || gtOper == GT_STORE_LCL_VAR); |
1106 | } |
1107 | |
1108 | static bool OperIsNonPhiLocal(genTreeOps gtOper) |
1109 | { |
1110 | return OperIsLocal(gtOper) && (gtOper != GT_PHI_ARG); |
1111 | } |
1112 | |
1113 | static bool OperIsLocalRead(genTreeOps gtOper) |
1114 | { |
1115 | return (OperIsLocal(gtOper) && !OperIsLocalStore(gtOper)); |
1116 | } |
1117 | |
1118 | static bool OperIsLocalStore(genTreeOps gtOper) |
1119 | { |
1120 | return (gtOper == GT_STORE_LCL_VAR || gtOper == GT_STORE_LCL_FLD); |
1121 | } |
1122 | |
1123 | static bool OperIsAddrMode(genTreeOps gtOper) |
1124 | { |
1125 | return (gtOper == GT_LEA); |
1126 | } |
1127 | |
1128 | static bool OperIsInitVal(genTreeOps gtOper) |
1129 | { |
1130 | return (gtOper == GT_INIT_VAL); |
1131 | } |
1132 | |
1133 | bool OperIsInitVal() const |
1134 | { |
1135 | return OperIsInitVal(OperGet()); |
1136 | } |
1137 | |
1138 | bool IsConstInitVal() |
1139 | { |
1140 | return (gtOper == GT_CNS_INT) || (OperIsInitVal() && (gtGetOp1()->gtOper == GT_CNS_INT)); |
1141 | } |
1142 | |
1143 | bool OperIsBlkOp(); |
1144 | bool OperIsCopyBlkOp(); |
1145 | bool OperIsInitBlkOp(); |
1146 | bool OperIsDynBlkOp(); |
1147 | |
1148 | static bool OperIsBlk(genTreeOps gtOper) |
1149 | { |
1150 | return ((gtOper == GT_BLK) || (gtOper == GT_OBJ) || (gtOper == GT_DYN_BLK) || (gtOper == GT_STORE_BLK) || |
1151 | (gtOper == GT_STORE_OBJ) || (gtOper == GT_STORE_DYN_BLK)); |
1152 | } |
1153 | |
1154 | bool OperIsBlk() const |
1155 | { |
1156 | return OperIsBlk(OperGet()); |
1157 | } |
1158 | |
1159 | static bool OperIsDynBlk(genTreeOps gtOper) |
1160 | { |
1161 | return ((gtOper == GT_DYN_BLK) || (gtOper == GT_STORE_DYN_BLK)); |
1162 | } |
1163 | |
1164 | bool OperIsDynBlk() const |
1165 | { |
1166 | return OperIsDynBlk(OperGet()); |
1167 | } |
1168 | |
1169 | static bool OperIsStoreBlk(genTreeOps gtOper) |
1170 | { |
1171 | return ((gtOper == GT_STORE_BLK) || (gtOper == GT_STORE_OBJ) || (gtOper == GT_STORE_DYN_BLK)); |
1172 | } |
1173 | |
1174 | bool OperIsStoreBlk() const |
1175 | { |
1176 | return OperIsStoreBlk(OperGet()); |
1177 | } |
1178 | |
1179 | bool OperIsPutArgSplit() const |
1180 | { |
1181 | #if FEATURE_ARG_SPLIT |
1182 | return gtOper == GT_PUTARG_SPLIT; |
1183 | #else // !FEATURE_ARG_SPLIT |
1184 | return false; |
1185 | #endif |
1186 | } |
1187 | |
1188 | bool OperIsPutArgStk() const |
1189 | { |
1190 | return gtOper == GT_PUTARG_STK; |
1191 | } |
1192 | |
1193 | bool OperIsPutArgStkOrSplit() const |
1194 | { |
1195 | return OperIsPutArgStk() || OperIsPutArgSplit(); |
1196 | } |
1197 | |
1198 | bool OperIsPutArgReg() const |
1199 | { |
1200 | return gtOper == GT_PUTARG_REG; |
1201 | } |
1202 | |
1203 | bool OperIsPutArg() const |
1204 | { |
1205 | return OperIsPutArgStk() || OperIsPutArgReg() || OperIsPutArgSplit(); |
1206 | } |
1207 | |
1208 | bool OperIsMultiRegOp() const |
1209 | { |
1210 | #if !defined(_TARGET_64BIT_) |
1211 | if (OperIs(GT_MUL_LONG)) |
1212 | { |
1213 | return true; |
1214 | } |
1215 | #if defined(_TARGET_ARM_) |
1216 | if (OperIs(GT_PUTARG_REG, GT_BITCAST)) |
1217 | { |
1218 | return true; |
1219 | } |
1220 | #endif // _TARGET_ARM_ |
1221 | #endif // _TARGET_64BIT_ |
1222 | return false; |
1223 | } |
1224 | |
1225 | bool OperIsAddrMode() const |
1226 | { |
1227 | return OperIsAddrMode(OperGet()); |
1228 | } |
1229 | |
1230 | bool OperIsLocal() const |
1231 | { |
1232 | return OperIsLocal(OperGet()); |
1233 | } |
1234 | |
1235 | bool OperIsLocalAddr() const |
1236 | { |
1237 | return OperIsLocalAddr(OperGet()); |
1238 | } |
1239 | |
1240 | bool OperIsScalarLocal() const |
1241 | { |
1242 | return OperIsScalarLocal(OperGet()); |
1243 | } |
1244 | |
1245 | bool OperIsNonPhiLocal() const |
1246 | { |
1247 | return OperIsNonPhiLocal(OperGet()); |
1248 | } |
1249 | |
1250 | bool OperIsLocalStore() const |
1251 | { |
1252 | return OperIsLocalStore(OperGet()); |
1253 | } |
1254 | |
1255 | bool OperIsLocalRead() const |
1256 | { |
1257 | return OperIsLocalRead(OperGet()); |
1258 | } |
1259 | |
1260 | bool OperIsCompare() const |
1261 | { |
1262 | return (OperKind(gtOper) & GTK_RELOP) != 0; |
1263 | } |
1264 | |
1265 | static bool OperIsLogical(genTreeOps gtOper) |
1266 | { |
1267 | return (OperKind(gtOper) & GTK_LOGOP) != 0; |
1268 | } |
1269 | |
1270 | bool OperIsLogical() const |
1271 | { |
1272 | return (OperKind(gtOper) & GTK_LOGOP) != 0; |
1273 | } |
1274 | |
1275 | static bool OperIsShift(genTreeOps gtOper) |
1276 | { |
1277 | return (gtOper == GT_LSH) || (gtOper == GT_RSH) || (gtOper == GT_RSZ); |
1278 | } |
1279 | |
1280 | bool OperIsShift() const |
1281 | { |
1282 | return OperIsShift(OperGet()); |
1283 | } |
1284 | |
1285 | static bool OperIsShiftLong(genTreeOps gtOper) |
1286 | { |
1287 | #ifdef _TARGET_64BIT_ |
1288 | return false; |
1289 | #else |
1290 | return (gtOper == GT_LSH_HI) || (gtOper == GT_RSH_LO); |
1291 | #endif |
1292 | } |
1293 | |
1294 | bool OperIsShiftLong() const |
1295 | { |
1296 | return OperIsShiftLong(OperGet()); |
1297 | } |
1298 | |
1299 | static bool OperIsRotate(genTreeOps gtOper) |
1300 | { |
1301 | return (gtOper == GT_ROL) || (gtOper == GT_ROR); |
1302 | } |
1303 | |
1304 | bool OperIsRotate() const |
1305 | { |
1306 | return OperIsRotate(OperGet()); |
1307 | } |
1308 | |
1309 | static bool OperIsShiftOrRotate(genTreeOps gtOper) |
1310 | { |
1311 | return OperIsShift(gtOper) || OperIsRotate(gtOper) || OperIsShiftLong(gtOper); |
1312 | } |
1313 | |
1314 | bool OperIsShiftOrRotate() const |
1315 | { |
1316 | return OperIsShiftOrRotate(OperGet()); |
1317 | } |
1318 | |
1319 | static bool OperIsMul(genTreeOps gtOper) |
1320 | { |
1321 | return (gtOper == GT_MUL) || (gtOper == GT_MULHI) |
1322 | #if !defined(_TARGET_64BIT_) |
1323 | || (gtOper == GT_MUL_LONG) |
1324 | #endif |
1325 | ; |
1326 | } |
1327 | |
1328 | bool OperIsMul() const |
1329 | { |
1330 | return OperIsMul(gtOper); |
1331 | } |
1332 | |
1333 | bool OperIsArithmetic() const |
1334 | { |
1335 | genTreeOps op = OperGet(); |
1336 | return op == GT_ADD || op == GT_SUB || op == GT_MUL || op == GT_DIV || op == GT_MOD |
1337 | |
1338 | || op == GT_UDIV || op == GT_UMOD |
1339 | |
1340 | || op == GT_OR || op == GT_XOR || op == GT_AND |
1341 | |
1342 | || OperIsShiftOrRotate(op); |
1343 | } |
1344 | |
1345 | #ifdef _TARGET_XARCH_ |
1346 | static bool OperIsRMWMemOp(genTreeOps gtOper) |
1347 | { |
1348 | // Return if binary op is one of the supported operations for RMW of memory. |
1349 | return (gtOper == GT_ADD || gtOper == GT_SUB || gtOper == GT_AND || gtOper == GT_OR || gtOper == GT_XOR || |
1350 | gtOper == GT_NOT || gtOper == GT_NEG || OperIsShiftOrRotate(gtOper)); |
1351 | } |
1352 | bool OperIsRMWMemOp() const |
1353 | { |
1354 | // Return if binary op is one of the supported operations for RMW of memory. |
1355 | return OperIsRMWMemOp(gtOper); |
1356 | } |
1357 | #endif // _TARGET_XARCH_ |
1358 | |
1359 | static bool OperIsUnary(genTreeOps gtOper) |
1360 | { |
1361 | return (OperKind(gtOper) & GTK_UNOP) != 0; |
1362 | } |
1363 | |
1364 | bool OperIsUnary() const |
1365 | { |
1366 | return OperIsUnary(gtOper); |
1367 | } |
1368 | |
1369 | static bool OperIsBinary(genTreeOps gtOper) |
1370 | { |
1371 | return (OperKind(gtOper) & GTK_BINOP) != 0; |
1372 | } |
1373 | |
1374 | bool OperIsBinary() const |
1375 | { |
1376 | return OperIsBinary(gtOper); |
1377 | } |
1378 | |
1379 | static bool OperIsSimple(genTreeOps gtOper) |
1380 | { |
1381 | return (OperKind(gtOper) & GTK_SMPOP) != 0; |
1382 | } |
1383 | |
1384 | static bool OperIsSpecial(genTreeOps gtOper) |
1385 | { |
1386 | return ((OperKind(gtOper) & GTK_KINDMASK) == GTK_SPECIAL); |
1387 | } |
1388 | |
1389 | bool OperIsSimple() const |
1390 | { |
1391 | return OperIsSimple(gtOper); |
1392 | } |
1393 | |
1394 | #ifdef FEATURE_SIMD |
1395 | bool isCommutativeSIMDIntrinsic(); |
1396 | #else // ! |
1397 | bool isCommutativeSIMDIntrinsic() |
1398 | { |
1399 | return false; |
1400 | } |
1401 | #endif // FEATURE_SIMD |
1402 | |
1403 | #ifdef FEATURE_HW_INTRINSICS |
1404 | bool isCommutativeHWIntrinsic() const; |
1405 | bool isContainableHWIntrinsic() const; |
1406 | bool isRMWHWIntrinsic(Compiler* comp); |
1407 | #else |
1408 | bool isCommutativeHWIntrinsic() const |
1409 | { |
1410 | return false; |
1411 | } |
1412 | |
1413 | bool isContainableHWIntrinsic() const |
1414 | { |
1415 | return false; |
1416 | } |
1417 | |
1418 | bool isRMWHWIntrinsic(Compiler* comp) |
1419 | { |
1420 | return false; |
1421 | } |
1422 | #endif // FEATURE_HW_INTRINSICS |
1423 | |
1424 | static bool OperIsCommutative(genTreeOps gtOper) |
1425 | { |
1426 | return (OperKind(gtOper) & GTK_COMMUTE) != 0; |
1427 | } |
1428 | |
1429 | bool OperIsCommutative() |
1430 | { |
1431 | return OperIsCommutative(gtOper) || (OperIsSIMD(gtOper) && isCommutativeSIMDIntrinsic()) || |
1432 | (OperIsHWIntrinsic(gtOper) && isCommutativeHWIntrinsic()); |
1433 | } |
1434 | |
1435 | static bool OperMayOverflow(genTreeOps gtOper) |
1436 | { |
1437 | return ((gtOper == GT_ADD) || (gtOper == GT_SUB) || (gtOper == GT_MUL) || (gtOper == GT_CAST) |
1438 | #if !defined(_TARGET_64BIT_) |
1439 | || (gtOper == GT_ADD_HI) || (gtOper == GT_SUB_HI) |
1440 | #endif |
1441 | ); |
1442 | } |
1443 | |
1444 | bool OperMayOverflow() const |
1445 | { |
1446 | return OperMayOverflow(gtOper); |
1447 | } |
1448 | |
1449 | static bool OperIsIndir(genTreeOps gtOper) |
1450 | { |
1451 | return gtOper == GT_IND || gtOper == GT_STOREIND || gtOper == GT_NULLCHECK || OperIsBlk(gtOper); |
1452 | } |
1453 | |
1454 | static bool OperIsIndirOrArrLength(genTreeOps gtOper) |
1455 | { |
1456 | return OperIsIndir(gtOper) || (gtOper == GT_ARR_LENGTH); |
1457 | } |
1458 | |
1459 | bool OperIsIndir() const |
1460 | { |
1461 | return OperIsIndir(gtOper); |
1462 | } |
1463 | |
1464 | bool OperIsIndirOrArrLength() const |
1465 | { |
1466 | return OperIsIndirOrArrLength(gtOper); |
1467 | } |
1468 | |
1469 | bool OperIsImplicitIndir() const; |
1470 | |
1471 | static bool OperIsAtomicOp(genTreeOps gtOper) |
1472 | { |
1473 | return (gtOper == GT_XADD || gtOper == GT_XCHG || gtOper == GT_LOCKADD || gtOper == GT_CMPXCHG); |
1474 | } |
1475 | |
1476 | bool OperIsAtomicOp() const |
1477 | { |
1478 | return OperIsAtomicOp(gtOper); |
1479 | } |
1480 | |
1481 | bool OperIsStore() const |
1482 | { |
1483 | return OperIsStore(gtOper); |
1484 | } |
1485 | |
1486 | static bool OperIsStore(genTreeOps gtOper) |
1487 | { |
1488 | return (gtOper == GT_STOREIND || gtOper == GT_STORE_LCL_VAR || gtOper == GT_STORE_LCL_FLD || |
1489 | OperIsStoreBlk(gtOper) || OperIsAtomicOp(gtOper)); |
1490 | } |
1491 | |
1492 | // This is here for cleaner FEATURE_SIMD #ifdefs. |
1493 | static bool OperIsSIMD(genTreeOps gtOper) |
1494 | { |
1495 | #ifdef FEATURE_SIMD |
1496 | return gtOper == GT_SIMD; |
1497 | #else // !FEATURE_SIMD |
1498 | return false; |
1499 | #endif // !FEATURE_SIMD |
1500 | } |
1501 | |
1502 | bool OperIsSIMD() const |
1503 | { |
1504 | return OperIsSIMD(gtOper); |
1505 | } |
1506 | |
1507 | static bool OperIsHWIntrinsic(genTreeOps gtOper) |
1508 | { |
1509 | #ifdef FEATURE_HW_INTRINSICS |
1510 | return gtOper == GT_HWIntrinsic; |
1511 | #else |
1512 | return false; |
1513 | #endif // FEATURE_HW_INTRINSICS |
1514 | } |
1515 | |
1516 | bool OperIsHWIntrinsic() const |
1517 | { |
1518 | return OperIsHWIntrinsic(gtOper); |
1519 | } |
1520 | |
1521 | #ifdef FEATURE_HW_INTRINSICS |
1522 | inline bool OperIsSimdHWIntrinsic() const; |
1523 | #else |
1524 | inline bool OperIsSimdHWIntrinsic() const |
1525 | { |
1526 | return false; |
1527 | } |
1528 | #endif |
1529 | |
1530 | bool OperIsSIMDorSimdHWintrinsic() const |
1531 | { |
1532 | return OperIsSIMD() || OperIsSimdHWIntrinsic(); |
1533 | } |
1534 | |
1535 | // This is here for cleaner GT_LONG #ifdefs. |
1536 | static bool OperIsLong(genTreeOps gtOper) |
1537 | { |
1538 | #if defined(_TARGET_64BIT_) |
1539 | return false; |
1540 | #else |
1541 | return gtOper == GT_LONG; |
1542 | #endif |
1543 | } |
1544 | |
1545 | bool OperIsLong() const |
1546 | { |
1547 | return OperIsLong(gtOper); |
1548 | } |
1549 | |
1550 | bool OperIsFieldListHead() |
1551 | { |
1552 | return (gtOper == GT_FIELD_LIST) && ((gtFlags & GTF_FIELD_LIST_HEAD) != 0); |
1553 | } |
1554 | |
1555 | bool OperIsConditionalJump() const |
1556 | { |
1557 | return (gtOper == GT_JTRUE) || (gtOper == GT_JCMP) || (gtOper == GT_JCC); |
1558 | } |
1559 | |
1560 | static bool OperIsBoundsCheck(genTreeOps op) |
1561 | { |
1562 | if (op == GT_ARR_BOUNDS_CHECK) |
1563 | { |
1564 | return true; |
1565 | } |
1566 | #ifdef FEATURE_SIMD |
1567 | if (op == GT_SIMD_CHK) |
1568 | { |
1569 | return true; |
1570 | } |
1571 | #endif // FEATURE_SIMD |
1572 | #ifdef FEATURE_HW_INTRINSICS |
1573 | if (op == GT_HW_INTRINSIC_CHK) |
1574 | { |
1575 | return true; |
1576 | } |
1577 | #endif // FEATURE_HW_INTRINSICS |
1578 | return false; |
1579 | } |
1580 | |
1581 | bool OperIsBoundsCheck() const |
1582 | { |
1583 | return OperIsBoundsCheck(OperGet()); |
1584 | } |
1585 | |
1586 | #ifdef DEBUG |
1587 | bool NullOp1Legal() const |
1588 | { |
1589 | assert(OperIsSimple(gtOper)); |
1590 | switch (gtOper) |
1591 | { |
1592 | case GT_PHI: |
1593 | case GT_LEA: |
1594 | case GT_RETFILT: |
1595 | case GT_NOP: |
1596 | #ifdef FEATURE_HW_INTRINSICS |
1597 | case GT_HWIntrinsic: |
1598 | #endif // FEATURE_HW_INTRINSICS |
1599 | return true; |
1600 | case GT_RETURN: |
1601 | return gtType == TYP_VOID; |
1602 | default: |
1603 | return false; |
1604 | } |
1605 | } |
1606 | |
1607 | bool NullOp2Legal() const |
1608 | { |
1609 | assert(OperIsSimple(gtOper) || OperIsBlk(gtOper)); |
1610 | if (!OperIsBinary(gtOper)) |
1611 | { |
1612 | return true; |
1613 | } |
1614 | switch (gtOper) |
1615 | { |
1616 | case GT_LIST: |
1617 | case GT_FIELD_LIST: |
1618 | case GT_INTRINSIC: |
1619 | case GT_LEA: |
1620 | #ifdef FEATURE_SIMD |
1621 | case GT_SIMD: |
1622 | #endif // !FEATURE_SIMD |
1623 | |
1624 | #ifdef FEATURE_HW_INTRINSICS |
1625 | case GT_HWIntrinsic: |
1626 | #endif // FEATURE_HW_INTRINSICS |
1627 | |
1628 | #if defined(_TARGET_ARM_) |
1629 | case GT_PUTARG_REG: |
1630 | #endif // defined(_TARGET_ARM_) |
1631 | |
1632 | return true; |
1633 | default: |
1634 | return false; |
1635 | } |
1636 | } |
1637 | |
1638 | static inline bool RequiresNonNullOp2(genTreeOps oper); |
1639 | bool IsValidCallArgument(); |
1640 | #endif // DEBUG |
1641 | |
1642 | inline bool IsFPZero(); |
1643 | inline bool IsIntegralConst(ssize_t constVal); |
1644 | inline bool IsIntegralConstVector(ssize_t constVal); |
1645 | |
1646 | inline bool IsBoxedValue(); |
1647 | |
1648 | inline bool IsSIMDEqualityOrInequality() const; |
1649 | |
1650 | static bool OperIsList(genTreeOps gtOper) |
1651 | { |
1652 | return gtOper == GT_LIST; |
1653 | } |
1654 | |
1655 | bool OperIsList() const |
1656 | { |
1657 | return OperIsList(gtOper); |
1658 | } |
1659 | |
1660 | static bool OperIsFieldList(genTreeOps gtOper) |
1661 | { |
1662 | return gtOper == GT_FIELD_LIST; |
1663 | } |
1664 | |
1665 | bool OperIsFieldList() const |
1666 | { |
1667 | return OperIsFieldList(gtOper); |
1668 | } |
1669 | |
1670 | static bool OperIsAnyList(genTreeOps gtOper) |
1671 | { |
1672 | return OperIsList(gtOper) || OperIsFieldList(gtOper); |
1673 | } |
1674 | |
1675 | bool OperIsAnyList() const |
1676 | { |
1677 | return OperIsAnyList(gtOper); |
1678 | } |
1679 | |
1680 | inline GenTree* MoveNext(); |
1681 | |
1682 | inline GenTree* Current(); |
1683 | |
1684 | inline GenTree** pCurrent(); |
1685 | |
1686 | inline GenTree* gtGetOp1() const; |
1687 | |
1688 | // Directly return op2. Asserts the node is binary. Might return nullptr if the binary node allows |
1689 | // a nullptr op2, such as GT_LIST. This is more efficient than gtGetOp2IfPresent() if you know what |
1690 | // node type you have. |
1691 | inline GenTree* gtGetOp2() const; |
1692 | |
1693 | // The returned pointer might be nullptr if the node is not binary, or if non-null op2 is not required. |
1694 | inline GenTree* gtGetOp2IfPresent() const; |
1695 | |
1696 | // Given a tree node, if this is a child of that node, return the pointer to the child node so that it |
1697 | // can be modified; otherwise, return null. |
1698 | GenTree** gtGetChildPointer(GenTree* parent) const; |
1699 | |
1700 | // Given a tree node, if this node uses that node, return the use as an out parameter and return true. |
1701 | // Otherwise, return false. |
1702 | bool TryGetUse(GenTree* def, GenTree*** use); |
1703 | |
1704 | private: |
1705 | bool TryGetUseList(GenTree* def, GenTree*** use); |
1706 | |
1707 | bool TryGetUseBinOp(GenTree* def, GenTree*** use); |
1708 | |
1709 | public: |
1710 | // Get the parent of this node, and optionally capture the pointer to the child so that it can be modified. |
1711 | GenTree* gtGetParent(GenTree*** parentChildPtrPtr) const; |
1712 | |
1713 | void ReplaceOperand(GenTree** useEdge, GenTree* replacement); |
1714 | |
1715 | inline GenTree* gtEffectiveVal(bool commaOnly = false); |
1716 | |
1717 | // Tunnel through any GT_RET_EXPRs |
1718 | inline GenTree* gtRetExprVal(); |
1719 | |
1720 | // Return the child of this node if it is a GT_RELOAD or GT_COPY; otherwise simply return the node itself |
1721 | inline GenTree* gtSkipReloadOrCopy(); |
1722 | |
1723 | // Returns true if it is a call node returning its value in more than one register |
1724 | inline bool IsMultiRegCall() const; |
1725 | |
1726 | // Returns true if it is a node returning its value in more than one register |
1727 | inline bool IsMultiRegNode() const; |
1728 | |
1729 | // Returns the number of registers defined by a multireg node. |
1730 | unsigned GetMultiRegCount(); |
1731 | |
1732 | // Returns the regIndex'th register defined by a possibly-multireg node. |
1733 | regNumber GetRegByIndex(int regIndex); |
1734 | |
1735 | // Returns the type of the regIndex'th register defined by a multi-reg node. |
1736 | var_types GetRegTypeByIndex(int regIndex); |
1737 | |
1738 | // Returns true if it is a GT_COPY or GT_RELOAD node |
1739 | inline bool IsCopyOrReload() const; |
1740 | |
1741 | // Returns true if it is a GT_COPY or GT_RELOAD of a multi-reg call node |
1742 | inline bool IsCopyOrReloadOfMultiRegCall() const; |
1743 | |
1744 | bool OperRequiresAsgFlag(); |
1745 | |
1746 | bool OperRequiresCallFlag(Compiler* comp); |
1747 | |
1748 | bool OperMayThrow(Compiler* comp); |
1749 | |
1750 | unsigned GetScaleIndexMul(); |
1751 | unsigned GetScaleIndexShf(); |
1752 | unsigned GetScaledIndex(); |
1753 | |
1754 | // Returns true if "addr" is a GT_ADD node, at least one of whose arguments is an integer |
1755 | // (<= 32 bit) constant. If it returns true, it sets "*offset" to (one of the) constant value(s), and |
1756 | // "*addr" to the other argument. |
1757 | bool IsAddWithI32Const(GenTree** addr, int* offset); |
1758 | |
1759 | public: |
1760 | #if SMALL_TREE_NODES |
1761 | static unsigned char s_gtNodeSizes[]; |
1762 | #if NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS |
1763 | static unsigned char s_gtTrueSizes[]; |
1764 | #endif |
1765 | #if COUNT_AST_OPERS |
1766 | static LONG s_gtNodeCounts[]; |
1767 | #endif |
1768 | #endif // SMALL_TREE_NODES |
1769 | |
1770 | static void InitNodeSize(); |
1771 | |
1772 | size_t GetNodeSize() const; |
1773 | |
1774 | bool IsNodeProperlySized() const; |
1775 | |
1776 | void ReplaceWith(GenTree* src, Compiler* comp); |
1777 | |
1778 | static genTreeOps ReverseRelop(genTreeOps relop); |
1779 | |
1780 | static genTreeOps SwapRelop(genTreeOps relop); |
1781 | |
1782 | //--------------------------------------------------------------------- |
1783 | |
1784 | static bool Compare(GenTree* op1, GenTree* op2, bool swapOK = false); |
1785 | |
1786 | //--------------------------------------------------------------------- |
1787 | |
1788 | #if defined(DEBUG) || NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS |
1789 | static const char* OpName(genTreeOps op); |
1790 | #endif |
1791 | |
1792 | #if MEASURE_NODE_SIZE && SMALL_TREE_NODES |
1793 | static const char* OpStructName(genTreeOps op); |
1794 | #endif |
1795 | |
1796 | //--------------------------------------------------------------------- |
1797 | |
1798 | bool IsNothingNode() const; |
1799 | void gtBashToNOP(); |
1800 | |
1801 | // Value number update action enumeration |
1802 | enum ValueNumberUpdate |
1803 | { |
1804 | CLEAR_VN, // Clear value number |
1805 | PRESERVE_VN // Preserve value number |
1806 | }; |
1807 | |
1808 | void SetOper(genTreeOps oper, ValueNumberUpdate vnUpdate = CLEAR_VN); // set gtOper |
1809 | void SetOperResetFlags(genTreeOps oper); // set gtOper and reset flags |
1810 | |
1811 | void ChangeOperConst(genTreeOps oper); // ChangeOper(constOper) |
1812 | // set gtOper and only keep GTF_COMMON_MASK flags |
1813 | void ChangeOper(genTreeOps oper, ValueNumberUpdate vnUpdate = CLEAR_VN); |
1814 | void ChangeOperUnchecked(genTreeOps oper); |
1815 | void SetOperRaw(genTreeOps oper); |
1816 | |
1817 | void ChangeType(var_types newType) |
1818 | { |
1819 | var_types oldType = gtType; |
1820 | gtType = newType; |
1821 | GenTree* node = this; |
1822 | while (node->gtOper == GT_COMMA) |
1823 | { |
1824 | node = node->gtGetOp2(); |
1825 | assert(node->gtType == oldType); |
1826 | node->gtType = newType; |
1827 | } |
1828 | } |
1829 | |
1830 | #if SMALL_TREE_NODES |
1831 | #if NODEBASH_STATS |
1832 | static void RecordOperBashing(genTreeOps operOld, genTreeOps operNew); |
1833 | static void ReportOperBashing(FILE* fp); |
1834 | #else |
1835 | static void RecordOperBashing(genTreeOps operOld, genTreeOps operNew) |
1836 | { /* do nothing */ |
1837 | } |
1838 | static void ReportOperBashing(FILE* fp) |
1839 | { /* do nothing */ |
1840 | } |
1841 | #endif |
1842 | #endif |
1843 | |
1844 | bool IsLocal() const |
1845 | { |
1846 | return OperIsLocal(OperGet()); |
1847 | } |
1848 | |
1849 | // Returns "true" iff 'this' is a GT_LCL_FLD or GT_STORE_LCL_FLD on which the type |
1850 | // is not the same size as the type of the GT_LCL_VAR. |
1851 | bool IsPartialLclFld(Compiler* comp); |
1852 | |
1853 | // Returns "true" iff "this" defines a local variable. Requires "comp" to be the |
1854 | // current compilation. If returns "true", sets "*pLclVarTree" to the |
1855 | // tree for the local that is defined, and, if "pIsEntire" is non-null, sets "*pIsEntire" to |
1856 | // true or false, depending on whether the assignment writes to the entirety of the local |
1857 | // variable, or just a portion of it. |
1858 | bool DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire = nullptr); |
1859 | |
1860 | // Returns true if "this" represents the address of a local, or a field of a local. If returns true, sets |
1861 | // "*pLclVarTree" to the node indicating the local variable. If the address is that of a field of this node, |
1862 | // sets "*pFldSeq" to the field sequence representing that field, else null. |
1863 | bool IsLocalAddrExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq); |
1864 | |
1865 | // Simpler variant of the above which just returns the local node if this is an expression that |
1866 | // yields an address into a local |
1867 | GenTreeLclVarCommon* IsLocalAddrExpr(); |
1868 | |
1869 | // Determine if this is a LclVarCommon node and return some additional info about it in the |
1870 | // two out parameters. |
1871 | bool IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq); |
1872 | |
1873 | // Determine whether this is an assignment tree of the form X = X (op) Y, |
1874 | // where Y is an arbitrary tree, and X is a lclVar. |
1875 | unsigned IsLclVarUpdateTree(GenTree** otherTree, genTreeOps* updateOper); |
1876 | |
1877 | // If returns "true", "this" may represent the address of a static or instance field |
1878 | // (or a field of such a field, in the case of an object field of type struct). |
1879 | // If returns "true", then either "*pObj" is set to the object reference, |
1880 | // or "*pStatic" is set to the baseAddr or offset to be added to the "*pFldSeq" |
1881 | // Only one of "*pObj" or "*pStatic" will be set, the other one will be null. |
1882 | // The boolean return value only indicates that "this" *may* be a field address |
1883 | // -- the field sequence must also be checked. |
1884 | // If it is a field address, the field sequence will be a sequence of length >= 1, |
1885 | // starting with an instance or static field, and optionally continuing with struct fields. |
1886 | bool IsFieldAddr(Compiler* comp, GenTree** pObj, GenTree** pStatic, FieldSeqNode** pFldSeq); |
1887 | |
1888 | // Requires "this" to be the address of an array (the child of a GT_IND labeled with GTF_IND_ARR_INDEX). |
1889 | // Sets "pArr" to the node representing the array (either an array object pointer, or perhaps a byref to the some |
1890 | // element). |
1891 | // Sets "*pArrayType" to the class handle for the array type. |
1892 | // Sets "*inxVN" to the value number inferred for the array index. |
1893 | // Sets "*pFldSeq" to the sequence, if any, of struct fields used to index into the array element. |
1894 | void ParseArrayAddress( |
1895 | Compiler* comp, struct ArrayInfo* arrayInfo, GenTree** pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq); |
1896 | |
1897 | // Helper method for the above. |
1898 | void ParseArrayAddressWork(Compiler* comp, |
1899 | target_ssize_t inputMul, |
1900 | GenTree** pArr, |
1901 | ValueNum* pInxVN, |
1902 | target_ssize_t* pOffset, |
1903 | FieldSeqNode** pFldSeq); |
1904 | |
1905 | // Requires "this" to be a GT_IND. Requires the outermost caller to set "*pFldSeq" to nullptr. |
1906 | // Returns true if it is an array index expression, or access to a (sequence of) struct field(s) |
1907 | // within a struct array element. If it returns true, sets *arrayInfo to the array information, and sets *pFldSeq |
1908 | // to the sequence of struct field accesses. |
1909 | bool ParseArrayElemForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq); |
1910 | |
1911 | // Requires "this" to be the address of a (possible) array element (or struct field within that). |
1912 | // If it is, sets "*arrayInfo" to the array access info, "*pFldSeq" to the sequence of struct fields |
1913 | // accessed within the array element, and returns true. If not, returns "false". |
1914 | bool ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq); |
1915 | |
1916 | // Requires "this" to be an int expression. If it is a sequence of one or more integer constants added together, |
1917 | // returns true and sets "*pFldSeq" to the sequence of fields with which those constants are annotated. |
1918 | bool ParseOffsetForm(Compiler* comp, FieldSeqNode** pFldSeq); |
1919 | |
1920 | // Labels "*this" as an array index expression: label all constants and variables that could contribute, as part of |
1921 | // an affine expression, to the value of the of the index. |
1922 | void LabelIndex(Compiler* comp, bool isConst = true); |
1923 | |
1924 | // Assumes that "this" occurs in a context where it is being dereferenced as the LHS of an assignment-like |
1925 | // statement (assignment, initblk, or copyblk). The "width" should be the number of bytes copied by the |
1926 | // operation. Returns "true" if "this" is an address of (or within) |
1927 | // a local variable; sets "*pLclVarTree" to that local variable instance; and, if "pIsEntire" is non-null, |
1928 | // sets "*pIsEntire" to true if this assignment writes the full width of the local. |
1929 | bool DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire); |
1930 | |
1931 | // These are only used for dumping. |
1932 | // The gtRegNum is only valid in LIR, but the dumping methods are not easily |
1933 | // modified to check this. |
1934 | CLANG_FORMAT_COMMENT_ANCHOR; |
1935 | |
1936 | #ifdef DEBUG |
1937 | bool InReg() const |
1938 | { |
1939 | return (GetRegTag() != GT_REGTAG_NONE) ? true : false; |
1940 | } |
1941 | regNumber GetReg() const |
1942 | { |
1943 | return (GetRegTag() != GT_REGTAG_NONE) ? gtRegNum : REG_NA; |
1944 | } |
1945 | #endif |
1946 | |
1947 | static bool IsContained(unsigned flags) |
1948 | { |
1949 | return ((flags & GTF_CONTAINED) != 0); |
1950 | } |
1951 | |
1952 | void SetContained() |
1953 | { |
1954 | assert(IsValue()); |
1955 | gtFlags |= GTF_CONTAINED; |
1956 | assert(isContained()); |
1957 | } |
1958 | |
1959 | void ClearContained() |
1960 | { |
1961 | assert(IsValue()); |
1962 | gtFlags &= ~GTF_CONTAINED; |
1963 | ClearRegOptional(); |
1964 | } |
1965 | |
1966 | bool IsRegVarDeath() const |
1967 | { |
1968 | unreached(); |
1969 | return (gtFlags & GTF_VAR_DEATH) ? true : false; |
1970 | } |
1971 | bool IsRegVarBirth() const |
1972 | { |
1973 | unreached(); |
1974 | return (gtFlags & GTF_REG_BIRTH) ? true : false; |
1975 | } |
1976 | |
1977 | bool IsReverseOp() const |
1978 | { |
1979 | return (gtFlags & GTF_REVERSE_OPS) ? true : false; |
1980 | } |
1981 | |
1982 | bool IsUnsigned() const |
1983 | { |
1984 | return ((gtFlags & GTF_UNSIGNED) != 0); |
1985 | } |
1986 | |
1987 | inline bool IsCnsIntOrI() const; |
1988 | |
1989 | inline bool IsIntegralConst() const; |
1990 | |
1991 | inline bool IsIntCnsFitsInI32(); // Constant fits in INT32 |
1992 | |
1993 | inline bool IsCnsFltOrDbl() const; |
1994 | |
1995 | inline bool IsCnsNonZeroFltOrDbl(); |
1996 | |
1997 | bool IsIconHandle() const |
1998 | { |
1999 | assert(gtOper == GT_CNS_INT); |
2000 | return (gtFlags & GTF_ICON_HDL_MASK) ? true : false; |
2001 | } |
2002 | |
2003 | bool IsIconHandle(unsigned handleType) const |
2004 | { |
2005 | assert(gtOper == GT_CNS_INT); |
2006 | assert((handleType & GTF_ICON_HDL_MASK) != 0); // check that handleType is one of the valid GTF_ICON_* values |
2007 | assert((handleType & ~GTF_ICON_HDL_MASK) == 0); |
2008 | return (gtFlags & GTF_ICON_HDL_MASK) == handleType; |
2009 | } |
2010 | |
2011 | // Return just the part of the flags corresponding to the GTF_ICON_*_HDL flag. For example, |
2012 | // GTF_ICON_SCOPE_HDL. The tree node must be a const int, but it might not be a handle, in which |
2013 | // case we'll return zero. |
2014 | unsigned GetIconHandleFlag() const |
2015 | { |
2016 | assert(gtOper == GT_CNS_INT); |
2017 | return (gtFlags & GTF_ICON_HDL_MASK); |
2018 | } |
2019 | |
2020 | // Mark this node as no longer being a handle; clear its GTF_ICON_*_HDL bits. |
2021 | void ClearIconHandleMask() |
2022 | { |
2023 | assert(gtOper == GT_CNS_INT); |
2024 | gtFlags &= ~GTF_ICON_HDL_MASK; |
2025 | } |
2026 | |
2027 | // Return true if the two GT_CNS_INT trees have the same handle flag (GTF_ICON_*_HDL). |
2028 | static bool SameIconHandleFlag(GenTree* t1, GenTree* t2) |
2029 | { |
2030 | return t1->GetIconHandleFlag() == t2->GetIconHandleFlag(); |
2031 | } |
2032 | |
2033 | bool IsArgPlaceHolderNode() const |
2034 | { |
2035 | return OperGet() == GT_ARGPLACE; |
2036 | } |
2037 | bool IsCall() const |
2038 | { |
2039 | return OperGet() == GT_CALL; |
2040 | } |
2041 | bool IsStatement() const |
2042 | { |
2043 | return OperGet() == GT_STMT; |
2044 | } |
2045 | inline bool IsHelperCall(); |
2046 | |
2047 | bool IsVarAddr() const; |
2048 | bool gtOverflow() const; |
2049 | bool gtOverflowEx() const; |
2050 | bool gtSetFlags() const; |
2051 | bool gtRequestSetFlags(); |
2052 | |
2053 | #ifdef DEBUG |
2054 | bool gtIsValid64RsltMul(); |
2055 | static int gtDispFlags(unsigned flags, unsigned debugFlags); |
2056 | #endif |
2057 | |
2058 | // cast operations |
2059 | inline var_types CastFromType(); |
2060 | inline var_types& CastToType(); |
2061 | |
2062 | // Returns "true" iff "this" is a phi-related node (i.e. a GT_PHI_ARG, GT_PHI, or a PhiDefn). |
2063 | bool IsPhiNode(); |
2064 | |
2065 | // Returns "true" iff "*this" is an assignment (GT_ASG) tree that defines an SSA name (lcl = phi(...)); |
2066 | bool IsPhiDefn(); |
2067 | |
2068 | // Returns "true" iff "*this" is a statement containing an assignment that defines an SSA name (lcl = phi(...)); |
2069 | bool IsPhiDefnStmt(); |
2070 | |
2071 | // Because of the fact that we hid the assignment operator of "BitSet" (in DEBUG), |
2072 | // we can't synthesize an assignment operator. |
2073 | // TODO-Cleanup: Could change this w/o liveset on tree nodes |
2074 | // (This is also necessary for the VTable trick.) |
2075 | GenTree() |
2076 | { |
2077 | } |
2078 | |
2079 | // Returns the number of children of the current node. |
2080 | unsigned NumChildren(); |
2081 | |
2082 | // Requires "childNum < NumChildren()". Returns the "n"th child of "this." |
2083 | GenTree* GetChild(unsigned childNum); |
2084 | |
2085 | // Returns an iterator that will produce the use edge to each operand of this node. Differs |
2086 | // from the sequence of nodes produced by a loop over `GetChild` in its handling of call, phi, |
2087 | // and block op nodes. |
2088 | GenTreeUseEdgeIterator UseEdgesBegin(); |
2089 | GenTreeUseEdgeIterator UseEdgesEnd(); |
2090 | |
2091 | IteratorPair<GenTreeUseEdgeIterator> UseEdges(); |
2092 | |
2093 | // Returns an iterator that will produce each operand of this node. Differs from the sequence |
2094 | // of nodes produced by a loop over `GetChild` in its handling of call, phi, and block op |
2095 | // nodes. |
2096 | GenTreeOperandIterator OperandsBegin(); |
2097 | GenTreeOperandIterator OperandsEnd(); |
2098 | |
2099 | // Returns a range that will produce the operands of this node in use order. |
2100 | IteratorPair<GenTreeOperandIterator> Operands(); |
2101 | |
2102 | enum class VisitResult |
2103 | { |
2104 | Abort = false, |
2105 | Continue = true |
2106 | }; |
2107 | |
2108 | // Visits each operand of this node. The operand must be either a lambda, function, or functor with the signature |
2109 | // `GenTree::VisitResult VisitorFunction(GenTree* operand)`. Here is a simple example: |
2110 | // |
2111 | // unsigned operandCount = 0; |
2112 | // node->VisitOperands([&](GenTree* operand) -> GenTree::VisitResult) |
2113 | // { |
2114 | // operandCount++; |
2115 | // return GenTree::VisitResult::Continue; |
2116 | // }); |
2117 | // |
2118 | // This function is generally more efficient that the operand iterator and should be preferred over that API for |
2119 | // hot code, as it affords better opportunities for inlining and acheives shorter dynamic path lengths when |
2120 | // deciding how operands need to be accessed. |
2121 | // |
2122 | // Note that this function does not respect `GTF_REVERSE_OPS` and `gtEvalSizeFirst`. This is always safe in LIR, |
2123 | // but may be dangerous in HIR if for some reason you need to visit operands in the order in which they will |
2124 | // execute. |
2125 | template <typename TVisitor> |
2126 | void VisitOperands(TVisitor visitor); |
2127 | |
2128 | private: |
2129 | template <typename TVisitor> |
2130 | VisitResult VisitListOperands(TVisitor visitor); |
2131 | |
2132 | template <typename TVisitor> |
2133 | void VisitBinOpOperands(TVisitor visitor); |
2134 | |
2135 | public: |
2136 | bool Precedes(GenTree* other); |
2137 | |
2138 | // The maximum possible # of children of any node. |
2139 | static const int MAX_CHILDREN = 6; |
2140 | |
2141 | bool IsReuseRegVal() const |
2142 | { |
2143 | // This can be extended to non-constant nodes, but not to local or indir nodes. |
2144 | if (OperIsConst() && ((gtFlags & GTF_REUSE_REG_VAL) != 0)) |
2145 | { |
2146 | return true; |
2147 | } |
2148 | return false; |
2149 | } |
2150 | void SetReuseRegVal() |
2151 | { |
2152 | assert(OperIsConst()); |
2153 | gtFlags |= GTF_REUSE_REG_VAL; |
2154 | } |
2155 | void ResetReuseRegVal() |
2156 | { |
2157 | assert(OperIsConst()); |
2158 | gtFlags &= ~GTF_REUSE_REG_VAL; |
2159 | } |
2160 | |
2161 | void SetIndirExceptionFlags(Compiler* comp) |
2162 | { |
2163 | assert(OperIsIndirOrArrLength()); |
2164 | gtFlags |= OperMayThrow(comp) ? GTF_EXCEPT : GTF_IND_NONFAULTING; |
2165 | } |
2166 | |
2167 | #if MEASURE_NODE_SIZE |
2168 | static void DumpNodeSizes(FILE* fp); |
2169 | #endif |
2170 | |
2171 | #ifdef DEBUG |
2172 | |
2173 | private: |
2174 | GenTree& operator=(const GenTree& gt) |
2175 | { |
2176 | assert(!"Don't copy" ); |
2177 | return *this; |
2178 | } |
2179 | #endif // DEBUG |
2180 | |
2181 | #if DEBUGGABLE_GENTREE |
2182 | // In DEBUG builds, add a dummy virtual method, to give the debugger run-time type information. |
2183 | virtual void DummyVirt() |
2184 | { |
2185 | } |
2186 | |
2187 | typedef void* VtablePtr; |
2188 | |
2189 | VtablePtr GetVtableForOper(genTreeOps oper); |
2190 | void SetVtableForOper(genTreeOps oper); |
2191 | |
2192 | static VtablePtr s_vtablesForOpers[GT_COUNT]; |
2193 | static VtablePtr s_vtableForOp; |
2194 | #endif // DEBUGGABLE_GENTREE |
2195 | |
2196 | public: |
2197 | inline void* operator new(size_t sz, class Compiler*, genTreeOps oper); |
2198 | |
2199 | inline GenTree(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false)); |
2200 | }; |
2201 | |
2202 | //------------------------------------------------------------------------ |
2203 | // GenTreeUseEdgeIterator: an iterator that will produce each use edge of a GenTree node in the order in which |
2204 | // they are used. |
2205 | // |
2206 | // The use edges of a node may not correspond exactly to the nodes on the other ends of its use edges: in |
2207 | // particular, GT_LIST nodes are expanded into their component parts. This differs from the behavior of |
2208 | // GenTree::GetChildPointer(), which does not expand lists. |
2209 | // |
2210 | // Operand iteration is common enough in the back end of the compiler that the implementation of this type has |
2211 | // traded some simplicity for speed: |
2212 | // - As much work as is reasonable is done in the constructor rather than during operand iteration |
2213 | // - Node-specific functionality is handled by a small class of "advance" functions called by operator++ |
2214 | // rather than making operator++ itself handle all nodes |
2215 | // - Some specialization has been performed for specific node types/shapes (e.g. the advance function for |
2216 | // binary nodes is specialized based on whether or not the node has the GTF_REVERSE_OPS flag set) |
2217 | // |
2218 | // Valid values of this type may be obtained by calling `GenTree::UseEdgesBegin` and `GenTree::UseEdgesEnd`. |
2219 | // |
2220 | class GenTreeUseEdgeIterator final |
2221 | { |
2222 | friend class GenTreeOperandIterator; |
2223 | friend GenTreeUseEdgeIterator GenTree::UseEdgesBegin(); |
2224 | friend GenTreeUseEdgeIterator GenTree::UseEdgesEnd(); |
2225 | |
2226 | enum |
2227 | { |
2228 | CALL_INSTANCE = 0, |
2229 | CALL_ARGS = 1, |
2230 | CALL_LATE_ARGS = 2, |
2231 | CALL_CONTROL_EXPR = 3, |
2232 | CALL_COOKIE = 4, |
2233 | CALL_ADDRESS = 5, |
2234 | CALL_TERMINAL = 6, |
2235 | }; |
2236 | |
2237 | typedef void (GenTreeUseEdgeIterator::*AdvanceFn)(); |
2238 | |
2239 | AdvanceFn m_advance; |
2240 | GenTree* m_node; |
2241 | GenTree** m_edge; |
2242 | GenTree* m_argList; |
2243 | int m_state; |
2244 | |
2245 | GenTreeUseEdgeIterator(GenTree* node); |
2246 | |
2247 | // Advance functions for special nodes |
2248 | void AdvanceCmpXchg(); |
2249 | void AdvanceBoundsChk(); |
2250 | void AdvanceArrElem(); |
2251 | void AdvanceArrOffset(); |
2252 | void AdvanceDynBlk(); |
2253 | void AdvanceStoreDynBlk(); |
2254 | |
2255 | template <bool ReverseOperands> |
2256 | void AdvanceBinOp(); |
2257 | void SetEntryStateForBinOp(); |
2258 | |
2259 | // An advance function for list-like nodes (Phi, SIMDIntrinsicInitN, FieldList) |
2260 | void AdvanceList(); |
2261 | void SetEntryStateForList(GenTree* list); |
2262 | |
2263 | // The advance function for call nodes |
2264 | template <int state> |
2265 | void AdvanceCall(); |
2266 | |
2267 | void Terminate(); |
2268 | |
2269 | public: |
2270 | GenTreeUseEdgeIterator(); |
2271 | |
2272 | inline GenTree** operator*() |
2273 | { |
2274 | assert(m_state != -1); |
2275 | return m_edge; |
2276 | } |
2277 | |
2278 | inline GenTree** operator->() |
2279 | { |
2280 | assert(m_state != -1); |
2281 | return m_edge; |
2282 | } |
2283 | |
2284 | inline bool operator==(const GenTreeUseEdgeIterator& other) const |
2285 | { |
2286 | if (m_state == -1 || other.m_state == -1) |
2287 | { |
2288 | return m_state == other.m_state; |
2289 | } |
2290 | |
2291 | return (m_node == other.m_node) && (m_edge == other.m_edge) && (m_argList == other.m_argList) && |
2292 | (m_state == other.m_state); |
2293 | } |
2294 | |
2295 | inline bool operator!=(const GenTreeUseEdgeIterator& other) const |
2296 | { |
2297 | return !(operator==(other)); |
2298 | } |
2299 | |
2300 | GenTreeUseEdgeIterator& operator++(); |
2301 | }; |
2302 | |
2303 | //------------------------------------------------------------------------ |
2304 | // GenTreeOperandIterator: an iterator that will produce each operand of a |
2305 | // GenTree node in the order in which they are |
2306 | // used. This uses `GenTreeUseEdgeIterator` under |
2307 | // the covers and comes with the same caveats |
2308 | // w.r.t. `GetChild`. |
2309 | // |
2310 | // Note: valid values of this type may be obtained by calling |
2311 | // `GenTree::OperandsBegin` and `GenTree::OperandsEnd`. |
2312 | class GenTreeOperandIterator final |
2313 | { |
2314 | friend GenTreeOperandIterator GenTree::OperandsBegin(); |
2315 | friend GenTreeOperandIterator GenTree::OperandsEnd(); |
2316 | |
2317 | GenTreeUseEdgeIterator m_useEdges; |
2318 | |
2319 | GenTreeOperandIterator(GenTree* node) : m_useEdges(node) |
2320 | { |
2321 | } |
2322 | |
2323 | public: |
2324 | GenTreeOperandIterator() : m_useEdges() |
2325 | { |
2326 | } |
2327 | |
2328 | inline GenTree* operator*() |
2329 | { |
2330 | return *(*m_useEdges); |
2331 | } |
2332 | |
2333 | inline GenTree* operator->() |
2334 | { |
2335 | return *(*m_useEdges); |
2336 | } |
2337 | |
2338 | inline bool operator==(const GenTreeOperandIterator& other) const |
2339 | { |
2340 | return m_useEdges == other.m_useEdges; |
2341 | } |
2342 | |
2343 | inline bool operator!=(const GenTreeOperandIterator& other) const |
2344 | { |
2345 | return !(operator==(other)); |
2346 | } |
2347 | |
2348 | inline GenTreeOperandIterator& operator++() |
2349 | { |
2350 | ++m_useEdges; |
2351 | return *this; |
2352 | } |
2353 | }; |
2354 | |
2355 | /*****************************************************************************/ |
2356 | // In the current design, we never instantiate GenTreeUnOp: it exists only to be |
2357 | // used as a base class. For unary operators, we instantiate GenTreeOp, with a NULL second |
2358 | // argument. We check that this is true dynamically. We could tighten this and get static |
2359 | // checking, but that would entail accessing the first child of a unary operator via something |
2360 | // like gtUnOp.gtOp1 instead of gtOp.gtOp1. |
2361 | struct GenTreeUnOp : public GenTree |
2362 | { |
2363 | GenTree* gtOp1; |
2364 | |
2365 | protected: |
2366 | GenTreeUnOp(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false)) |
2367 | : GenTree(oper, type DEBUGARG(largeNode)), gtOp1(nullptr) |
2368 | { |
2369 | } |
2370 | |
2371 | GenTreeUnOp(genTreeOps oper, var_types type, GenTree* op1 DEBUGARG(bool largeNode = false)) |
2372 | : GenTree(oper, type DEBUGARG(largeNode)), gtOp1(op1) |
2373 | { |
2374 | assert(op1 != nullptr || NullOp1Legal()); |
2375 | if (op1 != nullptr) |
2376 | { // Propagate effects flags from child. |
2377 | gtFlags |= op1->gtFlags & GTF_ALL_EFFECT; |
2378 | } |
2379 | } |
2380 | |
2381 | #if DEBUGGABLE_GENTREE |
2382 | GenTreeUnOp() : GenTree(), gtOp1(nullptr) |
2383 | { |
2384 | } |
2385 | #endif |
2386 | }; |
2387 | |
2388 | struct GenTreeOp : public GenTreeUnOp |
2389 | { |
2390 | GenTree* gtOp2; |
2391 | |
2392 | GenTreeOp(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2 DEBUGARG(bool largeNode = false)) |
2393 | : GenTreeUnOp(oper, type, op1 DEBUGARG(largeNode)), gtOp2(op2) |
2394 | { |
2395 | // comparisons are always integral types |
2396 | assert(!GenTree::OperIsCompare(oper) || varTypeIsIntegral(type)); |
2397 | // Binary operators, with a few exceptions, require a non-nullptr |
2398 | // second argument. |
2399 | assert(op2 != nullptr || NullOp2Legal()); |
2400 | // Unary operators, on the other hand, require a null second argument. |
2401 | assert(!OperIsUnary(oper) || op2 == nullptr); |
2402 | // Propagate effects flags from child. (UnOp handled this for first child.) |
2403 | if (op2 != nullptr) |
2404 | { |
2405 | gtFlags |= op2->gtFlags & GTF_ALL_EFFECT; |
2406 | } |
2407 | } |
2408 | |
2409 | // A small set of types are unary operators with optional arguments. We use |
2410 | // this constructor to build those. |
2411 | GenTreeOp(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false)) |
2412 | : GenTreeUnOp(oper, type DEBUGARG(largeNode)), gtOp2(nullptr) |
2413 | { |
2414 | // Unary operators with optional arguments: |
2415 | assert(oper == GT_NOP || oper == GT_RETURN || oper == GT_RETFILT || OperIsBlk(oper)); |
2416 | } |
2417 | |
2418 | #if DEBUGGABLE_GENTREE |
2419 | GenTreeOp() : GenTreeUnOp(), gtOp2(nullptr) |
2420 | { |
2421 | } |
2422 | #endif |
2423 | }; |
2424 | |
2425 | struct GenTreeVal : public GenTree |
2426 | { |
2427 | size_t gtVal1; |
2428 | |
2429 | GenTreeVal(genTreeOps oper, var_types type, ssize_t val) : GenTree(oper, type), gtVal1(val) |
2430 | { |
2431 | } |
2432 | #if DEBUGGABLE_GENTREE |
2433 | GenTreeVal() : GenTree() |
2434 | { |
2435 | } |
2436 | #endif |
2437 | }; |
2438 | |
2439 | struct GenTreeIntConCommon : public GenTree |
2440 | { |
2441 | inline INT64 LngValue(); |
2442 | inline void SetLngValue(INT64 val); |
2443 | inline ssize_t IconValue(); |
2444 | inline void SetIconValue(ssize_t val); |
2445 | inline INT64 IntegralValue(); |
2446 | |
2447 | GenTreeIntConCommon(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false)) |
2448 | : GenTree(oper, type DEBUGARG(largeNode)) |
2449 | { |
2450 | } |
2451 | |
2452 | bool FitsInI8() // IconValue() fits into 8-bit signed storage |
2453 | { |
2454 | return FitsInI8(IconValue()); |
2455 | } |
2456 | |
2457 | static bool FitsInI8(ssize_t val) // Constant fits into 8-bit signed storage |
2458 | { |
2459 | return (int8_t)val == val; |
2460 | } |
2461 | |
2462 | bool FitsInI32() // IconValue() fits into 32-bit signed storage |
2463 | { |
2464 | return FitsInI32(IconValue()); |
2465 | } |
2466 | |
2467 | static bool FitsInI32(ssize_t val) // Constant fits into 32-bit signed storage |
2468 | { |
2469 | #ifdef _TARGET_64BIT_ |
2470 | return (int32_t)val == val; |
2471 | #else |
2472 | return true; |
2473 | #endif |
2474 | } |
2475 | |
2476 | bool ImmedValNeedsReloc(Compiler* comp); |
2477 | bool ImmedValCanBeFolded(Compiler* comp, genTreeOps op); |
2478 | |
2479 | #ifdef _TARGET_XARCH_ |
2480 | bool FitsInAddrBase(Compiler* comp); |
2481 | bool AddrNeedsReloc(Compiler* comp); |
2482 | #endif |
2483 | |
2484 | #if DEBUGGABLE_GENTREE |
2485 | GenTreeIntConCommon() : GenTree() |
2486 | { |
2487 | } |
2488 | #endif |
2489 | }; |
2490 | |
2491 | // node representing a read from a physical register |
2492 | struct GenTreePhysReg : public GenTree |
2493 | { |
2494 | // physregs need a field beyond gtRegNum because |
2495 | // gtRegNum indicates the destination (and can be changed) |
2496 | // whereas reg indicates the source |
2497 | regNumber gtSrcReg; |
2498 | GenTreePhysReg(regNumber r, var_types type = TYP_I_IMPL) : GenTree(GT_PHYSREG, type), gtSrcReg(r) |
2499 | { |
2500 | } |
2501 | #if DEBUGGABLE_GENTREE |
2502 | GenTreePhysReg() : GenTree() |
2503 | { |
2504 | } |
2505 | #endif |
2506 | }; |
2507 | |
2508 | // gtJumpTable - Switch Jump Table |
2509 | // |
2510 | // This node stores a DWORD constant that represents the |
2511 | // absolute address of a jump table for switches. The code |
2512 | // generator uses this table to code the destination for every case |
2513 | // in an array of addresses which starting position is stored in |
2514 | // this constant. |
2515 | struct GenTreeJumpTable : public GenTreeIntConCommon |
2516 | { |
2517 | ssize_t gtJumpTableAddr; |
2518 | |
2519 | GenTreeJumpTable(var_types type DEBUGARG(bool largeNode = false)) |
2520 | : GenTreeIntConCommon(GT_JMPTABLE, type DEBUGARG(largeNode)) |
2521 | { |
2522 | } |
2523 | #if DEBUGGABLE_GENTREE |
2524 | GenTreeJumpTable() : GenTreeIntConCommon() |
2525 | { |
2526 | } |
2527 | #endif // DEBUG |
2528 | }; |
2529 | |
2530 | /* gtIntCon -- integer constant (GT_CNS_INT) */ |
2531 | struct GenTreeIntCon : public GenTreeIntConCommon |
2532 | { |
2533 | /* |
2534 | * This is the GT_CNS_INT struct definition. |
2535 | * It's used to hold for both int constants and pointer handle constants. |
2536 | * For the 64-bit targets we will only use GT_CNS_INT as it used to represent all the possible sizes |
2537 | * For the 32-bit targets we use a GT_CNS_LNG to hold a 64-bit integer constant and GT_CNS_INT for all others. |
2538 | * In the future when we retarget the JIT for x86 we should consider eliminating GT_CNS_LNG |
2539 | */ |
2540 | ssize_t gtIconVal; // Must overlap and have the same offset with the gtIconVal field in GenTreeLngCon below. |
2541 | |
2542 | /* The InitializeArray intrinsic needs to go back to the newarray statement |
2543 | to find the class handle of the array so that we can get its size. However, |
2544 | in ngen mode, the handle in that statement does not correspond to the compile |
2545 | time handle (rather it lets you get a handle at run-time). In that case, we also |
2546 | need to store a compile time handle, which goes in this gtCompileTimeHandle field. |
2547 | */ |
2548 | ssize_t gtCompileTimeHandle; |
2549 | |
2550 | // TODO-Cleanup: It's not clear what characterizes the cases where the field |
2551 | // above is used. It may be that its uses and those of the "gtFieldSeq" field below |
2552 | // are mutually exclusive, and they could be put in a union. Or else we should separate |
2553 | // this type into three subtypes. |
2554 | |
2555 | // If this constant represents the offset of one or more fields, "gtFieldSeq" represents that |
2556 | // sequence of fields. |
2557 | FieldSeqNode* gtFieldSeq; |
2558 | |
2559 | GenTreeIntCon(var_types type, ssize_t value DEBUGARG(bool largeNode = false)) |
2560 | : GenTreeIntConCommon(GT_CNS_INT, type DEBUGARG(largeNode)) |
2561 | , gtIconVal(value) |
2562 | , gtCompileTimeHandle(0) |
2563 | , gtFieldSeq(FieldSeqStore::NotAField()) |
2564 | { |
2565 | } |
2566 | |
2567 | GenTreeIntCon(var_types type, ssize_t value, FieldSeqNode* fields DEBUGARG(bool largeNode = false)) |
2568 | : GenTreeIntConCommon(GT_CNS_INT, type DEBUGARG(largeNode)) |
2569 | , gtIconVal(value) |
2570 | , gtCompileTimeHandle(0) |
2571 | , gtFieldSeq(fields) |
2572 | { |
2573 | assert(fields != nullptr); |
2574 | } |
2575 | |
2576 | void FixupInitBlkValue(var_types asgType); |
2577 | |
2578 | #ifdef _TARGET_64BIT_ |
2579 | void TruncateOrSignExtend32() |
2580 | { |
2581 | if (gtFlags & GTF_UNSIGNED) |
2582 | { |
2583 | gtIconVal = UINT32(gtIconVal); |
2584 | } |
2585 | else |
2586 | { |
2587 | gtIconVal = INT32(gtIconVal); |
2588 | } |
2589 | } |
2590 | #endif // _TARGET_64BIT_ |
2591 | |
2592 | #if DEBUGGABLE_GENTREE |
2593 | GenTreeIntCon() : GenTreeIntConCommon() |
2594 | { |
2595 | } |
2596 | #endif |
2597 | }; |
2598 | |
2599 | /* gtLngCon -- long constant (GT_CNS_LNG) */ |
2600 | |
2601 | struct GenTreeLngCon : public GenTreeIntConCommon |
2602 | { |
2603 | INT64 gtLconVal; // Must overlap and have the same offset with the gtIconVal field in GenTreeIntCon above. |
2604 | INT32 LoVal() |
2605 | { |
2606 | return (INT32)(gtLconVal & 0xffffffff); |
2607 | } |
2608 | |
2609 | INT32 HiVal() |
2610 | { |
2611 | return (INT32)(gtLconVal >> 32); |
2612 | } |
2613 | |
2614 | GenTreeLngCon(INT64 val) : GenTreeIntConCommon(GT_CNS_NATIVELONG, TYP_LONG) |
2615 | { |
2616 | SetLngValue(val); |
2617 | } |
2618 | #if DEBUGGABLE_GENTREE |
2619 | GenTreeLngCon() : GenTreeIntConCommon() |
2620 | { |
2621 | } |
2622 | #endif |
2623 | }; |
2624 | |
2625 | inline INT64 GenTreeIntConCommon::LngValue() |
2626 | { |
2627 | #ifndef _TARGET_64BIT_ |
2628 | assert(gtOper == GT_CNS_LNG); |
2629 | return AsLngCon()->gtLconVal; |
2630 | #else |
2631 | return IconValue(); |
2632 | #endif |
2633 | } |
2634 | |
2635 | inline void GenTreeIntConCommon::SetLngValue(INT64 val) |
2636 | { |
2637 | #ifndef _TARGET_64BIT_ |
2638 | assert(gtOper == GT_CNS_LNG); |
2639 | AsLngCon()->gtLconVal = val; |
2640 | #else |
2641 | // Compile time asserts that these two fields overlap and have the same offsets: gtIconVal and gtLconVal |
2642 | C_ASSERT(offsetof(GenTreeLngCon, gtLconVal) == offsetof(GenTreeIntCon, gtIconVal)); |
2643 | C_ASSERT(sizeof(AsLngCon()->gtLconVal) == sizeof(AsIntCon()->gtIconVal)); |
2644 | |
2645 | SetIconValue(ssize_t(val)); |
2646 | #endif |
2647 | } |
2648 | |
2649 | inline ssize_t GenTreeIntConCommon::IconValue() |
2650 | { |
2651 | assert(gtOper == GT_CNS_INT); // We should never see a GT_CNS_LNG for a 64-bit target! |
2652 | return AsIntCon()->gtIconVal; |
2653 | } |
2654 | |
2655 | inline void GenTreeIntConCommon::SetIconValue(ssize_t val) |
2656 | { |
2657 | assert(gtOper == GT_CNS_INT); // We should never see a GT_CNS_LNG for a 64-bit target! |
2658 | AsIntCon()->gtIconVal = val; |
2659 | } |
2660 | |
2661 | inline INT64 GenTreeIntConCommon::IntegralValue() |
2662 | { |
2663 | #ifdef _TARGET_64BIT_ |
2664 | return LngValue(); |
2665 | #else |
2666 | return gtOper == GT_CNS_LNG ? LngValue() : (INT64)IconValue(); |
2667 | #endif // _TARGET_64BIT_ |
2668 | } |
2669 | |
2670 | /* gtDblCon -- double constant (GT_CNS_DBL) */ |
2671 | |
2672 | struct GenTreeDblCon : public GenTree |
2673 | { |
2674 | double gtDconVal; |
2675 | |
2676 | bool isBitwiseEqual(GenTreeDblCon* other) |
2677 | { |
2678 | unsigned __int64 bits = *(unsigned __int64*)(>DconVal); |
2679 | unsigned __int64 otherBits = *(unsigned __int64*)(&(other->gtDconVal)); |
2680 | return (bits == otherBits); |
2681 | } |
2682 | |
2683 | GenTreeDblCon(double val) : GenTree(GT_CNS_DBL, TYP_DOUBLE), gtDconVal(val) |
2684 | { |
2685 | } |
2686 | #if DEBUGGABLE_GENTREE |
2687 | GenTreeDblCon() : GenTree() |
2688 | { |
2689 | } |
2690 | #endif |
2691 | }; |
2692 | |
2693 | /* gtStrCon -- string constant (GT_CNS_STR) */ |
2694 | |
2695 | struct GenTreeStrCon : public GenTree |
2696 | { |
2697 | unsigned gtSconCPX; |
2698 | CORINFO_MODULE_HANDLE gtScpHnd; |
2699 | |
2700 | // Because this node can come from an inlined method we need to |
2701 | // have the scope handle, since it will become a helper call. |
2702 | GenTreeStrCon(unsigned sconCPX, CORINFO_MODULE_HANDLE mod DEBUGARG(bool largeNode = false)) |
2703 | : GenTree(GT_CNS_STR, TYP_REF DEBUGARG(largeNode)), gtSconCPX(sconCPX), gtScpHnd(mod) |
2704 | { |
2705 | } |
2706 | #if DEBUGGABLE_GENTREE |
2707 | GenTreeStrCon() : GenTree() |
2708 | { |
2709 | } |
2710 | #endif |
2711 | }; |
2712 | |
2713 | // Common supertype of LCL_VAR, LCL_FLD, REG_VAR, PHI_ARG |
2714 | // This inherits from UnOp because lclvar stores are Unops |
2715 | struct GenTreeLclVarCommon : public GenTreeUnOp |
2716 | { |
2717 | private: |
2718 | unsigned _gtLclNum; // The local number. An index into the Compiler::lvaTable array. |
2719 | unsigned _gtSsaNum; // The SSA number. |
2720 | |
2721 | public: |
2722 | GenTreeLclVarCommon(genTreeOps oper, var_types type, unsigned lclNum DEBUGARG(bool largeNode = false)) |
2723 | : GenTreeUnOp(oper, type DEBUGARG(largeNode)) |
2724 | { |
2725 | SetLclNum(lclNum); |
2726 | } |
2727 | |
2728 | unsigned GetLclNum() const |
2729 | { |
2730 | return _gtLclNum; |
2731 | } |
2732 | __declspec(property(get = GetLclNum)) unsigned gtLclNum; |
2733 | |
2734 | void SetLclNum(unsigned lclNum) |
2735 | { |
2736 | _gtLclNum = lclNum; |
2737 | _gtSsaNum = SsaConfig::RESERVED_SSA_NUM; |
2738 | } |
2739 | |
2740 | unsigned GetSsaNum() const |
2741 | { |
2742 | return _gtSsaNum; |
2743 | } |
2744 | __declspec(property(get = GetSsaNum)) unsigned gtSsaNum; |
2745 | |
2746 | void SetSsaNum(unsigned ssaNum) |
2747 | { |
2748 | _gtSsaNum = ssaNum; |
2749 | } |
2750 | |
2751 | bool HasSsaName() |
2752 | { |
2753 | return (gtSsaNum != SsaConfig::RESERVED_SSA_NUM); |
2754 | } |
2755 | |
2756 | #if DEBUGGABLE_GENTREE |
2757 | GenTreeLclVarCommon() : GenTreeUnOp() |
2758 | { |
2759 | } |
2760 | #endif |
2761 | }; |
2762 | |
2763 | // gtLclVar -- load/store/addr of local variable |
2764 | |
2765 | struct GenTreeLclVar : public GenTreeLclVarCommon |
2766 | { |
2767 | IL_OFFSET gtLclILoffs; // instr offset of ref (only for debug info) |
2768 | |
2769 | GenTreeLclVar(var_types type, unsigned lclNum, IL_OFFSET ilOffs DEBUGARG(bool largeNode = false)) |
2770 | : GenTreeLclVarCommon(GT_LCL_VAR, type, lclNum DEBUGARG(largeNode)), gtLclILoffs(ilOffs) |
2771 | { |
2772 | } |
2773 | |
2774 | GenTreeLclVar(genTreeOps oper, var_types type, unsigned lclNum, IL_OFFSET ilOffs DEBUGARG(bool largeNode = false)) |
2775 | : GenTreeLclVarCommon(oper, type, lclNum DEBUGARG(largeNode)), gtLclILoffs(ilOffs) |
2776 | { |
2777 | assert(OperIsLocal(oper) || OperIsLocalAddr(oper)); |
2778 | } |
2779 | |
2780 | #if DEBUGGABLE_GENTREE |
2781 | GenTreeLclVar() : GenTreeLclVarCommon() |
2782 | { |
2783 | } |
2784 | #endif |
2785 | }; |
2786 | |
2787 | // gtLclFld -- load/store/addr of local variable field |
2788 | |
2789 | struct GenTreeLclFld : public GenTreeLclVarCommon |
2790 | { |
2791 | unsigned gtLclOffs; // offset into the variable to access |
2792 | |
2793 | FieldSeqNode* gtFieldSeq; // This LclFld node represents some sequences of accesses. |
2794 | |
2795 | // old/FE style constructor where load/store/addr share same opcode |
2796 | GenTreeLclFld(var_types type, unsigned lclNum, unsigned lclOffs) |
2797 | : GenTreeLclVarCommon(GT_LCL_FLD, type, lclNum), gtLclOffs(lclOffs), gtFieldSeq(nullptr) |
2798 | { |
2799 | assert(sizeof(*this) <= s_gtNodeSizes[GT_LCL_FLD]); |
2800 | } |
2801 | |
2802 | GenTreeLclFld(genTreeOps oper, var_types type, unsigned lclNum, unsigned lclOffs) |
2803 | : GenTreeLclVarCommon(oper, type, lclNum), gtLclOffs(lclOffs), gtFieldSeq(nullptr) |
2804 | { |
2805 | assert(sizeof(*this) <= s_gtNodeSizes[GT_LCL_FLD]); |
2806 | } |
2807 | #if DEBUGGABLE_GENTREE |
2808 | GenTreeLclFld() : GenTreeLclVarCommon() |
2809 | { |
2810 | } |
2811 | #endif |
2812 | }; |
2813 | |
2814 | /* gtCast -- conversion to a different type (GT_CAST) */ |
2815 | |
2816 | struct GenTreeCast : public GenTreeOp |
2817 | { |
2818 | GenTree*& CastOp() |
2819 | { |
2820 | return gtOp1; |
2821 | } |
2822 | var_types gtCastType; |
2823 | |
2824 | GenTreeCast(var_types type, GenTree* op, bool fromUnsigned, var_types castType DEBUGARG(bool largeNode = false)) |
2825 | : GenTreeOp(GT_CAST, type, op, nullptr DEBUGARG(largeNode)), gtCastType(castType) |
2826 | { |
2827 | gtFlags |= fromUnsigned ? GTF_UNSIGNED : 0; |
2828 | } |
2829 | #if DEBUGGABLE_GENTREE |
2830 | GenTreeCast() : GenTreeOp() |
2831 | { |
2832 | } |
2833 | #endif |
2834 | }; |
2835 | |
2836 | // GT_BOX nodes are place markers for boxed values. The "real" tree |
2837 | // for most purposes is in gtBoxOp. |
2838 | struct GenTreeBox : public GenTreeUnOp |
2839 | { |
2840 | // An expanded helper call to implement the "box" if we don't get |
2841 | // rid of it any other way. Must be in same position as op1. |
2842 | |
2843 | GenTree*& BoxOp() |
2844 | { |
2845 | return gtOp1; |
2846 | } |
2847 | // This is the statement that contains the assignment tree when the node is an inlined GT_BOX on a value |
2848 | // type |
2849 | GenTree* gtAsgStmtWhenInlinedBoxValue; |
2850 | // And this is the statement that copies from the value being boxed to the box payload |
2851 | GenTree* gtCopyStmtWhenInlinedBoxValue; |
2852 | |
2853 | GenTreeBox(var_types type, |
2854 | GenTree* boxOp, |
2855 | GenTree* asgStmtWhenInlinedBoxValue, |
2856 | GenTree* copyStmtWhenInlinedBoxValue) |
2857 | : GenTreeUnOp(GT_BOX, type, boxOp) |
2858 | , gtAsgStmtWhenInlinedBoxValue(asgStmtWhenInlinedBoxValue) |
2859 | , gtCopyStmtWhenInlinedBoxValue(copyStmtWhenInlinedBoxValue) |
2860 | { |
2861 | } |
2862 | #if DEBUGGABLE_GENTREE |
2863 | GenTreeBox() : GenTreeUnOp() |
2864 | { |
2865 | } |
2866 | #endif |
2867 | }; |
2868 | |
2869 | /* gtField -- data member ref (GT_FIELD) */ |
2870 | |
2871 | struct GenTreeField : public GenTree |
2872 | { |
2873 | GenTree* gtFldObj; |
2874 | CORINFO_FIELD_HANDLE gtFldHnd; |
2875 | DWORD gtFldOffset; |
2876 | bool gtFldMayOverlap; |
2877 | #ifdef FEATURE_READYTORUN_COMPILER |
2878 | CORINFO_CONST_LOOKUP gtFieldLookup; |
2879 | #endif |
2880 | |
2881 | GenTreeField(var_types type, GenTree* obj, CORINFO_FIELD_HANDLE fldHnd, DWORD offs) |
2882 | : GenTree(GT_FIELD, type), gtFldObj(obj), gtFldHnd(fldHnd), gtFldOffset(offs), gtFldMayOverlap(false) |
2883 | { |
2884 | if (obj != nullptr) |
2885 | { |
2886 | gtFlags |= (obj->gtFlags & GTF_ALL_EFFECT); |
2887 | } |
2888 | |
2889 | #ifdef FEATURE_READYTORUN_COMPILER |
2890 | gtFieldLookup.addr = nullptr; |
2891 | #endif |
2892 | } |
2893 | #if DEBUGGABLE_GENTREE |
2894 | GenTreeField() : GenTree() |
2895 | { |
2896 | } |
2897 | #endif |
2898 | }; |
2899 | |
2900 | // Represents the Argument list of a call node, as a Lisp-style linked list. |
2901 | // (Originally I had hoped that this could have *only* the m_arg/m_rest fields, but it turns out |
2902 | // that enough of the GenTree mechanism is used that it makes sense just to make it a subtype. But |
2903 | // note that in many ways, this is *not* a "real" node of the tree, but rather a mechanism for |
2904 | // giving call nodes a flexible number of children. GenTreeArgListNodes never evaluate to registers, |
2905 | // for example.) |
2906 | |
2907 | // Note that while this extends GenTreeOp, it is *not* an EXOP. We don't add any new fields, and one |
2908 | // is free to allocate a GenTreeOp of type GT_LIST. If you use this type, you get the convenient Current/Rest |
2909 | // method names for the arguments. |
2910 | struct GenTreeArgList : public GenTreeOp |
2911 | { |
2912 | GenTree*& Current() |
2913 | { |
2914 | return gtOp1; |
2915 | } |
2916 | GenTreeArgList*& Rest() |
2917 | { |
2918 | assert(gtOp2 == nullptr || gtOp2->OperIsAnyList()); |
2919 | return *reinterpret_cast<GenTreeArgList**>(>Op2); |
2920 | } |
2921 | |
2922 | #if DEBUGGABLE_GENTREE |
2923 | GenTreeArgList() : GenTreeOp() |
2924 | { |
2925 | } |
2926 | #endif |
2927 | |
2928 | GenTreeArgList(GenTree* arg) : GenTreeArgList(arg, nullptr) |
2929 | { |
2930 | } |
2931 | |
2932 | GenTreeArgList(GenTree* arg, GenTreeArgList* rest) : GenTreeArgList(GT_LIST, arg, rest) |
2933 | { |
2934 | } |
2935 | |
2936 | GenTreeArgList(genTreeOps oper, GenTree* arg, GenTreeArgList* rest) : GenTreeOp(oper, TYP_VOID, arg, rest) |
2937 | { |
2938 | assert(OperIsAnyList(oper)); |
2939 | assert((arg != nullptr) && arg->IsValidCallArgument()); |
2940 | gtFlags |= arg->gtFlags & GTF_ALL_EFFECT; |
2941 | if (rest != nullptr) |
2942 | { |
2943 | gtFlags |= rest->gtFlags & GTF_ALL_EFFECT; |
2944 | } |
2945 | } |
2946 | }; |
2947 | |
2948 | // Represents a list of fields constituting a struct, when it is passed as an argument. |
2949 | // The first field of the struct is marked with the GTF_FIELD_LIST_HEAD flag, and |
2950 | // in LIR form it is the only member of the list that is threaded into the execution |
2951 | // order. |
2952 | // It differs from the GenTreeArgList in a couple of ways: |
2953 | // - The entire list represents a single argument. |
2954 | // - It contains additional fields to provide the offset and type of the field. |
2955 | // |
2956 | struct GenTreeFieldList : public GenTreeArgList |
2957 | { |
2958 | unsigned gtFieldOffset; |
2959 | var_types gtFieldType; |
2960 | |
2961 | bool IsFieldListHead() const |
2962 | { |
2963 | return (gtFlags & GTF_FIELD_LIST_HEAD) != 0; |
2964 | } |
2965 | |
2966 | #if DEBUGGABLE_GENTREE |
2967 | GenTreeFieldList() : GenTreeArgList() |
2968 | { |
2969 | } |
2970 | #endif |
2971 | |
2972 | GenTreeFieldList*& Rest() |
2973 | { |
2974 | assert(gtOp2 == nullptr || gtOp2->OperGet() == GT_FIELD_LIST); |
2975 | return *reinterpret_cast<GenTreeFieldList**>(>Op2); |
2976 | } |
2977 | |
2978 | GenTreeFieldList(GenTree* arg, unsigned fieldOffset, var_types fieldType, GenTreeFieldList* prevList) |
2979 | : GenTreeArgList(GT_FIELD_LIST, arg, nullptr) |
2980 | { |
2981 | // While GT_FIELD_LIST can be in a GT_LIST, GT_FIELD_LISTs cannot be nested or have GT_LISTs. |
2982 | assert(!arg->OperIsAnyList()); |
2983 | gtFieldOffset = fieldOffset; |
2984 | gtFieldType = fieldType; |
2985 | gtType = fieldType; |
2986 | if (prevList == nullptr) |
2987 | { |
2988 | gtFlags |= GTF_FIELD_LIST_HEAD; |
2989 | |
2990 | // A GT_FIELD_LIST head is always contained. Other nodes return false from IsValue() |
2991 | // and should not be marked as contained. |
2992 | SetContained(); |
2993 | } |
2994 | else |
2995 | { |
2996 | prevList->gtOp2 = this; |
2997 | } |
2998 | } |
2999 | }; |
3000 | |
3001 | // There was quite a bit of confusion in the code base about which of gtOp1 and gtOp2 was the |
3002 | // 'then' and 'else' clause of a colon node. Adding these accessors, while not enforcing anything, |
3003 | // at least *allows* the programmer to be obviously correct. |
3004 | // However, these conventions seem backward. |
3005 | // TODO-Cleanup: If we could get these accessors used everywhere, then we could switch them. |
3006 | struct GenTreeColon : public GenTreeOp |
3007 | { |
3008 | GenTree*& ThenNode() |
3009 | { |
3010 | return gtOp2; |
3011 | } |
3012 | GenTree*& ElseNode() |
3013 | { |
3014 | return gtOp1; |
3015 | } |
3016 | |
3017 | #if DEBUGGABLE_GENTREE |
3018 | GenTreeColon() : GenTreeOp() |
3019 | { |
3020 | } |
3021 | #endif |
3022 | |
3023 | GenTreeColon(var_types typ, GenTree* thenNode, GenTree* elseNode) : GenTreeOp(GT_COLON, typ, elseNode, thenNode) |
3024 | { |
3025 | } |
3026 | }; |
3027 | |
3028 | // gtCall -- method call (GT_CALL) |
3029 | enum class InlineObservation; |
3030 | |
3031 | // Return type descriptor of a GT_CALL node. |
3032 | // x64 Unix, Arm64, Arm32 and x86 allow a value to be returned in multiple |
3033 | // registers. For such calls this struct provides the following info |
3034 | // on their return type |
3035 | // - type of value returned in each return register |
3036 | // - ABI return register numbers in which the value is returned |
3037 | // - count of return registers in which the value is returned |
3038 | // |
3039 | // TODO-ARM: Update this to meet the needs of Arm64 and Arm32 |
3040 | // |
3041 | // TODO-AllArch: Right now it is used for describing multi-reg returned types. |
3042 | // Eventually we would want to use it for describing even single-reg |
3043 | // returned types (e.g. structs returned in single register x64/arm). |
3044 | // This would allow us not to lie or normalize single struct return |
3045 | // values in importer/morph. |
3046 | struct ReturnTypeDesc |
3047 | { |
3048 | private: |
3049 | var_types m_regType[MAX_RET_REG_COUNT]; |
3050 | bool m_isEnclosingType; |
3051 | |
3052 | #ifdef DEBUG |
3053 | bool m_inited; |
3054 | #endif |
3055 | |
3056 | public: |
3057 | ReturnTypeDesc() |
3058 | { |
3059 | Reset(); |
3060 | } |
3061 | |
3062 | // Initialize the Return Type Descriptor for a method that returns a struct type |
3063 | void InitializeStructReturnType(Compiler* comp, CORINFO_CLASS_HANDLE retClsHnd); |
3064 | |
3065 | // Initialize the Return Type Descriptor for a method that returns a TYP_LONG |
3066 | // Only needed for X86 |
3067 | void InitializeLongReturnType(Compiler* comp); |
3068 | |
3069 | // Reset type descriptor to defaults |
3070 | void Reset() |
3071 | { |
3072 | for (unsigned i = 0; i < MAX_RET_REG_COUNT; ++i) |
3073 | { |
3074 | m_regType[i] = TYP_UNKNOWN; |
3075 | } |
3076 | m_isEnclosingType = false; |
3077 | #ifdef DEBUG |
3078 | m_inited = false; |
3079 | #endif |
3080 | } |
3081 | |
3082 | #ifdef DEBUG |
3083 | // NOTE: we only use this function when writing out IR dumps. These dumps may take place before the ReturnTypeDesc |
3084 | // has been initialized. |
3085 | unsigned TryGetReturnRegCount() const |
3086 | { |
3087 | return m_inited ? GetReturnRegCount() : 0; |
3088 | } |
3089 | #endif // DEBUG |
3090 | |
3091 | //-------------------------------------------------------------------------------------------- |
3092 | // GetReturnRegCount: Get the count of return registers in which the return value is returned. |
3093 | // |
3094 | // Arguments: |
3095 | // None |
3096 | // |
3097 | // Return Value: |
3098 | // Count of return registers. |
3099 | // Returns 0 if the return type is not returned in registers. |
3100 | unsigned GetReturnRegCount() const |
3101 | { |
3102 | assert(m_inited); |
3103 | |
3104 | int regCount = 0; |
3105 | for (unsigned i = 0; i < MAX_RET_REG_COUNT; ++i) |
3106 | { |
3107 | if (m_regType[i] == TYP_UNKNOWN) |
3108 | { |
3109 | break; |
3110 | } |
3111 | // otherwise |
3112 | regCount++; |
3113 | } |
3114 | |
3115 | #ifdef DEBUG |
3116 | // Any remaining elements in m_regTypes[] should also be TYP_UNKNOWN |
3117 | for (unsigned i = regCount + 1; i < MAX_RET_REG_COUNT; ++i) |
3118 | { |
3119 | assert(m_regType[i] == TYP_UNKNOWN); |
3120 | } |
3121 | #endif |
3122 | |
3123 | return regCount; |
3124 | } |
3125 | |
3126 | //----------------------------------------------------------------------- |
3127 | // IsMultiRegRetType: check whether the type is returned in multiple |
3128 | // return registers. |
3129 | // |
3130 | // Arguments: |
3131 | // None |
3132 | // |
3133 | // Return Value: |
3134 | // Returns true if the type is returned in multiple return registers. |
3135 | // False otherwise. |
3136 | // Note that we only have to examine the first two values to determine this |
3137 | // |
3138 | bool IsMultiRegRetType() const |
3139 | { |
3140 | if (MAX_RET_REG_COUNT < 2) |
3141 | { |
3142 | return false; |
3143 | } |
3144 | else |
3145 | { |
3146 | return ((m_regType[0] != TYP_UNKNOWN) && (m_regType[1] != TYP_UNKNOWN)); |
3147 | } |
3148 | } |
3149 | |
3150 | //-------------------------------------------------------------------------- |
3151 | // GetReturnRegType: Get var_type of the return register specified by index. |
3152 | // |
3153 | // Arguments: |
3154 | // index - Index of the return register. |
3155 | // First return register will have an index 0 and so on. |
3156 | // |
3157 | // Return Value: |
3158 | // var_type of the return register specified by its index. |
3159 | // asserts if the index does not have a valid register return type. |
3160 | |
3161 | var_types GetReturnRegType(unsigned index) |
3162 | { |
3163 | var_types result = m_regType[index]; |
3164 | assert(result != TYP_UNKNOWN); |
3165 | |
3166 | return result; |
3167 | } |
3168 | |
3169 | // True if this value is returned in integer register |
3170 | // that is larger than the type itself. |
3171 | bool IsEnclosingType() const |
3172 | { |
3173 | return m_isEnclosingType; |
3174 | } |
3175 | |
3176 | // Get ith ABI return register |
3177 | regNumber GetABIReturnReg(unsigned idx); |
3178 | |
3179 | // Get reg mask of ABI return registers |
3180 | regMaskTP GetABIReturnRegs(); |
3181 | }; |
3182 | |
3183 | class fgArgInfo; |
3184 | |
3185 | struct GenTreeCall final : public GenTree |
3186 | { |
3187 | GenTree* gtCallObjp; // The instance argument ('this' pointer) |
3188 | GenTreeArgList* gtCallArgs; // The list of arguments in original evaluation order |
3189 | GenTreeArgList* gtCallLateArgs; // On x86: The register arguments in an optimal order |
3190 | // On ARM/x64: - also includes any outgoing arg space arguments |
3191 | // - that were evaluated into a temp LclVar |
3192 | fgArgInfo* fgArgInfo; |
3193 | |
3194 | #if !FEATURE_FIXED_OUT_ARGS |
3195 | int regArgListCount; |
3196 | regList regArgList; |
3197 | #endif |
3198 | |
3199 | // TODO-Throughput: Revisit this (this used to be only defined if |
3200 | // FEATURE_FIXED_OUT_ARGS was enabled, so this makes GenTreeCall 4 bytes bigger on x86). |
3201 | CORINFO_SIG_INFO* callSig; // Used by tail calls and to register callsites with the EE |
3202 | |
3203 | #if FEATURE_MULTIREG_RET |
3204 | |
3205 | // State required to support multi-reg returning call nodes. |
3206 | // For now it is enabled only for x64 unix. |
3207 | // |
3208 | // TODO-AllArch: enable for all call nodes to unify single-reg and multi-reg returns. |
3209 | ReturnTypeDesc gtReturnTypeDesc; |
3210 | |
3211 | // gtRegNum would always be the first return reg. |
3212 | // The following array holds the other reg numbers of multi-reg return. |
3213 | regNumberSmall gtOtherRegs[MAX_RET_REG_COUNT - 1]; |
3214 | |
3215 | // GTF_SPILL or GTF_SPILLED flag on a multi-reg call node indicates that one or |
3216 | // more of its result regs are in that state. The spill flag of each of the |
3217 | // return register is stored here. We only need 2 bits per returned register, |
3218 | // so this is treated as a 2-bit array. No architecture needs more than 8 bits. |
3219 | |
3220 | static const unsigned PACKED_GTF_SPILL = 1; |
3221 | static const unsigned PACKED_GTF_SPILLED = 2; |
3222 | unsigned char gtSpillFlags; |
3223 | |
3224 | #endif // FEATURE_MULTIREG_RET |
3225 | |
3226 | //----------------------------------------------------------------------- |
3227 | // GetReturnTypeDesc: get the type descriptor of return value of the call |
3228 | // |
3229 | // Arguments: |
3230 | // None |
3231 | // |
3232 | // Returns |
3233 | // Type descriptor of the value returned by call |
3234 | // |
3235 | // Note: |
3236 | // Right now implemented only for x64 unix and yet to be |
3237 | // implemented for other multi-reg target arch (Arm64/Arm32/x86). |
3238 | // |
3239 | // TODO-AllArch: enable for all call nodes to unify single-reg and multi-reg returns. |
3240 | ReturnTypeDesc* GetReturnTypeDesc() |
3241 | { |
3242 | #if FEATURE_MULTIREG_RET |
3243 | return >ReturnTypeDesc; |
3244 | #else |
3245 | return nullptr; |
3246 | #endif |
3247 | } |
3248 | |
3249 | //--------------------------------------------------------------------------- |
3250 | // GetRegNumByIdx: get ith return register allocated to this call node. |
3251 | // |
3252 | // Arguments: |
3253 | // idx - index of the return register |
3254 | // |
3255 | // Return Value: |
3256 | // Return regNumber of ith return register of call node. |
3257 | // Returns REG_NA if there is no valid return register for the given index. |
3258 | // |
3259 | regNumber GetRegNumByIdx(unsigned idx) const |
3260 | { |
3261 | assert(idx < MAX_RET_REG_COUNT); |
3262 | |
3263 | if (idx == 0) |
3264 | { |
3265 | return gtRegNum; |
3266 | } |
3267 | |
3268 | #if FEATURE_MULTIREG_RET |
3269 | return (regNumber)gtOtherRegs[idx - 1]; |
3270 | #else |
3271 | return REG_NA; |
3272 | #endif |
3273 | } |
3274 | |
3275 | //---------------------------------------------------------------------- |
3276 | // SetRegNumByIdx: set ith return register of this call node |
3277 | // |
3278 | // Arguments: |
3279 | // reg - reg number |
3280 | // idx - index of the return register |
3281 | // |
3282 | // Return Value: |
3283 | // None |
3284 | // |
3285 | void SetRegNumByIdx(regNumber reg, unsigned idx) |
3286 | { |
3287 | assert(idx < MAX_RET_REG_COUNT); |
3288 | |
3289 | if (idx == 0) |
3290 | { |
3291 | gtRegNum = reg; |
3292 | } |
3293 | #if FEATURE_MULTIREG_RET |
3294 | else |
3295 | { |
3296 | gtOtherRegs[idx - 1] = (regNumberSmall)reg; |
3297 | assert(gtOtherRegs[idx - 1] == reg); |
3298 | } |
3299 | #else |
3300 | unreached(); |
3301 | #endif |
3302 | } |
3303 | |
3304 | //---------------------------------------------------------------------------- |
3305 | // ClearOtherRegs: clear multi-reg state to indicate no regs are allocated |
3306 | // |
3307 | // Arguments: |
3308 | // None |
3309 | // |
3310 | // Return Value: |
3311 | // None |
3312 | // |
3313 | void ClearOtherRegs() |
3314 | { |
3315 | #if FEATURE_MULTIREG_RET |
3316 | for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i) |
3317 | { |
3318 | gtOtherRegs[i] = REG_NA; |
3319 | } |
3320 | #endif |
3321 | } |
3322 | |
3323 | //---------------------------------------------------------------------------- |
3324 | // CopyOtherRegs: copy multi-reg state from the given call node to this node |
3325 | // |
3326 | // Arguments: |
3327 | // fromCall - GenTreeCall node from which to copy multi-reg state |
3328 | // |
3329 | // Return Value: |
3330 | // None |
3331 | // |
3332 | void CopyOtherRegs(GenTreeCall* fromCall) |
3333 | { |
3334 | #if FEATURE_MULTIREG_RET |
3335 | for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i) |
3336 | { |
3337 | this->gtOtherRegs[i] = fromCall->gtOtherRegs[i]; |
3338 | } |
3339 | #endif |
3340 | } |
3341 | |
3342 | // Get reg mask of all the valid registers of gtOtherRegs array |
3343 | regMaskTP GetOtherRegMask() const; |
3344 | |
3345 | //---------------------------------------------------------------------- |
3346 | // GetRegSpillFlagByIdx: get spill flag associated with the return register |
3347 | // specified by its index. |
3348 | // |
3349 | // Arguments: |
3350 | // idx - Position or index of the return register |
3351 | // |
3352 | // Return Value: |
3353 | // Returns GTF_* flags associated with the register. Only GTF_SPILL and GTF_SPILLED are considered. |
3354 | // |
3355 | unsigned GetRegSpillFlagByIdx(unsigned idx) const |
3356 | { |
3357 | static_assert_no_msg(MAX_RET_REG_COUNT * 2 <= sizeof(unsigned char) * BITS_PER_BYTE); |
3358 | assert(idx < MAX_RET_REG_COUNT); |
3359 | |
3360 | #if FEATURE_MULTIREG_RET |
3361 | unsigned bits = gtSpillFlags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here. |
3362 | unsigned spillFlags = 0; |
3363 | if (bits & PACKED_GTF_SPILL) |
3364 | { |
3365 | spillFlags |= GTF_SPILL; |
3366 | } |
3367 | if (bits & PACKED_GTF_SPILLED) |
3368 | { |
3369 | spillFlags |= GTF_SPILLED; |
3370 | } |
3371 | return spillFlags; |
3372 | #else |
3373 | assert(!"unreached" ); |
3374 | return 0; |
3375 | #endif |
3376 | } |
3377 | |
3378 | //---------------------------------------------------------------------- |
3379 | // SetRegSpillFlagByIdx: set spill flags for the return register |
3380 | // specified by its index. |
3381 | // |
3382 | // Arguments: |
3383 | // flags - GTF_* flags. Only GTF_SPILL and GTF_SPILLED are allowed. |
3384 | // idx - Position or index of the return register |
3385 | // |
3386 | // Return Value: |
3387 | // None |
3388 | // |
3389 | void SetRegSpillFlagByIdx(unsigned flags, unsigned idx) |
3390 | { |
3391 | static_assert_no_msg(MAX_RET_REG_COUNT * 2 <= sizeof(unsigned char) * BITS_PER_BYTE); |
3392 | assert(idx < MAX_RET_REG_COUNT); |
3393 | |
3394 | #if FEATURE_MULTIREG_RET |
3395 | unsigned bits = 0; |
3396 | if (flags & GTF_SPILL) |
3397 | { |
3398 | bits |= PACKED_GTF_SPILL; |
3399 | } |
3400 | if (flags & GTF_SPILLED) |
3401 | { |
3402 | bits |= PACKED_GTF_SPILLED; |
3403 | } |
3404 | |
3405 | const unsigned char packedFlags = PACKED_GTF_SPILL | PACKED_GTF_SPILLED; |
3406 | |
3407 | // Clear anything that was already there by masking out the bits before 'or'ing in what we want there. |
3408 | gtSpillFlags = (unsigned char)((gtSpillFlags & ~(packedFlags << (idx * 2))) | (bits << (idx * 2))); |
3409 | #else |
3410 | unreached(); |
3411 | #endif |
3412 | } |
3413 | |
3414 | //------------------------------------------------------------------- |
3415 | // clearOtherRegFlags: clear GTF_* flags associated with gtOtherRegs |
3416 | // |
3417 | // Arguments: |
3418 | // None |
3419 | // |
3420 | // Return Value: |
3421 | // None |
3422 | void ClearOtherRegFlags() |
3423 | { |
3424 | #if FEATURE_MULTIREG_RET |
3425 | gtSpillFlags = 0; |
3426 | #endif |
3427 | } |
3428 | |
3429 | //------------------------------------------------------------------------- |
3430 | // CopyOtherRegFlags: copy GTF_* flags associated with gtOtherRegs from |
3431 | // the given call node. |
3432 | // |
3433 | // Arguments: |
3434 | // fromCall - GenTreeCall node from which to copy |
3435 | // |
3436 | // Return Value: |
3437 | // None |
3438 | // |
3439 | void CopyOtherRegFlags(GenTreeCall* fromCall) |
3440 | { |
3441 | #if FEATURE_MULTIREG_RET |
3442 | this->gtSpillFlags = fromCall->gtSpillFlags; |
3443 | #endif |
3444 | } |
3445 | |
3446 | // clang-format off |
3447 | |
3448 | #define GTF_CALL_M_EXPLICIT_TAILCALL 0x00000001 // GT_CALL -- the call is "tail" prefixed and |
3449 | // importer has performed tail call checks |
3450 | #define GTF_CALL_M_TAILCALL 0x00000002 // GT_CALL -- the call is a tailcall |
3451 | #define GTF_CALL_M_VARARGS 0x00000004 // GT_CALL -- the call uses varargs ABI |
3452 | #define GTF_CALL_M_RETBUFFARG 0x00000008 // GT_CALL -- first parameter is the return buffer argument |
3453 | #define GTF_CALL_M_DELEGATE_INV 0x00000010 // GT_CALL -- call to Delegate.Invoke |
3454 | #define GTF_CALL_M_NOGCCHECK 0x00000020 // GT_CALL -- not a call for computing full interruptability |
3455 | #define GTF_CALL_M_SPECIAL_INTRINSIC 0x00000040 // GT_CALL -- function that could be optimized as an intrinsic |
3456 | // in special cases. Used to optimize fast way out in morphing |
3457 | #define GTF_CALL_M_UNMGD_THISCALL 0x00000080 // GT_CALL -- "this" pointer (first argument) |
3458 | // should be enregistered (only for GTF_CALL_UNMANAGED) |
3459 | #define GTF_CALL_M_VIRTSTUB_REL_INDIRECT 0x00000080 // the virtstub is indirected through |
3460 | // a relative address (only for GTF_CALL_VIRT_STUB) |
3461 | #define GTF_CALL_M_NONVIRT_SAME_THIS 0x00000080 // GT_CALL -- callee "this" pointer is |
3462 | // equal to caller this pointer (only for GTF_CALL_NONVIRT) |
3463 | #define GTF_CALL_M_FRAME_VAR_DEATH 0x00000100 // GT_CALL -- the compLvFrameListRoot variable dies here (last use) |
3464 | #define GTF_CALL_M_TAILCALL_VIA_HELPER 0x00000200 // GT_CALL -- call is a tail call dispatched via tail call JIT helper. |
3465 | |
3466 | #if FEATURE_TAILCALL_OPT |
3467 | #define GTF_CALL_M_IMPLICIT_TAILCALL 0x00000400 // GT_CALL -- call is an opportunistic |
3468 | // tail call and importer has performed tail call checks |
3469 | #define GTF_CALL_M_TAILCALL_TO_LOOP 0x00000800 // GT_CALL -- call is a fast recursive tail call |
3470 | // that can be converted into a loop |
3471 | #endif |
3472 | |
3473 | #define GTF_CALL_M_PINVOKE 0x00001000 // GT_CALL -- call is a pinvoke. This mirrors VM flag CORINFO_FLG_PINVOKE. |
3474 | // A call marked as Pinvoke is not necessarily a GT_CALL_UNMANAGED. For e.g. |
3475 | // an IL Stub dynamically generated for a PInvoke declaration is flagged as |
3476 | // a Pinvoke but not as an unmanaged call. See impCheckForPInvokeCall() to |
3477 | // know when these flags are set. |
3478 | |
3479 | #define GTF_CALL_M_R2R_REL_INDIRECT 0x00002000 // GT_CALL -- ready to run call is indirected through a relative address |
3480 | #define GTF_CALL_M_DOES_NOT_RETURN 0x00004000 // GT_CALL -- call does not return |
3481 | #define GTF_CALL_M_SECURE_DELEGATE_INV 0x00008000 // GT_CALL -- call is in secure delegate |
3482 | #define GTF_CALL_M_FAT_POINTER_CHECK 0x00010000 // GT_CALL -- CoreRT managed calli needs transformation, that checks |
3483 | // special bit in calli address. If it is set, then it is necessary |
3484 | // to restore real function address and load hidden argument |
3485 | // as the first argument for calli. It is CoreRT replacement for instantiating |
3486 | // stubs, because executable code cannot be generated at runtime. |
3487 | #define GTF_CALL_M_HELPER_SPECIAL_DCE 0x00020000 // GT_CALL -- this helper call can be removed if it is part of a comma and |
3488 | // the comma result is unused. |
3489 | #define GTF_CALL_M_DEVIRTUALIZED 0x00040000 // GT_CALL -- this call was devirtualized |
3490 | #define GTF_CALL_M_UNBOXED 0x00080000 // GT_CALL -- this call was optimized to use the unboxed entry point |
3491 | #define GTF_CALL_M_GUARDED_DEVIRT 0x00100000 // GT_CALL -- this call is a candidate for guarded devirtualization |
3492 | #define GTF_CALL_M_GUARDED 0x00200000 // GT_CALL -- this call was transformed by guarded devirtualization |
3493 | #define GTF_CALL_M_ALLOC_SIDE_EFFECTS 0x00400000 // GT_CALL -- this is a call to an allocator with side effects |
3494 | |
3495 | // clang-format on |
3496 | |
3497 | bool IsUnmanaged() const |
3498 | { |
3499 | return (gtFlags & GTF_CALL_UNMANAGED) != 0; |
3500 | } |
3501 | bool NeedsNullCheck() const |
3502 | { |
3503 | return (gtFlags & GTF_CALL_NULLCHECK) != 0; |
3504 | } |
3505 | bool CallerPop() const |
3506 | { |
3507 | return (gtFlags & GTF_CALL_POP_ARGS) != 0; |
3508 | } |
3509 | bool IsVirtual() const |
3510 | { |
3511 | return (gtFlags & GTF_CALL_VIRT_KIND_MASK) != GTF_CALL_NONVIRT; |
3512 | } |
3513 | bool IsVirtualStub() const |
3514 | { |
3515 | return (gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_STUB; |
3516 | } |
3517 | bool IsVirtualVtable() const |
3518 | { |
3519 | return (gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_VTABLE; |
3520 | } |
3521 | bool IsInlineCandidate() const |
3522 | { |
3523 | return (gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0; |
3524 | } |
3525 | |
3526 | bool HasNonStandardAddedArgs(Compiler* compiler) const; |
3527 | int GetNonStandardAddedArgCount(Compiler* compiler) const; |
3528 | |
3529 | // Returns true if this call uses a retBuf argument and its calling convention |
3530 | bool HasRetBufArg() const |
3531 | { |
3532 | return (gtCallMoreFlags & GTF_CALL_M_RETBUFFARG) != 0; |
3533 | } |
3534 | |
3535 | //------------------------------------------------------------------------- |
3536 | // TreatAsHasRetBufArg: |
3537 | // |
3538 | // Arguments: |
3539 | // compiler, the compiler instance so that we can call eeGetHelperNum |
3540 | // |
3541 | // Return Value: |
3542 | // Returns true if we treat the call as if it has a retBuf argument |
3543 | // This method may actually have a retBuf argument |
3544 | // or it could be a JIT helper that we are still transforming during |
3545 | // the importer phase. |
3546 | // |
3547 | // Notes: |
3548 | // On ARM64 marking the method with the GTF_CALL_M_RETBUFFARG flag |
3549 | // will make HasRetBufArg() return true, but will also force the |
3550 | // use of register x8 to pass the RetBuf argument. |
3551 | // |
3552 | bool TreatAsHasRetBufArg(Compiler* compiler) const; |
3553 | |
3554 | //----------------------------------------------------------------------------------------- |
3555 | // HasMultiRegRetVal: whether the call node returns its value in multiple return registers. |
3556 | // |
3557 | // Arguments: |
3558 | // None |
3559 | // |
3560 | // Return Value: |
3561 | // True if the call is returning a multi-reg return value. False otherwise. |
3562 | // |
3563 | bool HasMultiRegRetVal() const |
3564 | { |
3565 | #if defined(_TARGET_X86_) |
3566 | return varTypeIsLong(gtType); |
3567 | #elif FEATURE_MULTIREG_RET && defined(_TARGET_ARM_) |
3568 | return varTypeIsLong(gtType) || (varTypeIsStruct(gtType) && !HasRetBufArg()); |
3569 | #elif FEATURE_MULTIREG_RET |
3570 | return varTypeIsStruct(gtType) && !HasRetBufArg(); |
3571 | #else |
3572 | return false; |
3573 | #endif |
3574 | } |
3575 | |
3576 | // Returns true if VM has flagged this method as CORINFO_FLG_PINVOKE. |
3577 | bool IsPInvoke() const |
3578 | { |
3579 | return (gtCallMoreFlags & GTF_CALL_M_PINVOKE) != 0; |
3580 | } |
3581 | |
3582 | // Note that the distinction of whether tail prefixed or an implicit tail call |
3583 | // is maintained on a call node till fgMorphCall() after which it will be |
3584 | // either a tail call (i.e. IsTailCall() is true) or a non-tail call. |
3585 | bool IsTailPrefixedCall() const |
3586 | { |
3587 | return (gtCallMoreFlags & GTF_CALL_M_EXPLICIT_TAILCALL) != 0; |
3588 | } |
3589 | |
3590 | // This method returning "true" implies that tail call flowgraph morhphing has |
3591 | // performed final checks and committed to making a tail call. |
3592 | bool IsTailCall() const |
3593 | { |
3594 | return (gtCallMoreFlags & GTF_CALL_M_TAILCALL) != 0; |
3595 | } |
3596 | |
3597 | // This method returning "true" implies that importer has performed tail call checks |
3598 | // and providing a hint that this can be converted to a tail call. |
3599 | bool CanTailCall() const |
3600 | { |
3601 | return IsTailPrefixedCall() || IsImplicitTailCall(); |
3602 | } |
3603 | |
3604 | bool IsTailCallViaHelper() const |
3605 | { |
3606 | return IsTailCall() && (gtCallMoreFlags & GTF_CALL_M_TAILCALL_VIA_HELPER); |
3607 | } |
3608 | |
3609 | #if FEATURE_FASTTAILCALL |
3610 | bool IsFastTailCall() const |
3611 | { |
3612 | return IsTailCall() && !(gtCallMoreFlags & GTF_CALL_M_TAILCALL_VIA_HELPER); |
3613 | } |
3614 | #else // !FEATURE_FASTTAILCALL |
3615 | bool IsFastTailCall() const |
3616 | { |
3617 | return false; |
3618 | } |
3619 | #endif // !FEATURE_FASTTAILCALL |
3620 | |
3621 | #if FEATURE_TAILCALL_OPT |
3622 | // Returns true if this is marked for opportunistic tail calling. |
3623 | // That is, can be tail called though not explicitly prefixed with "tail" prefix. |
3624 | bool IsImplicitTailCall() const |
3625 | { |
3626 | return (gtCallMoreFlags & GTF_CALL_M_IMPLICIT_TAILCALL) != 0; |
3627 | } |
3628 | bool IsTailCallConvertibleToLoop() const |
3629 | { |
3630 | return (gtCallMoreFlags & GTF_CALL_M_TAILCALL_TO_LOOP) != 0; |
3631 | } |
3632 | #else // !FEATURE_TAILCALL_OPT |
3633 | bool IsImplicitTailCall() const |
3634 | { |
3635 | return false; |
3636 | } |
3637 | bool IsTailCallConvertibleToLoop() const |
3638 | { |
3639 | return false; |
3640 | } |
3641 | #endif // !FEATURE_TAILCALL_OPT |
3642 | |
3643 | bool IsSameThis() const |
3644 | { |
3645 | return (gtCallMoreFlags & GTF_CALL_M_NONVIRT_SAME_THIS) != 0; |
3646 | } |
3647 | bool IsDelegateInvoke() const |
3648 | { |
3649 | return (gtCallMoreFlags & GTF_CALL_M_DELEGATE_INV) != 0; |
3650 | } |
3651 | bool IsVirtualStubRelativeIndir() const |
3652 | { |
3653 | return (gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT) != 0; |
3654 | } |
3655 | |
3656 | #ifdef FEATURE_READYTORUN_COMPILER |
3657 | bool IsR2RRelativeIndir() const |
3658 | { |
3659 | return (gtCallMoreFlags & GTF_CALL_M_R2R_REL_INDIRECT) != 0; |
3660 | } |
3661 | void setEntryPoint(CORINFO_CONST_LOOKUP entryPoint) |
3662 | { |
3663 | gtEntryPoint = entryPoint; |
3664 | if (gtEntryPoint.accessType == IAT_PVALUE) |
3665 | { |
3666 | gtCallMoreFlags |= GTF_CALL_M_R2R_REL_INDIRECT; |
3667 | } |
3668 | } |
3669 | #endif // FEATURE_READYTORUN_COMPILER |
3670 | |
3671 | bool IsVarargs() const |
3672 | { |
3673 | return (gtCallMoreFlags & GTF_CALL_M_VARARGS) != 0; |
3674 | } |
3675 | |
3676 | bool IsNoReturn() const |
3677 | { |
3678 | return (gtCallMoreFlags & GTF_CALL_M_DOES_NOT_RETURN) != 0; |
3679 | } |
3680 | |
3681 | bool IsFatPointerCandidate() const |
3682 | { |
3683 | return (gtCallMoreFlags & GTF_CALL_M_FAT_POINTER_CHECK) != 0; |
3684 | } |
3685 | |
3686 | bool IsGuardedDevirtualizationCandidate() const |
3687 | { |
3688 | return (gtCallMoreFlags & GTF_CALL_M_GUARDED_DEVIRT) != 0; |
3689 | } |
3690 | |
3691 | bool IsPure(Compiler* compiler) const; |
3692 | |
3693 | bool HasSideEffects(Compiler* compiler, bool ignoreExceptions = false, bool ignoreCctors = false) const; |
3694 | |
3695 | void ClearFatPointerCandidate() |
3696 | { |
3697 | gtCallMoreFlags &= ~GTF_CALL_M_FAT_POINTER_CHECK; |
3698 | } |
3699 | |
3700 | void SetFatPointerCandidate() |
3701 | { |
3702 | gtCallMoreFlags |= GTF_CALL_M_FAT_POINTER_CHECK; |
3703 | } |
3704 | |
3705 | bool IsDevirtualized() const |
3706 | { |
3707 | return (gtCallMoreFlags & GTF_CALL_M_DEVIRTUALIZED) != 0; |
3708 | } |
3709 | |
3710 | bool IsGuarded() const |
3711 | { |
3712 | return (gtCallMoreFlags & GTF_CALL_M_GUARDED) != 0; |
3713 | } |
3714 | |
3715 | bool IsUnboxed() const |
3716 | { |
3717 | return (gtCallMoreFlags & GTF_CALL_M_UNBOXED) != 0; |
3718 | } |
3719 | |
3720 | void ClearGuardedDevirtualizationCandidate() |
3721 | { |
3722 | gtCallMoreFlags &= ~GTF_CALL_M_GUARDED_DEVIRT; |
3723 | } |
3724 | |
3725 | void SetGuardedDevirtualizationCandidate() |
3726 | { |
3727 | gtCallMoreFlags |= GTF_CALL_M_GUARDED_DEVIRT; |
3728 | } |
3729 | |
3730 | void SetIsGuarded() |
3731 | { |
3732 | gtCallMoreFlags |= GTF_CALL_M_GUARDED; |
3733 | } |
3734 | |
3735 | unsigned gtCallMoreFlags; // in addition to gtFlags |
3736 | |
3737 | unsigned char gtCallType : 3; // value from the gtCallTypes enumeration |
3738 | unsigned char gtReturnType : 5; // exact return type |
3739 | |
3740 | CORINFO_CLASS_HANDLE gtRetClsHnd; // The return type handle of the call if it is a struct; always available |
3741 | |
3742 | union { |
3743 | // only used for CALLI unmanaged calls (CT_INDIRECT) |
3744 | GenTree* gtCallCookie; |
3745 | // gtInlineCandidateInfo is only used when inlining methods |
3746 | InlineCandidateInfo* gtInlineCandidateInfo; |
3747 | GuardedDevirtualizationCandidateInfo* gtGuardedDevirtualizationCandidateInfo; |
3748 | void* gtStubCallStubAddr; // GTF_CALL_VIRT_STUB - these are never inlined |
3749 | CORINFO_GENERIC_HANDLE compileTimeHelperArgumentHandle; // Used to track type handle argument of dynamic helpers |
3750 | void* gtDirectCallAddress; // Used to pass direct call address between lower and codegen |
3751 | }; |
3752 | |
3753 | // expression evaluated after args are placed which determines the control target |
3754 | GenTree* gtControlExpr; |
3755 | |
3756 | union { |
3757 | CORINFO_METHOD_HANDLE gtCallMethHnd; // CT_USER_FUNC |
3758 | GenTree* gtCallAddr; // CT_INDIRECT |
3759 | }; |
3760 | |
3761 | #ifdef FEATURE_READYTORUN_COMPILER |
3762 | // Call target lookup info for method call from a Ready To Run module |
3763 | CORINFO_CONST_LOOKUP gtEntryPoint; |
3764 | #endif |
3765 | |
3766 | #if defined(DEBUG) || defined(INLINE_DATA) |
3767 | // For non-inline candidates, track the first observation |
3768 | // that blocks candidacy. |
3769 | InlineObservation gtInlineObservation; |
3770 | |
3771 | // IL offset of the call wrt its parent method. |
3772 | IL_OFFSET gtRawILOffset; |
3773 | #endif // defined(DEBUG) || defined(INLINE_DATA) |
3774 | |
3775 | bool IsHelperCall() const |
3776 | { |
3777 | return gtCallType == CT_HELPER; |
3778 | } |
3779 | |
3780 | bool IsHelperCall(CORINFO_METHOD_HANDLE callMethHnd) const |
3781 | { |
3782 | return IsHelperCall() && (callMethHnd == gtCallMethHnd); |
3783 | } |
3784 | |
3785 | bool IsHelperCall(Compiler* compiler, unsigned helper) const; |
3786 | |
3787 | void ReplaceCallOperand(GenTree** operandUseEdge, GenTree* replacement); |
3788 | |
3789 | bool AreArgsComplete() const; |
3790 | |
3791 | GenTreeCall(var_types type) : GenTree(GT_CALL, type) |
3792 | { |
3793 | fgArgInfo = nullptr; |
3794 | } |
3795 | #if DEBUGGABLE_GENTREE |
3796 | GenTreeCall() : GenTree() |
3797 | { |
3798 | } |
3799 | #endif |
3800 | }; |
3801 | |
3802 | struct GenTreeCmpXchg : public GenTree |
3803 | { |
3804 | GenTree* gtOpLocation; |
3805 | GenTree* gtOpValue; |
3806 | GenTree* gtOpComparand; |
3807 | |
3808 | GenTreeCmpXchg(var_types type, GenTree* loc, GenTree* val, GenTree* comparand) |
3809 | : GenTree(GT_CMPXCHG, type), gtOpLocation(loc), gtOpValue(val), gtOpComparand(comparand) |
3810 | { |
3811 | // There's no reason to do a compare-exchange on a local location, so we'll assume that all of these |
3812 | // have global effects. |
3813 | gtFlags |= (GTF_GLOB_REF | GTF_ASG); |
3814 | |
3815 | // Merge in flags from operands |
3816 | gtFlags |= gtOpLocation->gtFlags & GTF_ALL_EFFECT; |
3817 | gtFlags |= gtOpValue->gtFlags & GTF_ALL_EFFECT; |
3818 | gtFlags |= gtOpComparand->gtFlags & GTF_ALL_EFFECT; |
3819 | } |
3820 | #if DEBUGGABLE_GENTREE |
3821 | GenTreeCmpXchg() : GenTree() |
3822 | { |
3823 | } |
3824 | #endif |
3825 | }; |
3826 | |
3827 | #if !defined(_TARGET_64BIT_) |
3828 | struct GenTreeMultiRegOp : public GenTreeOp |
3829 | { |
3830 | regNumber gtOtherReg; |
3831 | |
3832 | // GTF_SPILL or GTF_SPILLED flag on a multi-reg node indicates that one or |
3833 | // more of its result regs are in that state. The spill flag of each of the |
3834 | // return register is stored here. We only need 2 bits per returned register, |
3835 | // so this is treated as a 2-bit array. No architecture needs more than 8 bits. |
3836 | |
3837 | static const unsigned PACKED_GTF_SPILL = 1; |
3838 | static const unsigned PACKED_GTF_SPILLED = 2; |
3839 | unsigned char gtSpillFlags; |
3840 | |
3841 | GenTreeMultiRegOp(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2) |
3842 | : GenTreeOp(oper, type, op1, op2), gtOtherReg(REG_NA) |
3843 | { |
3844 | ClearOtherRegFlags(); |
3845 | } |
3846 | |
3847 | unsigned GetRegCount() const |
3848 | { |
3849 | if (gtRegNum == REG_NA || gtRegNum == REG_STK) |
3850 | { |
3851 | return 0; |
3852 | } |
3853 | return (gtOtherReg == REG_NA || gtOtherReg == REG_STK) ? 1 : 2; |
3854 | } |
3855 | |
3856 | //--------------------------------------------------------------------------- |
3857 | // GetRegNumByIdx: get ith register allocated to this struct argument. |
3858 | // |
3859 | // Arguments: |
3860 | // idx - index of the register |
3861 | // |
3862 | // Return Value: |
3863 | // Return regNumber of ith register of this register argument |
3864 | // |
3865 | regNumber GetRegNumByIdx(unsigned idx) const |
3866 | { |
3867 | assert(idx < 2); |
3868 | |
3869 | if (idx == 0) |
3870 | { |
3871 | return gtRegNum; |
3872 | } |
3873 | |
3874 | return gtOtherReg; |
3875 | } |
3876 | |
3877 | //---------------------------------------------------------------------- |
3878 | // GetRegSpillFlagByIdx: get spill flag associated with the register |
3879 | // specified by its index. |
3880 | // |
3881 | // Arguments: |
3882 | // idx - Position or index of the register |
3883 | // |
3884 | // Return Value: |
3885 | // Returns GTF_* flags associated with the register. Only GTF_SPILL and GTF_SPILLED are considered. |
3886 | // |
3887 | unsigned GetRegSpillFlagByIdx(unsigned idx) const |
3888 | { |
3889 | assert(idx < MAX_REG_ARG); |
3890 | |
3891 | unsigned bits = gtSpillFlags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here. |
3892 | unsigned spillFlags = 0; |
3893 | if (bits & PACKED_GTF_SPILL) |
3894 | { |
3895 | spillFlags |= GTF_SPILL; |
3896 | } |
3897 | if (bits & PACKED_GTF_SPILLED) |
3898 | { |
3899 | spillFlags |= GTF_SPILLED; |
3900 | } |
3901 | |
3902 | return spillFlags; |
3903 | } |
3904 | |
3905 | //---------------------------------------------------------------------- |
3906 | // SetRegSpillFlagByIdx: set spill flags for the register |
3907 | // specified by its index. |
3908 | // |
3909 | // Arguments: |
3910 | // flags - GTF_* flags. Only GTF_SPILL and GTF_SPILLED are allowed. |
3911 | // idx - Position or index of the register |
3912 | // |
3913 | // Return Value: |
3914 | // None |
3915 | // |
3916 | void SetRegSpillFlagByIdx(unsigned flags, unsigned idx) |
3917 | { |
3918 | assert(idx < MAX_REG_ARG); |
3919 | |
3920 | unsigned bits = 0; |
3921 | if (flags & GTF_SPILL) |
3922 | { |
3923 | bits |= PACKED_GTF_SPILL; |
3924 | } |
3925 | if (flags & GTF_SPILLED) |
3926 | { |
3927 | bits |= PACKED_GTF_SPILLED; |
3928 | } |
3929 | |
3930 | const unsigned char packedFlags = PACKED_GTF_SPILL | PACKED_GTF_SPILLED; |
3931 | |
3932 | // Clear anything that was already there by masking out the bits before 'or'ing in what we want there. |
3933 | gtSpillFlags = (unsigned char)((gtSpillFlags & ~(packedFlags << (idx * 2))) | (bits << (idx * 2))); |
3934 | } |
3935 | |
3936 | //-------------------------------------------------------------------------- |
3937 | // GetRegType: Get var_type of the register specified by index. |
3938 | // |
3939 | // Arguments: |
3940 | // index - Index of the register. |
3941 | // First register will have an index 0 and so on. |
3942 | // |
3943 | // Return Value: |
3944 | // var_type of the register specified by its index. |
3945 | |
3946 | var_types GetRegType(unsigned index) |
3947 | { |
3948 | assert(index < 2); |
3949 | // The type of register is usually the same as GenTree type, since GenTreeMultiRegOp usually defines a single |
3950 | // reg. |
3951 | // The special case is when we have TYP_LONG, which may be a MUL_LONG, or a DOUBLE arg passed as LONG, |
3952 | // in which case we need to separate them into int for each index. |
3953 | var_types result = TypeGet(); |
3954 | if (result == TYP_LONG) |
3955 | { |
3956 | result = TYP_INT; |
3957 | } |
3958 | return result; |
3959 | } |
3960 | |
3961 | //------------------------------------------------------------------- |
3962 | // clearOtherRegFlags: clear GTF_* flags associated with gtOtherRegs |
3963 | // |
3964 | // Arguments: |
3965 | // None |
3966 | // |
3967 | // Return Value: |
3968 | // None |
3969 | // |
3970 | void ClearOtherRegFlags() |
3971 | { |
3972 | gtSpillFlags = 0; |
3973 | } |
3974 | |
3975 | #if DEBUGGABLE_GENTREE |
3976 | GenTreeMultiRegOp() : GenTreeOp() |
3977 | { |
3978 | } |
3979 | #endif |
3980 | }; |
3981 | #endif // !defined(_TARGET_64BIT_) |
3982 | |
3983 | struct GenTreeFptrVal : public GenTree |
3984 | { |
3985 | CORINFO_METHOD_HANDLE gtFptrMethod; |
3986 | |
3987 | #ifdef FEATURE_READYTORUN_COMPILER |
3988 | CORINFO_CONST_LOOKUP gtEntryPoint; |
3989 | #endif |
3990 | |
3991 | GenTreeFptrVal(var_types type, CORINFO_METHOD_HANDLE meth) : GenTree(GT_FTN_ADDR, type), gtFptrMethod(meth) |
3992 | { |
3993 | } |
3994 | #if DEBUGGABLE_GENTREE |
3995 | GenTreeFptrVal() : GenTree() |
3996 | { |
3997 | } |
3998 | #endif |
3999 | }; |
4000 | |
4001 | /* gtQmark */ |
4002 | struct GenTreeQmark : public GenTreeOp |
4003 | { |
4004 | // The "Compiler*" argument is not a DEBUGARG here because we use it to keep track of the set of |
4005 | // (possible) QMark nodes. |
4006 | GenTreeQmark(var_types type, GenTree* cond, GenTree* colonOp, class Compiler* comp); |
4007 | |
4008 | #if DEBUGGABLE_GENTREE |
4009 | GenTreeQmark() : GenTreeOp(GT_QMARK, TYP_INT, nullptr, nullptr) |
4010 | { |
4011 | } |
4012 | #endif |
4013 | }; |
4014 | |
4015 | /* gtIntrinsic -- intrinsic (possibly-binary op [NULL op2 is allowed] with an additional field) */ |
4016 | |
4017 | struct GenTreeIntrinsic : public GenTreeOp |
4018 | { |
4019 | CorInfoIntrinsics gtIntrinsicId; |
4020 | CORINFO_METHOD_HANDLE gtMethodHandle; // Method handle of the method which is treated as an intrinsic. |
4021 | |
4022 | #ifdef FEATURE_READYTORUN_COMPILER |
4023 | // Call target lookup info for method call from a Ready To Run module |
4024 | CORINFO_CONST_LOOKUP gtEntryPoint; |
4025 | #endif |
4026 | |
4027 | GenTreeIntrinsic(var_types type, GenTree* op1, CorInfoIntrinsics intrinsicId, CORINFO_METHOD_HANDLE methodHandle) |
4028 | : GenTreeOp(GT_INTRINSIC, type, op1, nullptr), gtIntrinsicId(intrinsicId), gtMethodHandle(methodHandle) |
4029 | { |
4030 | } |
4031 | |
4032 | GenTreeIntrinsic( |
4033 | var_types type, GenTree* op1, GenTree* op2, CorInfoIntrinsics intrinsicId, CORINFO_METHOD_HANDLE methodHandle) |
4034 | : GenTreeOp(GT_INTRINSIC, type, op1, op2), gtIntrinsicId(intrinsicId), gtMethodHandle(methodHandle) |
4035 | { |
4036 | } |
4037 | |
4038 | #if DEBUGGABLE_GENTREE |
4039 | GenTreeIntrinsic() : GenTreeOp() |
4040 | { |
4041 | } |
4042 | #endif |
4043 | }; |
4044 | |
4045 | struct GenTreeJitIntrinsic : public GenTreeOp |
4046 | { |
4047 | var_types gtSIMDBaseType; // SIMD vector base type |
4048 | unsigned gtSIMDSize; // SIMD vector size in bytes, use 0 for scalar intrinsics |
4049 | |
4050 | GenTreeJitIntrinsic(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2, var_types baseType, unsigned size) |
4051 | : GenTreeOp(oper, type, op1, op2), gtSIMDBaseType(baseType), gtSIMDSize(size) |
4052 | { |
4053 | } |
4054 | |
4055 | bool isSIMD() const |
4056 | { |
4057 | return gtSIMDSize != 0; |
4058 | } |
4059 | |
4060 | #if DEBUGGABLE_GENTREE |
4061 | GenTreeJitIntrinsic() : GenTreeOp() |
4062 | { |
4063 | } |
4064 | #endif |
4065 | }; |
4066 | |
4067 | #ifdef FEATURE_SIMD |
4068 | |
4069 | /* gtSIMD -- SIMD intrinsic (possibly-binary op [NULL op2 is allowed] with additional fields) */ |
4070 | struct GenTreeSIMD : public GenTreeJitIntrinsic |
4071 | { |
4072 | SIMDIntrinsicID gtSIMDIntrinsicID; // operation Id |
4073 | |
4074 | GenTreeSIMD(var_types type, GenTree* op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size) |
4075 | : GenTreeJitIntrinsic(GT_SIMD, type, op1, nullptr, baseType, size), gtSIMDIntrinsicID(simdIntrinsicID) |
4076 | { |
4077 | } |
4078 | |
4079 | GenTreeSIMD( |
4080 | var_types type, GenTree* op1, GenTree* op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size) |
4081 | : GenTreeJitIntrinsic(GT_SIMD, type, op1, op2, baseType, size), gtSIMDIntrinsicID(simdIntrinsicID) |
4082 | { |
4083 | } |
4084 | |
4085 | #if DEBUGGABLE_GENTREE |
4086 | GenTreeSIMD() : GenTreeJitIntrinsic() |
4087 | { |
4088 | } |
4089 | #endif |
4090 | }; |
4091 | #endif // FEATURE_SIMD |
4092 | |
4093 | #ifdef FEATURE_HW_INTRINSICS |
4094 | struct GenTreeHWIntrinsic : public GenTreeJitIntrinsic |
4095 | { |
4096 | NamedIntrinsic gtHWIntrinsicId; |
4097 | var_types gtIndexBaseType; // for AVX2 Gather* intrinsics |
4098 | |
4099 | GenTreeHWIntrinsic(var_types type, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned size) |
4100 | : GenTreeJitIntrinsic(GT_HWIntrinsic, type, nullptr, nullptr, baseType, size) |
4101 | , gtHWIntrinsicId(hwIntrinsicID) |
4102 | , gtIndexBaseType(TYP_UNKNOWN) |
4103 | { |
4104 | } |
4105 | |
4106 | GenTreeHWIntrinsic(var_types type, GenTree* op1, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned size) |
4107 | : GenTreeJitIntrinsic(GT_HWIntrinsic, type, op1, nullptr, baseType, size) |
4108 | , gtHWIntrinsicId(hwIntrinsicID) |
4109 | , gtIndexBaseType(TYP_UNKNOWN) |
4110 | { |
4111 | } |
4112 | |
4113 | GenTreeHWIntrinsic( |
4114 | var_types type, GenTree* op1, GenTree* op2, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned size) |
4115 | : GenTreeJitIntrinsic(GT_HWIntrinsic, type, op1, op2, baseType, size) |
4116 | , gtHWIntrinsicId(hwIntrinsicID) |
4117 | , gtIndexBaseType(TYP_UNKNOWN) |
4118 | { |
4119 | } |
4120 | |
4121 | // Note that HW Instrinsic instructions are a sub class of GenTreeOp which only supports two operands |
4122 | // However there are HW Instrinsic instructions that have 3 or even 4 operands and this is |
4123 | // supported using a single op1 and using an ArgList for it: gtNewArgList(op1, op2, op3) |
4124 | |
4125 | bool OperIsMemoryLoad(); // Returns true for the HW Instrinsic instructions that have MemoryLoad semantics, |
4126 | // false otherwise |
4127 | bool OperIsMemoryStore(); // Returns true for the HW Instrinsic instructions that have MemoryStore semantics, |
4128 | // false otherwise |
4129 | bool OperIsMemoryLoadOrStore(); // Returns true for the HW Instrinsic instructions that have MemoryLoad or |
4130 | // MemoryStore semantics, false otherwise |
4131 | |
4132 | #if DEBUGGABLE_GENTREE |
4133 | GenTreeHWIntrinsic() : GenTreeJitIntrinsic() |
4134 | { |
4135 | } |
4136 | #endif |
4137 | }; |
4138 | |
4139 | inline bool GenTree::OperIsSimdHWIntrinsic() const |
4140 | { |
4141 | if (gtOper == GT_HWIntrinsic) |
4142 | { |
4143 | return this->AsHWIntrinsic()->isSIMD(); |
4144 | } |
4145 | return false; |
4146 | } |
4147 | #endif // FEATURE_HW_INTRINSICS |
4148 | |
4149 | /* gtIndex -- array access */ |
4150 | |
4151 | struct GenTreeIndex : public GenTreeOp |
4152 | { |
4153 | GenTree*& Arr() |
4154 | { |
4155 | return gtOp1; |
4156 | } |
4157 | GenTree*& Index() |
4158 | { |
4159 | return gtOp2; |
4160 | } |
4161 | |
4162 | unsigned gtIndElemSize; // size of elements in the array |
4163 | CORINFO_CLASS_HANDLE gtStructElemClass; // If the element type is a struct, this is the struct type. |
4164 | |
4165 | GenTreeIndex(var_types type, GenTree* arr, GenTree* ind, unsigned indElemSize) |
4166 | : GenTreeOp(GT_INDEX, type, arr, ind) |
4167 | , gtIndElemSize(indElemSize) |
4168 | , gtStructElemClass(nullptr) // We always initialize this after construction. |
4169 | { |
4170 | #ifdef DEBUG |
4171 | if (JitConfig.JitSkipArrayBoundCheck() == 1) |
4172 | { |
4173 | // Skip bounds check |
4174 | } |
4175 | else |
4176 | #endif |
4177 | { |
4178 | // Do bounds check |
4179 | gtFlags |= GTF_INX_RNGCHK; |
4180 | } |
4181 | |
4182 | if (type == TYP_REF) |
4183 | { |
4184 | gtFlags |= GTF_INX_REFARR_LAYOUT; |
4185 | } |
4186 | |
4187 | gtFlags |= GTF_EXCEPT | GTF_GLOB_REF; |
4188 | } |
4189 | #if DEBUGGABLE_GENTREE |
4190 | GenTreeIndex() : GenTreeOp() |
4191 | { |
4192 | } |
4193 | #endif |
4194 | }; |
4195 | |
4196 | // gtIndexAddr: given an array object and an index, checks that the index is within the bounds of the array if |
4197 | // necessary and produces the address of the value at that index of the array. |
4198 | struct GenTreeIndexAddr : public GenTreeOp |
4199 | { |
4200 | GenTree*& Arr() |
4201 | { |
4202 | return gtOp1; |
4203 | } |
4204 | GenTree*& Index() |
4205 | { |
4206 | return gtOp2; |
4207 | } |
4208 | |
4209 | CORINFO_CLASS_HANDLE gtStructElemClass; // If the element type is a struct, this is the struct type. |
4210 | |
4211 | GenTree* gtIndRngFailBB; // Label to jump to for array-index-out-of-range |
4212 | |
4213 | var_types gtElemType; // The element type of the array. |
4214 | unsigned gtElemSize; // size of elements in the array |
4215 | unsigned gtLenOffset; // The offset from the array's base address to its length. |
4216 | unsigned gtElemOffset; // The offset from the array's base address to its first element. |
4217 | |
4218 | GenTreeIndexAddr(GenTree* arr, |
4219 | GenTree* ind, |
4220 | var_types elemType, |
4221 | CORINFO_CLASS_HANDLE structElemClass, |
4222 | unsigned elemSize, |
4223 | unsigned lenOffset, |
4224 | unsigned elemOffset) |
4225 | : GenTreeOp(GT_INDEX_ADDR, TYP_BYREF, arr, ind) |
4226 | , gtStructElemClass(structElemClass) |
4227 | , gtIndRngFailBB(nullptr) |
4228 | , gtElemType(elemType) |
4229 | , gtElemSize(elemSize) |
4230 | , gtLenOffset(lenOffset) |
4231 | , gtElemOffset(elemOffset) |
4232 | { |
4233 | #ifdef DEBUG |
4234 | if (JitConfig.JitSkipArrayBoundCheck() == 1) |
4235 | { |
4236 | // Skip bounds check |
4237 | } |
4238 | else |
4239 | #endif |
4240 | { |
4241 | // Do bounds check |
4242 | gtFlags |= GTF_INX_RNGCHK; |
4243 | } |
4244 | |
4245 | gtFlags |= GTF_EXCEPT | GTF_GLOB_REF; |
4246 | } |
4247 | |
4248 | #if DEBUGGABLE_GENTREE |
4249 | GenTreeIndexAddr() : GenTreeOp() |
4250 | { |
4251 | } |
4252 | #endif |
4253 | }; |
4254 | |
4255 | /* gtArrLen -- array length (GT_ARR_LENGTH) |
4256 | GT_ARR_LENGTH is used for "arr.length" */ |
4257 | |
4258 | struct GenTreeArrLen : public GenTreeUnOp |
4259 | { |
4260 | GenTree*& ArrRef() |
4261 | { |
4262 | return gtOp1; |
4263 | } // the array address node |
4264 | private: |
4265 | int gtArrLenOffset; // constant to add to "gtArrRef" to get the address of the array length. |
4266 | |
4267 | public: |
4268 | inline int ArrLenOffset() |
4269 | { |
4270 | return gtArrLenOffset; |
4271 | } |
4272 | |
4273 | GenTreeArrLen(var_types type, GenTree* arrRef, int lenOffset) |
4274 | : GenTreeUnOp(GT_ARR_LENGTH, type, arrRef), gtArrLenOffset(lenOffset) |
4275 | { |
4276 | } |
4277 | |
4278 | #if DEBUGGABLE_GENTREE |
4279 | GenTreeArrLen() : GenTreeUnOp() |
4280 | { |
4281 | } |
4282 | #endif |
4283 | }; |
4284 | |
4285 | // This takes: |
4286 | // - a comparison value (generally an array length), |
4287 | // - an index value, and |
4288 | // - the label to jump to if the index is out of range. |
4289 | // - the "kind" of the throw block to branch to on failure |
4290 | // It generates no result. |
4291 | |
4292 | struct GenTreeBoundsChk : public GenTree |
4293 | { |
4294 | GenTree* gtIndex; // The index expression. |
4295 | GenTree* gtArrLen; // An expression for the length of the array being indexed. |
4296 | |
4297 | GenTree* gtIndRngFailBB; // Label to jump to for array-index-out-of-range |
4298 | SpecialCodeKind gtThrowKind; // Kind of throw block to branch to on failure |
4299 | |
4300 | GenTreeBoundsChk(genTreeOps oper, var_types type, GenTree* index, GenTree* arrLen, SpecialCodeKind kind) |
4301 | : GenTree(oper, type), gtIndex(index), gtArrLen(arrLen), gtIndRngFailBB(nullptr), gtThrowKind(kind) |
4302 | { |
4303 | // Effects flags propagate upwards. |
4304 | gtFlags |= (index->gtFlags & GTF_ALL_EFFECT); |
4305 | gtFlags |= (arrLen->gtFlags & GTF_ALL_EFFECT); |
4306 | gtFlags |= GTF_EXCEPT; |
4307 | } |
4308 | #if DEBUGGABLE_GENTREE |
4309 | GenTreeBoundsChk() : GenTree() |
4310 | { |
4311 | } |
4312 | #endif |
4313 | |
4314 | // If the gtArrLen is really an array length, returns array reference, else "NULL". |
4315 | GenTree* GetArray() |
4316 | { |
4317 | if (gtArrLen->OperGet() == GT_ARR_LENGTH) |
4318 | { |
4319 | return gtArrLen->gtArrLen.ArrRef(); |
4320 | } |
4321 | else |
4322 | { |
4323 | return nullptr; |
4324 | } |
4325 | } |
4326 | }; |
4327 | |
4328 | // gtArrElem -- general array element (GT_ARR_ELEM), for non "SZ_ARRAYS" |
4329 | // -- multidimensional arrays, or 1-d arrays with non-zero lower bounds. |
4330 | |
4331 | struct GenTreeArrElem : public GenTree |
4332 | { |
4333 | GenTree* gtArrObj; |
4334 | |
4335 | #define GT_ARR_MAX_RANK 3 |
4336 | GenTree* gtArrInds[GT_ARR_MAX_RANK]; // Indices |
4337 | unsigned char gtArrRank; // Rank of the array |
4338 | |
4339 | unsigned char gtArrElemSize; // !!! Caution, this is an "unsigned char", it is used only |
4340 | // on the optimization path of array intrisics. |
4341 | // It stores the size of array elements WHEN it can fit |
4342 | // into an "unsigned char". |
4343 | // This has caused VSW 571394. |
4344 | var_types gtArrElemType; // The array element type |
4345 | |
4346 | // Requires that "inds" is a pointer to an array of "rank" GenTreePtrs for the indices. |
4347 | GenTreeArrElem( |
4348 | var_types type, GenTree* arr, unsigned char rank, unsigned char elemSize, var_types elemType, GenTree** inds) |
4349 | : GenTree(GT_ARR_ELEM, type), gtArrObj(arr), gtArrRank(rank), gtArrElemSize(elemSize), gtArrElemType(elemType) |
4350 | { |
4351 | gtFlags |= (arr->gtFlags & GTF_ALL_EFFECT); |
4352 | for (unsigned char i = 0; i < rank; i++) |
4353 | { |
4354 | gtArrInds[i] = inds[i]; |
4355 | gtFlags |= (inds[i]->gtFlags & GTF_ALL_EFFECT); |
4356 | } |
4357 | gtFlags |= GTF_EXCEPT; |
4358 | } |
4359 | #if DEBUGGABLE_GENTREE |
4360 | GenTreeArrElem() : GenTree() |
4361 | { |
4362 | } |
4363 | #endif |
4364 | }; |
4365 | |
4366 | //-------------------------------------------- |
4367 | // |
4368 | // GenTreeArrIndex (gtArrIndex): Expression to bounds-check the index for one dimension of a |
4369 | // multi-dimensional or non-zero-based array., and compute the effective index |
4370 | // (i.e. subtracting the lower bound). |
4371 | // |
4372 | // Notes: |
4373 | // This node is similar in some ways to GenTreeBoundsChk, which ONLY performs the check. |
4374 | // The reason that this node incorporates the check into the effective index computation is |
4375 | // to avoid duplicating the codegen, as the effective index is required to compute the |
4376 | // offset anyway. |
4377 | // TODO-CQ: Enable optimization of the lower bound and length by replacing this: |
4378 | // /--* <arrObj> |
4379 | // +--* <index0> |
4380 | // +--* ArrIndex[i, ] |
4381 | // with something like: |
4382 | // /--* <arrObj> |
4383 | // /--* ArrLowerBound[i, ] |
4384 | // | /--* <arrObj> |
4385 | // +--* ArrLen[i, ] (either generalize GT_ARR_LENGTH or add a new node) |
4386 | // +--* <index0> |
4387 | // +--* ArrIndex[i, ] |
4388 | // Which could, for example, be optimized to the following when known to be within bounds: |
4389 | // /--* TempForLowerBoundDim0 |
4390 | // +--* <index0> |
4391 | // +--* - (GT_SUB) |
4392 | // |
4393 | struct GenTreeArrIndex : public GenTreeOp |
4394 | { |
4395 | // The array object - may be any expression producing an Array reference, but is likely to be a lclVar. |
4396 | GenTree*& ArrObj() |
4397 | { |
4398 | return gtOp1; |
4399 | } |
4400 | // The index expression - may be any integral expression. |
4401 | GenTree*& IndexExpr() |
4402 | { |
4403 | return gtOp2; |
4404 | } |
4405 | unsigned char gtCurrDim; // The current dimension |
4406 | unsigned char gtArrRank; // Rank of the array |
4407 | var_types gtArrElemType; // The array element type |
4408 | |
4409 | GenTreeArrIndex(var_types type, |
4410 | GenTree* arrObj, |
4411 | GenTree* indexExpr, |
4412 | unsigned char currDim, |
4413 | unsigned char arrRank, |
4414 | var_types elemType) |
4415 | : GenTreeOp(GT_ARR_INDEX, type, arrObj, indexExpr) |
4416 | , gtCurrDim(currDim) |
4417 | , gtArrRank(arrRank) |
4418 | , gtArrElemType(elemType) |
4419 | { |
4420 | gtFlags |= GTF_EXCEPT; |
4421 | } |
4422 | #if DEBUGGABLE_GENTREE |
4423 | protected: |
4424 | friend GenTree; |
4425 | // Used only for GenTree::GetVtableForOper() |
4426 | GenTreeArrIndex() : GenTreeOp() |
4427 | { |
4428 | } |
4429 | #endif |
4430 | }; |
4431 | |
4432 | //-------------------------------------------- |
4433 | // |
4434 | // GenTreeArrOffset (gtArrOffset): Expression to compute the accumulated offset for the address |
4435 | // of an element of a multi-dimensional or non-zero-based array. |
4436 | // |
4437 | // Notes: |
4438 | // The result of this expression is (gtOffset * dimSize) + gtIndex |
4439 | // where dimSize is the length/stride/size of the dimension, and is obtained from gtArrObj. |
4440 | // This node is generated in conjunction with the GenTreeArrIndex node, which computes the |
4441 | // effective index for a single dimension. The sub-trees can be separately optimized, e.g. |
4442 | // within a loop body where the expression for the 0th dimension may be invariant. |
4443 | // |
4444 | // Here is an example of how the tree might look for a two-dimension array reference: |
4445 | // /--* const 0 |
4446 | // | /--* <arrObj> |
4447 | // | +--* <index0> |
4448 | // +--* ArrIndex[i, ] |
4449 | // +--* <arrObj> |
4450 | // /--| arrOffs[i, ] |
4451 | // | +--* <arrObj> |
4452 | // | +--* <index1> |
4453 | // +--* ArrIndex[*,j] |
4454 | // +--* <arrObj> |
4455 | // /--| arrOffs[*,j] |
4456 | // TODO-CQ: see comment on GenTreeArrIndex for how its representation may change. When that |
4457 | // is done, we will also want to replace the <arrObj> argument to arrOffs with the |
4458 | // ArrLen as for GenTreeArrIndex. |
4459 | // |
4460 | struct GenTreeArrOffs : public GenTree |
4461 | { |
4462 | GenTree* gtOffset; // The accumulated offset for lower dimensions - must be TYP_I_IMPL, and |
4463 | // will either be a CSE temp, the constant 0, or another GenTreeArrOffs node. |
4464 | GenTree* gtIndex; // The effective index for the current dimension - must be non-negative |
4465 | // and can be any expression (though it is likely to be either a GenTreeArrIndex, |
4466 | // node, a lclVar, or a constant). |
4467 | GenTree* gtArrObj; // The array object - may be any expression producing an Array reference, |
4468 | // but is likely to be a lclVar. |
4469 | unsigned char gtCurrDim; // The current dimension |
4470 | unsigned char gtArrRank; // Rank of the array |
4471 | var_types gtArrElemType; // The array element type |
4472 | |
4473 | GenTreeArrOffs(var_types type, |
4474 | GenTree* offset, |
4475 | GenTree* index, |
4476 | GenTree* arrObj, |
4477 | unsigned char currDim, |
4478 | unsigned char rank, |
4479 | var_types elemType) |
4480 | : GenTree(GT_ARR_OFFSET, type) |
4481 | , gtOffset(offset) |
4482 | , gtIndex(index) |
4483 | , gtArrObj(arrObj) |
4484 | , gtCurrDim(currDim) |
4485 | , gtArrRank(rank) |
4486 | , gtArrElemType(elemType) |
4487 | { |
4488 | assert(index->gtFlags & GTF_EXCEPT); |
4489 | gtFlags |= GTF_EXCEPT; |
4490 | } |
4491 | #if DEBUGGABLE_GENTREE |
4492 | GenTreeArrOffs() : GenTree() |
4493 | { |
4494 | } |
4495 | #endif |
4496 | }; |
4497 | |
4498 | /* gtAddrMode -- Target-specific canonicalized addressing expression (GT_LEA) */ |
4499 | |
4500 | struct GenTreeAddrMode : public GenTreeOp |
4501 | { |
4502 | // Address is Base + Index*Scale + Offset. |
4503 | // These are the legal patterns: |
4504 | // |
4505 | // Base // Base != nullptr && Index == nullptr && Scale == 0 && Offset == 0 |
4506 | // Base + Index*Scale // Base != nullptr && Index != nullptr && Scale != 0 && Offset == 0 |
4507 | // Base + Offset // Base != nullptr && Index == nullptr && Scale == 0 && Offset != 0 |
4508 | // Base + Index*Scale + Offset // Base != nullptr && Index != nullptr && Scale != 0 && Offset != 0 |
4509 | // Index*Scale // Base == nullptr && Index != nullptr && Scale > 1 && Offset == 0 |
4510 | // Index*Scale + Offset // Base == nullptr && Index != nullptr && Scale > 1 && Offset != 0 |
4511 | // Offset // Base == nullptr && Index == nullptr && Scale == 0 && Offset != 0 |
4512 | // |
4513 | // So, for example: |
4514 | // 1. Base + Index is legal with Scale==1 |
4515 | // 2. If Index is null, Scale should be zero (or unintialized / unused) |
4516 | // 3. If Scale==1, then we should have "Base" instead of "Index*Scale", and "Base + Offset" instead of |
4517 | // "Index*Scale + Offset". |
4518 | |
4519 | // First operand is base address/pointer |
4520 | bool HasBase() const |
4521 | { |
4522 | return gtOp1 != nullptr; |
4523 | } |
4524 | GenTree*& Base() |
4525 | { |
4526 | return gtOp1; |
4527 | } |
4528 | |
4529 | // Second operand is scaled index value |
4530 | bool HasIndex() const |
4531 | { |
4532 | return gtOp2 != nullptr; |
4533 | } |
4534 | GenTree*& Index() |
4535 | { |
4536 | return gtOp2; |
4537 | } |
4538 | |
4539 | int Offset() |
4540 | { |
4541 | return static_cast<int>(gtOffset); |
4542 | } |
4543 | |
4544 | unsigned gtScale; // The scale factor |
4545 | |
4546 | private: |
4547 | ssize_t gtOffset; // The offset to add |
4548 | |
4549 | public: |
4550 | GenTreeAddrMode(var_types type, GenTree* base, GenTree* index, unsigned scale, ssize_t offset) |
4551 | : GenTreeOp(GT_LEA, type, base, index) |
4552 | { |
4553 | assert(base != nullptr || index != nullptr); |
4554 | gtScale = scale; |
4555 | gtOffset = offset; |
4556 | } |
4557 | #if DEBUGGABLE_GENTREE |
4558 | protected: |
4559 | friend GenTree; |
4560 | // Used only for GenTree::GetVtableForOper() |
4561 | GenTreeAddrMode() : GenTreeOp() |
4562 | { |
4563 | } |
4564 | #endif |
4565 | }; |
4566 | |
4567 | // Indir is just an op, no additional data, but some additional abstractions |
4568 | struct GenTreeIndir : public GenTreeOp |
4569 | { |
4570 | // The address for the indirection. |
4571 | // Since GenTreeDynBlk derives from this, but is an "EXOP" (i.e. it has extra fields), |
4572 | // we can't access Op1 and Op2 in the normal manner if we may have a DynBlk. |
4573 | GenTree*& Addr() |
4574 | { |
4575 | return gtOp1; |
4576 | } |
4577 | |
4578 | // these methods provide an interface to the indirection node which |
4579 | bool HasBase(); |
4580 | bool HasIndex(); |
4581 | GenTree* Base(); |
4582 | GenTree* Index(); |
4583 | unsigned Scale(); |
4584 | ssize_t Offset(); |
4585 | |
4586 | GenTreeIndir(genTreeOps oper, var_types type, GenTree* addr, GenTree* data) : GenTreeOp(oper, type, addr, data) |
4587 | { |
4588 | } |
4589 | |
4590 | #if DEBUGGABLE_GENTREE |
4591 | protected: |
4592 | friend GenTree; |
4593 | // Used only for GenTree::GetVtableForOper() |
4594 | GenTreeIndir() : GenTreeOp() |
4595 | { |
4596 | } |
4597 | #endif |
4598 | }; |
4599 | |
4600 | // gtBlk -- 'block' (GT_BLK, GT_STORE_BLK). |
4601 | // |
4602 | // This is the base type for all of the nodes that represent block or struct |
4603 | // values. |
4604 | // Since it can be a store, it includes gtBlkOpKind to specify the type of |
4605 | // code generation that will be used for the block operation. |
4606 | |
4607 | struct GenTreeBlk : public GenTreeIndir |
4608 | { |
4609 | public: |
4610 | // The data to be stored (null for GT_BLK) |
4611 | GenTree*& Data() |
4612 | { |
4613 | return gtOp2; |
4614 | } |
4615 | void SetData(GenTree* dataNode) |
4616 | { |
4617 | gtOp2 = dataNode; |
4618 | } |
4619 | |
4620 | // The size of the buffer to be copied. |
4621 | unsigned Size() const |
4622 | { |
4623 | return gtBlkSize; |
4624 | } |
4625 | |
4626 | unsigned gtBlkSize; |
4627 | |
4628 | // Return true iff the object being copied contains one or more GC pointers. |
4629 | bool HasGCPtr(); |
4630 | |
4631 | // True if this BlkOpNode is a volatile memory operation. |
4632 | bool IsVolatile() const |
4633 | { |
4634 | return (gtFlags & GTF_BLK_VOLATILE) != 0; |
4635 | } |
4636 | |
4637 | // True if this BlkOpNode is an unaligned memory operation. |
4638 | bool IsUnaligned() const |
4639 | { |
4640 | return (gtFlags & GTF_BLK_UNALIGNED) != 0; |
4641 | } |
4642 | |
4643 | // Instruction selection: during codegen time, what code sequence we will be using |
4644 | // to encode this operation. |
4645 | enum |
4646 | { |
4647 | BlkOpKindInvalid, |
4648 | BlkOpKindHelper, |
4649 | BlkOpKindRepInstr, |
4650 | BlkOpKindUnroll, |
4651 | } gtBlkOpKind; |
4652 | |
4653 | bool gtBlkOpGcUnsafe; |
4654 | |
4655 | GenTreeBlk(genTreeOps oper, var_types type, GenTree* addr, unsigned size) |
4656 | : GenTreeIndir(oper, type, addr, nullptr) |
4657 | , gtBlkSize(size) |
4658 | , gtBlkOpKind(BlkOpKindInvalid) |
4659 | , gtBlkOpGcUnsafe(false) |
4660 | { |
4661 | assert(OperIsBlk(oper)); |
4662 | gtFlags |= (addr->gtFlags & GTF_ALL_EFFECT); |
4663 | } |
4664 | |
4665 | GenTreeBlk(genTreeOps oper, var_types type, GenTree* addr, GenTree* data, unsigned size) |
4666 | : GenTreeIndir(oper, type, addr, data), gtBlkSize(size), gtBlkOpKind(BlkOpKindInvalid), gtBlkOpGcUnsafe(false) |
4667 | { |
4668 | assert(OperIsBlk(oper)); |
4669 | gtFlags |= (addr->gtFlags & GTF_ALL_EFFECT); |
4670 | gtFlags |= (data->gtFlags & GTF_ALL_EFFECT); |
4671 | } |
4672 | |
4673 | #if DEBUGGABLE_GENTREE |
4674 | protected: |
4675 | friend GenTree; |
4676 | GenTreeBlk() : GenTreeIndir() |
4677 | { |
4678 | } |
4679 | #endif // DEBUGGABLE_GENTREE |
4680 | }; |
4681 | |
4682 | // gtObj -- 'object' (GT_OBJ). |
4683 | // |
4684 | // This node is used for block values that may have GC pointers. |
4685 | |
4686 | struct GenTreeObj : public GenTreeBlk |
4687 | { |
4688 | CORINFO_CLASS_HANDLE gtClass; // the class of the object |
4689 | |
4690 | // If non-null, this array represents the gc-layout of the class. |
4691 | // This may be simply copied when cloning this node, because it is not changed once computed. |
4692 | BYTE* gtGcPtrs; |
4693 | |
4694 | // If non-zero, this is the number of slots in the class layout that |
4695 | // contain gc-pointers. |
4696 | __declspec(property(get = GetGcPtrCount)) unsigned gtGcPtrCount; |
4697 | unsigned GetGcPtrCount() const |
4698 | { |
4699 | assert(_gtGcPtrCount != UINT32_MAX); |
4700 | return _gtGcPtrCount; |
4701 | } |
4702 | unsigned _gtGcPtrCount; |
4703 | |
4704 | // If non-zero, the number of pointer-sized slots that constitutes the class token. |
4705 | unsigned gtSlots; |
4706 | |
4707 | bool IsGCInfoInitialized() |
4708 | { |
4709 | return (_gtGcPtrCount != UINT32_MAX); |
4710 | } |
4711 | |
4712 | void SetGCInfo(BYTE* gcPtrs, unsigned gcPtrCount, unsigned slots) |
4713 | { |
4714 | gtGcPtrs = gcPtrs; |
4715 | _gtGcPtrCount = gcPtrCount; |
4716 | gtSlots = slots; |
4717 | if (gtGcPtrCount != 0) |
4718 | { |
4719 | // We assume that we cannot have a struct with GC pointers that is not a multiple |
4720 | // of the register size. |
4721 | // The EE currently does not allow this, but it could change. |
4722 | // Let's assert it just to be safe. |
4723 | noway_assert(roundUp(gtBlkSize, REGSIZE_BYTES) == gtBlkSize); |
4724 | } |
4725 | else |
4726 | { |
4727 | genTreeOps newOper = GT_BLK; |
4728 | if (gtOper == GT_STORE_OBJ) |
4729 | { |
4730 | newOper = GT_STORE_BLK; |
4731 | } |
4732 | else |
4733 | { |
4734 | assert(gtOper == GT_OBJ); |
4735 | } |
4736 | SetOper(newOper); |
4737 | } |
4738 | } |
4739 | |
4740 | void CopyGCInfo(GenTreeObj* srcObj) |
4741 | { |
4742 | if (srcObj->IsGCInfoInitialized()) |
4743 | { |
4744 | gtGcPtrs = srcObj->gtGcPtrs; |
4745 | _gtGcPtrCount = srcObj->gtGcPtrCount; |
4746 | gtSlots = srcObj->gtSlots; |
4747 | } |
4748 | } |
4749 | |
4750 | GenTreeObj(var_types type, GenTree* addr, CORINFO_CLASS_HANDLE cls, unsigned size) |
4751 | : GenTreeBlk(GT_OBJ, type, addr, size), gtClass(cls) |
4752 | { |
4753 | // By default, an OBJ is assumed to be a global reference. |
4754 | gtFlags |= GTF_GLOB_REF; |
4755 | noway_assert(cls != NO_CLASS_HANDLE); |
4756 | _gtGcPtrCount = UINT32_MAX; |
4757 | } |
4758 | |
4759 | GenTreeObj(var_types type, GenTree* addr, GenTree* data, CORINFO_CLASS_HANDLE cls, unsigned size) |
4760 | : GenTreeBlk(GT_STORE_OBJ, type, addr, data, size), gtClass(cls) |
4761 | { |
4762 | // By default, an OBJ is assumed to be a global reference. |
4763 | gtFlags |= GTF_GLOB_REF; |
4764 | noway_assert(cls != NO_CLASS_HANDLE); |
4765 | _gtGcPtrCount = UINT32_MAX; |
4766 | } |
4767 | |
4768 | #if DEBUGGABLE_GENTREE |
4769 | GenTreeObj() : GenTreeBlk() |
4770 | { |
4771 | } |
4772 | #endif |
4773 | }; |
4774 | |
4775 | // gtDynBlk -- 'dynamic block' (GT_DYN_BLK). |
4776 | // |
4777 | // This node is used for block values that have a dynamic size. |
4778 | // Note that such a value can never have GC pointers. |
4779 | |
4780 | struct GenTreeDynBlk : public GenTreeBlk |
4781 | { |
4782 | public: |
4783 | GenTree* gtDynamicSize; |
4784 | bool gtEvalSizeFirst; |
4785 | |
4786 | GenTreeDynBlk(GenTree* addr, GenTree* dynamicSize) |
4787 | : GenTreeBlk(GT_DYN_BLK, TYP_STRUCT, addr, 0), gtDynamicSize(dynamicSize), gtEvalSizeFirst(false) |
4788 | { |
4789 | // Conservatively the 'addr' could be null or point into the global heap. |
4790 | gtFlags |= GTF_EXCEPT | GTF_GLOB_REF; |
4791 | gtFlags |= (dynamicSize->gtFlags & GTF_ALL_EFFECT); |
4792 | } |
4793 | |
4794 | #if DEBUGGABLE_GENTREE |
4795 | protected: |
4796 | friend GenTree; |
4797 | GenTreeDynBlk() : GenTreeBlk() |
4798 | { |
4799 | } |
4800 | #endif // DEBUGGABLE_GENTREE |
4801 | }; |
4802 | |
4803 | // Read-modify-write status of a RMW memory op rooted at a storeInd |
4804 | enum RMWStatus |
4805 | { |
4806 | STOREIND_RMW_STATUS_UNKNOWN, // RMW status of storeInd unknown |
4807 | // Default status unless modified by IsRMWMemOpRootedAtStoreInd() |
4808 | |
4809 | // One of these denote storeind is a RMW memory operation. |
4810 | STOREIND_RMW_DST_IS_OP1, // StoreInd is known to be a RMW memory op and dst candidate is op1 |
4811 | STOREIND_RMW_DST_IS_OP2, // StoreInd is known to be a RMW memory op and dst candidate is op2 |
4812 | |
4813 | // One of these denote the reason for storeind is marked as non-RMW operation |
4814 | STOREIND_RMW_UNSUPPORTED_ADDR, // Addr mode is not yet supported for RMW memory |
4815 | STOREIND_RMW_UNSUPPORTED_OPER, // Operation is not supported for RMW memory |
4816 | STOREIND_RMW_UNSUPPORTED_TYPE, // Type is not supported for RMW memory |
4817 | STOREIND_RMW_INDIR_UNEQUAL // Indir to read value is not equivalent to indir that writes the value |
4818 | }; |
4819 | |
4820 | // StoreInd is just a BinOp, with additional RMW status |
4821 | struct GenTreeStoreInd : public GenTreeIndir |
4822 | { |
4823 | #if !CPU_LOAD_STORE_ARCH |
4824 | // The below flag is set and used during lowering |
4825 | RMWStatus gtRMWStatus; |
4826 | |
4827 | bool IsRMWStatusUnknown() |
4828 | { |
4829 | return gtRMWStatus == STOREIND_RMW_STATUS_UNKNOWN; |
4830 | } |
4831 | bool IsNonRMWMemoryOp() |
4832 | { |
4833 | return gtRMWStatus == STOREIND_RMW_UNSUPPORTED_ADDR || gtRMWStatus == STOREIND_RMW_UNSUPPORTED_OPER || |
4834 | gtRMWStatus == STOREIND_RMW_UNSUPPORTED_TYPE || gtRMWStatus == STOREIND_RMW_INDIR_UNEQUAL; |
4835 | } |
4836 | bool IsRMWMemoryOp() |
4837 | { |
4838 | return gtRMWStatus == STOREIND_RMW_DST_IS_OP1 || gtRMWStatus == STOREIND_RMW_DST_IS_OP2; |
4839 | } |
4840 | bool IsRMWDstOp1() |
4841 | { |
4842 | return gtRMWStatus == STOREIND_RMW_DST_IS_OP1; |
4843 | } |
4844 | bool IsRMWDstOp2() |
4845 | { |
4846 | return gtRMWStatus == STOREIND_RMW_DST_IS_OP2; |
4847 | } |
4848 | #endif //! CPU_LOAD_STORE_ARCH |
4849 | |
4850 | RMWStatus GetRMWStatus() |
4851 | { |
4852 | #if !CPU_LOAD_STORE_ARCH |
4853 | return gtRMWStatus; |
4854 | #else |
4855 | return STOREIND_RMW_STATUS_UNKNOWN; |
4856 | #endif |
4857 | } |
4858 | |
4859 | void SetRMWStatusDefault() |
4860 | { |
4861 | #if !CPU_LOAD_STORE_ARCH |
4862 | gtRMWStatus = STOREIND_RMW_STATUS_UNKNOWN; |
4863 | #endif |
4864 | } |
4865 | |
4866 | void SetRMWStatus(RMWStatus status) |
4867 | { |
4868 | #if !CPU_LOAD_STORE_ARCH |
4869 | gtRMWStatus = status; |
4870 | #endif |
4871 | } |
4872 | |
4873 | GenTree*& Data() |
4874 | { |
4875 | return gtOp2; |
4876 | } |
4877 | |
4878 | GenTreeStoreInd(var_types type, GenTree* destPtr, GenTree* data) : GenTreeIndir(GT_STOREIND, type, destPtr, data) |
4879 | { |
4880 | SetRMWStatusDefault(); |
4881 | } |
4882 | |
4883 | #if DEBUGGABLE_GENTREE |
4884 | protected: |
4885 | friend GenTree; |
4886 | // Used only for GenTree::GetVtableForOper() |
4887 | GenTreeStoreInd() : GenTreeIndir() |
4888 | { |
4889 | SetRMWStatusDefault(); |
4890 | } |
4891 | #endif |
4892 | }; |
4893 | |
4894 | /* gtRetExp -- Place holder for the return expression from an inline candidate (GT_RET_EXPR) */ |
4895 | |
4896 | struct GenTreeRetExpr : public GenTree |
4897 | { |
4898 | GenTree* gtInlineCandidate; |
4899 | |
4900 | CORINFO_CLASS_HANDLE gtRetClsHnd; |
4901 | |
4902 | GenTreeRetExpr(var_types type) : GenTree(GT_RET_EXPR, type) |
4903 | { |
4904 | } |
4905 | #if DEBUGGABLE_GENTREE |
4906 | GenTreeRetExpr() : GenTree() |
4907 | { |
4908 | } |
4909 | #endif |
4910 | }; |
4911 | |
4912 | /* gtStmt -- 'statement expr' (GT_STMT) */ |
4913 | |
4914 | class InlineContext; |
4915 | |
4916 | struct GenTreeStmt : public GenTree |
4917 | { |
4918 | GenTree* gtStmtExpr; // root of the expression tree |
4919 | GenTree* gtStmtList; // first node (for forward walks) |
4920 | InlineContext* gtInlineContext; // The inline context for this statement. |
4921 | IL_OFFSETX gtStmtILoffsx; // instr offset (if available) |
4922 | |
4923 | #ifdef DEBUG |
4924 | IL_OFFSET gtStmtLastILoffs; // instr offset at end of stmt |
4925 | #endif |
4926 | |
4927 | __declspec(property(get = getNextStmt)) GenTreeStmt* gtNextStmt; |
4928 | |
4929 | __declspec(property(get = getPrevStmt)) GenTreeStmt* gtPrevStmt; |
4930 | |
4931 | GenTreeStmt* getNextStmt() |
4932 | { |
4933 | if (gtNext == nullptr) |
4934 | { |
4935 | return nullptr; |
4936 | } |
4937 | else |
4938 | { |
4939 | return gtNext->AsStmt(); |
4940 | } |
4941 | } |
4942 | |
4943 | GenTreeStmt* getPrevStmt() |
4944 | { |
4945 | if (gtPrev == nullptr) |
4946 | { |
4947 | return nullptr; |
4948 | } |
4949 | else |
4950 | { |
4951 | return gtPrev->AsStmt(); |
4952 | } |
4953 | } |
4954 | |
4955 | GenTreeStmt(GenTree* expr, IL_OFFSETX offset) |
4956 | : GenTree(GT_STMT, TYP_VOID) |
4957 | , gtStmtExpr(expr) |
4958 | , gtStmtList(nullptr) |
4959 | , gtInlineContext(nullptr) |
4960 | , gtStmtILoffsx(offset) |
4961 | #ifdef DEBUG |
4962 | , gtStmtLastILoffs(BAD_IL_OFFSET) |
4963 | #endif |
4964 | { |
4965 | // Statements can't have statements as part of their expression tree. |
4966 | assert(expr->gtOper != GT_STMT); |
4967 | |
4968 | // Set the statement to have the same costs as the top node of the tree. |
4969 | // This is used long before costs have been assigned, so we need to copy |
4970 | // the raw costs. |
4971 | CopyRawCosts(expr); |
4972 | } |
4973 | |
4974 | #if DEBUGGABLE_GENTREE |
4975 | GenTreeStmt() : GenTree(GT_STMT, TYP_VOID) |
4976 | { |
4977 | } |
4978 | #endif |
4979 | }; |
4980 | |
4981 | /* NOTE: Any tree nodes that are larger than 8 bytes (two ints or |
4982 | pointers) must be flagged as 'large' in GenTree::InitNodeSize(). |
4983 | */ |
4984 | |
4985 | /* gtClsVar -- 'static data member' (GT_CLS_VAR) */ |
4986 | |
4987 | struct GenTreeClsVar : public GenTree |
4988 | { |
4989 | CORINFO_FIELD_HANDLE gtClsVarHnd; |
4990 | FieldSeqNode* gtFieldSeq; |
4991 | |
4992 | GenTreeClsVar(var_types type, CORINFO_FIELD_HANDLE clsVarHnd, FieldSeqNode* fldSeq) |
4993 | : GenTree(GT_CLS_VAR, type), gtClsVarHnd(clsVarHnd), gtFieldSeq(fldSeq) |
4994 | { |
4995 | gtFlags |= GTF_GLOB_REF; |
4996 | } |
4997 | #if DEBUGGABLE_GENTREE |
4998 | GenTreeClsVar() : GenTree() |
4999 | { |
5000 | } |
5001 | #endif |
5002 | }; |
5003 | |
5004 | /* gtArgPlace -- 'register argument placeholder' (GT_ARGPLACE) */ |
5005 | |
5006 | struct GenTreeArgPlace : public GenTree |
5007 | { |
5008 | CORINFO_CLASS_HANDLE gtArgPlaceClsHnd; // Needed when we have a TYP_STRUCT argument |
5009 | |
5010 | GenTreeArgPlace(var_types type, CORINFO_CLASS_HANDLE clsHnd) : GenTree(GT_ARGPLACE, type), gtArgPlaceClsHnd(clsHnd) |
5011 | { |
5012 | } |
5013 | #if DEBUGGABLE_GENTREE |
5014 | GenTreeArgPlace() : GenTree() |
5015 | { |
5016 | } |
5017 | #endif |
5018 | }; |
5019 | |
5020 | /* gtLabel -- code label target (GT_LABEL) */ |
5021 | |
5022 | struct GenTreeLabel : public GenTree |
5023 | { |
5024 | BasicBlock* gtLabBB; |
5025 | |
5026 | GenTreeLabel(BasicBlock* bb) : GenTree(GT_LABEL, TYP_VOID), gtLabBB(bb) |
5027 | { |
5028 | } |
5029 | #if DEBUGGABLE_GENTREE |
5030 | GenTreeLabel() : GenTree() |
5031 | { |
5032 | } |
5033 | #endif |
5034 | }; |
5035 | |
5036 | /* gtPhiArg -- phi node rhs argument, var = phi(phiarg, phiarg, phiarg...); GT_PHI_ARG */ |
5037 | struct GenTreePhiArg : public GenTreeLclVarCommon |
5038 | { |
5039 | BasicBlock* gtPredBB; |
5040 | |
5041 | GenTreePhiArg(var_types type, unsigned lclNum, unsigned snum, BasicBlock* block) |
5042 | : GenTreeLclVarCommon(GT_PHI_ARG, type, lclNum), gtPredBB(block) |
5043 | { |
5044 | SetSsaNum(snum); |
5045 | } |
5046 | |
5047 | #if DEBUGGABLE_GENTREE |
5048 | GenTreePhiArg() : GenTreeLclVarCommon() |
5049 | { |
5050 | } |
5051 | #endif |
5052 | }; |
5053 | |
5054 | /* gtPutArgStk -- Argument passed on stack (GT_PUTARG_STK) */ |
5055 | |
5056 | struct GenTreePutArgStk : public GenTreeUnOp |
5057 | { |
5058 | unsigned gtSlotNum; // Slot number of the argument to be passed on stack |
5059 | #if defined(UNIX_X86_ABI) |
5060 | unsigned gtPadAlign; // Number of padding slots for stack alignment |
5061 | #endif |
5062 | |
5063 | // Don't let clang-format mess with the GenTreePutArgStk constructor. |
5064 | // clang-format off |
5065 | |
5066 | GenTreePutArgStk(genTreeOps oper, |
5067 | var_types type, |
5068 | GenTree* op1, |
5069 | unsigned slotNum |
5070 | PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots), |
5071 | bool putInIncomingArgArea = false, |
5072 | GenTreeCall* callNode = nullptr) |
5073 | : GenTreeUnOp(oper, type, op1 DEBUGARG(/*largeNode*/ false)) |
5074 | , gtSlotNum(slotNum) |
5075 | #if defined(UNIX_X86_ABI) |
5076 | , gtPadAlign(0) |
5077 | #endif |
5078 | #if FEATURE_FASTTAILCALL |
5079 | , gtPutInIncomingArgArea(putInIncomingArgArea) |
5080 | #endif // FEATURE_FASTTAILCALL |
5081 | #ifdef FEATURE_PUT_STRUCT_ARG_STK |
5082 | , gtPutArgStkKind(Kind::Invalid) |
5083 | , gtNumSlots(numSlots) |
5084 | , gtNumberReferenceSlots(0) |
5085 | , gtGcPtrs(nullptr) |
5086 | #endif // FEATURE_PUT_STRUCT_ARG_STK |
5087 | #if defined(DEBUG) || defined(UNIX_X86_ABI) |
5088 | , gtCall(callNode) |
5089 | #endif |
5090 | { |
5091 | } |
5092 | |
5093 | // clang-format on |
5094 | |
5095 | #if FEATURE_FASTTAILCALL |
5096 | |
5097 | bool gtPutInIncomingArgArea; // Whether this arg needs to be placed in incoming arg area. |
5098 | // By default this is false and will be placed in out-going arg area. |
5099 | // Fast tail calls set this to true. |
5100 | // In future if we need to add more such bool fields consider bit fields. |
5101 | |
5102 | bool putInIncomingArgArea() const |
5103 | { |
5104 | return gtPutInIncomingArgArea; |
5105 | } |
5106 | |
5107 | #else // !FEATURE_FASTTAILCALL |
5108 | |
5109 | bool putInIncomingArgArea() const |
5110 | { |
5111 | return false; |
5112 | } |
5113 | |
5114 | #endif // !FEATURE_FASTTAILCALL |
5115 | |
5116 | unsigned getArgOffset() |
5117 | { |
5118 | return gtSlotNum * TARGET_POINTER_SIZE; |
5119 | } |
5120 | |
5121 | #if defined(UNIX_X86_ABI) |
5122 | unsigned getArgPadding() |
5123 | { |
5124 | return gtPadAlign; |
5125 | } |
5126 | |
5127 | void setArgPadding(unsigned padAlign) |
5128 | { |
5129 | gtPadAlign = padAlign; |
5130 | } |
5131 | #endif |
5132 | |
5133 | #ifdef FEATURE_PUT_STRUCT_ARG_STK |
5134 | |
5135 | unsigned getArgSize() |
5136 | { |
5137 | return gtNumSlots * TARGET_POINTER_SIZE; |
5138 | } |
5139 | |
5140 | // Return true if this is a PutArgStk of a SIMD12 struct. |
5141 | // This is needed because such values are re-typed to SIMD16, and the type of PutArgStk is VOID. |
5142 | unsigned isSIMD12() |
5143 | { |
5144 | return (varTypeIsSIMD(gtOp1) && (gtNumSlots == 3)); |
5145 | } |
5146 | |
5147 | //------------------------------------------------------------------------ |
5148 | // setGcPointers: Sets the number of references and the layout of the struct object returned by the VM. |
5149 | // |
5150 | // Arguments: |
5151 | // numPointers - Number of pointer references. |
5152 | // pointers - layout of the struct (with pointers marked.) |
5153 | // |
5154 | // Return Value: |
5155 | // None |
5156 | // |
5157 | // Notes: |
5158 | // This data is used in the codegen for GT_PUTARG_STK to decide how to copy the struct to the stack by value. |
5159 | // If no pointer references are used, block copying instructions are used. |
5160 | // Otherwise the pointer reference slots are copied atomically in a way that gcinfo is emitted. |
5161 | // Any non pointer references between the pointer reference slots are copied in block fashion. |
5162 | // |
5163 | void setGcPointers(unsigned numPointers, BYTE* pointers) |
5164 | { |
5165 | gtNumberReferenceSlots = numPointers; |
5166 | gtGcPtrs = pointers; |
5167 | } |
5168 | |
5169 | // Instruction selection: during codegen time, what code sequence we will be using |
5170 | // to encode this operation. |
5171 | // TODO-Throughput: The following information should be obtained from the child |
5172 | // block node. |
5173 | |
5174 | enum class Kind : __int8{ |
5175 | Invalid, RepInstr, Unroll, Push, PushAllSlots, |
5176 | }; |
5177 | |
5178 | Kind gtPutArgStkKind; |
5179 | bool isPushKind() |
5180 | { |
5181 | return (gtPutArgStkKind == Kind::Push) || (gtPutArgStkKind == Kind::PushAllSlots); |
5182 | } |
5183 | |
5184 | unsigned gtNumSlots; // Number of slots for the argument to be passed on stack |
5185 | unsigned gtNumberReferenceSlots; // Number of reference slots. |
5186 | BYTE* gtGcPtrs; // gcPointers |
5187 | |
5188 | #else // !FEATURE_PUT_STRUCT_ARG_STK |
5189 | unsigned getArgSize(); |
5190 | #endif // !FEATURE_PUT_STRUCT_ARG_STK |
5191 | |
5192 | #if defined(DEBUG) || defined(UNIX_X86_ABI) |
5193 | GenTreeCall* gtCall; // the call node to which this argument belongs |
5194 | #endif |
5195 | |
5196 | #if DEBUGGABLE_GENTREE |
5197 | GenTreePutArgStk() : GenTreeUnOp() |
5198 | { |
5199 | } |
5200 | #endif |
5201 | }; |
5202 | |
5203 | #if FEATURE_ARG_SPLIT |
5204 | // Represent the struct argument: split value in register(s) and stack |
5205 | struct GenTreePutArgSplit : public GenTreePutArgStk |
5206 | { |
5207 | unsigned gtNumRegs; |
5208 | |
5209 | GenTreePutArgSplit(GenTree* op1, |
5210 | unsigned slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots), |
5211 | unsigned numRegs, |
5212 | bool putIncomingArgArea = false, |
5213 | GenTreeCall* callNode = nullptr) |
5214 | : GenTreePutArgStk(GT_PUTARG_SPLIT, |
5215 | TYP_STRUCT, |
5216 | op1, |
5217 | slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(numSlots), |
5218 | putIncomingArgArea, |
5219 | callNode) |
5220 | , gtNumRegs(numRegs) |
5221 | { |
5222 | ClearOtherRegs(); |
5223 | ClearOtherRegFlags(); |
5224 | } |
5225 | |
5226 | // Type required to support multi-reg struct arg. |
5227 | var_types m_regType[MAX_REG_ARG]; |
5228 | |
5229 | // First reg of struct is always given by gtRegNum. |
5230 | // gtOtherRegs holds the other reg numbers of struct. |
5231 | regNumberSmall gtOtherRegs[MAX_REG_ARG - 1]; |
5232 | |
5233 | // GTF_SPILL or GTF_SPILLED flag on a multi-reg struct node indicates that one or |
5234 | // more of its result regs are in that state. The spill flag of each of the |
5235 | // return register is stored here. We only need 2 bits per register, |
5236 | // so this is treated as a 2-bit array. |
5237 | static const unsigned PACKED_GTF_SPILL = 1; |
5238 | static const unsigned PACKED_GTF_SPILLED = 2; |
5239 | unsigned char gtSpillFlags; |
5240 | |
5241 | //--------------------------------------------------------------------------- |
5242 | // GetRegNumByIdx: get ith register allocated to this struct argument. |
5243 | // |
5244 | // Arguments: |
5245 | // idx - index of the struct |
5246 | // |
5247 | // Return Value: |
5248 | // Return regNumber of ith register of this struct argument |
5249 | // |
5250 | regNumber GetRegNumByIdx(unsigned idx) const |
5251 | { |
5252 | assert(idx < MAX_REG_ARG); |
5253 | |
5254 | if (idx == 0) |
5255 | { |
5256 | return gtRegNum; |
5257 | } |
5258 | |
5259 | return (regNumber)gtOtherRegs[idx - 1]; |
5260 | } |
5261 | |
5262 | //---------------------------------------------------------------------- |
5263 | // SetRegNumByIdx: set ith register of this struct argument |
5264 | // |
5265 | // Arguments: |
5266 | // reg - reg number |
5267 | // idx - index of the struct |
5268 | // |
5269 | // Return Value: |
5270 | // None |
5271 | // |
5272 | void SetRegNumByIdx(regNumber reg, unsigned idx) |
5273 | { |
5274 | assert(idx < MAX_REG_ARG); |
5275 | if (idx == 0) |
5276 | { |
5277 | gtRegNum = reg; |
5278 | } |
5279 | else |
5280 | { |
5281 | gtOtherRegs[idx - 1] = (regNumberSmall)reg; |
5282 | assert(gtOtherRegs[idx - 1] == reg); |
5283 | } |
5284 | } |
5285 | |
5286 | //---------------------------------------------------------------------------- |
5287 | // ClearOtherRegs: clear multi-reg state to indicate no regs are allocated |
5288 | // |
5289 | // Arguments: |
5290 | // None |
5291 | // |
5292 | // Return Value: |
5293 | // None |
5294 | // |
5295 | void ClearOtherRegs() |
5296 | { |
5297 | for (unsigned i = 0; i < MAX_REG_ARG - 1; ++i) |
5298 | { |
5299 | gtOtherRegs[i] = REG_NA; |
5300 | } |
5301 | } |
5302 | |
5303 | //---------------------------------------------------------------------- |
5304 | // GetRegSpillFlagByIdx: get spill flag associated with the register |
5305 | // specified by its index. |
5306 | // |
5307 | // Arguments: |
5308 | // idx - Position or index of the register |
5309 | // |
5310 | // Return Value: |
5311 | // Returns GTF_* flags associated with the register. Only GTF_SPILL and GTF_SPILLED are considered. |
5312 | // |
5313 | unsigned GetRegSpillFlagByIdx(unsigned idx) const |
5314 | { |
5315 | assert(idx < MAX_REG_ARG); |
5316 | |
5317 | unsigned bits = gtSpillFlags >> (idx * 2); // It doesn't matter that we possibly leave other high bits here. |
5318 | unsigned spillFlags = 0; |
5319 | if (bits & PACKED_GTF_SPILL) |
5320 | { |
5321 | spillFlags |= GTF_SPILL; |
5322 | } |
5323 | if (bits & PACKED_GTF_SPILLED) |
5324 | { |
5325 | spillFlags |= GTF_SPILLED; |
5326 | } |
5327 | |
5328 | return spillFlags; |
5329 | } |
5330 | |
5331 | //---------------------------------------------------------------------- |
5332 | // SetRegSpillFlagByIdx: set spill flags for the register |
5333 | // specified by its index. |
5334 | // |
5335 | // Arguments: |
5336 | // flags - GTF_* flags. Only GTF_SPILL and GTF_SPILLED are allowed. |
5337 | // idx - Position or index of the register |
5338 | // |
5339 | // Return Value: |
5340 | // None |
5341 | // |
5342 | void SetRegSpillFlagByIdx(unsigned flags, unsigned idx) |
5343 | { |
5344 | assert(idx < MAX_REG_ARG); |
5345 | |
5346 | unsigned bits = 0; |
5347 | if (flags & GTF_SPILL) |
5348 | { |
5349 | bits |= PACKED_GTF_SPILL; |
5350 | } |
5351 | if (flags & GTF_SPILLED) |
5352 | { |
5353 | bits |= PACKED_GTF_SPILLED; |
5354 | } |
5355 | |
5356 | const unsigned char packedFlags = PACKED_GTF_SPILL | PACKED_GTF_SPILLED; |
5357 | |
5358 | // Clear anything that was already there by masking out the bits before 'or'ing in what we want there. |
5359 | gtSpillFlags = (unsigned char)((gtSpillFlags & ~(packedFlags << (idx * 2))) | (bits << (idx * 2))); |
5360 | } |
5361 | |
5362 | //-------------------------------------------------------------------------- |
5363 | // GetRegType: Get var_type of the register specified by index. |
5364 | // |
5365 | // Arguments: |
5366 | // index - Index of the register. |
5367 | // First register will have an index 0 and so on. |
5368 | // |
5369 | // Return Value: |
5370 | // var_type of the register specified by its index. |
5371 | |
5372 | var_types GetRegType(unsigned index) |
5373 | { |
5374 | assert(index < gtNumRegs); |
5375 | var_types result = m_regType[index]; |
5376 | return result; |
5377 | } |
5378 | |
5379 | //------------------------------------------------------------------- |
5380 | // clearOtherRegFlags: clear GTF_* flags associated with gtOtherRegs |
5381 | // |
5382 | // Arguments: |
5383 | // None |
5384 | // |
5385 | // Return Value: |
5386 | // None |
5387 | // |
5388 | void ClearOtherRegFlags() |
5389 | { |
5390 | gtSpillFlags = 0; |
5391 | } |
5392 | |
5393 | #ifdef FEATURE_PUT_STRUCT_ARG_STK |
5394 | unsigned getArgSize() |
5395 | { |
5396 | return (gtNumSlots + gtNumRegs) * TARGET_POINTER_SIZE; |
5397 | } |
5398 | #endif // FEATURE_PUT_STRUCT_ARG_STK |
5399 | |
5400 | #if DEBUGGABLE_GENTREE |
5401 | GenTreePutArgSplit() : GenTreePutArgStk() |
5402 | { |
5403 | } |
5404 | #endif |
5405 | }; |
5406 | #endif // FEATURE_ARG_SPLIT |
5407 | |
5408 | // Represents GT_COPY or GT_RELOAD node |
5409 | // |
5410 | // As it turns out, these are only needed on targets that happen to have multi-reg returns. |
5411 | // However, they are actually needed on any target that has any multi-reg ops. It is just |
5412 | // coincidence that those are the same (and there isn't a FEATURE_MULTIREG_OPS). |
5413 | // |
5414 | struct GenTreeCopyOrReload : public GenTreeUnOp |
5415 | { |
5416 | #if FEATURE_MULTIREG_RET |
5417 | // State required to support copy/reload of a multi-reg call node. |
5418 | // The first register is always given by gtRegNum. |
5419 | // |
5420 | regNumberSmall gtOtherRegs[MAX_RET_REG_COUNT - 1]; |
5421 | #endif |
5422 | |
5423 | //---------------------------------------------------------- |
5424 | // ClearOtherRegs: set gtOtherRegs to REG_NA. |
5425 | // |
5426 | // Arguments: |
5427 | // None |
5428 | // |
5429 | // Return Value: |
5430 | // None |
5431 | // |
5432 | void ClearOtherRegs() |
5433 | { |
5434 | #if FEATURE_MULTIREG_RET |
5435 | for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i) |
5436 | { |
5437 | gtOtherRegs[i] = REG_NA; |
5438 | } |
5439 | #endif |
5440 | } |
5441 | |
5442 | //----------------------------------------------------------- |
5443 | // GetRegNumByIdx: Get regNumber of ith position. |
5444 | // |
5445 | // Arguments: |
5446 | // idx - register position. |
5447 | // |
5448 | // Return Value: |
5449 | // Returns regNumber assigned to ith position. |
5450 | // |
5451 | regNumber GetRegNumByIdx(unsigned idx) const |
5452 | { |
5453 | assert(idx < MAX_RET_REG_COUNT); |
5454 | |
5455 | if (idx == 0) |
5456 | { |
5457 | return gtRegNum; |
5458 | } |
5459 | |
5460 | #if FEATURE_MULTIREG_RET |
5461 | return (regNumber)gtOtherRegs[idx - 1]; |
5462 | #else |
5463 | return REG_NA; |
5464 | #endif |
5465 | } |
5466 | |
5467 | //----------------------------------------------------------- |
5468 | // SetRegNumByIdx: Set the regNumber for ith position. |
5469 | // |
5470 | // Arguments: |
5471 | // reg - reg number |
5472 | // idx - register position. |
5473 | // |
5474 | // Return Value: |
5475 | // None. |
5476 | // |
5477 | void SetRegNumByIdx(regNumber reg, unsigned idx) |
5478 | { |
5479 | assert(idx < MAX_RET_REG_COUNT); |
5480 | |
5481 | if (idx == 0) |
5482 | { |
5483 | gtRegNum = reg; |
5484 | } |
5485 | #if FEATURE_MULTIREG_RET |
5486 | else |
5487 | { |
5488 | gtOtherRegs[idx - 1] = (regNumberSmall)reg; |
5489 | assert(gtOtherRegs[idx - 1] == reg); |
5490 | } |
5491 | #else |
5492 | else |
5493 | { |
5494 | unreached(); |
5495 | } |
5496 | #endif |
5497 | } |
5498 | |
5499 | //---------------------------------------------------------------------------- |
5500 | // CopyOtherRegs: copy multi-reg state from the given copy/reload node to this |
5501 | // node. |
5502 | // |
5503 | // Arguments: |
5504 | // from - GenTree node from which to copy multi-reg state |
5505 | // |
5506 | // Return Value: |
5507 | // None |
5508 | // |
5509 | // TODO-ARM: Implement this routine for Arm64 and Arm32 |
5510 | // TODO-X86: Implement this routine for x86 |
5511 | void CopyOtherRegs(GenTreeCopyOrReload* from) |
5512 | { |
5513 | assert(OperGet() == from->OperGet()); |
5514 | |
5515 | #ifdef UNIX_AMD64_ABI |
5516 | for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i) |
5517 | { |
5518 | gtOtherRegs[i] = from->gtOtherRegs[i]; |
5519 | } |
5520 | #endif |
5521 | } |
5522 | |
5523 | unsigned GetRegCount() |
5524 | { |
5525 | #if FEATURE_MULTIREG_RET |
5526 | // We need to return the highest index for which we have a valid register. |
5527 | // Note that the gtOtherRegs array is off by one (the 0th register is gtRegNum). |
5528 | // If there's no valid register in gtOtherRegs, gtRegNum must be valid. |
5529 | // Note that for most nodes, the set of valid registers must be contiguous, |
5530 | // but for COPY or RELOAD there is only a valid register for the register positions |
5531 | // that must be copied or reloaded. |
5532 | // |
5533 | for (unsigned i = MAX_RET_REG_COUNT; i > 1; i--) |
5534 | { |
5535 | if (gtOtherRegs[i - 2] != REG_NA) |
5536 | { |
5537 | return i; |
5538 | } |
5539 | } |
5540 | #endif |
5541 | // We should never have a COPY or RELOAD with no valid registers. |
5542 | assert(gtRegNum != REG_NA); |
5543 | return 1; |
5544 | } |
5545 | |
5546 | GenTreeCopyOrReload(genTreeOps oper, var_types type, GenTree* op1) : GenTreeUnOp(oper, type, op1) |
5547 | { |
5548 | gtRegNum = REG_NA; |
5549 | ClearOtherRegs(); |
5550 | } |
5551 | |
5552 | #if DEBUGGABLE_GENTREE |
5553 | GenTreeCopyOrReload() : GenTreeUnOp() |
5554 | { |
5555 | } |
5556 | #endif |
5557 | }; |
5558 | |
5559 | // Represents GT_ALLOCOBJ node |
5560 | |
5561 | struct GenTreeAllocObj final : public GenTreeUnOp |
5562 | { |
5563 | unsigned int gtNewHelper; // Value returned by ICorJitInfo::getNewHelper |
5564 | bool gtHelperHasSideEffects; |
5565 | CORINFO_CLASS_HANDLE gtAllocObjClsHnd; |
5566 | #ifdef FEATURE_READYTORUN_COMPILER |
5567 | CORINFO_CONST_LOOKUP gtEntryPoint; |
5568 | #endif |
5569 | |
5570 | GenTreeAllocObj( |
5571 | var_types type, unsigned int helper, bool helperHasSideEffects, CORINFO_CLASS_HANDLE clsHnd, GenTree* op) |
5572 | : GenTreeUnOp(GT_ALLOCOBJ, type, op DEBUGARG(/*largeNode*/ TRUE)) |
5573 | , // This node in most cases will be changed to a call node |
5574 | gtNewHelper(helper) |
5575 | , gtHelperHasSideEffects(helperHasSideEffects) |
5576 | , gtAllocObjClsHnd(clsHnd) |
5577 | { |
5578 | #ifdef FEATURE_READYTORUN_COMPILER |
5579 | gtEntryPoint.addr = nullptr; |
5580 | #endif |
5581 | } |
5582 | #if DEBUGGABLE_GENTREE |
5583 | GenTreeAllocObj() : GenTreeUnOp() |
5584 | { |
5585 | } |
5586 | #endif |
5587 | }; |
5588 | |
5589 | // Represents GT_RUNTIMELOOKUP node |
5590 | |
5591 | struct GenTreeRuntimeLookup final : public GenTreeUnOp |
5592 | { |
5593 | CORINFO_GENERIC_HANDLE gtHnd; |
5594 | CorInfoGenericHandleType gtHndType; |
5595 | |
5596 | GenTreeRuntimeLookup(CORINFO_GENERIC_HANDLE hnd, CorInfoGenericHandleType hndTyp, GenTree* tree) |
5597 | : GenTreeUnOp(GT_RUNTIMELOOKUP, tree->gtType, tree DEBUGARG(/*largeNode*/ FALSE)), gtHnd(hnd), gtHndType(hndTyp) |
5598 | { |
5599 | assert(hnd != nullptr); |
5600 | } |
5601 | #if DEBUGGABLE_GENTREE |
5602 | GenTreeRuntimeLookup() : GenTreeUnOp() |
5603 | { |
5604 | } |
5605 | #endif |
5606 | |
5607 | // Return reference to the actual tree that does the lookup |
5608 | GenTree*& Lookup() |
5609 | { |
5610 | return gtOp1; |
5611 | } |
5612 | |
5613 | bool IsClassHandle() const |
5614 | { |
5615 | return gtHndType == CORINFO_HANDLETYPE_CLASS; |
5616 | } |
5617 | bool IsMethodHandle() const |
5618 | { |
5619 | return gtHndType == CORINFO_HANDLETYPE_METHOD; |
5620 | } |
5621 | bool IsFieldHandle() const |
5622 | { |
5623 | return gtHndType == CORINFO_HANDLETYPE_FIELD; |
5624 | } |
5625 | |
5626 | // Note these operations describe the handle that is input to the |
5627 | // lookup, not the handle produced by the lookup. |
5628 | CORINFO_CLASS_HANDLE GetClassHandle() const |
5629 | { |
5630 | assert(IsClassHandle()); |
5631 | return (CORINFO_CLASS_HANDLE)gtHnd; |
5632 | } |
5633 | CORINFO_METHOD_HANDLE GetMethodHandle() const |
5634 | { |
5635 | assert(IsMethodHandle()); |
5636 | return (CORINFO_METHOD_HANDLE)gtHnd; |
5637 | } |
5638 | CORINFO_FIELD_HANDLE GetFieldHandle() const |
5639 | { |
5640 | assert(IsMethodHandle()); |
5641 | return (CORINFO_FIELD_HANDLE)gtHnd; |
5642 | } |
5643 | }; |
5644 | |
5645 | // Represents a GT_JCC or GT_SETCC node. |
5646 | |
5647 | struct GenTreeCC final : public GenTree |
5648 | { |
5649 | genTreeOps gtCondition; // any relop |
5650 | |
5651 | GenTreeCC(genTreeOps oper, genTreeOps condition, var_types type = TYP_VOID) |
5652 | : GenTree(oper, type DEBUGARG(/*largeNode*/ FALSE)), gtCondition(condition) |
5653 | { |
5654 | assert(OperIs(GT_JCC, GT_SETCC)); |
5655 | assert(OperIsCompare(condition)); |
5656 | } |
5657 | |
5658 | #if DEBUGGABLE_GENTREE |
5659 | GenTreeCC() : GenTree() |
5660 | { |
5661 | } |
5662 | #endif // DEBUGGABLE_GENTREE |
5663 | }; |
5664 | |
5665 | //------------------------------------------------------------------------ |
5666 | // Deferred inline functions of GenTree -- these need the subtypes above to |
5667 | // be defined already. |
5668 | //------------------------------------------------------------------------ |
5669 | |
5670 | inline bool GenTree::OperIsBlkOp() |
5671 | { |
5672 | return ((gtOper == GT_ASG) && varTypeIsStruct(gtOp.gtOp1)) || (OperIsBlk() && (AsBlk()->Data() != nullptr)); |
5673 | } |
5674 | |
5675 | inline bool GenTree::OperIsDynBlkOp() |
5676 | { |
5677 | if (gtOper == GT_ASG) |
5678 | { |
5679 | return gtGetOp1()->OperGet() == GT_DYN_BLK; |
5680 | } |
5681 | else if (gtOper == GT_STORE_DYN_BLK) |
5682 | { |
5683 | return true; |
5684 | } |
5685 | return false; |
5686 | } |
5687 | |
5688 | inline bool GenTree::OperIsInitBlkOp() |
5689 | { |
5690 | if (!OperIsBlkOp()) |
5691 | { |
5692 | return false; |
5693 | } |
5694 | GenTree* src; |
5695 | if (gtOper == GT_ASG) |
5696 | { |
5697 | src = gtGetOp2(); |
5698 | } |
5699 | else |
5700 | { |
5701 | src = AsBlk()->Data()->gtSkipReloadOrCopy(); |
5702 | } |
5703 | return src->OperIsInitVal() || src->OperIsConst(); |
5704 | } |
5705 | |
5706 | inline bool GenTree::OperIsCopyBlkOp() |
5707 | { |
5708 | return OperIsBlkOp() && !OperIsInitBlkOp(); |
5709 | } |
5710 | |
5711 | //------------------------------------------------------------------------ |
5712 | // IsFPZero: Checks whether this is a floating point constant with value 0.0 |
5713 | // |
5714 | // Return Value: |
5715 | // Returns true iff the tree is an GT_CNS_DBL, with value of 0.0. |
5716 | |
5717 | inline bool GenTree::IsFPZero() |
5718 | { |
5719 | if ((gtOper == GT_CNS_DBL) && (gtDblCon.gtDconVal == 0.0)) |
5720 | { |
5721 | return true; |
5722 | } |
5723 | return false; |
5724 | } |
5725 | |
5726 | //------------------------------------------------------------------------ |
5727 | // IsIntegralConst: Checks whether this is a constant node with the given value |
5728 | // |
5729 | // Arguments: |
5730 | // constVal - the value of interest |
5731 | // |
5732 | // Return Value: |
5733 | // Returns true iff the tree is an integral constant opcode, with |
5734 | // the given value. |
5735 | // |
5736 | // Notes: |
5737 | // Like gtIconVal, the argument is of ssize_t, so cannot check for |
5738 | // long constants in a target-independent way. |
5739 | |
5740 | inline bool GenTree::IsIntegralConst(ssize_t constVal) |
5741 | |
5742 | { |
5743 | if ((gtOper == GT_CNS_INT) && (gtIntConCommon.IconValue() == constVal)) |
5744 | { |
5745 | return true; |
5746 | } |
5747 | |
5748 | if ((gtOper == GT_CNS_LNG) && (gtIntConCommon.LngValue() == constVal)) |
5749 | { |
5750 | return true; |
5751 | } |
5752 | |
5753 | return false; |
5754 | } |
5755 | |
5756 | //------------------------------------------------------------------- |
5757 | // IsIntegralConstVector: returns true if this this is a SIMD vector |
5758 | // with all its elements equal to an integral constant. |
5759 | // |
5760 | // Arguments: |
5761 | // constVal - const value of vector element |
5762 | // |
5763 | // Returns: |
5764 | // True if this represents an integral const SIMD vector. |
5765 | // |
5766 | inline bool GenTree::IsIntegralConstVector(ssize_t constVal) |
5767 | { |
5768 | #ifdef FEATURE_SIMD |
5769 | // SIMDIntrinsicInit intrinsic with a const value as initializer |
5770 | // represents a const vector. |
5771 | if ((gtOper == GT_SIMD) && (gtSIMD.gtSIMDIntrinsicID == SIMDIntrinsicInit) && gtGetOp1()->IsIntegralConst(constVal)) |
5772 | { |
5773 | assert(varTypeIsIntegral(gtSIMD.gtSIMDBaseType)); |
5774 | assert(gtGetOp2IfPresent() == nullptr); |
5775 | return true; |
5776 | } |
5777 | #endif |
5778 | |
5779 | return false; |
5780 | } |
5781 | |
5782 | inline bool GenTree::IsBoxedValue() |
5783 | { |
5784 | assert(gtOper != GT_BOX || gtBox.BoxOp() != nullptr); |
5785 | return (gtOper == GT_BOX) && (gtFlags & GTF_BOX_VALUE); |
5786 | } |
5787 | |
5788 | inline bool GenTree::IsSIMDEqualityOrInequality() const |
5789 | { |
5790 | #ifdef FEATURE_SIMD |
5791 | if (gtOper == GT_SIMD) |
5792 | { |
5793 | SIMDIntrinsicID id = AsSIMD()->gtSIMDIntrinsicID; |
5794 | return (id == SIMDIntrinsicOpEquality) || (id == SIMDIntrinsicOpInEquality); |
5795 | } |
5796 | #endif |
5797 | |
5798 | return false; |
5799 | } |
5800 | |
5801 | inline GenTree* GenTree::MoveNext() |
5802 | { |
5803 | assert(OperIsAnyList()); |
5804 | return gtOp.gtOp2; |
5805 | } |
5806 | |
5807 | #ifdef DEBUG |
5808 | //------------------------------------------------------------------------ |
5809 | // IsValidCallArgument: Given an GenTree node that represents an argument |
5810 | // enforce (or don't enforce) the following invariant. |
5811 | // |
5812 | // Arguments: |
5813 | // instance method for a GenTree node |
5814 | // |
5815 | // Return values: |
5816 | // true: the GenTree node is accepted as a valid argument |
5817 | // false: the GenTree node is not accepted as a valid argumeny |
5818 | // |
5819 | // Notes: |
5820 | // For targets that don't support arguments as a list of fields, we do not support GT_FIELD_LIST. |
5821 | // |
5822 | // Currently for AMD64 UNIX we allow a limited case where a GT_FIELD_LIST is |
5823 | // allowed but every element must be a GT_LCL_FLD. |
5824 | // |
5825 | // For the future targets that allow for Multireg args (and this includes the current ARM64 target), |
5826 | // or that allow for passing promoted structs, we allow a GT_FIELD_LIST of arbitrary nodes. |
5827 | // These would typically start out as GT_LCL_VARs or GT_LCL_FLDS or GT_INDs, |
5828 | // but could be changed into constants or GT_COMMA trees by the later |
5829 | // optimization phases. |
5830 | |
5831 | inline bool GenTree::IsValidCallArgument() |
5832 | { |
5833 | if (OperIsList()) |
5834 | { |
5835 | // GT_FIELD_LIST is the only list allowed. |
5836 | return false; |
5837 | } |
5838 | if (OperIsFieldList()) |
5839 | { |
5840 | #if !FEATURE_MULTIREG_ARGS && !FEATURE_PUT_STRUCT_ARG_STK |
5841 | |
5842 | return false; |
5843 | |
5844 | #else // FEATURE_MULTIREG_ARGS or FEATURE_PUT_STRUCT_ARG_STK |
5845 | |
5846 | // We allow this GT_FIELD_LIST as an argument |
5847 | return true; |
5848 | |
5849 | #endif // FEATURE_MULTIREG_ARGS or FEATURE_PUT_STRUCT_ARG_STK |
5850 | } |
5851 | // We don't have either kind of list, so it satisfies the invariant. |
5852 | return true; |
5853 | } |
5854 | #endif // DEBUG |
5855 | |
5856 | inline GenTree* GenTree::Current() |
5857 | { |
5858 | assert(OperIsAnyList()); |
5859 | return gtOp.gtOp1; |
5860 | } |
5861 | |
5862 | inline GenTree** GenTree::pCurrent() |
5863 | { |
5864 | assert(OperIsAnyList()); |
5865 | return &(gtOp.gtOp1); |
5866 | } |
5867 | |
5868 | inline GenTree* GenTree::gtGetOp1() const |
5869 | { |
5870 | return AsOp()->gtOp1; |
5871 | } |
5872 | |
5873 | #ifdef DEBUG |
5874 | /* static */ |
5875 | inline bool GenTree::RequiresNonNullOp2(genTreeOps oper) |
5876 | { |
5877 | switch (oper) |
5878 | { |
5879 | case GT_ADD: |
5880 | case GT_SUB: |
5881 | case GT_MUL: |
5882 | case GT_DIV: |
5883 | case GT_MOD: |
5884 | case GT_UDIV: |
5885 | case GT_UMOD: |
5886 | case GT_OR: |
5887 | case GT_XOR: |
5888 | case GT_AND: |
5889 | case GT_LSH: |
5890 | case GT_RSH: |
5891 | case GT_RSZ: |
5892 | case GT_ROL: |
5893 | case GT_ROR: |
5894 | case GT_INDEX: |
5895 | case GT_ASG: |
5896 | case GT_EQ: |
5897 | case GT_NE: |
5898 | case GT_LT: |
5899 | case GT_LE: |
5900 | case GT_GE: |
5901 | case GT_GT: |
5902 | case GT_COMMA: |
5903 | case GT_QMARK: |
5904 | case GT_COLON: |
5905 | case GT_MKREFANY: |
5906 | return true; |
5907 | default: |
5908 | return false; |
5909 | } |
5910 | } |
5911 | #endif // DEBUG |
5912 | |
5913 | inline GenTree* GenTree::gtGetOp2() const |
5914 | { |
5915 | assert(OperIsBinary()); |
5916 | |
5917 | GenTree* op2 = AsOp()->gtOp2; |
5918 | |
5919 | // Only allow null op2 if the node type allows it, e.g. GT_LIST. |
5920 | assert((op2 != nullptr) || !RequiresNonNullOp2(gtOper)); |
5921 | |
5922 | return op2; |
5923 | } |
5924 | |
5925 | inline GenTree* GenTree::gtGetOp2IfPresent() const |
5926 | { |
5927 | /* gtOp.gtOp2 is only valid for GTK_BINOP nodes. */ |
5928 | |
5929 | GenTree* op2 = OperIsBinary() ? AsOp()->gtOp2 : nullptr; |
5930 | |
5931 | // This documents the genTreeOps for which gtOp.gtOp2 cannot be nullptr. |
5932 | // This helps prefix in its analysis of code which calls gtGetOp2() |
5933 | |
5934 | assert((op2 != nullptr) || !RequiresNonNullOp2(gtOper)); |
5935 | |
5936 | return op2; |
5937 | } |
5938 | |
5939 | inline GenTree* GenTree::gtEffectiveVal(bool commaOnly) |
5940 | { |
5941 | GenTree* effectiveVal = this; |
5942 | for (;;) |
5943 | { |
5944 | if (effectiveVal->gtOper == GT_COMMA) |
5945 | { |
5946 | effectiveVal = effectiveVal->gtOp.gtOp2; |
5947 | } |
5948 | else if (!commaOnly && (effectiveVal->gtOper == GT_NOP) && (effectiveVal->gtOp.gtOp1 != nullptr)) |
5949 | { |
5950 | effectiveVal = effectiveVal->gtOp.gtOp1; |
5951 | } |
5952 | else |
5953 | { |
5954 | return effectiveVal; |
5955 | } |
5956 | } |
5957 | } |
5958 | |
5959 | //------------------------------------------------------------------------- |
5960 | // gtRetExprVal - walk back through GT_RET_EXPRs |
5961 | // |
5962 | // Returns: |
5963 | // tree representing return value from a successful inline, |
5964 | // or original call for failed or yet to be determined inline. |
5965 | // |
5966 | // Notes: |
5967 | // Multi-level inlines can form chains of GT_RET_EXPRs. |
5968 | // This method walks back to the root of the chain. |
5969 | |
5970 | inline GenTree* GenTree::gtRetExprVal() |
5971 | { |
5972 | GenTree* retExprVal = this; |
5973 | for (;;) |
5974 | { |
5975 | if (retExprVal->gtOper == GT_RET_EXPR) |
5976 | { |
5977 | retExprVal = retExprVal->gtRetExpr.gtInlineCandidate; |
5978 | } |
5979 | else |
5980 | { |
5981 | return retExprVal; |
5982 | } |
5983 | } |
5984 | } |
5985 | |
5986 | inline GenTree* GenTree::gtSkipReloadOrCopy() |
5987 | { |
5988 | // There can be only one reload or copy (we can't have a reload/copy of a reload/copy) |
5989 | if (gtOper == GT_RELOAD || gtOper == GT_COPY) |
5990 | { |
5991 | assert(gtGetOp1()->OperGet() != GT_RELOAD && gtGetOp1()->OperGet() != GT_COPY); |
5992 | return gtGetOp1(); |
5993 | } |
5994 | return this; |
5995 | } |
5996 | |
5997 | //----------------------------------------------------------------------------------- |
5998 | // IsMultiRegCall: whether a call node returning its value in more than one register |
5999 | // |
6000 | // Arguments: |
6001 | // None |
6002 | // |
6003 | // Return Value: |
6004 | // Returns true if this GenTree is a multi register returning call |
6005 | inline bool GenTree::IsMultiRegCall() const |
6006 | { |
6007 | if (this->IsCall()) |
6008 | { |
6009 | // We cannot use AsCall() as it is not declared const |
6010 | const GenTreeCall* call = reinterpret_cast<const GenTreeCall*>(this); |
6011 | return call->HasMultiRegRetVal(); |
6012 | } |
6013 | |
6014 | return false; |
6015 | } |
6016 | |
6017 | //----------------------------------------------------------------------------------- |
6018 | // IsMultiRegNode: whether a node returning its value in more than one register |
6019 | // |
6020 | // Arguments: |
6021 | // None |
6022 | // |
6023 | // Return Value: |
6024 | // Returns true if this GenTree is a multi-reg node. |
6025 | // |
6026 | // Notes: |
6027 | // All targets that support multi-reg ops of any kind also support multi-reg return |
6028 | // values for calls. Should that change with a future target, this method will need |
6029 | // to change accordingly. |
6030 | // |
6031 | inline bool GenTree::IsMultiRegNode() const |
6032 | { |
6033 | #if FEATURE_MULTIREG_RET |
6034 | if (IsMultiRegCall()) |
6035 | { |
6036 | return true; |
6037 | } |
6038 | |
6039 | #if FEATURE_ARG_SPLIT |
6040 | if (OperIsPutArgSplit()) |
6041 | { |
6042 | return true; |
6043 | } |
6044 | #endif |
6045 | |
6046 | #if !defined(_TARGET_64BIT_) |
6047 | if (OperIsMultiRegOp()) |
6048 | { |
6049 | return true; |
6050 | } |
6051 | #endif |
6052 | |
6053 | if (OperIs(GT_COPY, GT_RELOAD)) |
6054 | { |
6055 | return true; |
6056 | } |
6057 | #endif // FEATURE_MULTIREG_RET |
6058 | return false; |
6059 | } |
6060 | //----------------------------------------------------------------------------------- |
6061 | // GetMultiRegCount: Return the register count for a multi-reg node. |
6062 | // |
6063 | // Arguments: |
6064 | // None |
6065 | // |
6066 | // Return Value: |
6067 | // Returns the number of registers defined by this node. |
6068 | // |
6069 | // Notes: |
6070 | // All targets that support multi-reg ops of any kind also support multi-reg return |
6071 | // values for calls. Should that change with a future target, this method will need |
6072 | // to change accordingly. |
6073 | // |
6074 | inline unsigned GenTree::GetMultiRegCount() |
6075 | { |
6076 | #if FEATURE_MULTIREG_RET |
6077 | if (IsMultiRegCall()) |
6078 | { |
6079 | return AsCall()->GetReturnTypeDesc()->GetReturnRegCount(); |
6080 | } |
6081 | |
6082 | #if FEATURE_ARG_SPLIT |
6083 | if (OperIsPutArgSplit()) |
6084 | { |
6085 | return AsPutArgSplit()->gtNumRegs; |
6086 | } |
6087 | #endif |
6088 | |
6089 | #if !defined(_TARGET_64BIT_) |
6090 | if (OperIsMultiRegOp()) |
6091 | { |
6092 | return AsMultiRegOp()->GetRegCount(); |
6093 | } |
6094 | #endif |
6095 | |
6096 | if (OperIs(GT_COPY, GT_RELOAD)) |
6097 | { |
6098 | return AsCopyOrReload()->GetRegCount(); |
6099 | } |
6100 | #endif // FEATURE_MULTIREG_RET |
6101 | assert(!"GetMultiRegCount called with non-multireg node" ); |
6102 | return 1; |
6103 | } |
6104 | |
6105 | //----------------------------------------------------------------------------------- |
6106 | // GetRegByIndex: Get a specific register, based on regIndex, that is produced |
6107 | // by this node. |
6108 | // |
6109 | // Arguments: |
6110 | // regIndex - which register to return (must be 0 for non-multireg nodes) |
6111 | // |
6112 | // Return Value: |
6113 | // The register, if any, assigned to this index for this node. |
6114 | // |
6115 | // Notes: |
6116 | // All targets that support multi-reg ops of any kind also support multi-reg return |
6117 | // values for calls. Should that change with a future target, this method will need |
6118 | // to change accordingly. |
6119 | // |
6120 | inline regNumber GenTree::GetRegByIndex(int regIndex) |
6121 | { |
6122 | if (regIndex == 0) |
6123 | { |
6124 | return gtRegNum; |
6125 | } |
6126 | |
6127 | #if FEATURE_MULTIREG_RET |
6128 | |
6129 | if (IsMultiRegCall()) |
6130 | { |
6131 | return AsCall()->GetRegNumByIdx(regIndex); |
6132 | } |
6133 | |
6134 | #if FEATURE_ARG_SPLIT |
6135 | if (OperIsPutArgSplit()) |
6136 | { |
6137 | return AsPutArgSplit()->GetRegNumByIdx(regIndex); |
6138 | } |
6139 | #endif |
6140 | #if !defined(_TARGET_64BIT_) |
6141 | if (OperIsMultiRegOp()) |
6142 | { |
6143 | return AsMultiRegOp()->GetRegNumByIdx(regIndex); |
6144 | } |
6145 | #endif |
6146 | |
6147 | if (OperIs(GT_COPY, GT_RELOAD)) |
6148 | { |
6149 | return AsCopyOrReload()->GetRegNumByIdx(regIndex); |
6150 | } |
6151 | #endif // FEATURE_MULTIREG_RET |
6152 | |
6153 | assert(!"Invalid regIndex for GetRegFromMultiRegNode" ); |
6154 | return REG_NA; |
6155 | } |
6156 | |
6157 | //----------------------------------------------------------------------------------- |
6158 | // GetRegTypeByIndex: Get a specific register's type, based on regIndex, that is produced |
6159 | // by this multi-reg node. |
6160 | // |
6161 | // Arguments: |
6162 | // regIndex - which register type to return |
6163 | // |
6164 | // Return Value: |
6165 | // The register type assigned to this index for this node. |
6166 | // |
6167 | // Notes: |
6168 | // This must be a multireg node that is *not* a copy or reload (which must retrieve the |
6169 | // type from its source), and 'regIndex' must be a valid index for this node. |
6170 | // |
6171 | // All targets that support multi-reg ops of any kind also support multi-reg return |
6172 | // values for calls. Should that change with a future target, this method will need |
6173 | // to change accordingly. |
6174 | // |
6175 | inline var_types GenTree::GetRegTypeByIndex(int regIndex) |
6176 | { |
6177 | #if FEATURE_MULTIREG_RET |
6178 | if (IsMultiRegCall()) |
6179 | { |
6180 | return AsCall()->AsCall()->GetReturnTypeDesc()->GetReturnRegType(regIndex); |
6181 | } |
6182 | |
6183 | #if FEATURE_ARG_SPLIT |
6184 | if (OperIsPutArgSplit()) |
6185 | { |
6186 | return AsPutArgSplit()->GetRegType(regIndex); |
6187 | } |
6188 | #endif |
6189 | #if !defined(_TARGET_64BIT_) |
6190 | if (OperIsMultiRegOp()) |
6191 | { |
6192 | return AsMultiRegOp()->GetRegType(regIndex); |
6193 | } |
6194 | #endif |
6195 | |
6196 | #endif // FEATURE_MULTIREG_RET |
6197 | assert(!"Invalid node type for GetRegTypeByIndex" ); |
6198 | return TYP_UNDEF; |
6199 | } |
6200 | |
6201 | //------------------------------------------------------------------------- |
6202 | // IsCopyOrReload: whether this is a GT_COPY or GT_RELOAD node. |
6203 | // |
6204 | // Arguments: |
6205 | // None |
6206 | // |
6207 | // Return Value: |
6208 | // Returns true if this GenTree is a copy or reload node. |
6209 | inline bool GenTree::IsCopyOrReload() const |
6210 | { |
6211 | return (gtOper == GT_COPY || gtOper == GT_RELOAD); |
6212 | } |
6213 | |
6214 | //----------------------------------------------------------------------------------- |
6215 | // IsCopyOrReloadOfMultiRegCall: whether this is a GT_COPY or GT_RELOAD of a multi-reg |
6216 | // call node. |
6217 | // |
6218 | // Arguments: |
6219 | // None |
6220 | // |
6221 | // Return Value: |
6222 | // Returns true if this GenTree is a copy or reload of multi-reg call node. |
6223 | inline bool GenTree::IsCopyOrReloadOfMultiRegCall() const |
6224 | { |
6225 | if (IsCopyOrReload()) |
6226 | { |
6227 | return gtGetOp1()->IsMultiRegCall(); |
6228 | } |
6229 | |
6230 | return false; |
6231 | } |
6232 | |
6233 | inline bool GenTree::IsCnsIntOrI() const |
6234 | { |
6235 | return (gtOper == GT_CNS_INT); |
6236 | } |
6237 | |
6238 | inline bool GenTree::IsIntegralConst() const |
6239 | { |
6240 | #ifdef _TARGET_64BIT_ |
6241 | return IsCnsIntOrI(); |
6242 | #else // !_TARGET_64BIT_ |
6243 | return ((gtOper == GT_CNS_INT) || (gtOper == GT_CNS_LNG)); |
6244 | #endif // !_TARGET_64BIT_ |
6245 | } |
6246 | |
6247 | // Is this node an integer constant that fits in a 32-bit signed integer (INT32) |
6248 | inline bool GenTree::IsIntCnsFitsInI32() |
6249 | { |
6250 | #ifdef _TARGET_64BIT_ |
6251 | return IsCnsIntOrI() && AsIntCon()->FitsInI32(); |
6252 | #else // !_TARGET_64BIT_ |
6253 | return IsCnsIntOrI(); |
6254 | #endif // !_TARGET_64BIT_ |
6255 | } |
6256 | |
6257 | inline bool GenTree::IsCnsFltOrDbl() const |
6258 | { |
6259 | return OperGet() == GT_CNS_DBL; |
6260 | } |
6261 | |
6262 | inline bool GenTree::IsCnsNonZeroFltOrDbl() |
6263 | { |
6264 | if (OperGet() == GT_CNS_DBL) |
6265 | { |
6266 | double constValue = gtDblCon.gtDconVal; |
6267 | return *(__int64*)&constValue != 0; |
6268 | } |
6269 | |
6270 | return false; |
6271 | } |
6272 | |
6273 | inline bool GenTree::IsHelperCall() |
6274 | { |
6275 | return OperGet() == GT_CALL && gtCall.gtCallType == CT_HELPER; |
6276 | } |
6277 | |
6278 | inline var_types GenTree::CastFromType() |
6279 | { |
6280 | return this->gtCast.CastOp()->TypeGet(); |
6281 | } |
6282 | inline var_types& GenTree::CastToType() |
6283 | { |
6284 | return this->gtCast.gtCastType; |
6285 | } |
6286 | |
6287 | //----------------------------------------------------------------------------------- |
6288 | // HasGCPtr: determine whether this block op involves GC pointers |
6289 | // |
6290 | // Arguments: |
6291 | // None |
6292 | // |
6293 | // Return Value: |
6294 | // Returns true iff the object being copied contains one or more GC pointers. |
6295 | // |
6296 | // Notes: |
6297 | // Of the block nodes, only GT_OBJ and ST_STORE_OBJ are allowed to have GC pointers. |
6298 | // |
6299 | inline bool GenTreeBlk::HasGCPtr() |
6300 | { |
6301 | if ((gtOper == GT_OBJ) || (gtOper == GT_STORE_OBJ)) |
6302 | { |
6303 | return (AsObj()->gtGcPtrCount != 0); |
6304 | } |
6305 | return false; |
6306 | } |
6307 | |
6308 | inline bool GenTree::isUsedFromSpillTemp() const |
6309 | { |
6310 | // If spilled and no reg at use, then it is used from the spill temp location rather than being reloaded. |
6311 | if (((gtFlags & GTF_SPILLED) != 0) && ((gtFlags & GTF_NOREG_AT_USE) != 0)) |
6312 | { |
6313 | return true; |
6314 | } |
6315 | |
6316 | return false; |
6317 | } |
6318 | |
6319 | /*****************************************************************************/ |
6320 | |
6321 | #ifndef _HOST_64BIT_ |
6322 | #include <poppack.h> |
6323 | #endif |
6324 | |
6325 | /*****************************************************************************/ |
6326 | |
6327 | #if SMALL_TREE_NODES |
6328 | |
6329 | // In debug, on some platforms (e.g., when LATE_DISASM is defined), GenTreeIntCon is bigger than GenTreeLclFld. |
6330 | const size_t TREE_NODE_SZ_SMALL = max(sizeof(GenTreeIntCon), sizeof(GenTreeLclFld)); |
6331 | |
6332 | #endif // SMALL_TREE_NODES |
6333 | |
6334 | const size_t TREE_NODE_SZ_LARGE = sizeof(GenTreeCall); |
6335 | |
6336 | enum varRefKinds |
6337 | { |
6338 | VR_INVARIANT = 0x00, // an invariant value |
6339 | VR_NONE = 0x00, |
6340 | VR_IND_REF = 0x01, // an object reference |
6341 | VR_IND_SCL = 0x02, // a non-object reference |
6342 | VR_GLB_VAR = 0x04, // a global (clsVar) |
6343 | }; |
6344 | |
6345 | /*****************************************************************************/ |
6346 | #endif // !GENTREE_H |
6347 | /*****************************************************************************/ |
6348 | |