| 1 | // Licensed to the .NET Foundation under one or more agreements. |
| 2 | // The .NET Foundation licenses this file to you under the MIT license. |
| 3 | // See the LICENSE file in the project root for more information. |
| 4 | |
| 5 | #include "jitpch.h" |
| 6 | #ifdef _MSC_VER |
| 7 | #pragma hdrstop |
| 8 | #endif |
| 9 | |
| 10 | // return op that is the store equivalent of the given load opcode |
| 11 | genTreeOps storeForm(genTreeOps loadForm) |
| 12 | { |
| 13 | switch (loadForm) |
| 14 | { |
| 15 | case GT_LCL_VAR: |
| 16 | return GT_STORE_LCL_VAR; |
| 17 | case GT_LCL_FLD: |
| 18 | return GT_STORE_LCL_FLD; |
| 19 | default: |
| 20 | noway_assert(!"not a data load opcode\n" ); |
| 21 | unreached(); |
| 22 | } |
| 23 | } |
| 24 | |
| 25 | // return op that is the addr equivalent of the given load opcode |
| 26 | genTreeOps addrForm(genTreeOps loadForm) |
| 27 | { |
| 28 | switch (loadForm) |
| 29 | { |
| 30 | case GT_LCL_VAR: |
| 31 | return GT_LCL_VAR_ADDR; |
| 32 | case GT_LCL_FLD: |
| 33 | return GT_LCL_FLD_ADDR; |
| 34 | default: |
| 35 | noway_assert(!"not a data load opcode\n" ); |
| 36 | unreached(); |
| 37 | } |
| 38 | } |
| 39 | |
| 40 | // return op that is the load equivalent of the given addr opcode |
| 41 | genTreeOps loadForm(genTreeOps addrForm) |
| 42 | { |
| 43 | switch (addrForm) |
| 44 | { |
| 45 | case GT_LCL_VAR_ADDR: |
| 46 | return GT_LCL_VAR; |
| 47 | case GT_LCL_FLD_ADDR: |
| 48 | return GT_LCL_FLD; |
| 49 | default: |
| 50 | noway_assert(!"not a local address opcode\n" ); |
| 51 | unreached(); |
| 52 | } |
| 53 | } |
| 54 | |
| 55 | // copy the flags determined by mask from src to dst |
| 56 | void copyFlags(GenTree* dst, GenTree* src, unsigned mask) |
| 57 | { |
| 58 | dst->gtFlags &= ~mask; |
| 59 | dst->gtFlags |= (src->gtFlags & mask); |
| 60 | } |
| 61 | |
| 62 | // Rewrite a SIMD indirection as GT_IND(GT_LEA(obj.op1)), or as a simple |
| 63 | // lclVar if possible. |
| 64 | // |
| 65 | // Arguments: |
| 66 | // use - A use reference for a block node |
| 67 | // keepBlk - True if this should remain a block node if it is not a lclVar |
| 68 | // |
| 69 | // Return Value: |
| 70 | // None. |
| 71 | // |
| 72 | // TODO-1stClassStructs: These should be eliminated earlier, once we can handle |
| 73 | // lclVars in all the places that used to have GT_OBJ. |
| 74 | // |
| 75 | void Rationalizer::RewriteSIMDOperand(LIR::Use& use, bool keepBlk) |
| 76 | { |
| 77 | #ifdef FEATURE_SIMD |
| 78 | // No lowering is needed for non-SIMD nodes, so early out if featureSIMD is not enabled. |
| 79 | if (!comp->featureSIMD) |
| 80 | { |
| 81 | return; |
| 82 | } |
| 83 | |
| 84 | GenTree* tree = use.Def(); |
| 85 | if (!tree->OperIsIndir()) |
| 86 | { |
| 87 | return; |
| 88 | } |
| 89 | var_types simdType = tree->TypeGet(); |
| 90 | |
| 91 | if (!varTypeIsSIMD(simdType)) |
| 92 | { |
| 93 | return; |
| 94 | } |
| 95 | |
| 96 | // If we have GT_IND(GT_LCL_VAR_ADDR) and the GT_LCL_VAR_ADDR is TYP_BYREF/TYP_I_IMPL, |
| 97 | // and the var is a SIMD type, replace the expression by GT_LCL_VAR. |
| 98 | GenTree* addr = tree->AsIndir()->Addr(); |
| 99 | if (addr->OperIsLocalAddr() && comp->isAddrOfSIMDType(addr)) |
| 100 | { |
| 101 | BlockRange().Remove(tree); |
| 102 | |
| 103 | addr->SetOper(loadForm(addr->OperGet())); |
| 104 | addr->gtType = simdType; |
| 105 | use.ReplaceWith(comp, addr); |
| 106 | } |
| 107 | else if ((addr->OperGet() == GT_ADDR) && (addr->gtGetOp1()->OperIsSIMDorSimdHWintrinsic())) |
| 108 | { |
| 109 | // if we have GT_IND(GT_ADDR(GT_SIMD)), remove the GT_IND(GT_ADDR()), leaving just the GT_SIMD. |
| 110 | BlockRange().Remove(tree); |
| 111 | BlockRange().Remove(addr); |
| 112 | |
| 113 | use.ReplaceWith(comp, addr->gtGetOp1()); |
| 114 | } |
| 115 | else if (!keepBlk) |
| 116 | { |
| 117 | tree->SetOper(GT_IND); |
| 118 | tree->gtType = simdType; |
| 119 | } |
| 120 | #endif // FEATURE_SIMD |
| 121 | } |
| 122 | |
| 123 | // RewriteNodeAsCall : Replace the given tree node by a GT_CALL. |
| 124 | // |
| 125 | // Arguments: |
| 126 | // ppTree - A pointer-to-a-pointer for the tree node |
| 127 | // fgWalkData - A pointer to tree walk data providing the context |
| 128 | // callHnd - The method handle of the call to be generated |
| 129 | // entryPoint - The method entrypoint of the call to be generated |
| 130 | // args - The argument list of the call to be generated |
| 131 | // |
| 132 | // Return Value: |
| 133 | // None. |
| 134 | // |
| 135 | |
| 136 | void Rationalizer::RewriteNodeAsCall(GenTree** use, |
| 137 | ArrayStack<GenTree*>& parents, |
| 138 | CORINFO_METHOD_HANDLE callHnd, |
| 139 | #ifdef FEATURE_READYTORUN_COMPILER |
| 140 | CORINFO_CONST_LOOKUP entryPoint, |
| 141 | #endif |
| 142 | GenTreeArgList* args) |
| 143 | { |
| 144 | GenTree* const tree = *use; |
| 145 | GenTree* const treeFirstNode = comp->fgGetFirstNode(tree); |
| 146 | GenTree* const insertionPoint = treeFirstNode->gtPrev; |
| 147 | |
| 148 | BlockRange().Remove(treeFirstNode, tree); |
| 149 | |
| 150 | // Create the call node |
| 151 | GenTreeCall* call = comp->gtNewCallNode(CT_USER_FUNC, callHnd, tree->gtType, args); |
| 152 | |
| 153 | #if DEBUG |
| 154 | CORINFO_SIG_INFO sig; |
| 155 | comp->eeGetMethodSig(callHnd, &sig); |
| 156 | assert(JITtype2varType(sig.retType) == tree->gtType); |
| 157 | #endif // DEBUG |
| 158 | |
| 159 | #ifdef FEATURE_READYTORUN_COMPILER |
| 160 | call->gtCall.setEntryPoint(entryPoint); |
| 161 | #endif |
| 162 | |
| 163 | call = comp->fgMorphArgs(call); |
| 164 | |
| 165 | // Replace "tree" with "call" |
| 166 | if (parents.Height() > 1) |
| 167 | { |
| 168 | parents.Index(1)->ReplaceOperand(use, call); |
| 169 | } |
| 170 | else |
| 171 | { |
| 172 | // If there's no parent, the tree being replaced is the root of the |
| 173 | // statement (and no special handling is necessary). |
| 174 | *use = call; |
| 175 | } |
| 176 | |
| 177 | comp->gtSetEvalOrder(call); |
| 178 | BlockRange().InsertAfter(insertionPoint, LIR::Range(comp->fgSetTreeSeq(call), call)); |
| 179 | |
| 180 | // Propagate flags of "call" to its parents. |
| 181 | // 0 is current node, so start at 1 |
| 182 | for (int i = 1; i < parents.Height(); i++) |
| 183 | { |
| 184 | parents.Index(i)->gtFlags |= (call->gtFlags & GTF_ALL_EFFECT) | GTF_CALL; |
| 185 | } |
| 186 | |
| 187 | // Since "tree" is replaced with "call", pop "tree" node (i.e the current node) |
| 188 | // and replace it with "call" on parent stack. |
| 189 | assert(parents.Top() == tree); |
| 190 | (void)parents.Pop(); |
| 191 | parents.Push(call); |
| 192 | } |
| 193 | |
| 194 | // RewriteIntrinsicAsUserCall : Rewrite an intrinsic operator as a GT_CALL to the original method. |
| 195 | // |
| 196 | // Arguments: |
| 197 | // ppTree - A pointer-to-a-pointer for the intrinsic node |
| 198 | // fgWalkData - A pointer to tree walk data providing the context |
| 199 | // |
| 200 | // Return Value: |
| 201 | // None. |
| 202 | // |
| 203 | // Some intrinsics, such as operation Sqrt, are rewritten back to calls, and some are not. |
| 204 | // The ones that are not being rewritten here must be handled in Codegen. |
| 205 | // Conceptually, the lower is the right place to do the rewrite. Keeping it in rationalization is |
| 206 | // mainly for throughput issue. |
| 207 | |
| 208 | void Rationalizer::RewriteIntrinsicAsUserCall(GenTree** use, ArrayStack<GenTree*>& parents) |
| 209 | { |
| 210 | GenTreeIntrinsic* intrinsic = (*use)->AsIntrinsic(); |
| 211 | |
| 212 | GenTreeArgList* args; |
| 213 | if (intrinsic->gtOp.gtOp2 == nullptr) |
| 214 | { |
| 215 | args = comp->gtNewArgList(intrinsic->gtGetOp1()); |
| 216 | } |
| 217 | else |
| 218 | { |
| 219 | args = comp->gtNewArgList(intrinsic->gtGetOp1(), intrinsic->gtGetOp2()); |
| 220 | } |
| 221 | |
| 222 | RewriteNodeAsCall(use, parents, intrinsic->gtMethodHandle, |
| 223 | #ifdef FEATURE_READYTORUN_COMPILER |
| 224 | intrinsic->gtEntryPoint, |
| 225 | #endif |
| 226 | args); |
| 227 | } |
| 228 | |
| 229 | #ifdef DEBUG |
| 230 | |
| 231 | void Rationalizer::ValidateStatement(GenTree* tree, BasicBlock* block) |
| 232 | { |
| 233 | assert(tree->gtOper == GT_STMT); |
| 234 | DBEXEC(TRUE, JitTls::GetCompiler()->fgDebugCheckNodeLinks(block, tree)); |
| 235 | } |
| 236 | |
| 237 | // sanity checks that apply to all kinds of IR |
| 238 | void Rationalizer::SanityCheck() |
| 239 | { |
| 240 | // TODO: assert(!IsLIR()); |
| 241 | BasicBlock* block; |
| 242 | foreach_block(comp, block) |
| 243 | { |
| 244 | for (GenTree* statement = block->bbTreeList; statement != nullptr; statement = statement->gtNext) |
| 245 | { |
| 246 | ValidateStatement(statement, block); |
| 247 | |
| 248 | for (GenTree* tree = statement->gtStmt.gtStmtList; tree; tree = tree->gtNext) |
| 249 | { |
| 250 | // QMARK nodes should have been removed before this phase. |
| 251 | assert(tree->OperGet() != GT_QMARK); |
| 252 | |
| 253 | if (tree->OperGet() == GT_ASG) |
| 254 | { |
| 255 | if (tree->gtGetOp1()->OperGet() == GT_LCL_VAR) |
| 256 | { |
| 257 | assert(tree->gtGetOp1()->gtFlags & GTF_VAR_DEF); |
| 258 | } |
| 259 | else if (tree->gtGetOp2()->OperGet() == GT_LCL_VAR) |
| 260 | { |
| 261 | assert(!(tree->gtGetOp2()->gtFlags & GTF_VAR_DEF)); |
| 262 | } |
| 263 | } |
| 264 | } |
| 265 | } |
| 266 | } |
| 267 | } |
| 268 | |
| 269 | void Rationalizer::SanityCheckRational() |
| 270 | { |
| 271 | // TODO-Cleanup : check that the tree is rational here |
| 272 | // then do normal checks |
| 273 | SanityCheck(); |
| 274 | } |
| 275 | |
| 276 | #endif // DEBUG |
| 277 | |
| 278 | static void RewriteAssignmentIntoStoreLclCore(GenTreeOp* assignment, |
| 279 | GenTree* location, |
| 280 | GenTree* value, |
| 281 | genTreeOps locationOp) |
| 282 | { |
| 283 | assert(assignment != nullptr); |
| 284 | assert(assignment->OperGet() == GT_ASG); |
| 285 | assert(location != nullptr); |
| 286 | assert(value != nullptr); |
| 287 | |
| 288 | genTreeOps storeOp = storeForm(locationOp); |
| 289 | |
| 290 | #ifdef DEBUG |
| 291 | JITDUMP("rewriting asg(%s, X) to %s(X)\n" , GenTree::OpName(locationOp), GenTree::OpName(storeOp)); |
| 292 | #endif // DEBUG |
| 293 | |
| 294 | assignment->SetOper(storeOp); |
| 295 | GenTreeLclVarCommon* store = assignment->AsLclVarCommon(); |
| 296 | |
| 297 | GenTreeLclVarCommon* var = location->AsLclVarCommon(); |
| 298 | store->SetLclNum(var->gtLclNum); |
| 299 | store->SetSsaNum(var->gtSsaNum); |
| 300 | |
| 301 | if (locationOp == GT_LCL_FLD) |
| 302 | { |
| 303 | store->gtLclFld.gtLclOffs = var->gtLclFld.gtLclOffs; |
| 304 | store->gtLclFld.gtFieldSeq = var->gtLclFld.gtFieldSeq; |
| 305 | } |
| 306 | |
| 307 | copyFlags(store, var, GTF_LIVENESS_MASK); |
| 308 | store->gtFlags &= ~GTF_REVERSE_OPS; |
| 309 | |
| 310 | store->gtType = var->TypeGet(); |
| 311 | store->gtOp1 = value; |
| 312 | |
| 313 | DISPNODE(store); |
| 314 | JITDUMP("\n" ); |
| 315 | } |
| 316 | |
| 317 | void Rationalizer::RewriteAssignmentIntoStoreLcl(GenTreeOp* assignment) |
| 318 | { |
| 319 | assert(assignment != nullptr); |
| 320 | assert(assignment->OperGet() == GT_ASG); |
| 321 | |
| 322 | GenTree* location = assignment->gtGetOp1(); |
| 323 | GenTree* value = assignment->gtGetOp2(); |
| 324 | |
| 325 | RewriteAssignmentIntoStoreLclCore(assignment, location, value, location->OperGet()); |
| 326 | } |
| 327 | |
| 328 | void Rationalizer::RewriteAssignment(LIR::Use& use) |
| 329 | { |
| 330 | assert(use.IsInitialized()); |
| 331 | |
| 332 | GenTreeOp* assignment = use.Def()->AsOp(); |
| 333 | assert(assignment->OperGet() == GT_ASG); |
| 334 | |
| 335 | GenTree* location = assignment->gtGetOp1(); |
| 336 | GenTree* value = assignment->gtGetOp2(); |
| 337 | |
| 338 | genTreeOps locationOp = location->OperGet(); |
| 339 | |
| 340 | if (assignment->OperIsBlkOp()) |
| 341 | { |
| 342 | #ifdef FEATURE_SIMD |
| 343 | if (varTypeIsSIMD(location) && assignment->OperIsInitBlkOp()) |
| 344 | { |
| 345 | if (location->OperGet() == GT_LCL_VAR) |
| 346 | { |
| 347 | var_types simdType = location->TypeGet(); |
| 348 | GenTree* initVal = assignment->gtOp.gtOp2; |
| 349 | var_types baseType = comp->getBaseTypeOfSIMDLocal(location); |
| 350 | if (baseType != TYP_UNKNOWN) |
| 351 | { |
| 352 | GenTreeSIMD* simdTree = new (comp, GT_SIMD) |
| 353 | GenTreeSIMD(simdType, initVal, SIMDIntrinsicInit, baseType, genTypeSize(simdType)); |
| 354 | assignment->gtOp.gtOp2 = simdTree; |
| 355 | value = simdTree; |
| 356 | initVal->gtNext = simdTree; |
| 357 | simdTree->gtPrev = initVal; |
| 358 | |
| 359 | simdTree->gtNext = location; |
| 360 | location->gtPrev = simdTree; |
| 361 | } |
| 362 | } |
| 363 | } |
| 364 | #endif // FEATURE_SIMD |
| 365 | if ((location->TypeGet() == TYP_STRUCT) && !assignment->IsPhiDefn() && !value->IsMultiRegCall()) |
| 366 | { |
| 367 | if ((location->OperGet() == GT_LCL_VAR)) |
| 368 | { |
| 369 | // We need to construct a block node for the location. |
| 370 | // Modify lcl to be the address form. |
| 371 | location->SetOper(addrForm(locationOp)); |
| 372 | LclVarDsc* varDsc = &(comp->lvaTable[location->AsLclVarCommon()->gtLclNum]); |
| 373 | location->gtType = TYP_BYREF; |
| 374 | GenTreeBlk* storeBlk = nullptr; |
| 375 | unsigned int size = varDsc->lvExactSize; |
| 376 | |
| 377 | if (varDsc->lvStructGcCount != 0) |
| 378 | { |
| 379 | CORINFO_CLASS_HANDLE structHnd = varDsc->lvVerTypeInfo.GetClassHandle(); |
| 380 | GenTreeObj* objNode = comp->gtNewObjNode(structHnd, location)->AsObj(); |
| 381 | unsigned int slots = roundUp(size, TARGET_POINTER_SIZE) / TARGET_POINTER_SIZE; |
| 382 | |
| 383 | objNode->SetGCInfo(varDsc->lvGcLayout, varDsc->lvStructGcCount, slots); |
| 384 | objNode->ChangeOper(GT_STORE_OBJ); |
| 385 | objNode->SetData(value); |
| 386 | comp->fgMorphUnsafeBlk(objNode); |
| 387 | storeBlk = objNode; |
| 388 | } |
| 389 | else |
| 390 | { |
| 391 | storeBlk = new (comp, GT_STORE_BLK) GenTreeBlk(GT_STORE_BLK, TYP_STRUCT, location, value, size); |
| 392 | } |
| 393 | storeBlk->gtFlags |= GTF_ASG; |
| 394 | storeBlk->gtFlags |= ((location->gtFlags | value->gtFlags) & GTF_ALL_EFFECT); |
| 395 | |
| 396 | GenTree* insertionPoint = location->gtNext; |
| 397 | BlockRange().InsertBefore(insertionPoint, storeBlk); |
| 398 | use.ReplaceWith(comp, storeBlk); |
| 399 | BlockRange().Remove(assignment); |
| 400 | JITDUMP("After transforming local struct assignment into a block op:\n" ); |
| 401 | DISPTREERANGE(BlockRange(), use.Def()); |
| 402 | JITDUMP("\n" ); |
| 403 | return; |
| 404 | } |
| 405 | else |
| 406 | { |
| 407 | assert(location->OperIsBlk()); |
| 408 | } |
| 409 | } |
| 410 | } |
| 411 | |
| 412 | switch (locationOp) |
| 413 | { |
| 414 | case GT_LCL_VAR: |
| 415 | case GT_LCL_FLD: |
| 416 | case GT_PHI_ARG: |
| 417 | RewriteAssignmentIntoStoreLclCore(assignment, location, value, locationOp); |
| 418 | BlockRange().Remove(location); |
| 419 | break; |
| 420 | |
| 421 | case GT_IND: |
| 422 | { |
| 423 | GenTreeStoreInd* store = |
| 424 | new (comp, GT_STOREIND) GenTreeStoreInd(location->TypeGet(), location->gtGetOp1(), value); |
| 425 | |
| 426 | copyFlags(store, assignment, GTF_ALL_EFFECT); |
| 427 | copyFlags(store, location, GTF_IND_FLAGS); |
| 428 | |
| 429 | // TODO: JIT dump |
| 430 | |
| 431 | // Remove the GT_IND node and replace the assignment node with the store |
| 432 | BlockRange().Remove(location); |
| 433 | BlockRange().InsertBefore(assignment, store); |
| 434 | use.ReplaceWith(comp, store); |
| 435 | BlockRange().Remove(assignment); |
| 436 | } |
| 437 | break; |
| 438 | |
| 439 | case GT_CLS_VAR: |
| 440 | { |
| 441 | location->SetOper(GT_CLS_VAR_ADDR); |
| 442 | location->gtType = TYP_BYREF; |
| 443 | |
| 444 | assignment->SetOper(GT_STOREIND); |
| 445 | assignment->AsStoreInd()->SetRMWStatusDefault(); |
| 446 | |
| 447 | // TODO: JIT dump |
| 448 | } |
| 449 | break; |
| 450 | |
| 451 | case GT_BLK: |
| 452 | case GT_OBJ: |
| 453 | case GT_DYN_BLK: |
| 454 | { |
| 455 | assert(varTypeIsStruct(location)); |
| 456 | GenTreeBlk* storeBlk = location->AsBlk(); |
| 457 | genTreeOps storeOper; |
| 458 | switch (location->gtOper) |
| 459 | { |
| 460 | case GT_BLK: |
| 461 | storeOper = GT_STORE_BLK; |
| 462 | break; |
| 463 | case GT_OBJ: |
| 464 | storeOper = GT_STORE_OBJ; |
| 465 | break; |
| 466 | case GT_DYN_BLK: |
| 467 | storeOper = GT_STORE_DYN_BLK; |
| 468 | storeBlk->AsDynBlk()->gtEvalSizeFirst = false; |
| 469 | break; |
| 470 | default: |
| 471 | unreached(); |
| 472 | } |
| 473 | JITDUMP("Rewriting GT_ASG(%s(X), Y) to %s(X,Y):\n" , GenTree::OpName(location->gtOper), |
| 474 | GenTree::OpName(storeOper)); |
| 475 | storeBlk->SetOperRaw(storeOper); |
| 476 | storeBlk->gtFlags &= ~GTF_DONT_CSE; |
| 477 | storeBlk->gtFlags |= |
| 478 | (assignment->gtFlags & (GTF_ALL_EFFECT | GTF_BLK_VOLATILE | GTF_BLK_UNALIGNED | GTF_DONT_CSE)); |
| 479 | storeBlk->gtBlk.Data() = value; |
| 480 | |
| 481 | // Remove the block node from its current position and replace the assignment node with it |
| 482 | // (now in its store form). |
| 483 | BlockRange().Remove(storeBlk); |
| 484 | BlockRange().InsertBefore(assignment, storeBlk); |
| 485 | use.ReplaceWith(comp, storeBlk); |
| 486 | BlockRange().Remove(assignment); |
| 487 | DISPTREERANGE(BlockRange(), use.Def()); |
| 488 | JITDUMP("\n" ); |
| 489 | } |
| 490 | break; |
| 491 | |
| 492 | default: |
| 493 | unreached(); |
| 494 | break; |
| 495 | } |
| 496 | } |
| 497 | |
| 498 | void Rationalizer::RewriteAddress(LIR::Use& use) |
| 499 | { |
| 500 | assert(use.IsInitialized()); |
| 501 | |
| 502 | GenTreeUnOp* address = use.Def()->AsUnOp(); |
| 503 | assert(address->OperGet() == GT_ADDR); |
| 504 | |
| 505 | GenTree* location = address->gtGetOp1(); |
| 506 | genTreeOps locationOp = location->OperGet(); |
| 507 | |
| 508 | if (location->IsLocal()) |
| 509 | { |
| 510 | // We are changing the child from GT_LCL_VAR TO GT_LCL_VAR_ADDR. |
| 511 | // Therefore gtType of the child needs to be changed to a TYP_BYREF |
| 512 | #ifdef DEBUG |
| 513 | if (locationOp == GT_LCL_VAR) |
| 514 | { |
| 515 | JITDUMP("Rewriting GT_ADDR(GT_LCL_VAR) to GT_LCL_VAR_ADDR:\n" ); |
| 516 | } |
| 517 | else |
| 518 | { |
| 519 | assert(locationOp == GT_LCL_FLD); |
| 520 | JITDUMP("Rewriting GT_ADDR(GT_LCL_FLD) to GT_LCL_FLD_ADDR:\n" ); |
| 521 | } |
| 522 | #endif // DEBUG |
| 523 | |
| 524 | location->SetOper(addrForm(locationOp)); |
| 525 | location->gtType = TYP_BYREF; |
| 526 | copyFlags(location, address, GTF_ALL_EFFECT); |
| 527 | |
| 528 | use.ReplaceWith(comp, location); |
| 529 | BlockRange().Remove(address); |
| 530 | } |
| 531 | else if (locationOp == GT_CLS_VAR) |
| 532 | { |
| 533 | location->SetOper(GT_CLS_VAR_ADDR); |
| 534 | location->gtType = TYP_BYREF; |
| 535 | copyFlags(location, address, GTF_ALL_EFFECT); |
| 536 | |
| 537 | use.ReplaceWith(comp, location); |
| 538 | BlockRange().Remove(address); |
| 539 | |
| 540 | JITDUMP("Rewriting GT_ADDR(GT_CLS_VAR) to GT_CLS_VAR_ADDR:\n" ); |
| 541 | } |
| 542 | else if (location->OperIsIndir()) |
| 543 | { |
| 544 | use.ReplaceWith(comp, location->gtGetOp1()); |
| 545 | BlockRange().Remove(location); |
| 546 | BlockRange().Remove(address); |
| 547 | |
| 548 | JITDUMP("Rewriting GT_ADDR(GT_IND(X)) to X:\n" ); |
| 549 | } |
| 550 | |
| 551 | DISPTREERANGE(BlockRange(), use.Def()); |
| 552 | JITDUMP("\n" ); |
| 553 | } |
| 554 | |
| 555 | Compiler::fgWalkResult Rationalizer::RewriteNode(GenTree** useEdge, ArrayStack<GenTree*>& parentStack) |
| 556 | { |
| 557 | assert(useEdge != nullptr); |
| 558 | |
| 559 | GenTree* node = *useEdge; |
| 560 | assert(node != nullptr); |
| 561 | |
| 562 | #ifdef DEBUG |
| 563 | const bool isLateArg = (node->gtFlags & GTF_LATE_ARG) != 0; |
| 564 | #endif |
| 565 | |
| 566 | // First, remove any preceeding list nodes, which are not otherwise visited by the tree walk. |
| 567 | // |
| 568 | // NOTE: GT_FIELD_LIST head nodes, and GT_LIST nodes used by phi nodes will in fact be visited. |
| 569 | for (GenTree* prev = node->gtPrev; prev != nullptr && prev->OperIsAnyList() && !(prev->OperIsFieldListHead()); |
| 570 | prev = node->gtPrev) |
| 571 | { |
| 572 | prev->gtFlags &= ~GTF_REVERSE_OPS; |
| 573 | BlockRange().Remove(prev); |
| 574 | } |
| 575 | |
| 576 | // Now clear the REVERSE_OPS flag on the current node. |
| 577 | node->gtFlags &= ~GTF_REVERSE_OPS; |
| 578 | |
| 579 | // In addition, remove the current node if it is a GT_LIST node that is not an aggregate. |
| 580 | if (node->OperIsAnyList()) |
| 581 | { |
| 582 | GenTreeArgList* list = node->AsArgList(); |
| 583 | if (!list->OperIsFieldListHead()) |
| 584 | { |
| 585 | BlockRange().Remove(list); |
| 586 | } |
| 587 | return Compiler::WALK_CONTINUE; |
| 588 | } |
| 589 | |
| 590 | LIR::Use use; |
| 591 | if (parentStack.Height() < 2) |
| 592 | { |
| 593 | use = LIR::Use::GetDummyUse(BlockRange(), *useEdge); |
| 594 | } |
| 595 | else |
| 596 | { |
| 597 | use = LIR::Use(BlockRange(), useEdge, parentStack.Index(1)); |
| 598 | } |
| 599 | |
| 600 | assert(node == use.Def()); |
| 601 | switch (node->OperGet()) |
| 602 | { |
| 603 | case GT_ASG: |
| 604 | RewriteAssignment(use); |
| 605 | break; |
| 606 | |
| 607 | case GT_BOX: |
| 608 | // GT_BOX at this level just passes through so get rid of it |
| 609 | use.ReplaceWith(comp, node->gtGetOp1()); |
| 610 | BlockRange().Remove(node); |
| 611 | break; |
| 612 | |
| 613 | case GT_ADDR: |
| 614 | RewriteAddress(use); |
| 615 | break; |
| 616 | |
| 617 | case GT_IND: |
| 618 | // Clear the `GTF_IND_ASG_LHS` flag, which overlaps with `GTF_IND_REQ_ADDR_IN_REG`. |
| 619 | node->gtFlags &= ~GTF_IND_ASG_LHS; |
| 620 | |
| 621 | if (varTypeIsSIMD(node)) |
| 622 | { |
| 623 | RewriteSIMDOperand(use, false); |
| 624 | } |
| 625 | else |
| 626 | { |
| 627 | // Due to promotion of structs containing fields of type struct with a |
| 628 | // single scalar type field, we could potentially see IR nodes of the |
| 629 | // form GT_IND(GT_ADD(lclvarAddr, 0)) where 0 is an offset representing |
| 630 | // a field-seq. These get folded here. |
| 631 | // |
| 632 | // TODO: This code can be removed once JIT implements recursive struct |
| 633 | // promotion instead of lying about the type of struct field as the type |
| 634 | // of its single scalar field. |
| 635 | GenTree* addr = node->AsIndir()->Addr(); |
| 636 | if (addr->OperGet() == GT_ADD && addr->gtGetOp1()->OperGet() == GT_LCL_VAR_ADDR && |
| 637 | addr->gtGetOp2()->IsIntegralConst(0)) |
| 638 | { |
| 639 | GenTreeLclVarCommon* lclVarNode = addr->gtGetOp1()->AsLclVarCommon(); |
| 640 | unsigned lclNum = lclVarNode->GetLclNum(); |
| 641 | LclVarDsc* varDsc = comp->lvaTable + lclNum; |
| 642 | if (node->TypeGet() == varDsc->TypeGet()) |
| 643 | { |
| 644 | JITDUMP("Rewriting GT_IND(GT_ADD(LCL_VAR_ADDR,0)) to LCL_VAR\n" ); |
| 645 | lclVarNode->SetOper(GT_LCL_VAR); |
| 646 | lclVarNode->gtType = node->TypeGet(); |
| 647 | use.ReplaceWith(comp, lclVarNode); |
| 648 | BlockRange().Remove(addr); |
| 649 | BlockRange().Remove(addr->gtGetOp2()); |
| 650 | BlockRange().Remove(node); |
| 651 | } |
| 652 | } |
| 653 | } |
| 654 | break; |
| 655 | |
| 656 | case GT_NOP: |
| 657 | // fgMorph sometimes inserts NOP nodes between defs and uses |
| 658 | // supposedly 'to prevent constant folding'. In this case, remove the |
| 659 | // NOP. |
| 660 | if (node->gtGetOp1() != nullptr) |
| 661 | { |
| 662 | use.ReplaceWith(comp, node->gtGetOp1()); |
| 663 | BlockRange().Remove(node); |
| 664 | node = node->gtGetOp1(); |
| 665 | } |
| 666 | break; |
| 667 | |
| 668 | case GT_COMMA: |
| 669 | { |
| 670 | GenTree* op1 = node->gtGetOp1(); |
| 671 | bool isClosed = false; |
| 672 | unsigned sideEffects = 0; |
| 673 | LIR::ReadOnlyRange lhsRange = BlockRange().GetTreeRange(op1, &isClosed, &sideEffects); |
| 674 | |
| 675 | if ((sideEffects & GTF_ALL_EFFECT) == 0) |
| 676 | { |
| 677 | // The LHS has no side effects. Remove it. |
| 678 | // None of the transforms performed herein violate tree order, so isClosed |
| 679 | // should always be true. |
| 680 | assert(isClosed); |
| 681 | |
| 682 | BlockRange().Delete(comp, m_block, std::move(lhsRange)); |
| 683 | } |
| 684 | else if (op1->IsValue()) |
| 685 | { |
| 686 | op1->SetUnusedValue(); |
| 687 | } |
| 688 | |
| 689 | BlockRange().Remove(node); |
| 690 | |
| 691 | GenTree* replacement = node->gtGetOp2(); |
| 692 | if (!use.IsDummyUse()) |
| 693 | { |
| 694 | use.ReplaceWith(comp, replacement); |
| 695 | node = replacement; |
| 696 | } |
| 697 | else |
| 698 | { |
| 699 | // This is a top-level comma. If the RHS has no side effects we can remove |
| 700 | // it as well. |
| 701 | bool isClosed = false; |
| 702 | unsigned sideEffects = 0; |
| 703 | LIR::ReadOnlyRange rhsRange = BlockRange().GetTreeRange(replacement, &isClosed, &sideEffects); |
| 704 | |
| 705 | if ((sideEffects & GTF_ALL_EFFECT) == 0) |
| 706 | { |
| 707 | // None of the transforms performed herein violate tree order, so isClosed |
| 708 | // should always be true. |
| 709 | assert(isClosed); |
| 710 | |
| 711 | BlockRange().Delete(comp, m_block, std::move(rhsRange)); |
| 712 | } |
| 713 | else |
| 714 | { |
| 715 | node = replacement; |
| 716 | } |
| 717 | } |
| 718 | } |
| 719 | break; |
| 720 | |
| 721 | case GT_ARGPLACE: |
| 722 | // Remove argplace and list nodes from the execution order. |
| 723 | // |
| 724 | // TODO: remove phi args and phi nodes as well? |
| 725 | BlockRange().Remove(node); |
| 726 | break; |
| 727 | |
| 728 | #if defined(_TARGET_XARCH_) || defined(_TARGET_ARM_) |
| 729 | case GT_CLS_VAR: |
| 730 | { |
| 731 | // Class vars that are the target of an assignment will get rewritten into |
| 732 | // GT_STOREIND(GT_CLS_VAR_ADDR, val) by RewriteAssignment. This check is |
| 733 | // not strictly necessary--the GT_IND(GT_CLS_VAR_ADDR) pattern that would |
| 734 | // otherwise be generated would also be picked up by RewriteAssignment--but |
| 735 | // skipping the rewrite here saves an allocation and a bit of extra work. |
| 736 | const bool isLHSOfAssignment = (use.User()->OperGet() == GT_ASG) && (use.User()->gtGetOp1() == node); |
| 737 | if (!isLHSOfAssignment) |
| 738 | { |
| 739 | GenTree* ind = comp->gtNewOperNode(GT_IND, node->TypeGet(), node); |
| 740 | |
| 741 | node->SetOper(GT_CLS_VAR_ADDR); |
| 742 | node->gtType = TYP_BYREF; |
| 743 | |
| 744 | BlockRange().InsertAfter(node, ind); |
| 745 | use.ReplaceWith(comp, ind); |
| 746 | |
| 747 | // TODO: JIT dump |
| 748 | } |
| 749 | } |
| 750 | break; |
| 751 | #endif // _TARGET_XARCH_ |
| 752 | |
| 753 | case GT_INTRINSIC: |
| 754 | // Non-target intrinsics should have already been rewritten back into user calls. |
| 755 | assert(comp->IsTargetIntrinsic(node->gtIntrinsic.gtIntrinsicId)); |
| 756 | break; |
| 757 | |
| 758 | #ifdef FEATURE_SIMD |
| 759 | case GT_BLK: |
| 760 | case GT_OBJ: |
| 761 | { |
| 762 | // TODO-1stClassStructs: These should have been transformed to GT_INDs, but in order |
| 763 | // to preserve existing behavior, we will keep this as a block node if this is the |
| 764 | // lhs of a block assignment, and either: |
| 765 | // - It is a "generic" TYP_STRUCT assignment, OR |
| 766 | // - It is an initblk, OR |
| 767 | // - Neither the lhs or rhs are known to be of SIMD type. |
| 768 | |
| 769 | GenTree* parent = use.User(); |
| 770 | bool keepBlk = false; |
| 771 | if ((parent->OperGet() == GT_ASG) && (node == parent->gtGetOp1())) |
| 772 | { |
| 773 | if ((node->TypeGet() == TYP_STRUCT) || parent->OperIsInitBlkOp()) |
| 774 | { |
| 775 | keepBlk = true; |
| 776 | } |
| 777 | else if (!comp->isAddrOfSIMDType(node->AsBlk()->Addr())) |
| 778 | { |
| 779 | GenTree* dataSrc = parent->gtGetOp2(); |
| 780 | if (!dataSrc->IsLocal() && (dataSrc->OperGet() != GT_SIMD) && (!dataSrc->OperIsHWIntrinsic())) |
| 781 | { |
| 782 | noway_assert(dataSrc->OperIsIndir()); |
| 783 | keepBlk = !comp->isAddrOfSIMDType(dataSrc->AsIndir()->Addr()); |
| 784 | } |
| 785 | } |
| 786 | } |
| 787 | RewriteSIMDOperand(use, keepBlk); |
| 788 | } |
| 789 | break; |
| 790 | |
| 791 | case GT_SIMD: |
| 792 | { |
| 793 | noway_assert(comp->featureSIMD); |
| 794 | GenTreeSIMD* simdNode = node->AsSIMD(); |
| 795 | unsigned simdSize = simdNode->gtSIMDSize; |
| 796 | var_types simdType = comp->getSIMDTypeForSize(simdSize); |
| 797 | |
| 798 | // TODO-1stClassStructs: This should be handled more generally for enregistered or promoted |
| 799 | // structs that are passed or returned in a different register type than their enregistered |
| 800 | // type(s). |
| 801 | if (simdNode->gtType == TYP_I_IMPL && simdNode->gtSIMDSize == TARGET_POINTER_SIZE) |
| 802 | { |
| 803 | // This happens when it is consumed by a GT_RET_EXPR. |
| 804 | // It can only be a Vector2f or Vector2i. |
| 805 | assert(genTypeSize(simdNode->gtSIMDBaseType) == 4); |
| 806 | simdNode->gtType = TYP_SIMD8; |
| 807 | } |
| 808 | // Certain SIMD trees require rationalizing. |
| 809 | if (simdNode->gtSIMD.gtSIMDIntrinsicID == SIMDIntrinsicInitArray) |
| 810 | { |
| 811 | // Rewrite this as an explicit load. |
| 812 | JITDUMP("Rewriting GT_SIMD array init as an explicit load:\n" ); |
| 813 | unsigned int baseTypeSize = genTypeSize(simdNode->gtSIMDBaseType); |
| 814 | GenTree* address = new (comp, GT_LEA) GenTreeAddrMode(TYP_BYREF, simdNode->gtOp1, simdNode->gtOp2, |
| 815 | baseTypeSize, OFFSETOF__CORINFO_Array__data); |
| 816 | GenTree* ind = comp->gtNewOperNode(GT_IND, simdType, address); |
| 817 | |
| 818 | BlockRange().InsertBefore(simdNode, address, ind); |
| 819 | use.ReplaceWith(comp, ind); |
| 820 | BlockRange().Remove(simdNode); |
| 821 | |
| 822 | DISPTREERANGE(BlockRange(), use.Def()); |
| 823 | JITDUMP("\n" ); |
| 824 | } |
| 825 | else |
| 826 | { |
| 827 | // This code depends on the fact that NONE of the SIMD intrinsics take vector operands |
| 828 | // of a different width. If that assumption changes, we will EITHER have to make these type |
| 829 | // transformations during importation, and plumb the types all the way through the JIT, |
| 830 | // OR add a lot of special handling here. |
| 831 | GenTree* op1 = simdNode->gtGetOp1(); |
| 832 | if (op1 != nullptr && op1->gtType == TYP_STRUCT) |
| 833 | { |
| 834 | op1->gtType = simdType; |
| 835 | } |
| 836 | |
| 837 | GenTree* op2 = simdNode->gtGetOp2IfPresent(); |
| 838 | if (op2 != nullptr && op2->gtType == TYP_STRUCT) |
| 839 | { |
| 840 | op2->gtType = simdType; |
| 841 | } |
| 842 | } |
| 843 | } |
| 844 | break; |
| 845 | #endif // FEATURE_SIMD |
| 846 | |
| 847 | default: |
| 848 | // These nodes should not be present in HIR. |
| 849 | assert(!node->OperIs(GT_CMP, GT_SETCC, GT_JCC, GT_JCMP, GT_LOCKADD)); |
| 850 | break; |
| 851 | } |
| 852 | |
| 853 | // Do some extra processing on top-level nodes to remove unused local reads. |
| 854 | if (node->OperIsLocalRead()) |
| 855 | { |
| 856 | if (use.IsDummyUse()) |
| 857 | { |
| 858 | BlockRange().Remove(node); |
| 859 | } |
| 860 | else |
| 861 | { |
| 862 | // Local reads are side-effect-free; clear any flags leftover from frontend transformations. |
| 863 | node->gtFlags &= ~GTF_ALL_EFFECT; |
| 864 | } |
| 865 | } |
| 866 | else |
| 867 | { |
| 868 | if (!node->OperIsStore()) |
| 869 | { |
| 870 | // Clear the GTF_ASG flag for all nodes but stores |
| 871 | node->gtFlags &= ~GTF_ASG; |
| 872 | } |
| 873 | |
| 874 | if (!node->IsCall()) |
| 875 | { |
| 876 | // Clear the GTF_CALL flag for all nodes but calls |
| 877 | node->gtFlags &= ~GTF_CALL; |
| 878 | } |
| 879 | |
| 880 | if (node->IsValue() && use.IsDummyUse()) |
| 881 | { |
| 882 | node->SetUnusedValue(); |
| 883 | } |
| 884 | |
| 885 | if (node->TypeGet() == TYP_LONG) |
| 886 | { |
| 887 | comp->compLongUsed = true; |
| 888 | } |
| 889 | } |
| 890 | |
| 891 | assert(isLateArg == ((use.Def()->gtFlags & GTF_LATE_ARG) != 0)); |
| 892 | |
| 893 | return Compiler::WALK_CONTINUE; |
| 894 | } |
| 895 | |
| 896 | void Rationalizer::DoPhase() |
| 897 | { |
| 898 | class RationalizeVisitor final : public GenTreeVisitor<RationalizeVisitor> |
| 899 | { |
| 900 | Rationalizer& m_rationalizer; |
| 901 | |
| 902 | public: |
| 903 | enum |
| 904 | { |
| 905 | ComputeStack = true, |
| 906 | DoPreOrder = true, |
| 907 | DoPostOrder = true, |
| 908 | UseExecutionOrder = true, |
| 909 | }; |
| 910 | |
| 911 | RationalizeVisitor(Rationalizer& rationalizer) |
| 912 | : GenTreeVisitor<RationalizeVisitor>(rationalizer.comp), m_rationalizer(rationalizer) |
| 913 | { |
| 914 | } |
| 915 | |
| 916 | // Rewrite intrinsics that are not supported by the target back into user calls. |
| 917 | // This needs to be done before the transition to LIR because it relies on the use |
| 918 | // of fgMorphArgs, which is designed to operate on HIR. Once this is done for a |
| 919 | // particular statement, link that statement's nodes into the current basic block. |
| 920 | fgWalkResult PreOrderVisit(GenTree** use, GenTree* user) |
| 921 | { |
| 922 | GenTree* const node = *use; |
| 923 | if (node->OperGet() == GT_INTRINSIC && |
| 924 | m_rationalizer.comp->IsIntrinsicImplementedByUserCall(node->gtIntrinsic.gtIntrinsicId)) |
| 925 | { |
| 926 | m_rationalizer.RewriteIntrinsicAsUserCall(use, this->m_ancestors); |
| 927 | } |
| 928 | |
| 929 | return Compiler::WALK_CONTINUE; |
| 930 | } |
| 931 | |
| 932 | // Rewrite HIR nodes into LIR nodes. |
| 933 | fgWalkResult PostOrderVisit(GenTree** use, GenTree* user) |
| 934 | { |
| 935 | return m_rationalizer.RewriteNode(use, this->m_ancestors); |
| 936 | } |
| 937 | }; |
| 938 | |
| 939 | DBEXEC(TRUE, SanityCheck()); |
| 940 | |
| 941 | comp->compCurBB = nullptr; |
| 942 | comp->fgOrder = Compiler::FGOrderLinear; |
| 943 | |
| 944 | RationalizeVisitor visitor(*this); |
| 945 | for (BasicBlock* block = comp->fgFirstBB; block != nullptr; block = block->bbNext) |
| 946 | { |
| 947 | comp->compCurBB = block; |
| 948 | m_block = block; |
| 949 | |
| 950 | GenTreeStmt* firstStatement = block->firstStmt(); |
| 951 | block->MakeLIR(nullptr, nullptr); |
| 952 | |
| 953 | // Establish the first and last nodes for the block. This is necessary in order for the LIR |
| 954 | // utilities that hang off the BasicBlock type to work correctly. |
| 955 | if (firstStatement == nullptr) |
| 956 | { |
| 957 | // No statements in this block; skip it. |
| 958 | continue; |
| 959 | } |
| 960 | |
| 961 | for (GenTreeStmt *statement = firstStatement, *nextStatement; statement != nullptr; statement = nextStatement) |
| 962 | { |
| 963 | assert(statement->gtStmtList != nullptr); |
| 964 | assert(statement->gtStmtList->gtPrev == nullptr); |
| 965 | assert(statement->gtStmtExpr != nullptr); |
| 966 | assert(statement->gtStmtExpr->gtNext == nullptr); |
| 967 | |
| 968 | BlockRange().InsertAtEnd(LIR::Range(statement->gtStmtList, statement->gtStmtExpr)); |
| 969 | |
| 970 | nextStatement = statement->getNextStmt(); |
| 971 | statement->gtNext = nullptr; |
| 972 | statement->gtPrev = nullptr; |
| 973 | |
| 974 | // If this statement has correct offset information, change it into an IL offset |
| 975 | // node and insert it into the LIR. |
| 976 | if (statement->gtStmtILoffsx != BAD_IL_OFFSET) |
| 977 | { |
| 978 | assert(!statement->IsPhiDefnStmt()); |
| 979 | statement->SetOper(GT_IL_OFFSET); |
| 980 | |
| 981 | BlockRange().InsertBefore(statement->gtStmtList, statement); |
| 982 | } |
| 983 | |
| 984 | m_block = block; |
| 985 | visitor.WalkTree(&statement->gtStmtExpr, nullptr); |
| 986 | } |
| 987 | |
| 988 | assert(BlockRange().CheckLIR(comp, true)); |
| 989 | } |
| 990 | |
| 991 | comp->compRationalIRForm = true; |
| 992 | } |
| 993 | |