1 | // Licensed to the .NET Foundation under one or more agreements. |
2 | // The .NET Foundation licenses this file to you under the MIT license. |
3 | // See the LICENSE file in the project root for more information. |
4 | |
5 | /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
6 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
7 | XX XX |
8 | XX GSChecks XX |
9 | XX XX |
10 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
11 | XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX |
12 | */ |
13 | |
14 | #include "jitpch.h" |
15 | #ifdef _MSC_VER |
16 | #pragma hdrstop |
17 | #endif |
18 | |
19 | /***************************************************************************** |
20 | * gsGSChecksInitCookie |
21 | * Grabs the cookie for detecting overflow of unsafe buffers. |
22 | */ |
23 | void Compiler::gsGSChecksInitCookie() |
24 | { |
25 | var_types type = TYP_I_IMPL; |
26 | |
27 | lvaGSSecurityCookie = lvaGrabTemp(false DEBUGARG("GSSecurityCookie" )); |
28 | |
29 | // Prevent cookie init/check from being optimized |
30 | lvaSetVarAddrExposed(lvaGSSecurityCookie); |
31 | lvaTable[lvaGSSecurityCookie].lvType = type; |
32 | |
33 | info.compCompHnd->getGSCookie(&gsGlobalSecurityCookieVal, &gsGlobalSecurityCookieAddr); |
34 | } |
35 | |
36 | const unsigned NO_SHADOW_COPY = UINT_MAX; |
37 | |
38 | /***************************************************************************** |
39 | * gsCopyShadowParams |
40 | * The current function has an unsafe buffer on the stack. Search for vulnerable |
41 | * parameters which could be used to modify a code address and take over the process |
42 | * in the case of a buffer overrun. Create a safe local copy for each vulnerable parameter, |
43 | * which will be allocated bellow the unsafe buffer. Change uses of the param to the |
44 | * shadow copy. |
45 | * |
46 | * A pointer under indirection is considered vulnerable. A malicious user could read from |
47 | * protected memory or write to it. If a parameter is assigned/computed into another variable, |
48 | * and is a pointer (i.e., under indirection), then we consider the variable to be part of the |
49 | * equivalence class with the parameter. All parameters in the equivalence class are shadowed. |
50 | */ |
51 | void Compiler::gsCopyShadowParams() |
52 | { |
53 | if (info.compIsVarArgs) |
54 | { |
55 | return; |
56 | } |
57 | |
58 | // Allocate array for shadow param info |
59 | gsShadowVarInfo = new (this, CMK_Unknown) ShadowParamVarInfo[lvaCount](); |
60 | |
61 | // Find groups of variables assigned to each other, and also |
62 | // tracks variables which are dereferenced and marks them as ptrs. |
63 | // Look for assignments to *p, and ptrs passed to functions |
64 | if (gsFindVulnerableParams()) |
65 | { |
66 | // Replace vulnerable params by shadow copies. |
67 | gsParamsToShadows(); |
68 | } |
69 | } |
70 | |
71 | // This struct tracks how a tree is being used |
72 | |
73 | struct MarkPtrsInfo |
74 | { |
75 | Compiler* comp; |
76 | unsigned lvAssignDef; // Which local variable is the tree being assigned to? |
77 | bool isAssignSrc; // Is this the source value for an assignment? |
78 | bool isUnderIndir; // Is this a pointer value tree that is being dereferenced? |
79 | bool skipNextNode; // Skip a single node during the tree-walk |
80 | |
81 | #ifdef DEBUG |
82 | void Print() |
83 | { |
84 | printf( |
85 | "[MarkPtrsInfo] = {comp = %p, lvAssignDef = %d, isAssignSrc = %d, isUnderIndir = %d, skipNextNode = %d}\n" , |
86 | comp, lvAssignDef, isAssignSrc, isUnderIndir, skipNextNode); |
87 | } |
88 | #endif |
89 | }; |
90 | |
91 | /***************************************************************************** |
92 | * gsMarkPtrsAndAssignGroups |
93 | * Walk a tree looking for assignment groups, variables whose value is used |
94 | * in a *p store or use, and variable passed to calls. This info is then used |
95 | * to determine parameters which are vulnerable. |
96 | * This function carries a state to know if it is under an assign node, call node |
97 | * or indirection node. It starts a new tree walk for it's subtrees when the state |
98 | * changes. |
99 | */ |
100 | Compiler::fgWalkResult Compiler::gsMarkPtrsAndAssignGroups(GenTree** pTree, fgWalkData* data) |
101 | { |
102 | struct MarkPtrsInfo* pState = (MarkPtrsInfo*)data->pCallbackData; |
103 | struct MarkPtrsInfo newState = *pState; |
104 | Compiler* comp = data->compiler; |
105 | GenTree* tree = *pTree; |
106 | ShadowParamVarInfo* shadowVarInfo = pState->comp->gsShadowVarInfo; |
107 | assert(shadowVarInfo); |
108 | bool fIsBlk = false; |
109 | unsigned lclNum; |
110 | |
111 | assert(!pState->isAssignSrc || pState->lvAssignDef != (unsigned)-1); |
112 | |
113 | if (pState->skipNextNode) |
114 | { |
115 | pState->skipNextNode = false; |
116 | return WALK_CONTINUE; |
117 | } |
118 | |
119 | switch (tree->OperGet()) |
120 | { |
121 | // Indirections - look for *p uses and defs |
122 | case GT_IND: |
123 | case GT_OBJ: |
124 | case GT_ARR_ELEM: |
125 | case GT_ARR_INDEX: |
126 | case GT_ARR_OFFSET: |
127 | case GT_FIELD: |
128 | |
129 | newState.isUnderIndir = true; |
130 | { |
131 | newState.skipNextNode = true; // Don't have to worry about which kind of node we're dealing with |
132 | comp->fgWalkTreePre(&tree, comp->gsMarkPtrsAndAssignGroups, (void*)&newState); |
133 | } |
134 | |
135 | return WALK_SKIP_SUBTREES; |
136 | |
137 | // local vars and param uses |
138 | case GT_LCL_VAR: |
139 | case GT_LCL_FLD: |
140 | lclNum = tree->gtLclVarCommon.gtLclNum; |
141 | |
142 | if (pState->isUnderIndir) |
143 | { |
144 | // The variable is being dereferenced for a read or a write. |
145 | comp->lvaTable[lclNum].lvIsPtr = 1; |
146 | } |
147 | |
148 | if (pState->isAssignSrc) |
149 | { |
150 | // |
151 | // Add lvAssignDef and lclNum to a common assign group |
152 | if (shadowVarInfo[pState->lvAssignDef].assignGroup) |
153 | { |
154 | if (shadowVarInfo[lclNum].assignGroup) |
155 | { |
156 | // OR both bit vector |
157 | shadowVarInfo[pState->lvAssignDef].assignGroup->bitVectOr(shadowVarInfo[lclNum].assignGroup); |
158 | } |
159 | else |
160 | { |
161 | shadowVarInfo[pState->lvAssignDef].assignGroup->bitVectSet(lclNum); |
162 | } |
163 | |
164 | // Point both to the same bit vector |
165 | shadowVarInfo[lclNum].assignGroup = shadowVarInfo[pState->lvAssignDef].assignGroup; |
166 | } |
167 | else if (shadowVarInfo[lclNum].assignGroup) |
168 | { |
169 | shadowVarInfo[lclNum].assignGroup->bitVectSet(pState->lvAssignDef); |
170 | |
171 | // Point both to the same bit vector |
172 | shadowVarInfo[pState->lvAssignDef].assignGroup = shadowVarInfo[lclNum].assignGroup; |
173 | } |
174 | else |
175 | { |
176 | FixedBitVect* bv = FixedBitVect::bitVectInit(pState->comp->lvaCount, pState->comp); |
177 | |
178 | // (shadowVarInfo[pState->lvAssignDef] == NULL && shadowVarInfo[lclNew] == NULL); |
179 | // Neither of them has an assign group yet. Make a new one. |
180 | shadowVarInfo[pState->lvAssignDef].assignGroup = bv; |
181 | shadowVarInfo[lclNum].assignGroup = bv; |
182 | bv->bitVectSet(pState->lvAssignDef); |
183 | bv->bitVectSet(lclNum); |
184 | } |
185 | } |
186 | return WALK_CONTINUE; |
187 | |
188 | // Calls - Mark arg variables |
189 | case GT_CALL: |
190 | |
191 | newState.isUnderIndir = false; |
192 | newState.isAssignSrc = false; |
193 | { |
194 | if (tree->gtCall.gtCallObjp) |
195 | { |
196 | newState.isUnderIndir = true; |
197 | comp->fgWalkTreePre(&tree->gtCall.gtCallObjp, gsMarkPtrsAndAssignGroups, (void*)&newState); |
198 | } |
199 | |
200 | for (GenTreeArgList* args = tree->gtCall.gtCallArgs; args; args = args->Rest()) |
201 | { |
202 | comp->fgWalkTreePre(&args->Current(), gsMarkPtrsAndAssignGroups, (void*)&newState); |
203 | } |
204 | for (GenTreeArgList* args = tree->gtCall.gtCallLateArgs; args; args = args->Rest()) |
205 | { |
206 | comp->fgWalkTreePre(&args->Current(), gsMarkPtrsAndAssignGroups, (void*)&newState); |
207 | } |
208 | |
209 | if (tree->gtCall.gtCallType == CT_INDIRECT) |
210 | { |
211 | newState.isUnderIndir = true; |
212 | |
213 | // A function pointer is treated like a write-through pointer since |
214 | // it controls what code gets executed, and so indirectly can cause |
215 | // a write to memory. |
216 | comp->fgWalkTreePre(&tree->gtCall.gtCallAddr, gsMarkPtrsAndAssignGroups, (void*)&newState); |
217 | } |
218 | } |
219 | return WALK_SKIP_SUBTREES; |
220 | |
221 | case GT_ADDR: |
222 | newState.isUnderIndir = false; |
223 | // We'll assume p in "**p = " can be vulnerable because by changing 'p', someone |
224 | // could control where **p stores to. |
225 | { |
226 | comp->fgWalkTreePre(&tree->gtOp.gtOp1, comp->gsMarkPtrsAndAssignGroups, (void*)&newState); |
227 | } |
228 | return WALK_SKIP_SUBTREES; |
229 | |
230 | default: |
231 | // Assignments - track assign groups and *p defs. |
232 | if (tree->OperIs(GT_ASG)) |
233 | { |
234 | bool isLocVar; |
235 | bool isLocFld; |
236 | |
237 | if (tree->OperIsBlkOp()) |
238 | { |
239 | // Blk assignments are always handled as if they have implicit indirections. |
240 | // TODO-1stClassStructs: improve this. |
241 | newState.isUnderIndir = true; |
242 | comp->fgWalkTreePre(&tree->gtOp.gtOp1, comp->gsMarkPtrsAndAssignGroups, (void*)&newState); |
243 | |
244 | if (tree->OperIsInitBlkOp()) |
245 | { |
246 | newState.isUnderIndir = false; |
247 | } |
248 | comp->fgWalkTreePre(&tree->gtOp.gtOp2, comp->gsMarkPtrsAndAssignGroups, (void*)&newState); |
249 | } |
250 | else |
251 | { |
252 | // Walk dst side |
253 | comp->fgWalkTreePre(&tree->gtOp.gtOp1, comp->gsMarkPtrsAndAssignGroups, (void*)&newState); |
254 | |
255 | // Now handle src side |
256 | isLocVar = tree->gtOp.gtOp1->OperGet() == GT_LCL_VAR; |
257 | isLocFld = tree->gtOp.gtOp1->OperGet() == GT_LCL_FLD; |
258 | |
259 | if ((isLocVar || isLocFld) && tree->gtOp.gtOp2) |
260 | { |
261 | lclNum = tree->gtOp.gtOp1->gtLclVarCommon.gtLclNum; |
262 | newState.lvAssignDef = lclNum; |
263 | newState.isAssignSrc = true; |
264 | } |
265 | |
266 | comp->fgWalkTreePre(&tree->gtOp.gtOp2, comp->gsMarkPtrsAndAssignGroups, (void*)&newState); |
267 | } |
268 | |
269 | return WALK_SKIP_SUBTREES; |
270 | } |
271 | } |
272 | |
273 | return WALK_CONTINUE; |
274 | } |
275 | |
276 | /***************************************************************************** |
277 | * gsFindVulnerableParams |
278 | * Walk all the trees looking for ptrs, args, assign groups, *p stores, etc. |
279 | * Then use that info to figure out vulnerable pointers. |
280 | * |
281 | * It returns true if it found atleast one vulnerable pointer parameter that |
282 | * needs to be shadow-copied. |
283 | */ |
284 | |
285 | bool Compiler::gsFindVulnerableParams() |
286 | { |
287 | MarkPtrsInfo info; |
288 | |
289 | info.comp = this; |
290 | info.lvAssignDef = (unsigned)-1; |
291 | info.isUnderIndir = false; |
292 | info.isAssignSrc = false; |
293 | info.skipNextNode = false; |
294 | |
295 | // Walk all the trees setting lvIsWritePtr, lvIsOutgoingArg, lvIsPtr and assignGroup. |
296 | fgWalkAllTreesPre(gsMarkPtrsAndAssignGroups, &info); |
297 | |
298 | // Compute has vulnerable at the end of the loop. |
299 | bool hasOneVulnerable = false; |
300 | |
301 | // Initialize propagated[v0...vn] = {0}^n, so we can skip the ones propagated through |
302 | // some assign group. |
303 | FixedBitVect* propagated = (lvaCount > 0) ? FixedBitVect::bitVectInit(lvaCount, this) : nullptr; |
304 | |
305 | for (UINT lclNum = 0; lclNum < lvaCount; lclNum++) |
306 | { |
307 | LclVarDsc* varDsc = &lvaTable[lclNum]; |
308 | ShadowParamVarInfo* shadowInfo = &gsShadowVarInfo[lclNum]; |
309 | |
310 | // If there was an indirection or if unsafe buffer, then we'd call it vulnerable. |
311 | if (varDsc->lvIsPtr || varDsc->lvIsUnsafeBuffer) |
312 | { |
313 | hasOneVulnerable = true; |
314 | } |
315 | |
316 | // Now, propagate the info through the assign group (an equivalence class of vars transitively assigned.) |
317 | if (shadowInfo->assignGroup == nullptr || propagated->bitVectTest(lclNum)) |
318 | { |
319 | continue; |
320 | } |
321 | |
322 | // Propagate lvIsPtr, so that: |
323 | // 1. Any parameter in the equivalence class can be identified as lvIsPtr and hence shadowed. |
324 | // 2. Buffers with pointers are placed at lower memory addresses than buffers without pointers. |
325 | UINT isUnderIndir = varDsc->lvIsPtr; |
326 | |
327 | // First pass -- find if any variable is vulnerable. |
328 | FixedBitVect* assignGroup = shadowInfo->assignGroup; |
329 | for (UINT lclNum = assignGroup->bitVectGetFirst(); lclNum != (unsigned)-1 && !isUnderIndir; |
330 | lclNum = assignGroup->bitVectGetNext(lclNum)) |
331 | { |
332 | isUnderIndir |= lvaTable[lclNum].lvIsPtr; |
333 | } |
334 | |
335 | // Vulnerable, so propagate to all members of the equivalence class. |
336 | if (isUnderIndir) |
337 | { |
338 | hasOneVulnerable = true; |
339 | } |
340 | // Nothing to propagate. |
341 | else |
342 | { |
343 | continue; |
344 | } |
345 | |
346 | // Second pass -- mark all are vulnerable. |
347 | assert(isUnderIndir); |
348 | for (UINT lclNum = assignGroup->bitVectGetFirst(); lclNum != (unsigned)-1; |
349 | lclNum = assignGroup->bitVectGetNext(lclNum)) |
350 | { |
351 | lvaTable[lclNum].lvIsPtr = TRUE; |
352 | propagated->bitVectSet(lclNum); |
353 | } |
354 | |
355 | #ifdef DEBUG |
356 | if (verbose) |
357 | { |
358 | printf("Equivalence assign group %s: " , isUnderIndir ? "isPtr " : "" ); |
359 | for (UINT lclNum = assignGroup->bitVectGetFirst(); lclNum != (unsigned)-1; |
360 | lclNum = assignGroup->bitVectGetNext(lclNum)) |
361 | { |
362 | gtDispLclVar(lclNum, false); |
363 | printf(" " ); |
364 | } |
365 | printf("\n" ); |
366 | } |
367 | #endif |
368 | } |
369 | |
370 | return hasOneVulnerable; |
371 | } |
372 | |
373 | /***************************************************************************** |
374 | * gsParamsToShadows |
375 | * Copy each vulnerable param ptr or buffer to a local shadow copy and replace |
376 | * uses of the param by the shadow copy |
377 | */ |
378 | void Compiler::gsParamsToShadows() |
379 | { |
380 | // Cache old count since we'll add new variables, and |
381 | // gsShadowVarInfo will not grow to accomodate the new ones. |
382 | UINT lvaOldCount = lvaCount; |
383 | |
384 | // Create shadow copy for each param candidate |
385 | for (UINT lclNum = 0; lclNum < lvaOldCount; lclNum++) |
386 | { |
387 | LclVarDsc* varDsc = &lvaTable[lclNum]; |
388 | gsShadowVarInfo[lclNum].shadowCopy = NO_SHADOW_COPY; |
389 | |
390 | // Only care about params whose values are on the stack |
391 | if (!ShadowParamVarInfo::mayNeedShadowCopy(varDsc)) |
392 | { |
393 | continue; |
394 | } |
395 | |
396 | if (!varDsc->lvIsPtr && !varDsc->lvIsUnsafeBuffer) |
397 | { |
398 | continue; |
399 | } |
400 | |
401 | int shadowVar = lvaGrabTemp(false DEBUGARG("shadowVar" )); |
402 | // reload varDsc as lvaGrabTemp may realloc the lvaTable[] |
403 | varDsc = &lvaTable[lclNum]; |
404 | |
405 | // Copy some info |
406 | |
407 | var_types type = varTypeIsSmall(varDsc->TypeGet()) ? TYP_INT : varDsc->TypeGet(); |
408 | lvaTable[shadowVar].lvType = type; |
409 | |
410 | #ifdef FEATURE_SIMD |
411 | lvaTable[shadowVar].lvSIMDType = varDsc->lvSIMDType; |
412 | lvaTable[shadowVar].lvUsedInSIMDIntrinsic = varDsc->lvUsedInSIMDIntrinsic; |
413 | if (varDsc->lvSIMDType) |
414 | { |
415 | lvaTable[shadowVar].lvExactSize = varDsc->lvExactSize; |
416 | lvaTable[shadowVar].lvBaseType = varDsc->lvBaseType; |
417 | } |
418 | #endif |
419 | lvaTable[shadowVar].lvRegStruct = varDsc->lvRegStruct; |
420 | |
421 | lvaTable[shadowVar].lvAddrExposed = varDsc->lvAddrExposed; |
422 | lvaTable[shadowVar].lvDoNotEnregister = varDsc->lvDoNotEnregister; |
423 | #ifdef DEBUG |
424 | lvaTable[shadowVar].lvVMNeedsStackAddr = varDsc->lvVMNeedsStackAddr; |
425 | lvaTable[shadowVar].lvLiveInOutOfHndlr = varDsc->lvLiveInOutOfHndlr; |
426 | lvaTable[shadowVar].lvLclFieldExpr = varDsc->lvLclFieldExpr; |
427 | lvaTable[shadowVar].lvLiveAcrossUCall = varDsc->lvLiveAcrossUCall; |
428 | #endif |
429 | lvaTable[shadowVar].lvVerTypeInfo = varDsc->lvVerTypeInfo; |
430 | lvaTable[shadowVar].lvGcLayout = varDsc->lvGcLayout; |
431 | lvaTable[shadowVar].lvIsUnsafeBuffer = varDsc->lvIsUnsafeBuffer; |
432 | lvaTable[shadowVar].lvIsPtr = varDsc->lvIsPtr; |
433 | |
434 | #ifdef DEBUG |
435 | if (verbose) |
436 | { |
437 | printf("Var V%02u is shadow param candidate. Shadow copy is V%02u.\n" , lclNum, shadowVar); |
438 | } |
439 | #endif |
440 | |
441 | gsShadowVarInfo[lclNum].shadowCopy = shadowVar; |
442 | } |
443 | |
444 | // Replace param uses with shadow copy |
445 | fgWalkAllTreesPre(gsReplaceShadowParams, (void*)this); |
446 | |
447 | // Now insert code to copy the params to their shadow copy. |
448 | for (UINT lclNum = 0; lclNum < lvaOldCount; lclNum++) |
449 | { |
450 | LclVarDsc* varDsc = &lvaTable[lclNum]; |
451 | |
452 | unsigned shadowVar = gsShadowVarInfo[lclNum].shadowCopy; |
453 | if (shadowVar == NO_SHADOW_COPY) |
454 | { |
455 | continue; |
456 | } |
457 | |
458 | var_types type = lvaTable[shadowVar].TypeGet(); |
459 | |
460 | GenTree* src = gtNewLclvNode(lclNum, varDsc->TypeGet()); |
461 | GenTree* dst = gtNewLclvNode(shadowVar, type); |
462 | |
463 | src->gtFlags |= GTF_DONT_CSE; |
464 | dst->gtFlags |= GTF_DONT_CSE; |
465 | |
466 | GenTree* opAssign = nullptr; |
467 | if (type == TYP_STRUCT) |
468 | { |
469 | CORINFO_CLASS_HANDLE clsHnd = varDsc->lvVerTypeInfo.GetClassHandle(); |
470 | |
471 | // We don't need unsafe value cls check here since we are copying the params and this flag |
472 | // would have been set on the original param before reaching here. |
473 | lvaSetStruct(shadowVar, clsHnd, false); |
474 | |
475 | src = gtNewOperNode(GT_ADDR, TYP_BYREF, src); |
476 | dst = gtNewOperNode(GT_ADDR, TYP_BYREF, dst); |
477 | |
478 | opAssign = gtNewCpObjNode(dst, src, clsHnd, false); |
479 | lvaTable[shadowVar].lvIsMultiRegArg = lvaTable[lclNum].lvIsMultiRegArg; |
480 | lvaTable[shadowVar].lvIsMultiRegRet = lvaTable[lclNum].lvIsMultiRegRet; |
481 | } |
482 | else |
483 | { |
484 | opAssign = gtNewAssignNode(dst, src); |
485 | } |
486 | fgEnsureFirstBBisScratch(); |
487 | (void)fgInsertStmtAtBeg(fgFirstBB, fgMorphTree(opAssign)); |
488 | } |
489 | |
490 | // If the method has "Jmp CalleeMethod", then we need to copy shadow params back to original |
491 | // params before "jmp" to CalleeMethod. |
492 | if (compJmpOpUsed) |
493 | { |
494 | // There could be more than one basic block ending with a "Jmp" type tail call. |
495 | // We would have to insert assignments in all such blocks, just before GT_JMP stmnt. |
496 | for (BasicBlock* block = fgFirstBB; block; block = block->bbNext) |
497 | { |
498 | if (block->bbJumpKind != BBJ_RETURN) |
499 | { |
500 | continue; |
501 | } |
502 | |
503 | if ((block->bbFlags & BBF_HAS_JMP) == 0) |
504 | { |
505 | continue; |
506 | } |
507 | |
508 | for (UINT lclNum = 0; lclNum < info.compArgsCount; lclNum++) |
509 | { |
510 | LclVarDsc* varDsc = &lvaTable[lclNum]; |
511 | |
512 | unsigned shadowVar = gsShadowVarInfo[lclNum].shadowCopy; |
513 | if (shadowVar == NO_SHADOW_COPY) |
514 | { |
515 | continue; |
516 | } |
517 | |
518 | GenTree* src = gtNewLclvNode(shadowVar, lvaTable[shadowVar].TypeGet()); |
519 | GenTree* dst = gtNewLclvNode(lclNum, varDsc->TypeGet()); |
520 | |
521 | src->gtFlags |= GTF_DONT_CSE; |
522 | dst->gtFlags |= GTF_DONT_CSE; |
523 | |
524 | GenTree* opAssign = nullptr; |
525 | if (varDsc->TypeGet() == TYP_STRUCT) |
526 | { |
527 | CORINFO_CLASS_HANDLE clsHnd = varDsc->lvVerTypeInfo.GetClassHandle(); |
528 | src = gtNewOperNode(GT_ADDR, TYP_BYREF, src); |
529 | dst = gtNewOperNode(GT_ADDR, TYP_BYREF, dst); |
530 | |
531 | opAssign = gtNewCpObjNode(dst, src, clsHnd, false); |
532 | } |
533 | else |
534 | { |
535 | opAssign = gtNewAssignNode(dst, src); |
536 | } |
537 | |
538 | (void)fgInsertStmtNearEnd(block, fgMorphTree(opAssign)); |
539 | } |
540 | } |
541 | } |
542 | } |
543 | |
544 | /***************************************************************************** |
545 | * gsReplaceShadowParams (tree-walk call-back) |
546 | * Replace all vulnerable param uses by it's shadow copy. |
547 | */ |
548 | |
549 | Compiler::fgWalkResult Compiler::gsReplaceShadowParams(GenTree** pTree, fgWalkData* data) |
550 | { |
551 | Compiler* comp = data->compiler; |
552 | GenTree* tree = *pTree; |
553 | GenTree* asg = nullptr; |
554 | |
555 | if (tree->gtOper == GT_ASG) |
556 | { |
557 | asg = tree; // "asg" is the assignment tree. |
558 | tree = tree->gtOp.gtOp1; // "tree" is the local var tree at the left-hand size of the assignment. |
559 | } |
560 | |
561 | if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_LCL_FLD) |
562 | { |
563 | UINT paramNum = tree->gtLclVarCommon.gtLclNum; |
564 | |
565 | if (!ShadowParamVarInfo::mayNeedShadowCopy(&comp->lvaTable[paramNum]) || |
566 | comp->gsShadowVarInfo[paramNum].shadowCopy == NO_SHADOW_COPY) |
567 | { |
568 | return WALK_CONTINUE; |
569 | } |
570 | |
571 | tree->gtLclVarCommon.SetLclNum(comp->gsShadowVarInfo[paramNum].shadowCopy); |
572 | |
573 | // In gsParamsToShadows(), we create a shadow var of TYP_INT for every small type param. |
574 | // Make sure we update the type of the local var tree as well. |
575 | if (varTypeIsSmall(comp->lvaTable[paramNum].TypeGet())) |
576 | { |
577 | tree->gtType = TYP_INT; |
578 | if (asg) |
579 | { |
580 | // If this is an assignment tree, propagate the type to it as well. |
581 | asg->gtType = TYP_INT; |
582 | } |
583 | } |
584 | } |
585 | |
586 | return WALK_CONTINUE; |
587 | } |
588 | |