1// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/compiler/backend/constant_propagator.h"
6
7#include "vm/compiler/backend/block_builder.h"
8#include "vm/compiler/backend/il_test_helper.h"
9#include "vm/compiler/backend/type_propagator.h"
10#include "vm/unit_test.h"
11
12namespace dart {
13
14// Test issue https://github.com/flutter/flutter/issues/53903.
15//
16// If graph contains a cyclic phi which participates in an EqualityCompare
17// or StrictCompare with its input like phi(x, ...) == x then constant
18// propagation might fail to converge by constantly revisiting this phi and
19// its uses (which includes comparison and the phi itself).
20ISOLATE_UNIT_TEST_CASE(ConstantPropagation_PhiUnwrappingAndConvergence) {
21 using compiler::BlockBuilder;
22 CompilerState S(thread, /*is_aot=*/false);
23 FlowGraphBuilderHelper H;
24
25 // We are going to build the following graph:
26 //
27 // B0[graph_entry]
28 // B1[function_entry]:
29 // v0 <- Constant(0)
30 // goto B2
31 // B2:
32 // v1 <- phi(v0, v1)
33 // v2 <- EqualityCompare(v1 == v0)
34 // if v2 == true then B4 else B3
35 // B3:
36 // goto B2
37 // B4:
38 // Return(v1)
39
40 PhiInstr* v1;
41 ConstantInstr* v0 = H.IntConstant(0);
42 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
43 auto b2 = H.JoinEntry();
44 auto b3 = H.TargetEntry();
45 auto b4 = H.TargetEntry();
46
47 {
48 BlockBuilder builder(H.flow_graph(), b1);
49 builder.AddInstruction(new GotoInstr(b2, S.GetNextDeoptId()));
50 }
51
52 {
53 BlockBuilder builder(H.flow_graph(), b2);
54 v1 = H.Phi(b2, {{b1, v0}, {b3, FlowGraphBuilderHelper::kPhiSelfReference}});
55 builder.AddPhi(v1);
56 auto v2 = builder.AddDefinition(new EqualityCompareInstr(
57 TokenPosition::kNoSource, Token::kEQ, new Value(v1), new Value(v0),
58 kSmiCid, S.GetNextDeoptId()));
59 builder.AddBranch(
60 new StrictCompareInstr(
61 TokenPosition::kNoSource, Token::kEQ_STRICT, new Value(v2),
62 new Value(H.flow_graph()->GetConstant(Bool::True())),
63 /*needs_number_check=*/false, S.GetNextDeoptId()),
64 b4, b3);
65 }
66
67 {
68 BlockBuilder builder(H.flow_graph(), b3);
69 builder.AddInstruction(new GotoInstr(b2, S.GetNextDeoptId()));
70 }
71
72 {
73 BlockBuilder builder(H.flow_graph(), b4);
74 builder.AddReturn(new Value(v1));
75 }
76
77 H.FinishGraph();
78
79 // Graph transformations will attempt to copy deopt information from
80 // branches and block entries which we did not assign.
81 // To disable copying we mark graph to disable LICM.
82 H.flow_graph()->disallow_licm();
83
84 ConstantPropagator::Optimize(H.flow_graph());
85
86 auto& blocks = H.flow_graph()->reverse_postorder();
87 EXPECT_EQ(2, blocks.length());
88 EXPECT_PROPERTY(blocks[0], it.IsGraphEntry());
89 EXPECT_PROPERTY(blocks[1], it.IsFunctionEntry());
90 EXPECT_PROPERTY(blocks[1]->next(), it.IsReturn());
91 EXPECT_PROPERTY(blocks[1]->next()->AsReturn(),
92 it.value()->definition() == v0);
93}
94
95// This test does not work on 32-bit platforms because it requires
96// kUnboxedInt64 constants.
97#if defined(ARCH_IS_64_BIT)
98namespace {
99struct FoldingResult {
100 static FoldingResult NoFold() { return {false, 0}; }
101
102 static FoldingResult FoldsTo(int64_t result) { return {true, result}; }
103
104 bool should_fold;
105 int64_t result;
106};
107
108static void ConstantPropagatorUnboxedOpTest(
109 Thread* thread,
110 int64_t lhs,
111 int64_t rhs,
112 std::function<Definition*(Definition*, Definition*, intptr_t)> make_op,
113 bool redundant_phi,
114 FoldingResult expected) {
115 using compiler::BlockBuilder;
116
117 CompilerState S(thread, /*is_aot=*/false);
118 FlowGraphBuilderHelper H;
119
120 // Add a variable into the scope which would provide static type for the
121 // parameter.
122 LocalVariable* v0_var =
123 new LocalVariable(TokenPosition::kNoSource, TokenPosition::kNoSource,
124 String::Handle(Symbols::New(thread, "v0")),
125 AbstractType::ZoneHandle(Type::IntType()));
126 v0_var->set_type_check_mode(LocalVariable::kTypeCheckedByCaller);
127 H.flow_graph()->parsed_function().scope()->AddVariable(v0_var);
128
129 // We are going to build the following graph:
130 //
131 // B0[graph_entry]
132 // B1[function_entry]:
133 // v0 <- Parameter(0)
134 // if 1 == ${redundant_phi ? 1 : v0} then B2 else B3
135 // B2:
136 // goto B4
137 // B3:
138 // goto B4
139 // B4:
140 // v1 <- Phi(lhs, ${redundant_phi ? -1 : lhs}) repr
141 // v2 <- Constant(rhs)
142 // v3 <- make_op(v1, v2)
143 // Return(v3)
144 //
145 // Note that we test both the case when v1 is fully redundant (has a single
146 // live predecessor) and when it is not redundant but has a constant value.
147 // These two cases are handled by different code paths - so we need to cover
148 // them both to ensure that we properly insert any unbox operations
149 // which are needed.
150
151 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
152 auto b2 = H.TargetEntry();
153 auto b3 = H.TargetEntry();
154 auto b4 = H.JoinEntry();
155 ReturnInstr* ret;
156
157 {
158 BlockBuilder builder(H.flow_graph(), b1);
159 auto v0 = builder.AddParameter(/*index=*/0, /*param_offset=*/0,
160 /*with_frame=*/true, kTagged);
161 builder.AddBranch(new StrictCompareInstr(
162 TokenPosition::kNoSource, Token::kEQ_STRICT,
163 new Value(H.IntConstant(1)),
164 new Value(redundant_phi ? H.IntConstant(1) : v0),
165 /*needs_number_check=*/false, S.GetNextDeoptId()),
166 b2, b3);
167 }
168
169 {
170 BlockBuilder builder(H.flow_graph(), b2);
171 builder.AddInstruction(new GotoInstr(b4, S.GetNextDeoptId()));
172 }
173
174 {
175 BlockBuilder builder(H.flow_graph(), b3);
176 builder.AddInstruction(new GotoInstr(b4, S.GetNextDeoptId()));
177 }
178
179 PhiInstr* v1;
180 Definition* op;
181 {
182 BlockBuilder builder(H.flow_graph(), b4);
183 v1 = H.Phi(b4, {{b2, H.IntConstant(lhs)},
184 {b3, H.IntConstant(redundant_phi ? -1 : lhs)}});
185 builder.AddPhi(v1);
186 op = builder.AddDefinition(
187 make_op(v1, H.IntConstant(rhs), S.GetNextDeoptId()));
188 ret = builder.AddReturn(new Value(op));
189 }
190
191 H.FinishGraph();
192 FlowGraphTypePropagator::Propagate(H.flow_graph());
193 // Force phi unboxing independent of heuristics.
194 v1->set_representation(op->representation());
195 H.flow_graph()->SelectRepresentations();
196 H.flow_graph()->Canonicalize();
197 EXPECT_PROPERTY(ret->value()->definition(),
198 it.IsBoxInteger() && it.RequiredInputRepresentation(0) ==
199 op->representation());
200
201 ConstantPropagator::Optimize(H.flow_graph());
202
203 // If |should_fold| then check that resulting graph is
204 //
205 // Return(Box(Unbox(Constant(result))))
206 //
207 // otherwise check that the graph is
208 //
209 // Return(Box(op))
210 //
211 {
212 auto ret_val = ret->value()->definition();
213 EXPECT_PROPERTY(
214 ret_val, it.IsBoxInteger() &&
215 it.RequiredInputRepresentation(0) == op->representation());
216 auto boxed_value = ret_val->AsBoxInteger()->value()->definition();
217 if (expected.should_fold) {
218 EXPECT_PROPERTY(
219 boxed_value,
220 it.IsUnboxInteger() && it.representation() == op->representation());
221 auto unboxed_value = boxed_value->AsUnboxInteger()->value()->definition();
222 EXPECT_PROPERTY(unboxed_value,
223 it.IsConstant() && it.AsConstant()->value().IsInteger());
224 EXPECT_EQ(
225 expected.result,
226 Integer::Cast(unboxed_value->AsConstant()->value()).AsInt64Value());
227 } else {
228 EXPECT_PROPERTY(boxed_value, &it == op);
229 }
230 }
231}
232
233void ConstantPropagatorUnboxedOpTest(
234 Thread* thread,
235 int64_t lhs,
236 int64_t rhs,
237 std::function<Definition*(Definition*, Definition*, intptr_t)> make_op,
238 FoldingResult expected) {
239 ConstantPropagatorUnboxedOpTest(thread, lhs, rhs, make_op,
240 /*redundant_phi=*/false, expected);
241 ConstantPropagatorUnboxedOpTest(thread, lhs, rhs, make_op,
242 /*redundant_phi=*/true, expected);
243}
244
245} // namespace
246
247// This test verifies that constant propagation respects representations when
248// replacing unboxed operations.
249ISOLATE_UNIT_TEST_CASE(ConstantPropagator_Regress35371) {
250 auto make_int64_add = [](Definition* lhs, Definition* rhs,
251 intptr_t deopt_id) {
252 return new BinaryInt64OpInstr(Token::kADD, new Value(lhs), new Value(rhs),
253 deopt_id, Instruction::kNotSpeculative);
254 };
255
256 auto make_int32_add = [](Definition* lhs, Definition* rhs,
257 intptr_t deopt_id) {
258 return new BinaryInt32OpInstr(Token::kADD, new Value(lhs), new Value(rhs),
259 deopt_id);
260 };
261
262 auto make_int32_truncating_add = [](Definition* lhs, Definition* rhs,
263 intptr_t deopt_id) {
264 auto op = new BinaryInt32OpInstr(Token::kADD, new Value(lhs),
265 new Value(rhs), deopt_id);
266 op->mark_truncating();
267 return op;
268 };
269
270 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/1, /*lhs=*/2, make_int64_add,
271 FoldingResult::FoldsTo(3));
272 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/kMaxInt64, /*lhs=*/1,
273 make_int64_add,
274 FoldingResult::FoldsTo(kMinInt64));
275
276 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/1, /*lhs=*/2, make_int32_add,
277 FoldingResult::FoldsTo(3));
278 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/kMaxInt32 - 1, /*lhs=*/1,
279 make_int32_add,
280 FoldingResult::FoldsTo(kMaxInt32));
281
282 // Overflow of int32 representation and operation is not marked as
283 // truncating.
284 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/kMaxInt32, /*lhs=*/1,
285 make_int32_add, FoldingResult::NoFold());
286
287 // Overflow of int32 representation and operation is marked as truncating.
288 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/kMaxInt32, /*lhs=*/1,
289 make_int32_truncating_add,
290 FoldingResult::FoldsTo(kMinInt32));
291}
292#endif
293
294} // namespace dart
295