1 | // Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/compiler/backend/redundancy_elimination.h" |
6 | |
7 | #include <functional> |
8 | |
9 | #include "vm/compiler/backend/block_builder.h" |
10 | #include "vm/compiler/backend/il_printer.h" |
11 | #include "vm/compiler/backend/il_test_helper.h" |
12 | #include "vm/compiler/backend/inliner.h" |
13 | #include "vm/compiler/backend/loops.h" |
14 | #include "vm/compiler/backend/type_propagator.h" |
15 | #include "vm/compiler/compiler_pass.h" |
16 | #include "vm/compiler/frontend/bytecode_reader.h" |
17 | #include "vm/compiler/frontend/kernel_to_il.h" |
18 | #include "vm/compiler/jit/jit_call_specializer.h" |
19 | #include "vm/flags.h" |
20 | #include "vm/kernel_isolate.h" |
21 | #include "vm/log.h" |
22 | #include "vm/object.h" |
23 | #include "vm/parser.h" |
24 | #include "vm/symbols.h" |
25 | #include "vm/unit_test.h" |
26 | |
27 | namespace dart { |
28 | |
29 | static void NoopNative(Dart_NativeArguments args) {} |
30 | |
31 | static Dart_NativeFunction NoopNativeLookup(Dart_Handle name, |
32 | int argument_count, |
33 | bool* auto_setup_scope) { |
34 | ASSERT(auto_setup_scope != nullptr); |
35 | *auto_setup_scope = false; |
36 | return reinterpret_cast<Dart_NativeFunction>(&NoopNative); |
37 | } |
38 | |
39 | // Flatten all non-captured LocalVariables from the given scope and its children |
40 | // and siblings into the given array based on their environment index. |
41 | static void FlattenScopeIntoEnvironment(FlowGraph* graph, |
42 | LocalScope* scope, |
43 | GrowableArray<LocalVariable*>* env) { |
44 | for (intptr_t i = 0; i < scope->num_variables(); i++) { |
45 | auto var = scope->VariableAt(i); |
46 | if (var->is_captured()) { |
47 | continue; |
48 | } |
49 | |
50 | auto index = graph->EnvIndex(var); |
51 | env->EnsureLength(index + 1, nullptr); |
52 | (*env)[index] = var; |
53 | } |
54 | |
55 | if (scope->sibling() != nullptr) { |
56 | FlattenScopeIntoEnvironment(graph, scope->sibling(), env); |
57 | } |
58 | if (scope->child() != nullptr) { |
59 | FlattenScopeIntoEnvironment(graph, scope->child(), env); |
60 | } |
61 | } |
62 | |
63 | #if !defined(PRODUCT) |
64 | void PopulateEnvironmentFromBytecodeLocalVariables( |
65 | const Function& function, |
66 | FlowGraph* graph, |
67 | GrowableArray<LocalVariable*>* env) { |
68 | const auto& bytecode = Bytecode::Handle(function.bytecode()); |
69 | ASSERT(!bytecode.IsNull()); |
70 | |
71 | kernel::BytecodeLocalVariablesIterator iter(Thread::Current()->zone(), |
72 | bytecode); |
73 | while (iter.MoveNext()) { |
74 | if (iter.IsVariableDeclaration() && !iter.IsCaptured()) { |
75 | LocalVariable* const var = new LocalVariable( |
76 | TokenPosition::kNoSource, TokenPosition::kNoSource, |
77 | String::ZoneHandle(graph->zone(), iter.Name()), |
78 | AbstractType::ZoneHandle(graph->zone(), iter.Type())); |
79 | if (iter.Index() < 0) { // Parameter. |
80 | var->set_index(VariableIndex(-iter.Index() - kKBCParamEndSlotFromFp)); |
81 | } else { |
82 | var->set_index(VariableIndex(-iter.Index())); |
83 | } |
84 | const intptr_t env_index = graph->EnvIndex(var); |
85 | env->EnsureLength(env_index + 1, nullptr); |
86 | (*env)[env_index] = var; |
87 | } |
88 | } |
89 | } |
90 | #endif |
91 | |
92 | // Run TryCatchAnalyzer optimization on the function foo from the given script |
93 | // and check that the only variables from the given list are synchronized |
94 | // on catch entry. |
95 | static void TryCatchOptimizerTest( |
96 | Thread* thread, |
97 | const char* script_chars, |
98 | std::initializer_list<const char*> synchronized) { |
99 | // Load the script and exercise the code once. |
100 | const auto& root_library = |
101 | Library::Handle(LoadTestScript(script_chars, &NoopNativeLookup)); |
102 | Invoke(root_library, "main" ); |
103 | |
104 | // Build the flow graph. |
105 | std::initializer_list<CompilerPass::Id> passes = { |
106 | CompilerPass::kComputeSSA, CompilerPass::kTypePropagation, |
107 | CompilerPass::kApplyICData, CompilerPass::kSelectRepresentations, |
108 | CompilerPass::kTypePropagation, CompilerPass::kCanonicalize, |
109 | }; |
110 | const auto& function = Function::Handle(GetFunction(root_library, "foo" )); |
111 | TestPipeline pipeline(function, CompilerPass::kJIT); |
112 | FlowGraph* graph = pipeline.RunPasses(passes); |
113 | |
114 | // Finally run TryCatchAnalyzer on the graph (in AOT mode). |
115 | OptimizeCatchEntryStates(graph, /*is_aot=*/true); |
116 | |
117 | EXPECT_EQ(1, graph->graph_entry()->catch_entries().length()); |
118 | auto scope = graph->parsed_function().scope(); |
119 | |
120 | GrowableArray<LocalVariable*> env; |
121 | if (function.is_declared_in_bytecode()) { |
122 | #if defined(PRODUCT) |
123 | // In product mode information about local variables is not retained in |
124 | // bytecode, so we can't find variables by names. |
125 | return; |
126 | #else |
127 | PopulateEnvironmentFromBytecodeLocalVariables(function, graph, &env); |
128 | #endif |
129 | } else { |
130 | FlattenScopeIntoEnvironment(graph, scope, &env); |
131 | } |
132 | |
133 | for (intptr_t i = 0; i < env.length(); i++) { |
134 | bool found = false; |
135 | for (auto name : synchronized) { |
136 | if (env[i]->name().Equals(name)) { |
137 | found = true; |
138 | break; |
139 | } |
140 | } |
141 | if (!found) { |
142 | env[i] = nullptr; |
143 | } |
144 | } |
145 | |
146 | CatchBlockEntryInstr* catch_entry = graph->graph_entry()->catch_entries()[0]; |
147 | |
148 | // We should only synchronize state for variables from the synchronized list. |
149 | for (auto defn : *catch_entry->initial_definitions()) { |
150 | if (ParameterInstr* param = defn->AsParameter()) { |
151 | EXPECT(0 <= param->index() && param->index() < env.length()); |
152 | EXPECT(env[param->index()] != nullptr); |
153 | } |
154 | } |
155 | } |
156 | |
157 | // |
158 | // Tests for TryCatchOptimizer. |
159 | // |
160 | |
161 | ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Simple1) { |
162 | const char* script_chars = R"( |
163 | dynamic blackhole([dynamic val]) native 'BlackholeNative'; |
164 | foo(int p) { |
165 | var a = blackhole(), b = blackhole(); |
166 | try { |
167 | blackhole([a, b]); |
168 | } catch (e) { |
169 | // nothing is used |
170 | } |
171 | } |
172 | main() { |
173 | foo(42); |
174 | } |
175 | )" ; |
176 | |
177 | TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{}); |
178 | } |
179 | |
180 | ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Simple2) { |
181 | const char* script_chars = R"( |
182 | dynamic blackhole([dynamic val]) native 'BlackholeNative'; |
183 | foo(int p) { |
184 | var a = blackhole(), b = blackhole(); |
185 | try { |
186 | blackhole([a, b]); |
187 | } catch (e) { |
188 | // a should be synchronized |
189 | blackhole(a); |
190 | } |
191 | } |
192 | main() { |
193 | foo(42); |
194 | } |
195 | )" ; |
196 | |
197 | TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{"a" }); |
198 | } |
199 | |
200 | ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Cyclic1) { |
201 | const char* script_chars = R"( |
202 | dynamic blackhole([dynamic val]) native 'BlackholeNative'; |
203 | foo(int p) { |
204 | var a = blackhole(), b; |
205 | for (var i = 0; i < 42; i++) { |
206 | b = blackhole(); |
207 | try { |
208 | blackhole([a, b]); |
209 | } catch (e) { |
210 | // a and i should be synchronized |
211 | } |
212 | } |
213 | } |
214 | main() { |
215 | foo(42); |
216 | } |
217 | )" ; |
218 | |
219 | TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{"a" , "i" }); |
220 | } |
221 | |
222 | ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Cyclic2) { |
223 | const char* script_chars = R"( |
224 | dynamic blackhole([dynamic val]) native 'BlackholeNative'; |
225 | foo(int p) { |
226 | var a = blackhole(), b = blackhole(); |
227 | for (var i = 0; i < 42; i++) { |
228 | try { |
229 | blackhole([a, b]); |
230 | } catch (e) { |
231 | // a, b and i should be synchronized |
232 | } |
233 | } |
234 | } |
235 | main() { |
236 | foo(42); |
237 | } |
238 | )" ; |
239 | |
240 | TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{"a" , "b" , "i" }); |
241 | } |
242 | |
243 | // LoadOptimizer tests |
244 | |
245 | // This family of tests verifies behavior of load forwarding when alias for an |
246 | // allocation A is created by creating a redefinition for it and then |
247 | // letting redefinition escape. |
248 | static void TestAliasingViaRedefinition( |
249 | Thread* thread, |
250 | bool make_it_escape, |
251 | std::function<Definition*(CompilerState* S, FlowGraph*, Definition*)> |
252 | make_redefinition) { |
253 | const char* script_chars = R"( |
254 | dynamic blackhole([a, b, c, d, e, f]) native 'BlackholeNative'; |
255 | class K { |
256 | var field; |
257 | } |
258 | )" ; |
259 | const Library& lib = |
260 | Library::Handle(LoadTestScript(script_chars, NoopNativeLookup)); |
261 | |
262 | const Class& cls = Class::Handle( |
263 | lib.LookupLocalClass(String::Handle(Symbols::New(thread, "K" )))); |
264 | const Error& err = Error::Handle(cls.EnsureIsFinalized(thread)); |
265 | EXPECT(err.IsNull()); |
266 | |
267 | const Field& field = Field::Handle( |
268 | cls.LookupField(String::Handle(Symbols::New(thread, "field" )))); |
269 | EXPECT(!field.IsNull()); |
270 | |
271 | const Function& blackhole = |
272 | Function::ZoneHandle(GetFunction(lib, "blackhole" )); |
273 | |
274 | using compiler::BlockBuilder; |
275 | CompilerState S(thread, /*is_aot=*/false); |
276 | FlowGraphBuilderHelper H; |
277 | |
278 | // We are going to build the following graph: |
279 | // |
280 | // B0[graph_entry] |
281 | // B1[function_entry]: |
282 | // v0 <- AllocateObject(class K) |
283 | // v1 <- LoadField(v0, K.field) |
284 | // v2 <- make_redefinition(v0) |
285 | // PushArgument(v1) |
286 | // #if make_it_escape |
287 | // PushArgument(v2) |
288 | // #endif |
289 | // v3 <- StaticCall(blackhole, v1, v2) |
290 | // v4 <- LoadField(v2, K.field) |
291 | // Return v4 |
292 | |
293 | auto b1 = H.flow_graph()->graph_entry()->normal_entry(); |
294 | AllocateObjectInstr* v0; |
295 | LoadFieldInstr* v1; |
296 | StaticCallInstr* call; |
297 | LoadFieldInstr* v4; |
298 | ReturnInstr* ret; |
299 | |
300 | { |
301 | BlockBuilder builder(H.flow_graph(), b1); |
302 | auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function()); |
303 | v0 = builder.AddDefinition( |
304 | new AllocateObjectInstr(TokenPosition::kNoSource, cls)); |
305 | v1 = builder.AddDefinition( |
306 | new LoadFieldInstr(new Value(v0), slot, TokenPosition::kNoSource)); |
307 | auto v2 = builder.AddDefinition(make_redefinition(&S, H.flow_graph(), v0)); |
308 | auto args = new InputsArray(2); |
309 | args->Add(new Value(v1)); |
310 | if (make_it_escape) { |
311 | args->Add(new Value(v2)); |
312 | } |
313 | call = builder.AddInstruction(new StaticCallInstr( |
314 | TokenPosition::kNoSource, blackhole, 0, Array::empty_array(), args, |
315 | S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic)); |
316 | v4 = builder.AddDefinition( |
317 | new LoadFieldInstr(new Value(v2), slot, TokenPosition::kNoSource)); |
318 | ret = builder.AddInstruction(new ReturnInstr( |
319 | TokenPosition::kNoSource, new Value(v4), S.GetNextDeoptId())); |
320 | } |
321 | H.FinishGraph(); |
322 | DominatorBasedCSE::Optimize(H.flow_graph()); |
323 | |
324 | if (make_it_escape) { |
325 | // Allocation must be considered aliased. |
326 | EXPECT_PROPERTY(v0, !it.Identity().IsNotAliased()); |
327 | } else { |
328 | // Allocation must be considered not-aliased. |
329 | EXPECT_PROPERTY(v0, it.Identity().IsNotAliased()); |
330 | } |
331 | |
332 | // v1 should have been removed from the graph and replaced with constant_null. |
333 | EXPECT_PROPERTY(v1, it.next() == nullptr && it.previous() == nullptr); |
334 | EXPECT_PROPERTY(call, it.ArgumentAt(0) == H.flow_graph()->constant_null()); |
335 | |
336 | if (make_it_escape) { |
337 | // v4 however should not be removed from the graph, because v0 escapes into |
338 | // blackhole. |
339 | EXPECT_PROPERTY(v4, it.next() != nullptr && it.previous() != nullptr); |
340 | EXPECT_PROPERTY(ret, it.value()->definition() == v4); |
341 | } else { |
342 | // If v0 it not aliased then v4 should also be removed from the graph. |
343 | EXPECT_PROPERTY(v4, it.next() == nullptr && it.previous() == nullptr); |
344 | EXPECT_PROPERTY( |
345 | ret, it.value()->definition() == H.flow_graph()->constant_null()); |
346 | } |
347 | } |
348 | |
349 | static Definition* MakeCheckNull(CompilerState* S, |
350 | FlowGraph* flow_graph, |
351 | Definition* defn) { |
352 | return new CheckNullInstr(new Value(defn), String::ZoneHandle(), |
353 | S->GetNextDeoptId(), TokenPosition::kNoSource); |
354 | } |
355 | |
356 | static Definition* MakeRedefinition(CompilerState* S, |
357 | FlowGraph* flow_graph, |
358 | Definition* defn) { |
359 | return new RedefinitionInstr(new Value(defn)); |
360 | } |
361 | |
362 | static Definition* MakeAssertAssignable(CompilerState* S, |
363 | FlowGraph* flow_graph, |
364 | Definition* defn) { |
365 | const auto& dst_type = AbstractType::ZoneHandle(Type::ObjectType()); |
366 | return new AssertAssignableInstr(TokenPosition::kNoSource, new Value(defn), |
367 | new Value(flow_graph->GetConstant(dst_type)), |
368 | new Value(flow_graph->constant_null()), |
369 | new Value(flow_graph->constant_null()), |
370 | Symbols::Empty(), S->GetNextDeoptId()); |
371 | } |
372 | |
373 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedefinitionAliasing_CheckNull_NoEscape) { |
374 | TestAliasingViaRedefinition(thread, /*make_it_escape=*/false, MakeCheckNull); |
375 | } |
376 | |
377 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedefinitionAliasing_CheckNull_Escape) { |
378 | TestAliasingViaRedefinition(thread, /*make_it_escape=*/true, MakeCheckNull); |
379 | } |
380 | |
381 | ISOLATE_UNIT_TEST_CASE( |
382 | LoadOptimizer_RedefinitionAliasing_Redefinition_NoEscape) { |
383 | TestAliasingViaRedefinition(thread, /*make_it_escape=*/false, |
384 | MakeRedefinition); |
385 | } |
386 | |
387 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedefinitionAliasing_Redefinition_Escape) { |
388 | TestAliasingViaRedefinition(thread, /*make_it_escape=*/true, |
389 | MakeRedefinition); |
390 | } |
391 | |
392 | ISOLATE_UNIT_TEST_CASE( |
393 | LoadOptimizer_RedefinitionAliasing_AssertAssignable_NoEscape) { |
394 | TestAliasingViaRedefinition(thread, /*make_it_escape=*/false, |
395 | MakeAssertAssignable); |
396 | } |
397 | |
398 | ISOLATE_UNIT_TEST_CASE( |
399 | LoadOptimizer_RedefinitionAliasing_AssertAssignable_Escape) { |
400 | TestAliasingViaRedefinition(thread, /*make_it_escape=*/true, |
401 | MakeAssertAssignable); |
402 | } |
403 | |
404 | // This family of tests verifies behavior of load forwarding when alias for an |
405 | // allocation A is created by storing it into another object B and then |
406 | // either loaded from it ([make_it_escape] is true) or object B itself |
407 | // escapes ([make_host_escape] is true). |
408 | // We insert redefinition for object B to check that use list traversal |
409 | // correctly discovers all loads and stores from B. |
410 | static void TestAliasingViaStore( |
411 | Thread* thread, |
412 | bool make_it_escape, |
413 | bool make_host_escape, |
414 | std::function<Definition*(CompilerState* S, FlowGraph*, Definition*)> |
415 | make_redefinition) { |
416 | const char* script_chars = R"( |
417 | dynamic blackhole([a, b, c, d, e, f]) native 'BlackholeNative'; |
418 | class K { |
419 | var field; |
420 | } |
421 | )" ; |
422 | const Library& lib = |
423 | Library::Handle(LoadTestScript(script_chars, NoopNativeLookup)); |
424 | |
425 | const Class& cls = Class::Handle( |
426 | lib.LookupLocalClass(String::Handle(Symbols::New(thread, "K" )))); |
427 | const Error& err = Error::Handle(cls.EnsureIsFinalized(thread)); |
428 | EXPECT(err.IsNull()); |
429 | |
430 | const Field& field = Field::Handle( |
431 | cls.LookupField(String::Handle(Symbols::New(thread, "field" )))); |
432 | EXPECT(!field.IsNull()); |
433 | |
434 | const Function& blackhole = |
435 | Function::ZoneHandle(GetFunction(lib, "blackhole" )); |
436 | |
437 | using compiler::BlockBuilder; |
438 | CompilerState S(thread, /*is_aot=*/false); |
439 | FlowGraphBuilderHelper H; |
440 | |
441 | // We are going to build the following graph: |
442 | // |
443 | // B0[graph_entry] |
444 | // B1[function_entry]: |
445 | // v0 <- AllocateObject(class K) |
446 | // v5 <- AllocateObject(class K) |
447 | // #if !make_host_escape |
448 | // StoreField(v5 . K.field = v0) |
449 | // #endif |
450 | // v1 <- LoadField(v0, K.field) |
451 | // v2 <- REDEFINITION(v5) |
452 | // PushArgument(v1) |
453 | // #if make_it_escape |
454 | // v6 <- LoadField(v2, K.field) |
455 | // PushArgument(v6) |
456 | // #elif make_host_escape |
457 | // StoreField(v2 . K.field = v0) |
458 | // PushArgument(v5) |
459 | // #endif |
460 | // v3 <- StaticCall(blackhole, v1, v6) |
461 | // v4 <- LoadField(v0, K.field) |
462 | // Return v4 |
463 | |
464 | auto b1 = H.flow_graph()->graph_entry()->normal_entry(); |
465 | AllocateObjectInstr* v0; |
466 | AllocateObjectInstr* v5; |
467 | LoadFieldInstr* v1; |
468 | StaticCallInstr* call; |
469 | LoadFieldInstr* v4; |
470 | ReturnInstr* ret; |
471 | |
472 | { |
473 | BlockBuilder builder(H.flow_graph(), b1); |
474 | auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function()); |
475 | v0 = builder.AddDefinition( |
476 | new AllocateObjectInstr(TokenPosition::kNoSource, cls)); |
477 | v5 = builder.AddDefinition( |
478 | new AllocateObjectInstr(TokenPosition::kNoSource, cls)); |
479 | if (!make_host_escape) { |
480 | builder.AddInstruction(new StoreInstanceFieldInstr( |
481 | slot, new Value(v5), new Value(v0), kEmitStoreBarrier, |
482 | TokenPosition::kNoSource)); |
483 | } |
484 | v1 = builder.AddDefinition( |
485 | new LoadFieldInstr(new Value(v0), slot, TokenPosition::kNoSource)); |
486 | auto v2 = builder.AddDefinition(make_redefinition(&S, H.flow_graph(), v5)); |
487 | auto args = new InputsArray(2); |
488 | args->Add(new Value(v1)); |
489 | if (make_it_escape) { |
490 | auto v6 = builder.AddDefinition( |
491 | new LoadFieldInstr(new Value(v2), slot, TokenPosition::kNoSource)); |
492 | args->Add(new Value(v6)); |
493 | } else if (make_host_escape) { |
494 | builder.AddInstruction(new StoreInstanceFieldInstr( |
495 | slot, new Value(v2), new Value(v0), kEmitStoreBarrier, |
496 | TokenPosition::kNoSource)); |
497 | args->Add(new Value(v5)); |
498 | } |
499 | call = builder.AddInstruction(new StaticCallInstr( |
500 | TokenPosition::kNoSource, blackhole, 0, Array::empty_array(), args, |
501 | S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic)); |
502 | v4 = builder.AddDefinition( |
503 | new LoadFieldInstr(new Value(v0), slot, TokenPosition::kNoSource)); |
504 | ret = builder.AddInstruction(new ReturnInstr( |
505 | TokenPosition::kNoSource, new Value(v4), S.GetNextDeoptId())); |
506 | } |
507 | H.FinishGraph(); |
508 | DominatorBasedCSE::Optimize(H.flow_graph()); |
509 | |
510 | if (make_it_escape || make_host_escape) { |
511 | // Allocation must be considered aliased. |
512 | EXPECT_PROPERTY(v0, !it.Identity().IsNotAliased()); |
513 | } else { |
514 | // Allocation must not be considered aliased. |
515 | EXPECT_PROPERTY(v0, it.Identity().IsNotAliased()); |
516 | } |
517 | |
518 | if (make_host_escape) { |
519 | EXPECT_PROPERTY(v5, !it.Identity().IsNotAliased()); |
520 | } else { |
521 | EXPECT_PROPERTY(v5, it.Identity().IsNotAliased()); |
522 | } |
523 | |
524 | // v1 should have been removed from the graph and replaced with constant_null. |
525 | EXPECT_PROPERTY(v1, it.next() == nullptr && it.previous() == nullptr); |
526 | EXPECT_PROPERTY(call, it.ArgumentAt(0) == H.flow_graph()->constant_null()); |
527 | |
528 | if (make_it_escape || make_host_escape) { |
529 | // v4 however should not be removed from the graph, because v0 escapes into |
530 | // blackhole. |
531 | EXPECT_PROPERTY(v4, it.next() != nullptr && it.previous() != nullptr); |
532 | EXPECT_PROPERTY(ret, it.value()->definition() == v4); |
533 | } else { |
534 | // If v0 it not aliased then v4 should also be removed from the graph. |
535 | EXPECT_PROPERTY(v4, it.next() == nullptr && it.previous() == nullptr); |
536 | EXPECT_PROPERTY( |
537 | ret, it.value()->definition() == H.flow_graph()->constant_null()); |
538 | } |
539 | } |
540 | |
541 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_CheckNull_NoEscape) { |
542 | TestAliasingViaStore(thread, /*make_it_escape=*/false, |
543 | /* make_host_escape= */ false, MakeCheckNull); |
544 | } |
545 | |
546 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_CheckNull_Escape) { |
547 | TestAliasingViaStore(thread, /*make_it_escape=*/true, |
548 | /* make_host_escape= */ false, MakeCheckNull); |
549 | } |
550 | |
551 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_CheckNull_EscapeViaHost) { |
552 | TestAliasingViaStore(thread, /*make_it_escape=*/false, |
553 | /* make_host_escape= */ true, MakeCheckNull); |
554 | } |
555 | |
556 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_Redefinition_NoEscape) { |
557 | TestAliasingViaStore(thread, /*make_it_escape=*/false, |
558 | /* make_host_escape= */ false, MakeRedefinition); |
559 | } |
560 | |
561 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_Redefinition_Escape) { |
562 | TestAliasingViaStore(thread, /*make_it_escape=*/true, |
563 | /* make_host_escape= */ false, MakeRedefinition); |
564 | } |
565 | |
566 | ISOLATE_UNIT_TEST_CASE( |
567 | LoadOptimizer_AliasingViaStore_Redefinition_EscapeViaHost) { |
568 | TestAliasingViaStore(thread, /*make_it_escape=*/false, |
569 | /* make_host_escape= */ true, MakeRedefinition); |
570 | } |
571 | |
572 | ISOLATE_UNIT_TEST_CASE( |
573 | LoadOptimizer_AliasingViaStore_AssertAssignable_NoEscape) { |
574 | TestAliasingViaStore(thread, /*make_it_escape=*/false, |
575 | /* make_host_escape= */ false, MakeAssertAssignable); |
576 | } |
577 | |
578 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_AssertAssignable_Escape) { |
579 | TestAliasingViaStore(thread, /*make_it_escape=*/true, |
580 | /* make_host_escape= */ false, MakeAssertAssignable); |
581 | } |
582 | |
583 | ISOLATE_UNIT_TEST_CASE( |
584 | LoadOptimizer_AliasingViaStore_AssertAssignable_EscapeViaHost) { |
585 | TestAliasingViaStore(thread, /*make_it_escape=*/false, |
586 | /* make_host_escape= */ true, MakeAssertAssignable); |
587 | } |
588 | |
589 | // This is a regression test for |
590 | // https://github.com/flutter/flutter/issues/48114. |
591 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaTypedDataAndUntaggedTypedData) { |
592 | using compiler::BlockBuilder; |
593 | CompilerState S(thread, /*is_aot=*/false); |
594 | FlowGraphBuilderHelper H; |
595 | |
596 | const auto& lib = Library::Handle(Library::TypedDataLibrary()); |
597 | const Class& cls = Class::Handle(lib.LookupLocalClass(Symbols::Uint32List())); |
598 | const Error& err = Error::Handle(cls.EnsureIsFinalized(thread)); |
599 | EXPECT(err.IsNull()); |
600 | |
601 | const Function& function = Function::ZoneHandle( |
602 | cls.LookupFactory(String::Handle(String::New("Uint32List." )))); |
603 | EXPECT(!function.IsNull()); |
604 | |
605 | auto zone = H.flow_graph()->zone(); |
606 | |
607 | // We are going to build the following graph: |
608 | // |
609 | // B0[graph_entry] { |
610 | // vc0 <- Constant(0) |
611 | // vc42 <- Constant(42) |
612 | // } |
613 | // |
614 | // B1[function_entry] { |
615 | // } |
616 | // array <- StaticCall(...) {_Uint32List} |
617 | // v1 <- LoadIndexed(array) |
618 | // v2 <- LoadUntagged(array) |
619 | // StoreIndexed(v2, index=vc0, value=vc42) |
620 | // v3 <- LoadIndexed(array) |
621 | // return v3 |
622 | // } |
623 | |
624 | auto vc0 = H.flow_graph()->GetConstant(Integer::Handle(Integer::New(0))); |
625 | auto vc42 = H.flow_graph()->GetConstant(Integer::Handle(Integer::New(42))); |
626 | auto b1 = H.flow_graph()->graph_entry()->normal_entry(); |
627 | |
628 | StaticCallInstr* array; |
629 | LoadIndexedInstr* v1; |
630 | LoadUntaggedInstr* v2; |
631 | StoreIndexedInstr* store; |
632 | LoadIndexedInstr* v3; |
633 | ReturnInstr* ret; |
634 | |
635 | { |
636 | BlockBuilder builder(H.flow_graph(), b1); |
637 | |
638 | // array <- StaticCall(...) {_Uint32List} |
639 | array = builder.AddDefinition(new StaticCallInstr( |
640 | TokenPosition::kNoSource, function, 0, Array::empty_array(), |
641 | new InputsArray(), DeoptId::kNone, 0, ICData::kNoRebind)); |
642 | array->UpdateType(CompileType::FromCid(kTypedDataUint32ArrayCid)); |
643 | array->SetResultType(zone, CompileType::FromCid(kTypedDataUint32ArrayCid)); |
644 | array->set_is_known_list_constructor(true); |
645 | |
646 | // v1 <- LoadIndexed(array) |
647 | v1 = builder.AddDefinition(new LoadIndexedInstr( |
648 | new Value(array), new Value(vc0), /*index_unboxed=*/false, 1, |
649 | kTypedDataUint32ArrayCid, kAlignedAccess, DeoptId::kNone, |
650 | TokenPosition::kNoSource)); |
651 | |
652 | // v2 <- LoadUntagged(array) |
653 | // StoreIndexed(v2, index=0, value=42) |
654 | v2 = builder.AddDefinition(new LoadUntaggedInstr(new Value(array), 0)); |
655 | store = builder.AddInstruction(new StoreIndexedInstr( |
656 | new Value(v2), new Value(vc0), new Value(vc42), kNoStoreBarrier, |
657 | /*index_unboxed=*/false, 1, kTypedDataUint32ArrayCid, kAlignedAccess, |
658 | DeoptId::kNone, TokenPosition::kNoSource)); |
659 | |
660 | // v3 <- LoadIndexed(array) |
661 | v3 = builder.AddDefinition(new LoadIndexedInstr( |
662 | new Value(array), new Value(vc0), /*index_unboxed=*/false, 1, |
663 | kTypedDataUint32ArrayCid, kAlignedAccess, DeoptId::kNone, |
664 | TokenPosition::kNoSource)); |
665 | |
666 | // return v3 |
667 | ret = builder.AddInstruction(new ReturnInstr( |
668 | TokenPosition::kNoSource, new Value(v3), S.GetNextDeoptId())); |
669 | } |
670 | H.FinishGraph(); |
671 | |
672 | DominatorBasedCSE::Optimize(H.flow_graph()); |
673 | { |
674 | Instruction* sc = nullptr; |
675 | Instruction* li = nullptr; |
676 | Instruction* lu = nullptr; |
677 | Instruction* s = nullptr; |
678 | Instruction* li2 = nullptr; |
679 | Instruction* r = nullptr; |
680 | ILMatcher cursor(H.flow_graph(), b1, true); |
681 | RELEASE_ASSERT(cursor.TryMatch({ |
682 | kMatchAndMoveFunctionEntry, |
683 | {kMatchAndMoveStaticCall, &sc}, |
684 | {kMatchAndMoveLoadIndexed, &li}, |
685 | {kMatchAndMoveLoadUntagged, &lu}, |
686 | {kMatchAndMoveStoreIndexed, &s}, |
687 | {kMatchAndMoveLoadIndexed, &li2}, |
688 | {kMatchReturn, &r}, |
689 | })); |
690 | EXPECT(array == sc); |
691 | EXPECT(v1 == li); |
692 | EXPECT(v2 == lu); |
693 | EXPECT(store == s); |
694 | EXPECT(v3 == li2); |
695 | EXPECT(ret == r); |
696 | } |
697 | } |
698 | |
699 | // This test verifies behavior of load forwarding when an alias for an |
700 | // allocation A is created after forwarded due to an eliminated load. That is, |
701 | // allocation A is stored and later retrieved via load B, B is used in store C |
702 | // (with a different constant index/index_scale than in B but that overlaps), |
703 | // and then A is retrieved again (with the same index as in B) in load D. |
704 | // |
705 | // When B gets eliminated and replaced with in C and D with A, the store in C |
706 | // should stop the load D from being eliminated. This is a scenario that came |
707 | // up when forwarding typed data view factory arguments. |
708 | // |
709 | // Here, the entire scenario happens within a single basic block. |
710 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaLoadElimination_SingleBlock) { |
711 | const char* kScript = R"( |
712 | import 'dart:typed_data'; |
713 | |
714 | testViewAliasing1() { |
715 | final f64 = new Float64List(1); |
716 | final f32 = new Float32List.view(f64.buffer); |
717 | f64[0] = 1.0; // Should not be forwarded. |
718 | f32[1] = 2.0; // upper 32bits for 2.0f and 2.0 are the same |
719 | return f64[0]; |
720 | } |
721 | )" ; |
722 | |
723 | const auto& root_library = Library::Handle(LoadTestScript(kScript)); |
724 | const auto& function = |
725 | Function::Handle(GetFunction(root_library, "testViewAliasing1" )); |
726 | |
727 | Invoke(root_library, "testViewAliasing1" ); |
728 | |
729 | TestPipeline pipeline(function, CompilerPass::kJIT); |
730 | FlowGraph* flow_graph = pipeline.RunPasses({}); |
731 | |
732 | auto entry = flow_graph->graph_entry()->normal_entry(); |
733 | EXPECT(entry != nullptr); |
734 | |
735 | StaticCallInstr* list_factory = nullptr; |
736 | UnboxedConstantInstr* double_one = nullptr; |
737 | StoreIndexedInstr* first_store = nullptr; |
738 | StoreIndexedInstr* second_store = nullptr; |
739 | LoadIndexedInstr* final_load = nullptr; |
740 | BoxInstr* boxed_result = nullptr; |
741 | |
742 | ILMatcher cursor(flow_graph, entry); |
743 | RELEASE_ASSERT(cursor.TryMatch( |
744 | { |
745 | {kMatchAndMoveStaticCall, &list_factory}, |
746 | {kMatchAndMoveUnboxedConstant, &double_one}, |
747 | {kMatchAndMoveStoreIndexed, &first_store}, |
748 | {kMatchAndMoveStoreIndexed, &second_store}, |
749 | {kMatchAndMoveLoadIndexed, &final_load}, |
750 | {kMatchAndMoveBox, &boxed_result}, |
751 | kMatchReturn, |
752 | }, |
753 | /*insert_before=*/kMoveGlob)); |
754 | |
755 | EXPECT(first_store->array()->definition() == list_factory); |
756 | EXPECT(second_store->array()->definition() == list_factory); |
757 | EXPECT(boxed_result->value()->definition() != double_one); |
758 | EXPECT(boxed_result->value()->definition() == final_load); |
759 | } |
760 | |
761 | // This test verifies behavior of load forwarding when an alias for an |
762 | // allocation A is created after forwarded due to an eliminated load. That is, |
763 | // allocation A is stored and later retrieved via load B, B is used in store C |
764 | // (with a different constant index/index_scale than in B but that overlaps), |
765 | // and then A is retrieved again (with the same index as in B) in load D. |
766 | // |
767 | // When B gets eliminated and replaced with in C and D with A, the store in C |
768 | // should stop the load D from being eliminated. This is a scenario that came |
769 | // up when forwarding typed data view factory arguments. |
770 | // |
771 | // Here, the scenario is split across basic blocks. This is a cut-down version |
772 | // of language_2/vm/load_to_load_forwarding_vm_test.dart with just enough extra |
773 | // to keep testViewAliasing1 from being optimized into a single basic block. |
774 | // Thus, this test may be brittler than the other, if future work causes it to |
775 | // end up compiled into a single basic block (or a simpler set of basic blocks). |
776 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaLoadElimination_AcrossBlocks) { |
777 | const char* kScript = R"( |
778 | import 'dart:typed_data'; |
779 | |
780 | class Expect { |
781 | static void equals(var a, var b) {} |
782 | static void listEquals(var a, var b) {} |
783 | } |
784 | |
785 | testViewAliasing1() { |
786 | final f64 = new Float64List(1); |
787 | final f32 = new Float32List.view(f64.buffer); |
788 | f64[0] = 1.0; // Should not be forwarded. |
789 | f32[1] = 2.0; // upper 32bits for 2.0f and 2.0 are the same |
790 | return f64[0]; |
791 | } |
792 | |
793 | testViewAliasing2() { |
794 | final f64 = new Float64List(2); |
795 | final f64v = new Float64List.view(f64.buffer, |
796 | Float64List.bytesPerElement); |
797 | f64[1] = 1.0; // Should not be forwarded. |
798 | f64v[0] = 2.0; |
799 | return f64[1]; |
800 | } |
801 | |
802 | testViewAliasing3() { |
803 | final u8 = new Uint8List(Float64List.bytesPerElement * 2); |
804 | final f64 = new Float64List.view(u8.buffer, Float64List.bytesPerElement); |
805 | f64[0] = 1.0; // Should not be forwarded. |
806 | u8[15] = 0x40; |
807 | u8[14] = 0x00; |
808 | return f64[0]; |
809 | } |
810 | |
811 | main() { |
812 | for (var i = 0; i < 20; i++) { |
813 | Expect.equals(2.0, testViewAliasing1()); |
814 | Expect.equals(2.0, testViewAliasing2()); |
815 | Expect.equals(2.0, testViewAliasing3()); |
816 | } |
817 | } |
818 | )" ; |
819 | |
820 | const auto& root_library = Library::Handle(LoadTestScript(kScript)); |
821 | const auto& function = |
822 | Function::Handle(GetFunction(root_library, "testViewAliasing1" )); |
823 | |
824 | Invoke(root_library, "main" ); |
825 | |
826 | TestPipeline pipeline(function, CompilerPass::kJIT); |
827 | // Recent changes actually compile the function into a single basic |
828 | // block, so we need to test right after the load optimizer has been run. |
829 | // Have checked that this test still fails appropriately using the load |
830 | // optimizer prior to the fix (commit 2a237327). |
831 | FlowGraph* flow_graph = pipeline.RunPasses({ |
832 | CompilerPass::kComputeSSA, |
833 | CompilerPass::kApplyICData, |
834 | CompilerPass::kTryOptimizePatterns, |
835 | CompilerPass::kSetOuterInliningId, |
836 | CompilerPass::kTypePropagation, |
837 | CompilerPass::kApplyClassIds, |
838 | CompilerPass::kInlining, |
839 | CompilerPass::kTypePropagation, |
840 | CompilerPass::kApplyClassIds, |
841 | CompilerPass::kTypePropagation, |
842 | CompilerPass::kApplyICData, |
843 | CompilerPass::kCanonicalize, |
844 | CompilerPass::kBranchSimplify, |
845 | CompilerPass::kIfConvert, |
846 | CompilerPass::kCanonicalize, |
847 | CompilerPass::kConstantPropagation, |
848 | CompilerPass::kOptimisticallySpecializeSmiPhis, |
849 | CompilerPass::kTypePropagation, |
850 | CompilerPass::kWidenSmiToInt32, |
851 | CompilerPass::kSelectRepresentations, |
852 | CompilerPass::kCSE, |
853 | }); |
854 | |
855 | auto entry = flow_graph->graph_entry()->normal_entry(); |
856 | EXPECT(entry != nullptr); |
857 | |
858 | StaticCallInstr* list_factory = nullptr; |
859 | UnboxedConstantInstr* double_one = nullptr; |
860 | StoreIndexedInstr* first_store = nullptr; |
861 | StoreIndexedInstr* second_store = nullptr; |
862 | LoadIndexedInstr* final_load = nullptr; |
863 | BoxInstr* boxed_result = nullptr; |
864 | |
865 | ILMatcher cursor(flow_graph, entry); |
866 | RELEASE_ASSERT(cursor.TryMatch( |
867 | { |
868 | {kMatchAndMoveStaticCall, &list_factory}, |
869 | kMatchAndMoveBranchTrue, |
870 | kMatchAndMoveBranchTrue, |
871 | kMatchAndMoveBranchFalse, |
872 | kMatchAndMoveBranchFalse, |
873 | {kMatchAndMoveUnboxedConstant, &double_one}, |
874 | {kMatchAndMoveStoreIndexed, &first_store}, |
875 | kMatchAndMoveBranchFalse, |
876 | {kMatchAndMoveStoreIndexed, &second_store}, |
877 | {kMatchAndMoveLoadIndexed, &final_load}, |
878 | {kMatchAndMoveBox, &boxed_result}, |
879 | kMatchReturn, |
880 | }, |
881 | /*insert_before=*/kMoveGlob)); |
882 | |
883 | EXPECT(first_store->array()->definition() == list_factory); |
884 | EXPECT(second_store->array()->definition() == list_factory); |
885 | EXPECT(boxed_result->value()->definition() != double_one); |
886 | EXPECT(boxed_result->value()->definition() == final_load); |
887 | } |
888 | |
889 | static void CountLoadsStores(FlowGraph* flow_graph, |
890 | intptr_t* loads, |
891 | intptr_t* stores) { |
892 | for (BlockIterator block_it = flow_graph->reverse_postorder_iterator(); |
893 | !block_it.Done(); block_it.Advance()) { |
894 | for (ForwardInstructionIterator it(block_it.Current()); !it.Done(); |
895 | it.Advance()) { |
896 | if (it.Current()->IsLoadField()) { |
897 | (*loads)++; |
898 | } else if (it.Current()->IsStoreInstanceField()) { |
899 | (*stores)++; |
900 | } |
901 | } |
902 | } |
903 | } |
904 | |
905 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantStoresAndLoads) { |
906 | const char* kScript = R"( |
907 | class Bar { |
908 | Bar() { a = null; } |
909 | dynamic a; |
910 | } |
911 | |
912 | Bar foo() { |
913 | Bar bar = new Bar(); |
914 | bar.a = null; |
915 | bar.a = bar; |
916 | bar.a = bar.a; |
917 | return bar.a; |
918 | } |
919 | |
920 | main() { |
921 | foo(); |
922 | } |
923 | )" ; |
924 | |
925 | const auto& root_library = Library::Handle(LoadTestScript(kScript)); |
926 | Invoke(root_library, "main" ); |
927 | const auto& function = Function::Handle(GetFunction(root_library, "foo" )); |
928 | TestPipeline pipeline(function, CompilerPass::kJIT); |
929 | FlowGraph* flow_graph = pipeline.RunPasses({ |
930 | CompilerPass::kComputeSSA, |
931 | CompilerPass::kTypePropagation, |
932 | CompilerPass::kApplyICData, |
933 | CompilerPass::kInlining, |
934 | CompilerPass::kTypePropagation, |
935 | CompilerPass::kSelectRepresentations, |
936 | CompilerPass::kCanonicalize, |
937 | CompilerPass::kConstantPropagation, |
938 | }); |
939 | |
940 | ASSERT(flow_graph != nullptr); |
941 | |
942 | // Before CSE, we have 2 loads and 4 stores. |
943 | intptr_t bef_loads = 0; |
944 | intptr_t bef_stores = 0; |
945 | CountLoadsStores(flow_graph, &bef_loads, &bef_stores); |
946 | EXPECT_EQ(2, bef_loads); |
947 | EXPECT_EQ(4, bef_stores); |
948 | |
949 | DominatorBasedCSE::Optimize(flow_graph); |
950 | |
951 | // After CSE, no load and only one store remains. |
952 | intptr_t aft_loads = 0; |
953 | intptr_t aft_stores = 0; |
954 | CountLoadsStores(flow_graph, &aft_loads, &aft_stores); |
955 | EXPECT_EQ(0, aft_loads); |
956 | EXPECT_EQ(1, aft_stores); |
957 | } |
958 | |
959 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantStaticFieldInitialization) { |
960 | const char* kScript = R"( |
961 | int getX() => 2; |
962 | int x = getX(); |
963 | |
964 | foo() => x + x; |
965 | |
966 | main() { |
967 | foo(); |
968 | } |
969 | )" ; |
970 | |
971 | // Make sure static field initialization is not removed because |
972 | // field is already initialized. |
973 | SetFlagScope<bool> sfs(&FLAG_fields_may_be_reset, true); |
974 | |
975 | const auto& root_library = Library::Handle(LoadTestScript(kScript)); |
976 | Invoke(root_library, "main" ); |
977 | const auto& function = Function::Handle(GetFunction(root_library, "foo" )); |
978 | TestPipeline pipeline(function, CompilerPass::kJIT); |
979 | FlowGraph* flow_graph = pipeline.RunPasses({}); |
980 | ASSERT(flow_graph != nullptr); |
981 | |
982 | auto entry = flow_graph->graph_entry()->normal_entry(); |
983 | EXPECT(entry != nullptr); |
984 | |
985 | ILMatcher cursor(flow_graph, entry); |
986 | RELEASE_ASSERT(cursor.TryMatch({ |
987 | kMatchAndMoveFunctionEntry, |
988 | kMatchAndMoveCheckStackOverflow, |
989 | kMatchAndMoveLoadStaticField, |
990 | kMoveParallelMoves, |
991 | kMatchAndMoveCheckSmi, |
992 | kMoveParallelMoves, |
993 | kMatchAndMoveBinarySmiOp, |
994 | kMoveParallelMoves, |
995 | kMatchReturn, |
996 | })); |
997 | } |
998 | |
999 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantInitializerCallAfterIf) { |
1000 | const char* kScript = R"( |
1001 | int x = int.parse('1'); |
1002 | |
1003 | @pragma('vm:never-inline') |
1004 | use(int arg) {} |
1005 | |
1006 | foo(bool condition) { |
1007 | if (condition) { |
1008 | x = 3; |
1009 | } else { |
1010 | use(x); |
1011 | } |
1012 | use(x); |
1013 | } |
1014 | |
1015 | main() { |
1016 | foo(true); |
1017 | } |
1018 | )" ; |
1019 | |
1020 | // Make sure static field initialization is not removed because |
1021 | // field is already initialized. |
1022 | SetFlagScope<bool> sfs(&FLAG_fields_may_be_reset, true); |
1023 | |
1024 | const auto& root_library = Library::Handle(LoadTestScript(kScript)); |
1025 | Invoke(root_library, "main" ); |
1026 | const auto& function = Function::Handle(GetFunction(root_library, "foo" )); |
1027 | TestPipeline pipeline(function, CompilerPass::kJIT); |
1028 | FlowGraph* flow_graph = pipeline.RunPasses({}); |
1029 | ASSERT(flow_graph != nullptr); |
1030 | |
1031 | auto entry = flow_graph->graph_entry()->normal_entry(); |
1032 | EXPECT(entry != nullptr); |
1033 | |
1034 | LoadStaticFieldInstr* load_static_after_if = nullptr; |
1035 | |
1036 | ILMatcher cursor(flow_graph, entry); |
1037 | RELEASE_ASSERT(cursor.TryMatch({ |
1038 | kMoveGlob, |
1039 | kMatchAndMoveBranchTrue, |
1040 | kMoveGlob, |
1041 | kMatchAndMoveGoto, |
1042 | kMatchAndMoveJoinEntry, |
1043 | kMoveParallelMoves, |
1044 | {kMatchAndMoveLoadStaticField, &load_static_after_if}, |
1045 | kMoveGlob, |
1046 | kMatchReturn, |
1047 | })); |
1048 | EXPECT(!load_static_after_if->calls_initializer()); |
1049 | } |
1050 | |
1051 | ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantInitializerCallInLoop) { |
1052 | if (!TestCase::IsNNBD()) { |
1053 | return; |
1054 | } |
1055 | |
1056 | const char* kScript = R"( |
1057 | class A { |
1058 | late int x = int.parse('1'); |
1059 | A? next; |
1060 | } |
1061 | |
1062 | @pragma('vm:never-inline') |
1063 | use(int arg) {} |
1064 | |
1065 | foo(A obj) { |
1066 | use(obj.x); |
1067 | for (;;) { |
1068 | use(obj.x); |
1069 | final next = obj.next; |
1070 | if (next == null) { |
1071 | break; |
1072 | } |
1073 | obj = next; |
1074 | use(obj.x); |
1075 | } |
1076 | } |
1077 | |
1078 | main() { |
1079 | foo(A()..next = A()); |
1080 | } |
1081 | )" ; |
1082 | |
1083 | const auto& root_library = Library::Handle(LoadTestScript(kScript)); |
1084 | Invoke(root_library, "main" ); |
1085 | const auto& function = Function::Handle(GetFunction(root_library, "foo" )); |
1086 | TestPipeline pipeline(function, CompilerPass::kJIT); |
1087 | FlowGraph* flow_graph = pipeline.RunPasses({}); |
1088 | ASSERT(flow_graph != nullptr); |
1089 | |
1090 | auto entry = flow_graph->graph_entry()->normal_entry(); |
1091 | EXPECT(entry != nullptr); |
1092 | |
1093 | LoadFieldInstr* load_field_before_loop = nullptr; |
1094 | LoadFieldInstr* load_field_in_loop1 = nullptr; |
1095 | LoadFieldInstr* load_field_in_loop2 = nullptr; |
1096 | |
1097 | ILMatcher cursor(flow_graph, entry); |
1098 | RELEASE_ASSERT(cursor.TryMatch({ |
1099 | kMoveGlob, |
1100 | {kMatchAndMoveLoadField, &load_field_before_loop}, |
1101 | kMoveGlob, |
1102 | kMatchAndMoveGoto, |
1103 | kMatchAndMoveJoinEntry, |
1104 | kMoveGlob, |
1105 | {kMatchAndMoveLoadField, &load_field_in_loop1}, |
1106 | kMoveGlob, |
1107 | kMatchAndMoveBranchFalse, |
1108 | kMoveGlob, |
1109 | {kMatchAndMoveLoadField, &load_field_in_loop2}, |
1110 | })); |
1111 | |
1112 | EXPECT(load_field_before_loop->calls_initializer()); |
1113 | EXPECT(!load_field_in_loop1->calls_initializer()); |
1114 | EXPECT(load_field_in_loop2->calls_initializer()); |
1115 | } |
1116 | |
1117 | #if !defined(TARGET_ARCH_IA32) |
1118 | |
1119 | ISOLATE_UNIT_TEST_CASE(DelayAllocations_DelayAcrossCalls) { |
1120 | const char* kScript = R"( |
1121 | class A { |
1122 | dynamic x, y; |
1123 | A(this.x, this.y); |
1124 | } |
1125 | |
1126 | int count = 0; |
1127 | |
1128 | @pragma("vm:never-inline") |
1129 | dynamic foo(int i) => count++ < 2 ? i : '$i'; |
1130 | |
1131 | @pragma("vm:never-inline") |
1132 | dynamic use(v) {} |
1133 | |
1134 | void test() { |
1135 | A a = new A(foo(1), foo(2)); |
1136 | use(a); |
1137 | } |
1138 | )" ; |
1139 | |
1140 | const auto& root_library = Library::Handle(LoadTestScript(kScript)); |
1141 | const auto& function = Function::Handle(GetFunction(root_library, "test" )); |
1142 | |
1143 | // Get fields to kDynamicCid guard |
1144 | Invoke(root_library, "test" ); |
1145 | Invoke(root_library, "test" ); |
1146 | |
1147 | TestPipeline pipeline(function, CompilerPass::kAOT); |
1148 | FlowGraph* flow_graph = pipeline.RunPasses({}); |
1149 | auto entry = flow_graph->graph_entry()->normal_entry(); |
1150 | |
1151 | StaticCallInstr* call1; |
1152 | StaticCallInstr* call2; |
1153 | AllocateObjectInstr* allocate; |
1154 | StoreInstanceFieldInstr* store1; |
1155 | StoreInstanceFieldInstr* store2; |
1156 | |
1157 | ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip); |
1158 | RELEASE_ASSERT(cursor.TryMatch({ |
1159 | kMoveGlob, |
1160 | {kMatchAndMoveStaticCall, &call1}, |
1161 | kMoveGlob, |
1162 | {kMatchAndMoveStaticCall, &call2}, |
1163 | kMoveGlob, |
1164 | {kMatchAndMoveAllocateObject, &allocate}, |
1165 | {kMatchAndMoveStoreInstanceField, &store1}, |
1166 | {kMatchAndMoveStoreInstanceField, &store2}, |
1167 | })); |
1168 | |
1169 | EXPECT(strcmp(call1->function().UserVisibleNameCString(), "foo" ) == 0); |
1170 | EXPECT(strcmp(call2->function().UserVisibleNameCString(), "foo" ) == 0); |
1171 | EXPECT(store1->instance()->definition() == allocate); |
1172 | EXPECT(!store1->ShouldEmitStoreBarrier()); |
1173 | EXPECT(store2->instance()->definition() == allocate); |
1174 | EXPECT(!store2->ShouldEmitStoreBarrier()); |
1175 | } |
1176 | |
1177 | ISOLATE_UNIT_TEST_CASE(DelayAllocations_DontDelayIntoLoop) { |
1178 | const char* kScript = R"( |
1179 | void test() { |
1180 | Object o = new Object(); |
1181 | for (int i = 0; i < 10; i++) { |
1182 | use(o); |
1183 | } |
1184 | } |
1185 | |
1186 | @pragma('vm:never-inline') |
1187 | void use(Object o) { |
1188 | print(o.hashCode); |
1189 | } |
1190 | )" ; |
1191 | |
1192 | const auto& root_library = Library::Handle(LoadTestScript(kScript)); |
1193 | const auto& function = Function::Handle(GetFunction(root_library, "test" )); |
1194 | |
1195 | TestPipeline pipeline(function, CompilerPass::kAOT); |
1196 | FlowGraph* flow_graph = pipeline.RunPasses({}); |
1197 | auto entry = flow_graph->graph_entry()->normal_entry(); |
1198 | |
1199 | AllocateObjectInstr* allocate; |
1200 | StaticCallInstr* call; |
1201 | |
1202 | ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip); |
1203 | RELEASE_ASSERT(cursor.TryMatch({ |
1204 | kMoveGlob, |
1205 | {kMatchAndMoveAllocateObject, &allocate}, |
1206 | kMoveGlob, |
1207 | kMatchAndMoveBranchTrue, |
1208 | kMoveGlob, |
1209 | {kMatchAndMoveStaticCall, &call}, |
1210 | })); |
1211 | |
1212 | EXPECT(strcmp(call->function().UserVisibleNameCString(), "use" ) == 0); |
1213 | EXPECT(call->Receiver()->definition() == allocate); |
1214 | } |
1215 | |
1216 | #endif // !defined(TARGET_ARCH_IA32) |
1217 | |
1218 | } // namespace dart |
1219 | |