1 | // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/compiler/aot/aot_call_specializer.h" |
6 | |
7 | #include "vm/bit_vector.h" |
8 | #include "vm/compiler/aot/precompiler.h" |
9 | #include "vm/compiler/backend/branch_optimizer.h" |
10 | #include "vm/compiler/backend/flow_graph_compiler.h" |
11 | #include "vm/compiler/backend/il.h" |
12 | #include "vm/compiler/backend/il_printer.h" |
13 | #include "vm/compiler/backend/inliner.h" |
14 | #include "vm/compiler/backend/range_analysis.h" |
15 | #include "vm/compiler/cha.h" |
16 | #include "vm/compiler/compiler_state.h" |
17 | #include "vm/compiler/frontend/flow_graph_builder.h" |
18 | #include "vm/compiler/jit/compiler.h" |
19 | #include "vm/compiler/jit/jit_call_specializer.h" |
20 | #include "vm/cpu.h" |
21 | #include "vm/dart_entry.h" |
22 | #include "vm/exceptions.h" |
23 | #include "vm/hash_map.h" |
24 | #include "vm/object.h" |
25 | #include "vm/object_store.h" |
26 | #include "vm/parser.h" |
27 | #include "vm/resolver.h" |
28 | #include "vm/scopes.h" |
29 | #include "vm/stack_frame.h" |
30 | #include "vm/symbols.h" |
31 | |
32 | namespace dart { |
33 | |
34 | DEFINE_FLAG(int, |
35 | max_exhaustive_polymorphic_checks, |
36 | 5, |
37 | "If a call receiver is known to be of at most this many classes, " |
38 | "generate exhaustive class tests instead of a megamorphic call" ); |
39 | |
40 | // Quick access to the current isolate and zone. |
41 | #define I (isolate()) |
42 | #define Z (zone()) |
43 | |
44 | #ifdef DART_PRECOMPILER |
45 | |
46 | // Returns named function that is a unique dynamic target, i.e., |
47 | // - the target is identified by its name alone, since it occurs only once. |
48 | // - target's class has no subclasses, and neither is subclassed, i.e., |
49 | // the receiver type can be only the function's class. |
50 | // Returns Function::null() if there is no unique dynamic target for |
51 | // given 'fname'. 'fname' must be a symbol. |
52 | static void GetUniqueDynamicTarget(Isolate* isolate, |
53 | const String& fname, |
54 | Object* function) { |
55 | UniqueFunctionsMap functions_map( |
56 | isolate->object_store()->unique_dynamic_targets()); |
57 | ASSERT(fname.IsSymbol()); |
58 | *function = functions_map.GetOrNull(fname); |
59 | ASSERT(functions_map.Release().raw() == |
60 | isolate->object_store()->unique_dynamic_targets()); |
61 | } |
62 | |
63 | AotCallSpecializer::AotCallSpecializer( |
64 | Precompiler* precompiler, |
65 | FlowGraph* flow_graph, |
66 | SpeculativeInliningPolicy* speculative_policy) |
67 | : CallSpecializer(flow_graph, |
68 | speculative_policy, |
69 | /* should_clone_fields=*/false), |
70 | precompiler_(precompiler), |
71 | has_unique_no_such_method_(false) { |
72 | Function& target_function = Function::Handle(); |
73 | if (isolate()->object_store()->unique_dynamic_targets() != Array::null()) { |
74 | GetUniqueDynamicTarget(isolate(), Symbols::NoSuchMethod(), |
75 | &target_function); |
76 | has_unique_no_such_method_ = !target_function.IsNull(); |
77 | } |
78 | } |
79 | |
80 | bool AotCallSpecializer::TryCreateICDataForUniqueTarget( |
81 | InstanceCallInstr* call) { |
82 | if (isolate()->object_store()->unique_dynamic_targets() == Array::null()) { |
83 | return false; |
84 | } |
85 | |
86 | // Check if the target is unique. |
87 | Function& target_function = Function::Handle(Z); |
88 | GetUniqueDynamicTarget(isolate(), call->function_name(), &target_function); |
89 | |
90 | if (target_function.IsNull()) { |
91 | return false; |
92 | } |
93 | |
94 | // Calls passing named arguments and calls to a function taking named |
95 | // arguments must be resolved/checked at runtime. |
96 | // Calls passing a type argument vector and calls to a generic function must |
97 | // be resolved/checked at runtime. |
98 | if (target_function.HasOptionalNamedParameters() || |
99 | target_function.IsGeneric() || |
100 | !target_function.AreValidArgumentCounts( |
101 | call->type_args_len(), call->ArgumentCountWithoutTypeArgs(), |
102 | call->argument_names().IsNull() ? 0 : call->argument_names().Length(), |
103 | /* error_message = */ NULL)) { |
104 | return false; |
105 | } |
106 | |
107 | const Class& cls = Class::Handle(Z, target_function.Owner()); |
108 | if (CHA::IsImplemented(cls) || CHA::HasSubclasses(cls)) { |
109 | return false; |
110 | } |
111 | |
112 | call->SetTargets( |
113 | CallTargets::CreateMonomorphic(Z, cls.id(), target_function)); |
114 | ASSERT(call->Targets().IsMonomorphic()); |
115 | |
116 | // If we know that the only noSuchMethod is Object.noSuchMethod then |
117 | // this call is guaranteed to either succeed or throw. |
118 | if (has_unique_no_such_method_) { |
119 | call->set_has_unique_selector(true); |
120 | |
121 | // Add redefinition of the receiver to prevent code motion across |
122 | // this call. |
123 | const intptr_t receiver_index = call->FirstArgIndex(); |
124 | RedefinitionInstr* redefinition = new (Z) |
125 | RedefinitionInstr(new (Z) Value(call->ArgumentAt(receiver_index))); |
126 | redefinition->set_ssa_temp_index(flow_graph()->alloc_ssa_temp_index()); |
127 | if (FlowGraph::NeedsPairLocation(redefinition->representation())) { |
128 | flow_graph()->alloc_ssa_temp_index(); |
129 | } |
130 | redefinition->InsertAfter(call); |
131 | // Replace all uses of the receiver dominated by this call. |
132 | FlowGraph::RenameDominatedUses(call->ArgumentAt(receiver_index), |
133 | redefinition, redefinition); |
134 | if (!redefinition->HasUses()) { |
135 | redefinition->RemoveFromGraph(); |
136 | } |
137 | } |
138 | |
139 | return true; |
140 | } |
141 | |
142 | bool AotCallSpecializer::TryCreateICData(InstanceCallInstr* call) { |
143 | if (TryCreateICDataForUniqueTarget(call)) { |
144 | return true; |
145 | } |
146 | |
147 | return CallSpecializer::TryCreateICData(call); |
148 | } |
149 | |
150 | bool AotCallSpecializer::RecognizeRuntimeTypeGetter(InstanceCallInstr* call) { |
151 | if ((precompiler_ == NULL) || !precompiler_->get_runtime_type_is_unique()) { |
152 | return false; |
153 | } |
154 | |
155 | if (call->function_name().raw() != Symbols::GetRuntimeType().raw()) { |
156 | return false; |
157 | } |
158 | |
159 | // There is only a single function Object.get:runtimeType that can be invoked |
160 | // by this call. Convert dynamic invocation to a static one. |
161 | const Class& cls = Class::Handle(Z, I->object_store()->object_class()); |
162 | const Function& function = |
163 | Function::Handle(Z, call->ResolveForReceiverClass(cls)); |
164 | ASSERT(!function.IsNull()); |
165 | const Function& target = Function::ZoneHandle(Z, function.raw()); |
166 | StaticCallInstr* static_call = |
167 | StaticCallInstr::FromCall(Z, call, target, call->CallCount()); |
168 | static_call->SetResultType(Z, CompileType::FromCid(kTypeCid)); |
169 | call->ReplaceWith(static_call, current_iterator()); |
170 | return true; |
171 | } |
172 | |
173 | static bool IsGetRuntimeType(Definition* defn) { |
174 | StaticCallInstr* call = defn->AsStaticCall(); |
175 | return (call != NULL) && (call->function().recognized_kind() == |
176 | MethodRecognizer::kObjectRuntimeType); |
177 | } |
178 | |
179 | // Recognize a.runtimeType == b.runtimeType and fold it into |
180 | // Object._haveSameRuntimeType(a, b). |
181 | // Note: this optimization is not speculative. |
182 | bool AotCallSpecializer::TryReplaceWithHaveSameRuntimeType( |
183 | TemplateDartCall<0>* call) { |
184 | ASSERT((call->IsInstanceCall() && |
185 | (call->AsInstanceCall()->ic_data()->NumArgsTested() == 2)) || |
186 | call->IsStaticCall()); |
187 | ASSERT(call->type_args_len() == 0); |
188 | ASSERT(call->ArgumentCount() == 2); |
189 | |
190 | Definition* left = call->ArgumentAt(0); |
191 | Definition* right = call->ArgumentAt(1); |
192 | |
193 | if (IsGetRuntimeType(left) && left->input_use_list()->IsSingleUse() && |
194 | IsGetRuntimeType(right) && right->input_use_list()->IsSingleUse()) { |
195 | const Class& cls = Class::Handle(Z, I->object_store()->object_class()); |
196 | const Function& have_same_runtime_type = Function::ZoneHandle( |
197 | Z, |
198 | cls.LookupStaticFunctionAllowPrivate(Symbols::HaveSameRuntimeType())); |
199 | ASSERT(!have_same_runtime_type.IsNull()); |
200 | |
201 | InputsArray* args = new (Z) InputsArray(Z, 2); |
202 | args->Add(left->ArgumentValueAt(0)->CopyWithType(Z)); |
203 | args->Add(right->ArgumentValueAt(0)->CopyWithType(Z)); |
204 | const intptr_t kTypeArgsLen = 0; |
205 | StaticCallInstr* static_call = new (Z) StaticCallInstr( |
206 | call->token_pos(), have_same_runtime_type, kTypeArgsLen, |
207 | Object::null_array(), // argument_names |
208 | args, call->deopt_id(), call->CallCount(), ICData::kOptimized); |
209 | static_call->SetResultType(Z, CompileType::FromCid(kBoolCid)); |
210 | ReplaceCall(call, static_call); |
211 | // ReplaceCall moved environment from 'call' to 'static_call'. |
212 | // Update arguments of 'static_call' in the environment. |
213 | Environment* env = static_call->env(); |
214 | env->ValueAt(env->Length() - 2) |
215 | ->BindToEnvironment(static_call->ArgumentAt(0)); |
216 | env->ValueAt(env->Length() - 1) |
217 | ->BindToEnvironment(static_call->ArgumentAt(1)); |
218 | return true; |
219 | } |
220 | |
221 | return false; |
222 | } |
223 | |
224 | static bool HasLikelySmiOperand(InstanceCallInstr* instr) { |
225 | ASSERT(instr->type_args_len() == 0); |
226 | |
227 | // If Smi is not assignable to the interface target of the call, the receiver |
228 | // is definitely not a Smi. |
229 | if (!instr->CanReceiverBeSmiBasedOnInterfaceTarget( |
230 | Thread::Current()->zone())) { |
231 | return false; |
232 | } |
233 | |
234 | // Phis with at least one known smi are // guessed to be likely smi as well. |
235 | for (intptr_t i = 0; i < instr->ArgumentCount(); ++i) { |
236 | PhiInstr* phi = instr->ArgumentAt(i)->AsPhi(); |
237 | if (phi != NULL) { |
238 | for (intptr_t j = 0; j < phi->InputCount(); ++j) { |
239 | if (phi->InputAt(j)->Type()->ToCid() == kSmiCid) return true; |
240 | } |
241 | } |
242 | } |
243 | // If all of the inputs are known smis or the result of CheckedSmiOp, |
244 | // we guess the operand to be likely smi. |
245 | for (intptr_t i = 0; i < instr->ArgumentCount(); ++i) { |
246 | if (!instr->ArgumentAt(i)->IsCheckedSmiOp()) return false; |
247 | } |
248 | return true; |
249 | } |
250 | |
251 | bool AotCallSpecializer::TryInlineFieldAccess(InstanceCallInstr* call) { |
252 | const Token::Kind op_kind = call->token_kind(); |
253 | if ((op_kind == Token::kGET) && TryInlineInstanceGetter(call)) { |
254 | return true; |
255 | } |
256 | if ((op_kind == Token::kSET) && TryInlineInstanceSetter(call)) { |
257 | return true; |
258 | } |
259 | return false; |
260 | } |
261 | |
262 | bool AotCallSpecializer::TryInlineFieldAccess(StaticCallInstr* call) { |
263 | if (call->function().IsImplicitGetterFunction()) { |
264 | Field& field = Field::ZoneHandle(call->function().accessor_field()); |
265 | if (field.is_late()) { |
266 | // TODO(dartbug.com/40447): Inline implicit getters for late fields. |
267 | return false; |
268 | } |
269 | if (should_clone_fields_) { |
270 | field = field.CloneFromOriginal(); |
271 | } |
272 | InlineImplicitInstanceGetter(call, field); |
273 | return true; |
274 | } |
275 | |
276 | return false; |
277 | } |
278 | |
279 | bool AotCallSpecializer::IsSupportedIntOperandForStaticDoubleOp( |
280 | CompileType* operand_type) { |
281 | if (operand_type->IsNullableInt()) { |
282 | if (operand_type->ToNullableCid() == kSmiCid) { |
283 | return true; |
284 | } |
285 | |
286 | if (FlowGraphCompiler::SupportsUnboxedInt64() && |
287 | FlowGraphCompiler::CanConvertInt64ToDouble()) { |
288 | return true; |
289 | } |
290 | } |
291 | |
292 | return false; |
293 | } |
294 | |
295 | Value* AotCallSpecializer::PrepareStaticOpInput(Value* input, |
296 | intptr_t cid, |
297 | Instruction* call) { |
298 | ASSERT((cid == kDoubleCid) || (cid == kMintCid)); |
299 | |
300 | if (input->Type()->is_nullable()) { |
301 | const String& function_name = |
302 | (call->IsInstanceCall() |
303 | ? call->AsInstanceCall()->function_name() |
304 | : String::ZoneHandle(Z, call->AsStaticCall()->function().name())); |
305 | AddCheckNull(input, function_name, call->deopt_id(), call->env(), call); |
306 | } |
307 | |
308 | input = input->CopyWithType(Z); |
309 | |
310 | if (cid == kDoubleCid && input->Type()->IsNullableInt()) { |
311 | Definition* conversion = NULL; |
312 | |
313 | if (input->Type()->ToNullableCid() == kSmiCid) { |
314 | conversion = new (Z) SmiToDoubleInstr(input, call->token_pos()); |
315 | } else if (FlowGraphCompiler::SupportsUnboxedInt64() && |
316 | FlowGraphCompiler::CanConvertInt64ToDouble()) { |
317 | conversion = new (Z) Int64ToDoubleInstr(input, DeoptId::kNone, |
318 | Instruction::kNotSpeculative); |
319 | } else { |
320 | UNREACHABLE(); |
321 | } |
322 | |
323 | if (FLAG_trace_strong_mode_types) { |
324 | THR_Print("[Strong mode] Inserted %s\n" , conversion->ToCString()); |
325 | } |
326 | InsertBefore(call, conversion, /* env = */ NULL, FlowGraph::kValue); |
327 | return new (Z) Value(conversion); |
328 | } |
329 | |
330 | return input; |
331 | } |
332 | |
333 | CompileType AotCallSpecializer::BuildStrengthenedReceiverType(Value* input, |
334 | intptr_t cid) { |
335 | CompileType* old_type = input->Type(); |
336 | CompileType* refined_type = old_type; |
337 | |
338 | CompileType type = CompileType::None(); |
339 | if (cid == kSmiCid) { |
340 | type = CompileType::NullableSmi(); |
341 | refined_type = CompileType::ComputeRefinedType(old_type, &type); |
342 | } else if (cid == kMintCid) { |
343 | type = CompileType::NullableMint(); |
344 | refined_type = CompileType::ComputeRefinedType(old_type, &type); |
345 | } else if (cid == kIntegerCid && !input->Type()->IsNullableInt()) { |
346 | type = CompileType::NullableInt(); |
347 | refined_type = CompileType::ComputeRefinedType(old_type, &type); |
348 | } else if (cid == kDoubleCid && !input->Type()->IsNullableDouble()) { |
349 | type = CompileType::NullableDouble(); |
350 | refined_type = CompileType::ComputeRefinedType(old_type, &type); |
351 | } |
352 | |
353 | if (refined_type != old_type) { |
354 | return *refined_type; |
355 | } |
356 | return CompileType::None(); |
357 | } |
358 | |
359 | // After replacing a call with a specialized instruction, make sure to |
360 | // update types at all uses, as specialized instruction can provide a more |
361 | // specific type. |
362 | static void RefineUseTypes(Definition* instr) { |
363 | CompileType* new_type = instr->Type(); |
364 | for (Value::Iterator it(instr->input_use_list()); !it.Done(); it.Advance()) { |
365 | it.Current()->RefineReachingType(new_type); |
366 | } |
367 | } |
368 | |
369 | bool AotCallSpecializer::TryOptimizeInstanceCallUsingStaticTypes( |
370 | InstanceCallInstr* instr) { |
371 | const Token::Kind op_kind = instr->token_kind(); |
372 | return TryOptimizeIntegerOperation(instr, op_kind) || |
373 | TryOptimizeDoubleOperation(instr, op_kind); |
374 | } |
375 | |
376 | bool AotCallSpecializer::TryOptimizeStaticCallUsingStaticTypes( |
377 | StaticCallInstr* instr) { |
378 | const String& name = String::Handle(Z, instr->function().name()); |
379 | const Token::Kind op_kind = MethodTokenRecognizer::RecognizeTokenKind(name); |
380 | |
381 | if (op_kind == Token::kEQ && TryReplaceWithHaveSameRuntimeType(instr)) { |
382 | return true; |
383 | } |
384 | |
385 | // We only specialize instance methods for int/double operations. |
386 | const auto& target = instr->function(); |
387 | if (!target.IsDynamicFunction()) { |
388 | return false; |
389 | } |
390 | |
391 | // For de-virtualized instance calls, we strengthen the type here manually |
392 | // because it might not be attached to the receiver. |
393 | // See http://dartbug.com/35179 for preserving the receiver type information. |
394 | const Class& owner = Class::Handle(Z, target.Owner()); |
395 | const intptr_t cid = owner.id(); |
396 | if (cid == kSmiCid || cid == kMintCid || cid == kIntegerCid || |
397 | cid == kDoubleCid) { |
398 | // Sometimes TFA de-virtualizes instance calls to static calls. In such |
399 | // cases the VM might have a looser type on the receiver, so we explicitly |
400 | // tighten it (this is safe since it was proven that the receiver is either |
401 | // null or will end up with that target). |
402 | const intptr_t receiver_index = instr->FirstArgIndex(); |
403 | const intptr_t argument_count = instr->ArgumentCountWithoutTypeArgs(); |
404 | if (argument_count >= 1) { |
405 | auto receiver_value = instr->ArgumentValueAt(receiver_index); |
406 | auto receiver = receiver_value->definition(); |
407 | auto type = BuildStrengthenedReceiverType(receiver_value, cid); |
408 | if (!type.IsNone()) { |
409 | auto redefinition = |
410 | flow_graph()->EnsureRedefinition(instr->previous(), receiver, type); |
411 | if (redefinition != nullptr) { |
412 | RefineUseTypes(redefinition); |
413 | } |
414 | } |
415 | } |
416 | } |
417 | |
418 | return TryOptimizeIntegerOperation(instr, op_kind) || |
419 | TryOptimizeDoubleOperation(instr, op_kind); |
420 | } |
421 | |
422 | // Modulo against a constant power-of-two can be optimized into a mask. |
423 | // x % y -> x & (|y| - 1) for smi masks only |
424 | Definition* AotCallSpecializer::TryOptimizeMod(TemplateDartCall<0>* instr, |
425 | Token::Kind op_kind, |
426 | Value* left_value, |
427 | Value* right_value) { |
428 | if (!right_value->BindsToConstant()) { |
429 | return nullptr; |
430 | } |
431 | |
432 | const Object& rhs = right_value->BoundConstant(); |
433 | const int64_t value = Integer::Cast(rhs).AsInt64Value(); // smi and mint |
434 | if (value == kMinInt64) { |
435 | return nullptr; // non-smi mask |
436 | } |
437 | const int64_t modulus = Utils::Abs(value); |
438 | if (!Utils::IsPowerOfTwo(modulus) || !compiler::target::IsSmi(modulus - 1)) { |
439 | return nullptr; |
440 | } |
441 | |
442 | left_value = PrepareStaticOpInput(left_value, kMintCid, instr); |
443 | |
444 | #if defined(TARGET_ARCH_ARM) |
445 | Definition* right_definition = new (Z) UnboxedConstantInstr( |
446 | Smi::ZoneHandle(Z, Smi::New(modulus - 1)), kUnboxedInt32); |
447 | InsertBefore(instr, right_definition, /*env=*/NULL, FlowGraph::kValue); |
448 | right_definition = new (Z) |
449 | IntConverterInstr(kUnboxedInt32, kUnboxedInt64, |
450 | new (Z) Value(right_definition), DeoptId::kNone); |
451 | #else |
452 | Definition* right_definition = new (Z) UnboxedConstantInstr( |
453 | Smi::ZoneHandle(Z, Smi::New(modulus - 1)), kUnboxedInt64); |
454 | #endif |
455 | if (modulus == 1) return right_definition; |
456 | InsertBefore(instr, right_definition, /*env=*/NULL, FlowGraph::kValue); |
457 | right_value = new (Z) Value(right_definition); |
458 | return new (Z) |
459 | BinaryInt64OpInstr(Token::kBIT_AND, left_value, right_value, |
460 | DeoptId::kNone, Instruction::kNotSpeculative); |
461 | } |
462 | |
463 | bool AotCallSpecializer::TryOptimizeIntegerOperation(TemplateDartCall<0>* instr, |
464 | Token::Kind op_kind) { |
465 | if (instr->type_args_len() != 0) { |
466 | // Arithmetic operations don't have type arguments. |
467 | return false; |
468 | } |
469 | |
470 | Definition* replacement = NULL; |
471 | if (instr->ArgumentCount() == 2) { |
472 | Value* left_value = instr->ArgumentValueAt(0); |
473 | Value* right_value = instr->ArgumentValueAt(1); |
474 | CompileType* left_type = left_value->Type(); |
475 | CompileType* right_type = right_value->Type(); |
476 | |
477 | const bool is_equality_op = Token::IsEqualityOperator(op_kind); |
478 | bool has_nullable_int_args = |
479 | left_type->IsNullableInt() && right_type->IsNullableInt(); |
480 | |
481 | if (auto* call = instr->AsInstanceCall()) { |
482 | if (!call->CanReceiverBeSmiBasedOnInterfaceTarget(zone())) { |
483 | has_nullable_int_args = false; |
484 | } |
485 | } |
486 | |
487 | // NOTE: We cannot use strict comparisons if the receiver has an overridden |
488 | // == operator or if either side can be a double, since 1.0 == 1. |
489 | const bool can_use_strict_compare = |
490 | is_equality_op && has_nullable_int_args && |
491 | (left_type->IsNullableSmi() || right_type->IsNullableSmi()); |
492 | |
493 | // We only support binary operations if both operands are nullable integers |
494 | // or when we can use a cheap strict comparison operation. |
495 | if (!has_nullable_int_args) { |
496 | return false; |
497 | } |
498 | |
499 | switch (op_kind) { |
500 | case Token::kEQ: |
501 | case Token::kNE: |
502 | case Token::kLT: |
503 | case Token::kLTE: |
504 | case Token::kGT: |
505 | case Token::kGTE: { |
506 | const bool supports_unboxed_int = |
507 | FlowGraphCompiler::SupportsUnboxedInt64(); |
508 | const bool can_use_equality_compare = |
509 | supports_unboxed_int && is_equality_op && left_type->IsInt() && |
510 | right_type->IsInt(); |
511 | |
512 | // We prefer equality compare, since it doesn't require boxing. |
513 | if (!can_use_equality_compare && can_use_strict_compare) { |
514 | replacement = new (Z) StrictCompareInstr( |
515 | instr->token_pos(), |
516 | (op_kind == Token::kEQ) ? Token::kEQ_STRICT : Token::kNE_STRICT, |
517 | left_value->CopyWithType(Z), right_value->CopyWithType(Z), |
518 | /*needs_number_check=*/false, DeoptId::kNone); |
519 | break; |
520 | } |
521 | |
522 | if (supports_unboxed_int) { |
523 | if (can_use_equality_compare) { |
524 | replacement = new (Z) EqualityCompareInstr( |
525 | instr->token_pos(), op_kind, left_value->CopyWithType(Z), |
526 | right_value->CopyWithType(Z), kMintCid, DeoptId::kNone, |
527 | Instruction::kNotSpeculative); |
528 | break; |
529 | } else if (Token::IsRelationalOperator(op_kind)) { |
530 | left_value = PrepareStaticOpInput(left_value, kMintCid, instr); |
531 | right_value = PrepareStaticOpInput(right_value, kMintCid, instr); |
532 | replacement = new (Z) RelationalOpInstr( |
533 | instr->token_pos(), op_kind, left_value, right_value, kMintCid, |
534 | DeoptId::kNone, Instruction::kNotSpeculative); |
535 | break; |
536 | } else { |
537 | // TODO(dartbug.com/30480): Figure out how to handle null in |
538 | // equality comparisons. |
539 | replacement = new (Z) |
540 | CheckedSmiComparisonInstr(op_kind, left_value->CopyWithType(Z), |
541 | right_value->CopyWithType(Z), instr); |
542 | break; |
543 | } |
544 | } else { |
545 | replacement = new (Z) |
546 | CheckedSmiComparisonInstr(op_kind, left_value->CopyWithType(Z), |
547 | right_value->CopyWithType(Z), instr); |
548 | break; |
549 | } |
550 | break; |
551 | } |
552 | case Token::kMOD: |
553 | replacement = TryOptimizeMod(instr, op_kind, left_value, right_value); |
554 | if (replacement != nullptr) break; |
555 | FALL_THROUGH; |
556 | case Token::kTRUNCDIV: |
557 | #if !defined(TARGET_ARCH_X64) && !defined(TARGET_ARCH_ARM64) |
558 | // TODO(ajcbik): 32-bit archs too? |
559 | break; |
560 | #else |
561 | FALL_THROUGH; |
562 | #endif |
563 | case Token::kSHL: |
564 | FALL_THROUGH; |
565 | case Token::kSHR: |
566 | FALL_THROUGH; |
567 | case Token::kBIT_OR: |
568 | FALL_THROUGH; |
569 | case Token::kBIT_XOR: |
570 | FALL_THROUGH; |
571 | case Token::kBIT_AND: |
572 | FALL_THROUGH; |
573 | case Token::kADD: |
574 | FALL_THROUGH; |
575 | case Token::kSUB: |
576 | FALL_THROUGH; |
577 | case Token::kMUL: { |
578 | if (FlowGraphCompiler::SupportsUnboxedInt64()) { |
579 | if (op_kind == Token::kSHR || op_kind == Token::kSHL) { |
580 | left_value = PrepareStaticOpInput(left_value, kMintCid, instr); |
581 | right_value = PrepareStaticOpInput(right_value, kMintCid, instr); |
582 | replacement = new (Z) ShiftInt64OpInstr( |
583 | op_kind, left_value, right_value, DeoptId::kNone); |
584 | break; |
585 | } else { |
586 | left_value = PrepareStaticOpInput(left_value, kMintCid, instr); |
587 | right_value = PrepareStaticOpInput(right_value, kMintCid, instr); |
588 | replacement = new (Z) BinaryInt64OpInstr( |
589 | op_kind, left_value, right_value, DeoptId::kNone, |
590 | Instruction::kNotSpeculative); |
591 | break; |
592 | } |
593 | } |
594 | if (op_kind != Token::kMOD && op_kind != Token::kTRUNCDIV) { |
595 | replacement = |
596 | new (Z) CheckedSmiOpInstr(op_kind, left_value->CopyWithType(Z), |
597 | right_value->CopyWithType(Z), instr); |
598 | break; |
599 | } |
600 | break; |
601 | } |
602 | |
603 | default: |
604 | break; |
605 | } |
606 | } else if (instr->ArgumentCount() == 1) { |
607 | Value* left_value = instr->ArgumentValueAt(0); |
608 | CompileType* left_type = left_value->Type(); |
609 | |
610 | // We only support unary operations on nullable integers. |
611 | if (!left_type->IsNullableInt()) { |
612 | return false; |
613 | } |
614 | |
615 | if (FlowGraphCompiler::SupportsUnboxedInt64()) { |
616 | if (op_kind == Token::kNEGATE || op_kind == Token::kBIT_NOT) { |
617 | left_value = PrepareStaticOpInput(left_value, kMintCid, instr); |
618 | replacement = new (Z) UnaryInt64OpInstr( |
619 | op_kind, left_value, DeoptId::kNone, Instruction::kNotSpeculative); |
620 | } |
621 | } |
622 | } |
623 | |
624 | if (replacement != nullptr && !replacement->ComputeCanDeoptimize()) { |
625 | if (FLAG_trace_strong_mode_types) { |
626 | THR_Print("[Strong mode] Optimization: replacing %s with %s\n" , |
627 | instr->ToCString(), replacement->ToCString()); |
628 | } |
629 | ReplaceCall(instr, replacement); |
630 | RefineUseTypes(replacement); |
631 | return true; |
632 | } |
633 | |
634 | return false; |
635 | } |
636 | |
637 | bool AotCallSpecializer::TryOptimizeDoubleOperation(TemplateDartCall<0>* instr, |
638 | Token::Kind op_kind) { |
639 | if (instr->type_args_len() != 0) { |
640 | // Arithmetic operations don't have type arguments. |
641 | return false; |
642 | } |
643 | |
644 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) { |
645 | return false; |
646 | } |
647 | |
648 | Definition* replacement = NULL; |
649 | |
650 | if (instr->ArgumentCount() == 2) { |
651 | Value* left_value = instr->ArgumentValueAt(0); |
652 | Value* right_value = instr->ArgumentValueAt(1); |
653 | CompileType* left_type = left_value->Type(); |
654 | CompileType* right_type = right_value->Type(); |
655 | |
656 | if (!left_type->IsNullableDouble() && |
657 | !IsSupportedIntOperandForStaticDoubleOp(left_type)) { |
658 | return false; |
659 | } |
660 | if (!right_type->IsNullableDouble() && |
661 | !IsSupportedIntOperandForStaticDoubleOp(right_type)) { |
662 | return false; |
663 | } |
664 | |
665 | switch (op_kind) { |
666 | case Token::kEQ: |
667 | FALL_THROUGH; |
668 | case Token::kNE: { |
669 | // TODO(dartbug.com/32166): Support EQ, NE for nullable doubles. |
670 | // (requires null-aware comparison instruction). |
671 | if (left_type->IsDouble() && right_type->IsDouble()) { |
672 | left_value = PrepareStaticOpInput(left_value, kDoubleCid, instr); |
673 | right_value = PrepareStaticOpInput(right_value, kDoubleCid, instr); |
674 | replacement = new (Z) EqualityCompareInstr( |
675 | instr->token_pos(), op_kind, left_value, right_value, kDoubleCid, |
676 | DeoptId::kNone, Instruction::kNotSpeculative); |
677 | break; |
678 | } |
679 | break; |
680 | } |
681 | case Token::kLT: |
682 | FALL_THROUGH; |
683 | case Token::kLTE: |
684 | FALL_THROUGH; |
685 | case Token::kGT: |
686 | FALL_THROUGH; |
687 | case Token::kGTE: { |
688 | left_value = PrepareStaticOpInput(left_value, kDoubleCid, instr); |
689 | right_value = PrepareStaticOpInput(right_value, kDoubleCid, instr); |
690 | replacement = new (Z) RelationalOpInstr( |
691 | instr->token_pos(), op_kind, left_value, right_value, kDoubleCid, |
692 | DeoptId::kNone, Instruction::kNotSpeculative); |
693 | break; |
694 | } |
695 | case Token::kADD: |
696 | FALL_THROUGH; |
697 | case Token::kSUB: |
698 | FALL_THROUGH; |
699 | case Token::kMUL: |
700 | FALL_THROUGH; |
701 | case Token::kDIV: { |
702 | if (op_kind == Token::kDIV && |
703 | !FlowGraphCompiler::SupportsHardwareDivision()) { |
704 | return false; |
705 | } |
706 | left_value = PrepareStaticOpInput(left_value, kDoubleCid, instr); |
707 | right_value = PrepareStaticOpInput(right_value, kDoubleCid, instr); |
708 | replacement = new (Z) BinaryDoubleOpInstr( |
709 | op_kind, left_value, right_value, DeoptId::kNone, |
710 | instr->token_pos(), Instruction::kNotSpeculative); |
711 | break; |
712 | } |
713 | |
714 | case Token::kBIT_OR: |
715 | FALL_THROUGH; |
716 | case Token::kBIT_XOR: |
717 | FALL_THROUGH; |
718 | case Token::kBIT_AND: |
719 | FALL_THROUGH; |
720 | case Token::kMOD: |
721 | FALL_THROUGH; |
722 | case Token::kTRUNCDIV: |
723 | FALL_THROUGH; |
724 | default: |
725 | break; |
726 | } |
727 | } else if (instr->ArgumentCount() == 1) { |
728 | Value* left_value = instr->ArgumentValueAt(0); |
729 | CompileType* left_type = left_value->Type(); |
730 | |
731 | // We only support unary operations on nullable doubles. |
732 | if (!left_type->IsNullableDouble()) { |
733 | return false; |
734 | } |
735 | |
736 | if (op_kind == Token::kNEGATE) { |
737 | left_value = PrepareStaticOpInput(left_value, kDoubleCid, instr); |
738 | replacement = new (Z) |
739 | UnaryDoubleOpInstr(Token::kNEGATE, left_value, instr->deopt_id(), |
740 | Instruction::kNotSpeculative); |
741 | } |
742 | } |
743 | |
744 | if (replacement != NULL && !replacement->ComputeCanDeoptimize()) { |
745 | if (FLAG_trace_strong_mode_types) { |
746 | THR_Print("[Strong mode] Optimization: replacing %s with %s\n" , |
747 | instr->ToCString(), replacement->ToCString()); |
748 | } |
749 | ReplaceCall(instr, replacement); |
750 | RefineUseTypes(replacement); |
751 | return true; |
752 | } |
753 | |
754 | return false; |
755 | } |
756 | |
757 | static void EnsureICData(Zone* zone, |
758 | const Function& function, |
759 | InstanceCallInstr* call) { |
760 | if (!call->HasICData()) { |
761 | const Array& arguments_descriptor = |
762 | Array::Handle(zone, call->GetArgumentsDescriptor()); |
763 | const ICData& ic_data = ICData::ZoneHandle( |
764 | zone, ICData::New(function, call->function_name(), arguments_descriptor, |
765 | call->deopt_id(), call->checked_argument_count(), |
766 | ICData::kInstance)); |
767 | call->set_ic_data(&ic_data); |
768 | } |
769 | } |
770 | |
771 | // Tries to optimize instance call by replacing it with a faster instruction |
772 | // (e.g, binary op, field load, ..). |
773 | // TODO(dartbug.com/30635) Evaluate how much this can be shared with |
774 | // JitCallSpecializer. |
775 | void AotCallSpecializer::VisitInstanceCall(InstanceCallInstr* instr) { |
776 | // Type test is special as it always gets converted into inlined code. |
777 | const Token::Kind op_kind = instr->token_kind(); |
778 | if (Token::IsTypeTestOperator(op_kind)) { |
779 | ReplaceWithInstanceOf(instr); |
780 | return; |
781 | } |
782 | |
783 | if (TryInlineFieldAccess(instr)) { |
784 | return; |
785 | } |
786 | |
787 | if (RecognizeRuntimeTypeGetter(instr)) { |
788 | return; |
789 | } |
790 | |
791 | if ((op_kind == Token::kEQ) && TryReplaceWithHaveSameRuntimeType(instr)) { |
792 | return; |
793 | } |
794 | |
795 | const CallTargets& targets = instr->Targets(); |
796 | const intptr_t receiver_idx = instr->FirstArgIndex(); |
797 | |
798 | if (TryOptimizeInstanceCallUsingStaticTypes(instr)) { |
799 | return; |
800 | } |
801 | |
802 | bool has_one_target = targets.HasSingleTarget(); |
803 | if (has_one_target) { |
804 | // Check if the single target is a polymorphic target, if it is, |
805 | // we don't have one target. |
806 | const Function& target = targets.FirstTarget(); |
807 | has_one_target = !target.is_polymorphic_target(); |
808 | } |
809 | |
810 | if (has_one_target) { |
811 | const Function& target = targets.FirstTarget(); |
812 | FunctionLayout::Kind function_kind = target.kind(); |
813 | if (flow_graph()->CheckForInstanceCall(instr, function_kind) == |
814 | FlowGraph::ToCheck::kNoCheck) { |
815 | StaticCallInstr* call = StaticCallInstr::FromCall( |
816 | Z, instr, target, targets.AggregateCallCount()); |
817 | instr->ReplaceWith(call, current_iterator()); |
818 | return; |
819 | } |
820 | } |
821 | |
822 | switch (instr->token_kind()) { |
823 | case Token::kEQ: |
824 | case Token::kNE: |
825 | case Token::kLT: |
826 | case Token::kLTE: |
827 | case Token::kGT: |
828 | case Token::kGTE: { |
829 | if (instr->BinaryFeedback().OperandsAre(kSmiCid) || |
830 | HasLikelySmiOperand(instr)) { |
831 | ASSERT(receiver_idx == 0); |
832 | Definition* left = instr->ArgumentAt(0); |
833 | Definition* right = instr->ArgumentAt(1); |
834 | CheckedSmiComparisonInstr* smi_op = new (Z) |
835 | CheckedSmiComparisonInstr(instr->token_kind(), new (Z) Value(left), |
836 | new (Z) Value(right), instr); |
837 | ReplaceCall(instr, smi_op); |
838 | return; |
839 | } |
840 | break; |
841 | } |
842 | case Token::kSHL: |
843 | case Token::kSHR: |
844 | case Token::kBIT_OR: |
845 | case Token::kBIT_XOR: |
846 | case Token::kBIT_AND: |
847 | case Token::kADD: |
848 | case Token::kSUB: |
849 | case Token::kMUL: { |
850 | if (instr->BinaryFeedback().OperandsAre(kSmiCid) || |
851 | HasLikelySmiOperand(instr)) { |
852 | ASSERT(receiver_idx == 0); |
853 | Definition* left = instr->ArgumentAt(0); |
854 | Definition* right = instr->ArgumentAt(1); |
855 | CheckedSmiOpInstr* smi_op = |
856 | new (Z) CheckedSmiOpInstr(instr->token_kind(), new (Z) Value(left), |
857 | new (Z) Value(right), instr); |
858 | |
859 | ReplaceCall(instr, smi_op); |
860 | return; |
861 | } |
862 | break; |
863 | } |
864 | default: |
865 | break; |
866 | } |
867 | |
868 | // No IC data checks. Try resolve target using the propagated cid. |
869 | const intptr_t receiver_cid = |
870 | instr->ArgumentValueAt(receiver_idx)->Type()->ToCid(); |
871 | if (receiver_cid != kDynamicCid) { |
872 | const Class& receiver_class = |
873 | Class::Handle(Z, isolate()->class_table()->At(receiver_cid)); |
874 | const Function& function = |
875 | Function::Handle(Z, instr->ResolveForReceiverClass(receiver_class)); |
876 | if (!function.IsNull()) { |
877 | const Function& target = Function::ZoneHandle(Z, function.raw()); |
878 | StaticCallInstr* call = |
879 | StaticCallInstr::FromCall(Z, instr, target, instr->CallCount()); |
880 | instr->ReplaceWith(call, current_iterator()); |
881 | return; |
882 | } |
883 | } |
884 | |
885 | // Check for x == y, where x has type T?, there are no subtypes of T, and |
886 | // T does not override ==. Replace with StrictCompare. |
887 | if (instr->token_kind() == Token::kEQ || instr->token_kind() == Token::kNE) { |
888 | GrowableArray<intptr_t> class_ids(6); |
889 | if (instr->ArgumentValueAt(receiver_idx)->Type()->Specialize(&class_ids)) { |
890 | bool is_object_eq = true; |
891 | for (intptr_t i = 0; i < class_ids.length(); i++) { |
892 | const intptr_t cid = class_ids[i]; |
893 | // Skip sentinel cid. It may appear in the unreachable code after |
894 | // inlining a method which doesn't return. |
895 | if (cid == kNeverCid) continue; |
896 | const Class& cls = Class::Handle(Z, isolate()->class_table()->At(cid)); |
897 | const Function& target = |
898 | Function::Handle(Z, instr->ResolveForReceiverClass(cls)); |
899 | if (target.recognized_kind() != MethodRecognizer::kObjectEquals) { |
900 | is_object_eq = false; |
901 | break; |
902 | } |
903 | } |
904 | if (is_object_eq) { |
905 | auto* replacement = new (Z) StrictCompareInstr( |
906 | instr->token_pos(), |
907 | (instr->token_kind() == Token::kEQ) ? Token::kEQ_STRICT |
908 | : Token::kNE_STRICT, |
909 | instr->ArgumentValueAt(0)->CopyWithType(Z), |
910 | instr->ArgumentValueAt(1)->CopyWithType(Z), |
911 | /*needs_number_check=*/false, DeoptId::kNone); |
912 | ReplaceCall(instr, replacement); |
913 | RefineUseTypes(replacement); |
914 | return; |
915 | } |
916 | } |
917 | } |
918 | |
919 | Definition* callee_receiver = instr->ArgumentAt(receiver_idx); |
920 | const Function& function = flow_graph()->function(); |
921 | Class& receiver_class = Class::Handle(Z); |
922 | |
923 | if (function.IsDynamicFunction() && |
924 | flow_graph()->IsReceiver(callee_receiver)) { |
925 | // Call receiver is method receiver. |
926 | receiver_class = function.Owner(); |
927 | } else { |
928 | // Check if we have an non-nullable compile type for the receiver. |
929 | CompileType* type = instr->ArgumentAt(receiver_idx)->Type(); |
930 | if (type->ToAbstractType()->IsType() && |
931 | !type->ToAbstractType()->IsDynamicType() && !type->is_nullable()) { |
932 | receiver_class = type->ToAbstractType()->type_class(); |
933 | if (receiver_class.is_implemented()) { |
934 | receiver_class = Class::null(); |
935 | } |
936 | } |
937 | } |
938 | if (!receiver_class.IsNull()) { |
939 | GrowableArray<intptr_t> class_ids(6); |
940 | if (thread()->compiler_state().cha().ConcreteSubclasses(receiver_class, |
941 | &class_ids)) { |
942 | // First check if all subclasses end up calling the same method. |
943 | // If this is the case we will replace instance call with a direct |
944 | // static call. |
945 | // Otherwise we will try to create ICData that contains all possible |
946 | // targets with appropriate checks. |
947 | Function& single_target = Function::Handle(Z); |
948 | ICData& ic_data = ICData::Handle(Z); |
949 | const Array& args_desc_array = |
950 | Array::Handle(Z, instr->GetArgumentsDescriptor()); |
951 | Function& target = Function::Handle(Z); |
952 | Class& cls = Class::Handle(Z); |
953 | for (intptr_t i = 0; i < class_ids.length(); i++) { |
954 | const intptr_t cid = class_ids[i]; |
955 | cls = isolate()->class_table()->At(cid); |
956 | target = instr->ResolveForReceiverClass(cls); |
957 | ASSERT(target.IsNull() || !target.IsInvokeFieldDispatcher()); |
958 | if (target.IsNull()) { |
959 | single_target = Function::null(); |
960 | ic_data = ICData::null(); |
961 | break; |
962 | } else if (ic_data.IsNull()) { |
963 | // First we are trying to compute a single target for all subclasses. |
964 | if (single_target.IsNull()) { |
965 | ASSERT(i == 0); |
966 | single_target = target.raw(); |
967 | continue; |
968 | } else if (single_target.raw() == target.raw()) { |
969 | continue; |
970 | } |
971 | |
972 | // The call does not resolve to a single target within the hierarchy. |
973 | // If we have too many subclasses abort the optimization. |
974 | if (class_ids.length() > FLAG_max_exhaustive_polymorphic_checks) { |
975 | single_target = Function::null(); |
976 | break; |
977 | } |
978 | |
979 | // Create an ICData and map all previously seen classes (< i) to |
980 | // the computed single_target. |
981 | ic_data = ICData::New(function, instr->function_name(), |
982 | args_desc_array, DeoptId::kNone, |
983 | /* args_tested = */ 1, ICData::kOptimized); |
984 | for (intptr_t j = 0; j < i; j++) { |
985 | ic_data.AddReceiverCheck(class_ids[j], single_target); |
986 | } |
987 | |
988 | single_target = Function::null(); |
989 | } |
990 | |
991 | ASSERT(ic_data.raw() != ICData::null()); |
992 | ASSERT(single_target.raw() == Function::null()); |
993 | ic_data.AddReceiverCheck(cid, target); |
994 | } |
995 | |
996 | if (single_target.raw() != Function::null()) { |
997 | // If this is a getter or setter invocation try inlining it right away |
998 | // instead of replacing it with a static call. |
999 | if ((op_kind == Token::kGET) || (op_kind == Token::kSET)) { |
1000 | // Create fake IC data with the resolved target. |
1001 | const ICData& ic_data = ICData::Handle( |
1002 | ICData::New(flow_graph()->function(), instr->function_name(), |
1003 | args_desc_array, DeoptId::kNone, |
1004 | /* args_tested = */ 1, ICData::kOptimized)); |
1005 | cls = single_target.Owner(); |
1006 | ic_data.AddReceiverCheck(cls.id(), single_target); |
1007 | instr->set_ic_data(&ic_data); |
1008 | |
1009 | if (TryInlineFieldAccess(instr)) { |
1010 | return; |
1011 | } |
1012 | } |
1013 | |
1014 | // We have computed that there is only a single target for this call |
1015 | // within the whole hierarchy. Replace InstanceCall with StaticCall. |
1016 | const Function& target = Function::ZoneHandle(Z, single_target.raw()); |
1017 | StaticCallInstr* call = |
1018 | StaticCallInstr::FromCall(Z, instr, target, instr->CallCount()); |
1019 | instr->ReplaceWith(call, current_iterator()); |
1020 | return; |
1021 | } else if ((ic_data.raw() != ICData::null()) && |
1022 | !ic_data.NumberOfChecksIs(0)) { |
1023 | const CallTargets* targets = CallTargets::Create(Z, ic_data); |
1024 | ASSERT(!targets->is_empty()); |
1025 | PolymorphicInstanceCallInstr* call = |
1026 | PolymorphicInstanceCallInstr::FromCall(Z, instr, *targets, |
1027 | /* complete = */ true); |
1028 | instr->ReplaceWith(call, current_iterator()); |
1029 | return; |
1030 | } |
1031 | } |
1032 | |
1033 | // Detect if o.m(...) is a call through a getter and expand it |
1034 | // into o.get:m().call(...). |
1035 | if (TryExpandCallThroughGetter(receiver_class, instr)) { |
1036 | return; |
1037 | } |
1038 | } |
1039 | |
1040 | // More than one target. Generate generic polymorphic call without |
1041 | // deoptimization. |
1042 | if (targets.length() > 0) { |
1043 | ASSERT(!FLAG_polymorphic_with_deopt); |
1044 | // OK to use checks with PolymorphicInstanceCallInstr since no |
1045 | // deoptimization is allowed. |
1046 | PolymorphicInstanceCallInstr* call = |
1047 | PolymorphicInstanceCallInstr::FromCall(Z, instr, targets, |
1048 | /* complete = */ false); |
1049 | instr->ReplaceWith(call, current_iterator()); |
1050 | return; |
1051 | } |
1052 | } |
1053 | |
1054 | void AotCallSpecializer::VisitStaticCall(StaticCallInstr* instr) { |
1055 | if (TryInlineFieldAccess(instr)) { |
1056 | return; |
1057 | } |
1058 | CallSpecializer::VisitStaticCall(instr); |
1059 | } |
1060 | |
1061 | bool AotCallSpecializer::TryExpandCallThroughGetter(const Class& receiver_class, |
1062 | InstanceCallInstr* call) { |
1063 | // If it's an accessor call it can't be a call through getter. |
1064 | if (call->token_kind() == Token::kGET || call->token_kind() == Token::kSET) { |
1065 | return false; |
1066 | } |
1067 | |
1068 | // Ignore callsites like f.call() for now. Those need to be handled |
1069 | // specially if f is a closure. |
1070 | if (call->function_name().raw() == Symbols::Call().raw()) { |
1071 | return false; |
1072 | } |
1073 | |
1074 | Function& target = Function::Handle(Z); |
1075 | |
1076 | const String& getter_name = String::ZoneHandle( |
1077 | Z, Symbols::LookupFromGet(thread(), call->function_name())); |
1078 | if (getter_name.IsNull()) { |
1079 | return false; |
1080 | } |
1081 | |
1082 | const Array& args_desc_array = Array::Handle( |
1083 | Z, |
1084 | ArgumentsDescriptor::NewBoxed(/*type_args_len=*/0, /*num_arguments=*/1)); |
1085 | ArgumentsDescriptor args_desc(args_desc_array); |
1086 | target = Resolver::ResolveDynamicForReceiverClass( |
1087 | receiver_class, getter_name, args_desc, /*allow_add=*/false); |
1088 | if (target.raw() == Function::null() || target.IsMethodExtractor()) { |
1089 | return false; |
1090 | } |
1091 | |
1092 | // We found a getter with the same name as the method this |
1093 | // call tries to invoke. This implies call through getter |
1094 | // because methods can't override getters. Build |
1095 | // o.get:m().call(...) sequence and replace o.m(...) invocation. |
1096 | |
1097 | const intptr_t receiver_idx = call->type_args_len() > 0 ? 1 : 0; |
1098 | |
1099 | InputsArray* get_arguments = new (Z) InputsArray(Z, 1); |
1100 | get_arguments->Add(call->ArgumentValueAt(receiver_idx)->CopyWithType(Z)); |
1101 | InstanceCallInstr* invoke_get = new (Z) InstanceCallInstr( |
1102 | call->token_pos(), getter_name, Token::kGET, get_arguments, |
1103 | /*type_args_len=*/0, |
1104 | /*argument_names=*/Object::empty_array(), |
1105 | /*checked_argument_count=*/1, |
1106 | thread()->compiler_state().GetNextDeoptId()); |
1107 | |
1108 | // Arguments to the .call() are the same as arguments to the |
1109 | // original call (including type arguments), but receiver |
1110 | // is replaced with the result of the get. |
1111 | InputsArray* call_arguments = new (Z) InputsArray(Z, call->ArgumentCount()); |
1112 | if (call->type_args_len() > 0) { |
1113 | call_arguments->Add(call->ArgumentValueAt(0)->CopyWithType(Z)); |
1114 | } |
1115 | call_arguments->Add(new (Z) Value(invoke_get)); |
1116 | for (intptr_t i = receiver_idx + 1; i < call->ArgumentCount(); i++) { |
1117 | call_arguments->Add(call->ArgumentValueAt(i)->CopyWithType(Z)); |
1118 | } |
1119 | |
1120 | InstanceCallInstr* invoke_call = new (Z) InstanceCallInstr( |
1121 | call->token_pos(), Symbols::Call(), Token::kILLEGAL, call_arguments, |
1122 | call->type_args_len(), call->argument_names(), |
1123 | /*checked_argument_count=*/1, |
1124 | thread()->compiler_state().GetNextDeoptId()); |
1125 | |
1126 | // Create environment and insert 'invoke_get'. |
1127 | Environment* get_env = |
1128 | call->env()->DeepCopy(Z, call->env()->Length() - call->ArgumentCount()); |
1129 | for (intptr_t i = 0, n = invoke_get->ArgumentCount(); i < n; i++) { |
1130 | get_env->PushValue(new (Z) Value(invoke_get->ArgumentAt(i))); |
1131 | } |
1132 | InsertBefore(call, invoke_get, get_env, FlowGraph::kValue); |
1133 | |
1134 | // Replace original call with .call(...) invocation. |
1135 | call->ReplaceWith(invoke_call, current_iterator()); |
1136 | |
1137 | // ReplaceWith moved environment from 'call' to 'invoke_call'. |
1138 | // Update receiver argument in the environment. |
1139 | Environment* invoke_env = invoke_call->env(); |
1140 | invoke_env |
1141 | ->ValueAt(invoke_env->Length() - invoke_call->ArgumentCount() + |
1142 | receiver_idx) |
1143 | ->BindToEnvironment(invoke_get); |
1144 | |
1145 | // AOT compiler expects all calls to have an ICData. |
1146 | EnsureICData(Z, flow_graph()->function(), invoke_get); |
1147 | EnsureICData(Z, flow_graph()->function(), invoke_call); |
1148 | |
1149 | // Specialize newly inserted calls. |
1150 | TryCreateICData(invoke_get); |
1151 | VisitInstanceCall(invoke_get); |
1152 | TryCreateICData(invoke_call); |
1153 | VisitInstanceCall(invoke_call); |
1154 | |
1155 | // Success. |
1156 | return true; |
1157 | } |
1158 | |
1159 | void AotCallSpecializer::VisitPolymorphicInstanceCall( |
1160 | PolymorphicInstanceCallInstr* call) { |
1161 | const intptr_t receiver_idx = call->type_args_len() > 0 ? 1 : 0; |
1162 | const intptr_t receiver_cid = |
1163 | call->ArgumentValueAt(receiver_idx)->Type()->ToCid(); |
1164 | if (receiver_cid != kDynamicCid) { |
1165 | const Class& receiver_class = |
1166 | Class::Handle(Z, isolate()->class_table()->At(receiver_cid)); |
1167 | const Function& function = |
1168 | Function::ZoneHandle(Z, call->ResolveForReceiverClass(receiver_class)); |
1169 | if (!function.IsNull()) { |
1170 | // Only one target. Replace by static call. |
1171 | StaticCallInstr* new_call = |
1172 | StaticCallInstr::FromCall(Z, call, function, call->CallCount()); |
1173 | call->ReplaceWith(new_call, current_iterator()); |
1174 | } |
1175 | } |
1176 | } |
1177 | |
1178 | bool AotCallSpecializer::TryReplaceInstanceOfWithRangeCheck( |
1179 | InstanceCallInstr* call, |
1180 | const AbstractType& type) { |
1181 | if (precompiler_ == NULL) { |
1182 | // Loading not complete, can't do CHA yet. |
1183 | return false; |
1184 | } |
1185 | |
1186 | HierarchyInfo* hi = thread()->hierarchy_info(); |
1187 | if (hi == NULL) { |
1188 | return false; |
1189 | } |
1190 | |
1191 | intptr_t lower_limit, upper_limit; |
1192 | if (!hi->InstanceOfHasClassRange(type, &lower_limit, &upper_limit)) { |
1193 | return false; |
1194 | } |
1195 | |
1196 | Definition* left = call->ArgumentAt(0); |
1197 | |
1198 | // left.instanceof(type) => |
1199 | // _classRangeCheck(left.cid, lower_limit, upper_limit) |
1200 | LoadClassIdInstr* left_cid = new (Z) LoadClassIdInstr(new (Z) Value(left)); |
1201 | InsertBefore(call, left_cid, NULL, FlowGraph::kValue); |
1202 | ConstantInstr* lower_cid = |
1203 | flow_graph()->GetConstant(Smi::Handle(Z, Smi::New(lower_limit))); |
1204 | |
1205 | if (lower_limit == upper_limit) { |
1206 | StrictCompareInstr* check_cid = new (Z) |
1207 | StrictCompareInstr(call->token_pos(), Token::kEQ_STRICT, |
1208 | new (Z) Value(left_cid), new (Z) Value(lower_cid), |
1209 | /* number_check = */ false, DeoptId::kNone); |
1210 | ReplaceCall(call, check_cid); |
1211 | return true; |
1212 | } |
1213 | |
1214 | ConstantInstr* upper_cid = |
1215 | flow_graph()->GetConstant(Smi::Handle(Z, Smi::New(upper_limit))); |
1216 | |
1217 | InputsArray* args = new (Z) InputsArray(Z, 3); |
1218 | args->Add(new (Z) Value(left_cid)); |
1219 | args->Add(new (Z) Value(lower_cid)); |
1220 | args->Add(new (Z) Value(upper_cid)); |
1221 | |
1222 | const Library& dart_internal = Library::Handle(Z, Library::InternalLibrary()); |
1223 | const String& target_name = Symbols::_classRangeCheck(); |
1224 | const Function& target = Function::ZoneHandle( |
1225 | Z, dart_internal.LookupFunctionAllowPrivate(target_name)); |
1226 | ASSERT(!target.IsNull()); |
1227 | ASSERT(target.IsRecognized()); |
1228 | ASSERT(FlowGraphInliner::FunctionHasPreferInlinePragma(target)); |
1229 | |
1230 | const intptr_t kTypeArgsLen = 0; |
1231 | StaticCallInstr* new_call = new (Z) StaticCallInstr( |
1232 | call->token_pos(), target, kTypeArgsLen, |
1233 | Object::null_array(), // argument_names |
1234 | args, call->deopt_id(), call->CallCount(), ICData::kOptimized); |
1235 | Environment* copy = |
1236 | call->env()->DeepCopy(Z, call->env()->Length() - call->ArgumentCount()); |
1237 | for (intptr_t i = 0; i < args->length(); ++i) { |
1238 | copy->PushValue(new (Z) Value(new_call->ArgumentAt(i))); |
1239 | } |
1240 | call->RemoveEnvironment(); |
1241 | ReplaceCall(call, new_call); |
1242 | copy->DeepCopyTo(Z, new_call); |
1243 | return true; |
1244 | } |
1245 | |
1246 | void AotCallSpecializer::ReplaceInstanceCallsWithDispatchTableCalls() { |
1247 | ASSERT(current_iterator_ == nullptr); |
1248 | for (BlockIterator block_it = flow_graph()->reverse_postorder_iterator(); |
1249 | !block_it.Done(); block_it.Advance()) { |
1250 | ForwardInstructionIterator it(block_it.Current()); |
1251 | current_iterator_ = ⁢ |
1252 | for (; !it.Done(); it.Advance()) { |
1253 | Instruction* instr = it.Current(); |
1254 | if (auto call = instr->AsInstanceCall()) { |
1255 | TryReplaceWithDispatchTableCall(call); |
1256 | } else if (auto call = instr->AsPolymorphicInstanceCall()) { |
1257 | TryReplaceWithDispatchTableCall(call); |
1258 | } |
1259 | } |
1260 | current_iterator_ = nullptr; |
1261 | } |
1262 | } |
1263 | |
1264 | const Function& AotCallSpecializer::InterfaceTargetForTableDispatch( |
1265 | InstanceCallBaseInstr* call) { |
1266 | const Function& interface_target = call->interface_target(); |
1267 | if (!interface_target.IsNull()) { |
1268 | return interface_target; |
1269 | } |
1270 | |
1271 | // Dynamic call or tearoff. |
1272 | const Function& tearoff_interface_target = call->tearoff_interface_target(); |
1273 | if (!tearoff_interface_target.IsNull()) { |
1274 | // Tearoff. |
1275 | return Function::ZoneHandle( |
1276 | Z, tearoff_interface_target.GetMethodExtractor(call->function_name())); |
1277 | } |
1278 | |
1279 | // Dynamic call. |
1280 | return Function::null_function(); |
1281 | } |
1282 | |
1283 | void AotCallSpecializer::TryReplaceWithDispatchTableCall( |
1284 | InstanceCallBaseInstr* call) { |
1285 | const Function& interface_target = InterfaceTargetForTableDispatch(call); |
1286 | if (interface_target.IsNull()) { |
1287 | // Dynamic call. |
1288 | return; |
1289 | } |
1290 | |
1291 | Value* receiver = call->ArgumentValueAt(call->FirstArgIndex()); |
1292 | const compiler::TableSelector* selector = |
1293 | precompiler_->selector_map()->GetSelector(interface_target); |
1294 | |
1295 | if (selector == nullptr) { |
1296 | // Target functions were removed by tree shaking. This call is dead code, |
1297 | // or the receiver is always null. |
1298 | #if defined(DEBUG) |
1299 | AddCheckNull(receiver->CopyWithType(Z), call->function_name(), |
1300 | DeoptId::kNone, call->env(), call); |
1301 | StopInstr* stop = new (Z) StopInstr("Dead instance call executed." ); |
1302 | InsertBefore(call, stop, call->env(), FlowGraph::kEffect); |
1303 | #endif |
1304 | return; |
1305 | } |
1306 | |
1307 | const bool receiver_can_be_smi = |
1308 | call->CanReceiverBeSmiBasedOnInterfaceTarget(zone()); |
1309 | auto load_cid = new (Z) LoadClassIdInstr(receiver->CopyWithType(Z), kUntagged, |
1310 | receiver_can_be_smi); |
1311 | InsertBefore(call, load_cid, call->env(), FlowGraph::kValue); |
1312 | |
1313 | auto dispatch_table_call = DispatchTableCallInstr::FromCall( |
1314 | Z, call, new (Z) Value(load_cid), interface_target, selector); |
1315 | call->ReplaceWith(dispatch_table_call, current_iterator()); |
1316 | } |
1317 | |
1318 | #endif // DART_PRECOMPILER |
1319 | |
1320 | } // namespace dart |
1321 | |