1// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/compiler/frontend/base_flow_graph_builder.h"
6
7#include "vm/compiler/ffi/call.h"
8#include "vm/compiler/frontend/flow_graph_builder.h" // For InlineExitCollector.
9#include "vm/compiler/jit/compiler.h" // For Compiler::IsBackgroundCompilation().
10#include "vm/compiler/runtime_api.h"
11#include "vm/growable_array.h"
12#include "vm/object_store.h"
13
14namespace dart {
15namespace kernel {
16
17#define Z (zone_)
18#define I (thread_->isolate())
19
20Fragment& Fragment::operator+=(const Fragment& other) {
21 if (entry == NULL) {
22 entry = other.entry;
23 current = other.current;
24 } else if (current != NULL && other.entry != NULL) {
25 current->LinkTo(other.entry);
26 current = other.current;
27 }
28 return *this;
29}
30
31Fragment& Fragment::operator<<=(Instruction* next) {
32 if (entry == NULL) {
33 entry = current = next;
34 } else if (current != NULL) {
35 current->LinkTo(next);
36 current = next;
37 }
38 return *this;
39}
40
41void Fragment::Prepend(Instruction* start) {
42 if (entry == NULL) {
43 entry = current = start;
44 } else {
45 start->LinkTo(entry);
46 entry = start;
47 }
48}
49
50Fragment Fragment::closed() {
51 ASSERT(entry != NULL);
52 return Fragment(entry, NULL);
53}
54
55Fragment operator+(const Fragment& first, const Fragment& second) {
56 Fragment result = first;
57 result += second;
58 return result;
59}
60
61Fragment operator<<(const Fragment& fragment, Instruction* next) {
62 Fragment result = fragment;
63 result <<= next;
64 return result;
65}
66
67TestFragment::TestFragment(Instruction* entry, BranchInstr* branch)
68 : entry(entry),
69 true_successor_addresses(new SuccessorAddressArray(1)),
70 false_successor_addresses(new SuccessorAddressArray(1)) {
71 true_successor_addresses->Add(branch->true_successor_address());
72 false_successor_addresses->Add(branch->false_successor_address());
73}
74
75void TestFragment::ConnectBranchesTo(
76 BaseFlowGraphBuilder* builder,
77 const TestFragment::SuccessorAddressArray& branches,
78 JoinEntryInstr* join) {
79 ASSERT(!branches.is_empty());
80 for (auto branch : branches) {
81 *branch = builder->BuildTargetEntry();
82 (*branch)->Goto(join);
83 }
84}
85
86BlockEntryInstr* TestFragment::CreateSuccessorFor(
87 BaseFlowGraphBuilder* builder,
88 const TestFragment::SuccessorAddressArray& branches) {
89 ASSERT(!branches.is_empty());
90
91 if (branches.length() == 1) {
92 TargetEntryInstr* target = builder->BuildTargetEntry();
93 *(branches[0]) = target;
94 return target;
95 }
96
97 JoinEntryInstr* join = builder->BuildJoinEntry();
98 ConnectBranchesTo(builder, branches, join);
99 return join;
100}
101
102BlockEntryInstr* TestFragment::CreateTrueSuccessor(
103 BaseFlowGraphBuilder* builder) {
104 ASSERT(true_successor_addresses != nullptr);
105 return CreateSuccessorFor(builder, *true_successor_addresses);
106}
107
108BlockEntryInstr* TestFragment::CreateFalseSuccessor(
109 BaseFlowGraphBuilder* builder) {
110 ASSERT(false_successor_addresses != nullptr);
111 return CreateSuccessorFor(builder, *false_successor_addresses);
112}
113
114Fragment BaseFlowGraphBuilder::LoadContextAt(int depth) {
115 intptr_t delta = context_depth_ - depth;
116 ASSERT(delta >= 0);
117 Fragment instructions = LoadLocal(parsed_function_->current_context_var());
118 while (delta-- > 0) {
119 instructions += LoadNativeField(Slot::Context_parent());
120 }
121 return instructions;
122}
123
124Fragment BaseFlowGraphBuilder::StrictCompare(TokenPosition position,
125 Token::Kind kind,
126 bool number_check /* = false */) {
127 Value* right = Pop();
128 Value* left = Pop();
129 StrictCompareInstr* compare = new (Z) StrictCompareInstr(
130 position, kind, left, right, number_check, GetNextDeoptId());
131 Push(compare);
132 return Fragment(compare);
133}
134
135Fragment BaseFlowGraphBuilder::StrictCompare(Token::Kind kind,
136 bool number_check /* = false */) {
137 Value* right = Pop();
138 Value* left = Pop();
139 StrictCompareInstr* compare =
140 new (Z) StrictCompareInstr(TokenPosition::kNoSource, kind, left, right,
141 number_check, GetNextDeoptId());
142 Push(compare);
143 return Fragment(compare);
144}
145
146Fragment BaseFlowGraphBuilder::BranchIfTrue(TargetEntryInstr** then_entry,
147 TargetEntryInstr** otherwise_entry,
148 bool negate) {
149 Fragment instructions = Constant(Bool::True());
150 return instructions + BranchIfEqual(then_entry, otherwise_entry, negate);
151}
152
153Fragment BaseFlowGraphBuilder::BranchIfNull(TargetEntryInstr** then_entry,
154 TargetEntryInstr** otherwise_entry,
155 bool negate) {
156 Fragment instructions = NullConstant();
157 return instructions + BranchIfEqual(then_entry, otherwise_entry, negate);
158}
159
160Fragment BaseFlowGraphBuilder::BranchIfEqual(TargetEntryInstr** then_entry,
161 TargetEntryInstr** otherwise_entry,
162 bool negate) {
163 Value* right_value = Pop();
164 Value* left_value = Pop();
165 StrictCompareInstr* compare = new (Z) StrictCompareInstr(
166 TokenPosition::kNoSource, negate ? Token::kNE_STRICT : Token::kEQ_STRICT,
167 left_value, right_value, false, GetNextDeoptId());
168 BranchInstr* branch = new (Z) BranchInstr(compare, GetNextDeoptId());
169 *then_entry = *branch->true_successor_address() = BuildTargetEntry();
170 *otherwise_entry = *branch->false_successor_address() = BuildTargetEntry();
171 return Fragment(branch).closed();
172}
173
174Fragment BaseFlowGraphBuilder::BranchIfStrictEqual(
175 TargetEntryInstr** then_entry,
176 TargetEntryInstr** otherwise_entry) {
177 Value* rhs = Pop();
178 Value* lhs = Pop();
179 StrictCompareInstr* compare =
180 new (Z) StrictCompareInstr(TokenPosition::kNoSource, Token::kEQ_STRICT,
181 lhs, rhs, false, GetNextDeoptId());
182 BranchInstr* branch = new (Z) BranchInstr(compare, GetNextDeoptId());
183 *then_entry = *branch->true_successor_address() = BuildTargetEntry();
184 *otherwise_entry = *branch->false_successor_address() = BuildTargetEntry();
185 return Fragment(branch).closed();
186}
187
188Fragment BaseFlowGraphBuilder::Return(TokenPosition position,
189 intptr_t yield_index) {
190 Fragment instructions;
191
192 Value* value = Pop();
193 ASSERT(stack_ == nullptr);
194 const Function& function = parsed_function_->function();
195 Representation representation;
196 if (function.has_unboxed_integer_return()) {
197 representation = kUnboxedInt64;
198 } else if (function.has_unboxed_double_return()) {
199 representation = kUnboxedDouble;
200 } else {
201 ASSERT(!function.has_unboxed_return());
202 representation = kTagged;
203 }
204 ReturnInstr* return_instr = new (Z) ReturnInstr(
205 position, value, GetNextDeoptId(), yield_index, representation);
206 if (exit_collector_ != nullptr) exit_collector_->AddExit(return_instr);
207
208 instructions <<= return_instr;
209
210 return instructions.closed();
211}
212
213Fragment BaseFlowGraphBuilder::CheckStackOverflow(TokenPosition position,
214 intptr_t stack_depth,
215 intptr_t loop_depth) {
216 return Fragment(new (Z) CheckStackOverflowInstr(
217 position, stack_depth, loop_depth, GetNextDeoptId(),
218 CheckStackOverflowInstr::kOsrAndPreemption));
219}
220
221Fragment BaseFlowGraphBuilder::CheckStackOverflowInPrologue(
222 TokenPosition position) {
223 if (IsInlining()) {
224 // If we are inlining don't actually attach the stack check. We must still
225 // create the stack check in order to allocate a deopt id.
226 CheckStackOverflow(position, 0, 0);
227 return Fragment();
228 }
229 return CheckStackOverflow(position, 0, 0);
230}
231
232Fragment BaseFlowGraphBuilder::Constant(const Object& value) {
233 ASSERT(value.IsNotTemporaryScopedHandle());
234 ConstantInstr* constant = new (Z) ConstantInstr(value);
235 Push(constant);
236 return Fragment(constant);
237}
238
239Fragment BaseFlowGraphBuilder::Goto(JoinEntryInstr* destination) {
240 return Fragment(new (Z) GotoInstr(destination, GetNextDeoptId())).closed();
241}
242
243Fragment BaseFlowGraphBuilder::IntConstant(int64_t value) {
244 return Fragment(
245 Constant(Integer::ZoneHandle(Z, Integer::New(value, Heap::kOld))));
246}
247
248Fragment BaseFlowGraphBuilder::MemoryCopy(classid_t src_cid,
249 classid_t dest_cid) {
250 Value* length = Pop();
251 Value* dest_start = Pop();
252 Value* src_start = Pop();
253 Value* dest = Pop();
254 Value* src = Pop();
255 auto copy = new (Z) MemoryCopyInstr(src, dest, src_start, dest_start, length,
256 src_cid, dest_cid);
257 return Fragment(copy);
258}
259
260Fragment BaseFlowGraphBuilder::TailCall(const Code& code) {
261 Value* arg_desc = Pop();
262 return Fragment(new (Z) TailCallInstr(code, arg_desc));
263}
264
265void BaseFlowGraphBuilder::InlineBailout(const char* reason) {
266 if (IsInlining()) {
267 parsed_function_->function().set_is_inlinable(false);
268 parsed_function_->Bailout("kernel::BaseFlowGraphBuilder", reason);
269 }
270}
271
272Fragment BaseFlowGraphBuilder::TestTypeArgsLen(Fragment eq_branch,
273 Fragment neq_branch,
274 intptr_t num_type_args) {
275 Fragment test;
276
277 TargetEntryInstr* eq_entry;
278 TargetEntryInstr* neq_entry;
279
280 test += LoadArgDescriptor();
281 test += LoadNativeField(Slot::ArgumentsDescriptor_type_args_len());
282 test += IntConstant(num_type_args);
283 test += BranchIfEqual(&eq_entry, &neq_entry);
284
285 eq_branch.Prepend(eq_entry);
286 neq_branch.Prepend(neq_entry);
287
288 JoinEntryInstr* join = BuildJoinEntry();
289 eq_branch += Goto(join);
290 neq_branch += Goto(join);
291
292 return Fragment(test.entry, join);
293}
294
295Fragment BaseFlowGraphBuilder::TestDelayedTypeArgs(LocalVariable* closure,
296 Fragment present,
297 Fragment absent) {
298 Fragment test;
299
300 TargetEntryInstr* absent_entry;
301 TargetEntryInstr* present_entry;
302
303 test += LoadLocal(closure);
304 test += LoadNativeField(Slot::Closure_delayed_type_arguments());
305 test += Constant(Object::empty_type_arguments());
306 test += BranchIfEqual(&absent_entry, &present_entry);
307
308 present.Prepend(present_entry);
309 absent.Prepend(absent_entry);
310
311 JoinEntryInstr* join = BuildJoinEntry();
312 absent += Goto(join);
313 present += Goto(join);
314
315 return Fragment(test.entry, join);
316}
317
318Fragment BaseFlowGraphBuilder::TestAnyTypeArgs(Fragment present,
319 Fragment absent) {
320 if (parsed_function_->function().IsClosureFunction()) {
321 LocalVariable* closure = parsed_function_->ParameterVariable(0);
322
323 JoinEntryInstr* complete = BuildJoinEntry();
324 JoinEntryInstr* present_entry = BuildJoinEntry();
325
326 Fragment test = TestTypeArgsLen(
327 TestDelayedTypeArgs(closure, Goto(present_entry), absent),
328 Goto(present_entry), 0);
329 test += Goto(complete);
330
331 Fragment(present_entry) + present + Goto(complete);
332
333 return Fragment(test.entry, complete);
334 } else {
335 return TestTypeArgsLen(absent, present, 0);
336 }
337}
338
339Fragment BaseFlowGraphBuilder::LoadIndexed(intptr_t index_scale) {
340 Value* index = Pop();
341 Value* array = Pop();
342 LoadIndexedInstr* instr = new (Z) LoadIndexedInstr(
343 array, index, /*index_unboxed=*/false, index_scale, kArrayCid,
344 kAlignedAccess, DeoptId::kNone, TokenPosition::kNoSource);
345 Push(instr);
346 return Fragment(instr);
347}
348
349Fragment BaseFlowGraphBuilder::LoadIndexedTypedData(classid_t class_id,
350 intptr_t index_scale,
351 bool index_unboxed) {
352 // We use C behavior when dereferencing pointers, we assume alignment.
353 const AlignmentType alignment = kAlignedAccess;
354
355 Value* index = Pop();
356 Value* c_pointer = Pop();
357 LoadIndexedInstr* instr = new (Z)
358 LoadIndexedInstr(c_pointer, index, index_unboxed, index_scale, class_id,
359 alignment, DeoptId::kNone, TokenPosition::kNoSource);
360 Push(instr);
361 return Fragment(instr);
362}
363
364Fragment BaseFlowGraphBuilder::LoadUntagged(intptr_t offset) {
365 Value* object = Pop();
366 auto load = new (Z) LoadUntaggedInstr(object, offset);
367 Push(load);
368 return Fragment(load);
369}
370
371Fragment BaseFlowGraphBuilder::StoreUntagged(intptr_t offset) {
372 Value* value = Pop();
373 Value* object = Pop();
374 auto store = new (Z) StoreUntaggedInstr(object, value, offset);
375 return Fragment(store);
376}
377
378Fragment BaseFlowGraphBuilder::ConvertUntaggedToUnboxed(
379 Representation to_representation) {
380 ASSERT(to_representation == kUnboxedIntPtr ||
381 to_representation == kUnboxedFfiIntPtr);
382 Value* value = Pop();
383 auto converted = new (Z)
384 IntConverterInstr(kUntagged, to_representation, value, DeoptId::kNone);
385 converted->mark_truncating();
386 Push(converted);
387 return Fragment(converted);
388}
389
390Fragment BaseFlowGraphBuilder::ConvertUnboxedToUntagged(
391 Representation from_representation) {
392 ASSERT(from_representation == kUnboxedIntPtr ||
393 from_representation == kUnboxedFfiIntPtr);
394 Value* value = Pop();
395 auto converted = new (Z)
396 IntConverterInstr(from_representation, kUntagged, value, DeoptId::kNone);
397 converted->mark_truncating();
398 Push(converted);
399 return Fragment(converted);
400}
401
402Fragment BaseFlowGraphBuilder::AddIntptrIntegers() {
403 Value* right = Pop();
404 Value* left = Pop();
405#if defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_X64)
406 auto add = new (Z) BinaryInt64OpInstr(
407 Token::kADD, left, right, DeoptId::kNone, Instruction::kNotSpeculative);
408#else
409 auto add =
410 new (Z) BinaryInt32OpInstr(Token::kADD, left, right, DeoptId::kNone);
411#endif
412 add->mark_truncating();
413 Push(add);
414 return Fragment(add);
415}
416
417Fragment BaseFlowGraphBuilder::UnboxSmiToIntptr() {
418 Value* value = Pop();
419 auto untagged = new (Z)
420 UnboxIntegerInstr(kUnboxedIntPtr, UnboxIntegerInstr::kNoTruncation, value,
421 DeoptId::kNone, Instruction::kNotSpeculative);
422 Push(untagged);
423 return Fragment(untagged);
424}
425
426Fragment BaseFlowGraphBuilder::FloatToDouble() {
427 Value* value = Pop();
428 FloatToDoubleInstr* instr = new FloatToDoubleInstr(value, DeoptId::kNone);
429 Push(instr);
430 return Fragment(instr);
431}
432
433Fragment BaseFlowGraphBuilder::DoubleToFloat() {
434 Value* value = Pop();
435 DoubleToFloatInstr* instr = new DoubleToFloatInstr(
436 value, DeoptId::kNone, Instruction::SpeculativeMode::kNotSpeculative);
437 Push(instr);
438 return Fragment(instr);
439}
440
441Fragment BaseFlowGraphBuilder::LoadField(const Field& field,
442 bool calls_initializer) {
443 return LoadNativeField(Slot::Get(MayCloneField(field), parsed_function_),
444 calls_initializer);
445}
446
447Fragment BaseFlowGraphBuilder::LoadNativeField(const Slot& native_field,
448 bool calls_initializer) {
449 LoadFieldInstr* load = new (Z) LoadFieldInstr(
450 Pop(), native_field, TokenPosition::kNoSource, calls_initializer,
451 calls_initializer ? GetNextDeoptId() : DeoptId::kNone);
452 Push(load);
453 return Fragment(load);
454}
455
456Fragment BaseFlowGraphBuilder::LoadLocal(LocalVariable* variable) {
457 ASSERT(!variable->is_captured());
458 LoadLocalInstr* load =
459 new (Z) LoadLocalInstr(*variable, TokenPosition::kNoSource);
460 Push(load);
461 return Fragment(load);
462}
463
464Fragment BaseFlowGraphBuilder::NullConstant() {
465 return Constant(Instance::ZoneHandle(Z, Instance::null()));
466}
467
468Fragment BaseFlowGraphBuilder::GuardFieldLength(const Field& field,
469 intptr_t deopt_id) {
470 return Fragment(new (Z) GuardFieldLengthInstr(Pop(), field, deopt_id));
471}
472
473Fragment BaseFlowGraphBuilder::GuardFieldClass(const Field& field,
474 intptr_t deopt_id) {
475 return Fragment(new (Z) GuardFieldClassInstr(Pop(), field, deopt_id));
476}
477
478const Field& BaseFlowGraphBuilder::MayCloneField(const Field& field) {
479 if ((Compiler::IsBackgroundCompilation() ||
480 FLAG_force_clone_compiler_objects) &&
481 field.IsOriginal()) {
482 return Field::ZoneHandle(Z, field.CloneFromOriginal());
483 } else {
484 ASSERT(field.IsZoneHandle());
485 return field;
486 }
487}
488
489Fragment BaseFlowGraphBuilder::StoreInstanceField(
490 TokenPosition position,
491 const Slot& field,
492 StoreInstanceFieldInstr::Kind
493 kind /* = StoreInstanceFieldInstr::Kind::kOther */,
494 StoreBarrierType emit_store_barrier /* = kEmitStoreBarrier */) {
495 Value* value = Pop();
496 if (value->BindsToConstant()) {
497 emit_store_barrier = kNoStoreBarrier;
498 }
499 StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
500 field, Pop(), value, emit_store_barrier, position, kind);
501 return Fragment(store);
502}
503
504Fragment BaseFlowGraphBuilder::StoreInstanceField(
505 const Field& field,
506 StoreInstanceFieldInstr::Kind
507 kind /* = StoreInstanceFieldInstr::Kind::kOther */,
508 StoreBarrierType emit_store_barrier) {
509 Value* value = Pop();
510 if (value->BindsToConstant()) {
511 emit_store_barrier = kNoStoreBarrier;
512 }
513
514 StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
515 MayCloneField(field), Pop(), value, emit_store_barrier,
516 TokenPosition::kNoSource, parsed_function_, kind);
517
518 return Fragment(store);
519}
520
521Fragment BaseFlowGraphBuilder::StoreInstanceFieldGuarded(
522 const Field& field,
523 StoreInstanceFieldInstr::Kind
524 kind /* = StoreInstanceFieldInstr::Kind::kOther */) {
525 Fragment instructions;
526 const Field& field_clone = MayCloneField(field);
527 if (I->use_field_guards()) {
528 LocalVariable* store_expression = MakeTemporary();
529 instructions += LoadLocal(store_expression);
530 instructions += GuardFieldClass(field_clone, GetNextDeoptId());
531
532 // Field length guard can be omitted if it is not needed.
533 // However, it is possible that we were tracking list length previously,
534 // and generated length guards in the past. We need to generate same IL
535 // to keep deopt ids stable, but we can discard generated IL fragment
536 // if length guard is not needed.
537 Fragment length_guard;
538 length_guard += LoadLocal(store_expression);
539 length_guard += GuardFieldLength(field_clone, GetNextDeoptId());
540
541 if (field_clone.needs_length_check()) {
542 instructions += length_guard;
543 }
544
545 // If we are tracking exactness of the static type of the field then
546 // emit appropriate guard.
547 if (field_clone.static_type_exactness_state().IsTracking()) {
548 instructions += LoadLocal(store_expression);
549 instructions <<=
550 new (Z) GuardFieldTypeInstr(Pop(), field_clone, GetNextDeoptId());
551 }
552 }
553 instructions += StoreInstanceField(field_clone, kind);
554 return instructions;
555}
556
557Fragment BaseFlowGraphBuilder::LoadStaticField(const Field& field,
558 bool calls_initializer) {
559 LoadStaticFieldInstr* load = new (Z) LoadStaticFieldInstr(
560 field, TokenPosition::kNoSource, calls_initializer,
561 calls_initializer ? GetNextDeoptId() : DeoptId::kNone);
562 Push(load);
563 return Fragment(load);
564}
565
566Fragment BaseFlowGraphBuilder::RedefinitionWithType(const AbstractType& type) {
567 auto redefinition = new (Z) RedefinitionInstr(Pop());
568 redefinition->set_constrained_type(
569 new (Z) CompileType(CompileType::FromAbstractType(type)));
570 Push(redefinition);
571 return Fragment(redefinition);
572}
573
574Fragment BaseFlowGraphBuilder::ReachabilityFence() {
575 Fragment instructions;
576 instructions <<= new (Z) ReachabilityFenceInstr(Pop());
577 return instructions;
578}
579
580Fragment BaseFlowGraphBuilder::Utf8Scan() {
581 Value* table = Pop();
582 Value* end = Pop();
583 Value* start = Pop();
584 Value* bytes = Pop();
585 Value* decoder = Pop();
586 const Field& scan_flags_field =
587 compiler::LookupConvertUtf8DecoderScanFlagsField();
588 auto scan = new (Z) Utf8ScanInstr(
589 decoder, bytes, start, end, table,
590 Slot::Get(MayCloneField(scan_flags_field), parsed_function_));
591 Push(scan);
592 return Fragment(scan);
593}
594
595Fragment BaseFlowGraphBuilder::StoreStaticField(TokenPosition position,
596 const Field& field) {
597 return Fragment(
598 new (Z) StoreStaticFieldInstr(MayCloneField(field), Pop(), position));
599}
600
601Fragment BaseFlowGraphBuilder::StoreIndexed(classid_t class_id) {
602 Value* value = Pop();
603 Value* index = Pop();
604 const StoreBarrierType emit_store_barrier =
605 value->BindsToConstant() ? kNoStoreBarrier : kEmitStoreBarrier;
606 StoreIndexedInstr* store = new (Z) StoreIndexedInstr(
607 Pop(), // Array.
608 index, value, emit_store_barrier, /*index_unboxed=*/false,
609
610 compiler::target::Instance::ElementSizeFor(class_id), class_id,
611 kAlignedAccess, DeoptId::kNone, TokenPosition::kNoSource);
612 return Fragment(store);
613}
614
615Fragment BaseFlowGraphBuilder::StoreIndexedTypedData(classid_t class_id,
616 intptr_t index_scale,
617 bool index_unboxed) {
618 // We use C behavior when dereferencing pointers, we assume alignment.
619 const AlignmentType alignment = kAlignedAccess;
620
621 Value* value = Pop();
622 Value* index = Pop();
623 Value* c_pointer = Pop();
624 StoreIndexedInstr* instr = new (Z) StoreIndexedInstr(
625 c_pointer, index, value, kNoStoreBarrier, index_unboxed, index_scale,
626 class_id, alignment, DeoptId::kNone, TokenPosition::kNoSource,
627 Instruction::SpeculativeMode::kNotSpeculative);
628 return Fragment(instr);
629}
630
631Fragment BaseFlowGraphBuilder::StoreLocal(TokenPosition position,
632 LocalVariable* variable) {
633 if (variable->is_captured()) {
634 Fragment instructions;
635 LocalVariable* value = MakeTemporary();
636 instructions += LoadContextAt(variable->owner()->context_level());
637 instructions += LoadLocal(value);
638 instructions += StoreInstanceField(
639 position, Slot::GetContextVariableSlotFor(thread_, *variable));
640 return instructions;
641 }
642 return StoreLocalRaw(position, variable);
643}
644
645Fragment BaseFlowGraphBuilder::StoreLocalRaw(TokenPosition position,
646 LocalVariable* variable) {
647 ASSERT(!variable->is_captured());
648 Value* value = Pop();
649 StoreLocalInstr* store = new (Z) StoreLocalInstr(*variable, value, position);
650 Fragment instructions(store);
651 Push(store);
652 return instructions;
653}
654
655LocalVariable* BaseFlowGraphBuilder::MakeTemporary() {
656 char name[64];
657 intptr_t index = stack_->definition()->temp_index();
658 Utils::SNPrint(name, 64, ":t%" Pd, index);
659 const String& symbol_name =
660 String::ZoneHandle(Z, Symbols::New(thread_, name));
661 LocalVariable* variable =
662 new (Z) LocalVariable(TokenPosition::kNoSource, TokenPosition::kNoSource,
663 symbol_name, Object::dynamic_type());
664 // Set the index relative to the base of the expression stack including
665 // outgoing arguments.
666 variable->set_index(
667 VariableIndex(-parsed_function_->num_stack_locals() - index));
668
669 // The value on top of the stack has uses as if it were a local variable.
670 // Mark all definitions on the stack as used so that their temp indices
671 // will not be cleared (causing them to never be materialized in the
672 // expression stack and skew stack depth).
673 for (Value* item = stack_; item != nullptr; item = item->next_use()) {
674 item->definition()->set_ssa_temp_index(0);
675 }
676
677 return variable;
678}
679
680void BaseFlowGraphBuilder::SetTempIndex(Definition* definition) {
681 definition->set_temp_index(
682 stack_ == NULL ? 0 : stack_->definition()->temp_index() + 1);
683}
684
685void BaseFlowGraphBuilder::Push(Definition* definition) {
686 SetTempIndex(definition);
687 Value::AddToList(new (Z) Value(definition), &stack_);
688}
689
690Definition* BaseFlowGraphBuilder::Peek(intptr_t depth) {
691 Value* head = stack_;
692 for (intptr_t i = 0; i < depth; ++i) {
693 ASSERT(head != nullptr);
694 head = head->next_use();
695 }
696 ASSERT(head != nullptr);
697 return head->definition();
698}
699
700Value* BaseFlowGraphBuilder::Pop() {
701 ASSERT(stack_ != NULL);
702 Value* value = stack_;
703 stack_ = value->next_use();
704 if (stack_ != NULL) stack_->set_previous_use(NULL);
705
706 value->set_next_use(NULL);
707 value->set_previous_use(NULL);
708 value->definition()->ClearSSATempIndex();
709 return value;
710}
711
712Fragment BaseFlowGraphBuilder::Drop() {
713 ASSERT(stack_ != NULL);
714 Fragment instructions;
715 Definition* definition = stack_->definition();
716 // The SSA renaming implementation doesn't like [LoadLocal]s without a
717 // tempindex.
718 if (definition->HasSSATemp() || definition->IsLoadLocal()) {
719 instructions <<= new (Z) DropTempsInstr(1, NULL);
720 } else {
721 definition->ClearTempIndex();
722 }
723
724 Pop();
725 return instructions;
726}
727
728Fragment BaseFlowGraphBuilder::DropTempsPreserveTop(
729 intptr_t num_temps_to_drop) {
730 Value* top = Pop();
731
732 for (intptr_t i = 0; i < num_temps_to_drop; ++i) {
733 Pop();
734 }
735
736 DropTempsInstr* drop_temps = new (Z) DropTempsInstr(num_temps_to_drop, top);
737 Push(drop_temps);
738
739 return Fragment(drop_temps);
740}
741
742Fragment BaseFlowGraphBuilder::MakeTemp() {
743 MakeTempInstr* make_temp = new (Z) MakeTempInstr(Z);
744 Push(make_temp);
745 return Fragment(make_temp);
746}
747
748TargetEntryInstr* BaseFlowGraphBuilder::BuildTargetEntry() {
749 return new (Z) TargetEntryInstr(AllocateBlockId(), CurrentTryIndex(),
750 GetNextDeoptId(), GetStackDepth());
751}
752
753FunctionEntryInstr* BaseFlowGraphBuilder::BuildFunctionEntry(
754 GraphEntryInstr* graph_entry) {
755 return new (Z) FunctionEntryInstr(graph_entry, AllocateBlockId(),
756 CurrentTryIndex(), GetNextDeoptId());
757}
758
759JoinEntryInstr* BaseFlowGraphBuilder::BuildJoinEntry(intptr_t try_index) {
760 return new (Z) JoinEntryInstr(AllocateBlockId(), try_index, GetNextDeoptId(),
761 GetStackDepth());
762}
763
764JoinEntryInstr* BaseFlowGraphBuilder::BuildJoinEntry() {
765 return new (Z) JoinEntryInstr(AllocateBlockId(), CurrentTryIndex(),
766 GetNextDeoptId(), GetStackDepth());
767}
768
769IndirectEntryInstr* BaseFlowGraphBuilder::BuildIndirectEntry(
770 intptr_t indirect_id,
771 intptr_t try_index) {
772 return new (Z) IndirectEntryInstr(AllocateBlockId(), indirect_id, try_index,
773 GetNextDeoptId());
774}
775
776InputsArray* BaseFlowGraphBuilder::GetArguments(int count) {
777 InputsArray* arguments = new (Z) ZoneGrowableArray<Value*>(Z, count);
778 arguments->SetLength(count);
779 for (intptr_t i = count - 1; i >= 0; --i) {
780 arguments->data()[i] = Pop();
781 }
782 return arguments;
783}
784
785Fragment BaseFlowGraphBuilder::SmiRelationalOp(Token::Kind kind) {
786 Value* right = Pop();
787 Value* left = Pop();
788 RelationalOpInstr* instr = new (Z) RelationalOpInstr(
789 TokenPosition::kNoSource, kind, left, right, kSmiCid, GetNextDeoptId());
790 Push(instr);
791 return Fragment(instr);
792}
793
794Fragment BaseFlowGraphBuilder::SmiBinaryOp(Token::Kind kind,
795 bool is_truncating) {
796 Value* right = Pop();
797 Value* left = Pop();
798 BinarySmiOpInstr* instr =
799 new (Z) BinarySmiOpInstr(kind, left, right, GetNextDeoptId());
800 if (is_truncating) {
801 instr->mark_truncating();
802 }
803 Push(instr);
804 return Fragment(instr);
805}
806
807Fragment BaseFlowGraphBuilder::BinaryIntegerOp(Token::Kind kind,
808 Representation representation,
809 bool is_truncating) {
810 ASSERT(representation == kUnboxedInt32 || representation == kUnboxedUint32 ||
811 representation == kUnboxedInt64);
812 Value* right = Pop();
813 Value* left = Pop();
814 BinaryIntegerOpInstr* instr;
815 switch (representation) {
816 case kUnboxedInt32:
817 instr = new (Z) BinaryInt32OpInstr(kind, left, right, GetNextDeoptId());
818 break;
819 case kUnboxedUint32:
820 instr = new (Z) BinaryUint32OpInstr(kind, left, right, GetNextDeoptId());
821 break;
822 case kUnboxedInt64:
823 instr = new (Z) BinaryInt64OpInstr(kind, left, right, GetNextDeoptId());
824 break;
825 default:
826 UNREACHABLE();
827 }
828 if (is_truncating) {
829 instr->mark_truncating();
830 }
831 Push(instr);
832 return Fragment(instr);
833}
834
835Fragment BaseFlowGraphBuilder::LoadFpRelativeSlot(
836 intptr_t offset,
837 CompileType result_type,
838 Representation representation) {
839 LoadIndexedUnsafeInstr* instr = new (Z)
840 LoadIndexedUnsafeInstr(Pop(), offset, result_type, representation);
841 Push(instr);
842 return Fragment(instr);
843}
844
845Fragment BaseFlowGraphBuilder::StoreFpRelativeSlot(intptr_t offset) {
846 Value* value = Pop();
847 Value* index = Pop();
848 StoreIndexedUnsafeInstr* instr =
849 new (Z) StoreIndexedUnsafeInstr(index, value, offset);
850 return Fragment(instr);
851}
852
853JoinEntryInstr* BaseFlowGraphBuilder::BuildThrowNoSuchMethod() {
854 JoinEntryInstr* nsm = BuildJoinEntry();
855
856 Fragment failing(nsm);
857 const Code& nsm_handler = Code::ZoneHandle(
858 Z, I->object_store()->call_closure_no_such_method_stub());
859 failing += LoadArgDescriptor();
860 failing += TailCall(nsm_handler);
861
862 return nsm;
863}
864
865Fragment BaseFlowGraphBuilder::AssertBool(TokenPosition position) {
866 Value* value = Pop();
867 AssertBooleanInstr* instr =
868 new (Z) AssertBooleanInstr(position, value, GetNextDeoptId());
869 Push(instr);
870 return Fragment(instr);
871}
872
873Fragment BaseFlowGraphBuilder::BooleanNegate() {
874 BooleanNegateInstr* negate = new (Z) BooleanNegateInstr(Pop());
875 Push(negate);
876 return Fragment(negate);
877}
878
879Fragment BaseFlowGraphBuilder::AllocateContext(
880 const ZoneGrowableArray<const Slot*>& context_slots) {
881 AllocateContextInstr* allocate =
882 new (Z) AllocateContextInstr(TokenPosition::kNoSource, context_slots);
883 Push(allocate);
884 return Fragment(allocate);
885}
886
887Fragment BaseFlowGraphBuilder::AllocateClosure(
888 TokenPosition position,
889 const Function& closure_function) {
890 const Class& cls = Class::ZoneHandle(Z, I->object_store()->closure_class());
891 AllocateObjectInstr* allocate = new (Z) AllocateObjectInstr(position, cls);
892 allocate->set_closure_function(closure_function);
893 Push(allocate);
894 return Fragment(allocate);
895}
896
897Fragment BaseFlowGraphBuilder::CreateArray() {
898 Value* element_count = Pop();
899 CreateArrayInstr* array =
900 new (Z) CreateArrayInstr(TokenPosition::kNoSource,
901 Pop(), // Element type.
902 element_count, GetNextDeoptId());
903 Push(array);
904 return Fragment(array);
905}
906
907Fragment BaseFlowGraphBuilder::InstantiateType(const AbstractType& type) {
908 Value* function_type_args = Pop();
909 Value* instantiator_type_args = Pop();
910 InstantiateTypeInstr* instr = new (Z) InstantiateTypeInstr(
911 TokenPosition::kNoSource, type, instantiator_type_args,
912 function_type_args, GetNextDeoptId());
913 Push(instr);
914 return Fragment(instr);
915}
916
917Fragment BaseFlowGraphBuilder::InstantiateTypeArguments(
918 const TypeArguments& type_arguments) {
919 Value* function_type_args = Pop();
920 Value* instantiator_type_args = Pop();
921 const Class& instantiator_class = Class::ZoneHandle(Z, function_.Owner());
922 InstantiateTypeArgumentsInstr* instr = new (Z) InstantiateTypeArgumentsInstr(
923 TokenPosition::kNoSource, type_arguments, instantiator_class, function_,
924 instantiator_type_args, function_type_args, GetNextDeoptId());
925 Push(instr);
926 return Fragment(instr);
927}
928
929Fragment BaseFlowGraphBuilder::LoadClassId() {
930 LoadClassIdInstr* load = new (Z) LoadClassIdInstr(Pop());
931 Push(load);
932 return Fragment(load);
933}
934
935Fragment BaseFlowGraphBuilder::AllocateObject(TokenPosition position,
936 const Class& klass,
937 intptr_t argument_count) {
938 ASSERT((argument_count == 0) || (argument_count == 1));
939 Value* type_arguments = (argument_count > 0) ? Pop() : nullptr;
940 AllocateObjectInstr* allocate =
941 new (Z) AllocateObjectInstr(position, klass, type_arguments);
942 Push(allocate);
943 return Fragment(allocate);
944}
945
946Fragment BaseFlowGraphBuilder::Box(Representation from) {
947 BoxInstr* box = BoxInstr::Create(from, Pop());
948 Push(box);
949 return Fragment(box);
950}
951
952Fragment BaseFlowGraphBuilder::BuildFfiAsFunctionInternalCall(
953 const TypeArguments& signatures) {
954 ASSERT(signatures.IsInstantiated());
955 ASSERT(signatures.Length() == 2);
956
957 const AbstractType& dart_type = AbstractType::Handle(signatures.TypeAt(0));
958 const AbstractType& native_type = AbstractType::Handle(signatures.TypeAt(1));
959
960 ASSERT(dart_type.IsFunctionType() && native_type.IsFunctionType());
961 const Function& target =
962 Function::ZoneHandle(compiler::ffi::TrampolineFunction(
963 Function::Handle(Z, Type::Cast(dart_type).signature()),
964 Function::Handle(Z, Type::Cast(native_type).signature())));
965
966 Fragment code;
967 // Store the pointer in the context, we cannot load the untagged address
968 // here as these can be unoptimized call sites.
969 LocalVariable* pointer = MakeTemporary();
970
971 auto& context_slots = CompilerState::Current().GetDummyContextSlots(
972 /*context_id=*/0, /*num_variables=*/1);
973 code += AllocateContext(context_slots);
974 LocalVariable* context = MakeTemporary();
975
976 code += LoadLocal(context);
977 code += LoadLocal(pointer);
978 code += StoreInstanceField(TokenPosition::kNoSource, *context_slots[0]);
979
980 code += AllocateClosure(TokenPosition::kNoSource, target);
981 LocalVariable* closure = MakeTemporary();
982
983 code += LoadLocal(closure);
984 code += LoadLocal(context);
985 code += StoreInstanceField(TokenPosition::kNoSource, Slot::Closure_context(),
986 StoreInstanceFieldInstr::Kind::kInitializing);
987
988 code += LoadLocal(closure);
989 code += Constant(target);
990 code += StoreInstanceField(TokenPosition::kNoSource, Slot::Closure_function(),
991 StoreInstanceFieldInstr::Kind::kInitializing);
992
993 // Drop address and context.
994 code += DropTempsPreserveTop(2);
995
996 return code;
997}
998
999Fragment BaseFlowGraphBuilder::DebugStepCheck(TokenPosition position) {
1000#ifdef PRODUCT
1001 return Fragment();
1002#else
1003 return Fragment(new (Z) DebugStepCheckInstr(
1004 position, PcDescriptorsLayout::kRuntimeCall, GetNextDeoptId()));
1005#endif
1006}
1007
1008Fragment BaseFlowGraphBuilder::CheckNull(TokenPosition position,
1009 LocalVariable* receiver,
1010 const String& function_name,
1011 bool clear_the_temp /* = true */) {
1012 Fragment instructions = LoadLocal(receiver);
1013
1014 CheckNullInstr* check_null = new (Z)
1015 CheckNullInstr(Pop(), function_name, GetNextDeoptId(), position,
1016 function_name.IsNull() ? CheckNullInstr::kCastError
1017 : CheckNullInstr::kNoSuchMethod);
1018
1019 // Does not use the redefinition, no `Push(check_null)`.
1020 instructions <<= check_null;
1021
1022 if (clear_the_temp) {
1023 // Null out receiver to make sure it is not saved into the frame before
1024 // doing the call.
1025 instructions += NullConstant();
1026 instructions += StoreLocal(TokenPosition::kNoSource, receiver);
1027 instructions += Drop();
1028 }
1029
1030 return instructions;
1031}
1032
1033Fragment BaseFlowGraphBuilder::CheckNullOptimized(TokenPosition position,
1034 const String& function_name) {
1035 Value* value = Pop();
1036 CheckNullInstr* check_null =
1037 new (Z) CheckNullInstr(value, function_name, GetNextDeoptId(), position);
1038 Push(check_null); // Use the redefinition.
1039 return Fragment(check_null);
1040}
1041
1042void BaseFlowGraphBuilder::RecordUncheckedEntryPoint(
1043 GraphEntryInstr* graph_entry,
1044 FunctionEntryInstr* unchecked_entry) {
1045 // Closures always check all arguments on their checked entry-point, most
1046 // call-sites are unchecked, and they're inlined less often, so it's very
1047 // beneficial to build multiple entry-points for them. Regular methods however
1048 // have fewer checks to begin with since they have dynamic invocation
1049 // forwarders, so in AOT we implement a more conservative time-space tradeoff
1050 // by only building the unchecked entry-point when inlining. We should
1051 // reconsider this heuristic if we identify non-inlined type-checks in
1052 // hotspots of new benchmarks.
1053 if (!IsInlining() && (parsed_function_->function().IsClosureFunction() ||
1054 !CompilerState::Current().is_aot())) {
1055 graph_entry->set_unchecked_entry(unchecked_entry);
1056 } else if (InliningUncheckedEntry()) {
1057 graph_entry->set_normal_entry(unchecked_entry);
1058 }
1059}
1060
1061Fragment BaseFlowGraphBuilder::BuildEntryPointsIntrospection() {
1062 if (!FLAG_enable_testing_pragmas) return Drop();
1063
1064 auto& function = Function::Handle(Z, parsed_function_->function().raw());
1065
1066 if (function.IsImplicitClosureFunction()) {
1067 const auto& parent = Function::Handle(Z, function.parent_function());
1068 const auto& func_name = String::Handle(Z, parent.name());
1069 const auto& owner = Class::Handle(Z, parent.Owner());
1070 function = owner.LookupFunction(func_name);
1071 }
1072
1073 Object& options = Object::Handle(Z);
1074 if (!Library::FindPragma(thread_, /*only_core=*/false, function,
1075 Symbols::vm_trace_entrypoints(), &options) ||
1076 options.IsNull() || !options.IsClosure()) {
1077 return Drop();
1078 }
1079 auto& closure = Closure::ZoneHandle(Z, Closure::Cast(options).raw());
1080 LocalVariable* entry_point_num = MakeTemporary();
1081
1082 auto& function_name = String::ZoneHandle(
1083 Z, String::New(function.ToLibNamePrefixedQualifiedCString(), Heap::kOld));
1084 if (parsed_function_->function().IsImplicitClosureFunction()) {
1085 function_name = String::Concat(
1086 function_name, String::Handle(Z, String::New("#tearoff", Heap::kNew)),
1087 Heap::kOld);
1088 }
1089 if (!function_name.IsCanonical()) {
1090 function_name = Symbols::New(thread_, function_name);
1091 }
1092
1093 Fragment call_hook;
1094 call_hook += Constant(closure);
1095 call_hook += Constant(function_name);
1096 call_hook += LoadLocal(entry_point_num);
1097 call_hook += Constant(Function::ZoneHandle(Z, closure.function()));
1098 call_hook += ClosureCall(TokenPosition::kNoSource,
1099 /*type_args_len=*/0, /*argument_count=*/3,
1100 /*argument_names=*/Array::ZoneHandle(Z));
1101 call_hook += Drop(); // result of closure call
1102 call_hook += Drop(); // entrypoint number
1103 return call_hook;
1104}
1105
1106Fragment BaseFlowGraphBuilder::ClosureCall(TokenPosition position,
1107 intptr_t type_args_len,
1108 intptr_t argument_count,
1109 const Array& argument_names,
1110 bool is_statically_checked) {
1111 const intptr_t total_count = argument_count + (type_args_len > 0 ? 1 : 0) + 1;
1112 InputsArray* arguments = GetArguments(total_count);
1113 ClosureCallInstr* call = new (Z) ClosureCallInstr(
1114 arguments, type_args_len, argument_names, position, GetNextDeoptId(),
1115 is_statically_checked ? Code::EntryKind::kUnchecked
1116 : Code::EntryKind::kNormal);
1117 Push(call);
1118 return Fragment(call);
1119}
1120
1121Fragment BaseFlowGraphBuilder::StringInterpolate(TokenPosition position) {
1122 Value* array = Pop();
1123 StringInterpolateInstr* interpolate =
1124 new (Z) StringInterpolateInstr(array, position, GetNextDeoptId());
1125 Push(interpolate);
1126 return Fragment(interpolate);
1127}
1128
1129void BaseFlowGraphBuilder::reset_context_depth_for_deopt_id(intptr_t deopt_id) {
1130 if (is_recording_context_levels()) {
1131 for (intptr_t i = 0, n = context_level_array_->length(); i < n; i += 2) {
1132 if (context_level_array_->At(i) == deopt_id) {
1133 (*context_level_array_)[i + 1] = context_depth_;
1134 return;
1135 }
1136 ASSERT(context_level_array_->At(i) < deopt_id);
1137 }
1138 }
1139}
1140
1141Fragment BaseFlowGraphBuilder::AssertAssignable(
1142 TokenPosition position,
1143 const String& dst_name,
1144 AssertAssignableInstr::Kind kind) {
1145 Value* function_type_args = Pop();
1146 Value* instantiator_type_args = Pop();
1147 Value* dst_type = Pop();
1148 Value* value = Pop();
1149
1150 AssertAssignableInstr* instr = new (Z) AssertAssignableInstr(
1151 position, value, dst_type, instantiator_type_args, function_type_args,
1152 dst_name, GetNextDeoptId(), kind);
1153 Push(instr);
1154
1155 return Fragment(instr);
1156}
1157
1158Fragment BaseFlowGraphBuilder::InitConstantParameters() {
1159 Fragment instructions;
1160 const intptr_t parameter_count = parsed_function_->function().NumParameters();
1161 for (intptr_t i = 0; i < parameter_count; ++i) {
1162 LocalVariable* raw_parameter = parsed_function_->RawParameterVariable(i);
1163 const Object* param_value = raw_parameter->parameter_value();
1164 if (param_value != nullptr) {
1165 instructions += Constant(*param_value);
1166 instructions += StoreLocalRaw(TokenPosition::kNoSource, raw_parameter);
1167 instructions += Drop();
1168 }
1169 }
1170 return instructions;
1171}
1172
1173} // namespace kernel
1174} // namespace dart
1175