1// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_COMPILER_FRONTEND_BASE_FLOW_GRAPH_BUILDER_H_
6#define RUNTIME_VM_COMPILER_FRONTEND_BASE_FLOW_GRAPH_BUILDER_H_
7
8#if defined(DART_PRECOMPILED_RUNTIME)
9#error "AOT runtime should not use compiler sources (including header files)"
10#endif // defined(DART_PRECOMPILED_RUNTIME)
11
12#include <initializer_list>
13
14#include "vm/compiler/backend/flow_graph.h"
15#include "vm/compiler/backend/il.h"
16#include "vm/object.h"
17
18namespace dart {
19
20class InlineExitCollector;
21
22namespace kernel {
23
24class BaseFlowGraphBuilder;
25class TryCatchBlock;
26
27class Fragment {
28 public:
29 Instruction* entry = nullptr;
30 Instruction* current = nullptr;
31
32 Fragment() {}
33
34 explicit Fragment(Instruction* instruction)
35 : entry(instruction), current(instruction) {}
36
37 Fragment(Instruction* entry, Instruction* current)
38 : entry(entry), current(current) {}
39
40 bool is_open() const { return entry == nullptr || current != nullptr; }
41 bool is_closed() const { return !is_open(); }
42
43 bool is_empty() const { return entry == nullptr && current == nullptr; }
44
45 void Prepend(Instruction* start);
46
47 Fragment& operator+=(const Fragment& other);
48 Fragment& operator<<=(Instruction* next);
49
50 Fragment closed();
51
52 private:
53 DISALLOW_ALLOCATION();
54};
55
56Fragment operator+(const Fragment& first, const Fragment& second);
57Fragment operator<<(const Fragment& fragment, Instruction* next);
58
59// IL fragment that performs some sort of test (comparison) and
60// has a single entry and multiple true and false exits.
61class TestFragment {
62 public:
63 BlockEntryInstr* CreateTrueSuccessor(BaseFlowGraphBuilder* builder);
64 BlockEntryInstr* CreateFalseSuccessor(BaseFlowGraphBuilder* builder);
65
66 void IfTrueGoto(BaseFlowGraphBuilder* builder, JoinEntryInstr* join) {
67 ConnectBranchesTo(builder, *true_successor_addresses, join);
68 }
69
70 // If negate is true then return negated fragment by flipping
71 // true and false successors. Otherwise return this fragment
72 // without change.
73 TestFragment Negate(bool negate) {
74 if (negate) {
75 return TestFragment(entry, false_successor_addresses,
76 true_successor_addresses);
77 } else {
78 return *this;
79 }
80 }
81
82 typedef ZoneGrowableArray<TargetEntryInstr**> SuccessorAddressArray;
83
84 // Create an empty fragment.
85 TestFragment() {}
86
87 // Create a fragment with the given entry and true/false exits.
88 TestFragment(Instruction* entry,
89 SuccessorAddressArray* true_successor_addresses,
90 SuccessorAddressArray* false_successor_addresses)
91 : entry(entry),
92 true_successor_addresses(true_successor_addresses),
93 false_successor_addresses(false_successor_addresses) {}
94
95 // Create a fragment with the given entry and a single branch as an exit.
96 TestFragment(Instruction* entry, BranchInstr* branch);
97
98 void ConnectBranchesTo(BaseFlowGraphBuilder* builder,
99 const TestFragment::SuccessorAddressArray& branches,
100 JoinEntryInstr* join);
101
102 BlockEntryInstr* CreateSuccessorFor(
103 BaseFlowGraphBuilder* builder,
104 const TestFragment::SuccessorAddressArray& branches);
105
106 Instruction* entry = nullptr;
107 SuccessorAddressArray* true_successor_addresses = nullptr;
108 SuccessorAddressArray* false_successor_addresses = nullptr;
109};
110
111// Indicates which form of the unchecked entrypoint we are compiling.
112//
113// kNone:
114//
115// There is no unchecked entrypoint: the unchecked entry is set to NULL in
116// the 'GraphEntryInstr'.
117//
118// kSeparate:
119//
120// The normal and unchecked entrypoint each point to their own versions of
121// the prologue, containing exactly those checks which need to be performed
122// on either side. Both sides jump directly to the body after performing
123// their prologue.
124//
125// kSharedWithVariable:
126//
127// A temporary variable is allocated and initialized to 0 on normal entry
128// and 2 on unchecked entry. Code which should be ommitted on the unchecked
129// entrypoint is made conditional on this variable being equal to 0.
130//
131enum class UncheckedEntryPointStyle {
132 kNone = 0,
133 kSeparate = 1,
134 kSharedWithVariable = 2,
135};
136
137class BaseFlowGraphBuilder {
138 public:
139 BaseFlowGraphBuilder(
140 const ParsedFunction* parsed_function,
141 intptr_t last_used_block_id,
142 intptr_t osr_id = DeoptId::kNone,
143 ZoneGrowableArray<intptr_t>* context_level_array = nullptr,
144 InlineExitCollector* exit_collector = nullptr,
145 bool inlining_unchecked_entry = false)
146 : parsed_function_(parsed_function),
147 function_(parsed_function_->function()),
148 thread_(Thread::Current()),
149 zone_(thread_->zone()),
150 osr_id_(osr_id),
151 context_level_array_(context_level_array),
152 context_depth_(0),
153 last_used_block_id_(last_used_block_id),
154 current_try_index_(kInvalidTryIndex),
155 next_used_try_index_(0),
156 stack_(NULL),
157 exit_collector_(exit_collector),
158 inlining_unchecked_entry_(inlining_unchecked_entry) {}
159
160 Fragment LoadField(const Field& field, bool calls_initializer);
161 Fragment LoadNativeField(const Slot& native_field,
162 bool calls_initializer = false);
163 Fragment LoadIndexed(intptr_t index_scale);
164 // Takes a [class_id] valid for StoreIndexed.
165 Fragment LoadIndexedTypedData(classid_t class_id,
166 intptr_t index_scale,
167 bool index_unboxed);
168
169 Fragment LoadUntagged(intptr_t offset);
170 Fragment StoreUntagged(intptr_t offset);
171 Fragment ConvertUntaggedToUnboxed(Representation to);
172 Fragment ConvertUnboxedToUntagged(Representation from);
173 Fragment UnboxSmiToIntptr();
174 Fragment FloatToDouble();
175 Fragment DoubleToFloat();
176
177 Fragment AddIntptrIntegers();
178
179 void SetTempIndex(Definition* definition);
180
181 Fragment LoadLocal(LocalVariable* variable);
182 Fragment StoreLocal(TokenPosition position, LocalVariable* variable);
183 Fragment StoreLocalRaw(TokenPosition position, LocalVariable* variable);
184 Fragment LoadContextAt(int depth);
185 Fragment GuardFieldLength(const Field& field, intptr_t deopt_id);
186 Fragment GuardFieldClass(const Field& field, intptr_t deopt_id);
187 const Field& MayCloneField(const Field& field);
188 Fragment StoreInstanceField(
189 TokenPosition position,
190 const Slot& field,
191 StoreInstanceFieldInstr::Kind kind =
192 StoreInstanceFieldInstr::Kind::kOther,
193 StoreBarrierType emit_store_barrier = kEmitStoreBarrier);
194 Fragment StoreInstanceField(
195 const Field& field,
196 StoreInstanceFieldInstr::Kind kind =
197 StoreInstanceFieldInstr::Kind::kOther,
198 StoreBarrierType emit_store_barrier = kEmitStoreBarrier);
199 Fragment StoreInstanceFieldGuarded(const Field& field,
200 StoreInstanceFieldInstr::Kind kind =
201 StoreInstanceFieldInstr::Kind::kOther);
202 Fragment LoadStaticField(const Field& field, bool calls_initializer);
203 Fragment RedefinitionWithType(const AbstractType& type);
204 Fragment ReachabilityFence();
205 Fragment StoreStaticField(TokenPosition position, const Field& field);
206 Fragment StoreIndexed(classid_t class_id);
207 // Takes a [class_id] valid for StoreIndexed.
208 Fragment StoreIndexedTypedData(classid_t class_id,
209 intptr_t index_scale,
210 bool index_unboxed);
211
212 // Sign-extends kUnboxedInt32 and zero-extends kUnboxedUint32.
213 Fragment Box(Representation from);
214
215 void Push(Definition* definition);
216 Definition* Peek(intptr_t depth = 0);
217 Value* Pop();
218 Fragment Drop();
219 // Drop given number of temps from the stack but preserve top of the stack.
220 Fragment DropTempsPreserveTop(intptr_t num_temps_to_drop);
221 Fragment MakeTemp();
222
223 // Create a pseudo-local variable for a location on the expression stack.
224 // Note: SSA construction currently does not support inserting Phi functions
225 // for expression stack locations - only real local variables are supported.
226 // This means that you can't use MakeTemporary in a way that would require
227 // a Phi in SSA form. For example example below will be miscompiled or
228 // will crash debug VM with assertion when building SSA for optimizing
229 // compiler:
230 //
231 // t = MakeTemporary()
232 // Branch B1 or B2
233 // B1:
234 // StoreLocal(t, v0)
235 // goto B3
236 // B2:
237 // StoreLocal(t, v1)
238 // goto B3
239 // B3:
240 // LoadLocal(t)
241 //
242 LocalVariable* MakeTemporary();
243
244 InputsArray* GetArguments(int count);
245
246 TargetEntryInstr* BuildTargetEntry();
247 FunctionEntryInstr* BuildFunctionEntry(GraphEntryInstr* graph_entry);
248 JoinEntryInstr* BuildJoinEntry();
249 JoinEntryInstr* BuildJoinEntry(intptr_t try_index);
250 IndirectEntryInstr* BuildIndirectEntry(intptr_t indirect_id,
251 intptr_t try_index);
252
253 Fragment StrictCompare(TokenPosition position,
254 Token::Kind kind,
255 bool number_check = false);
256 Fragment StrictCompare(Token::Kind kind, bool number_check = false);
257 Fragment Goto(JoinEntryInstr* destination);
258 Fragment IntConstant(int64_t value);
259 Fragment Constant(const Object& value);
260 Fragment NullConstant();
261 Fragment SmiRelationalOp(Token::Kind kind);
262 Fragment SmiBinaryOp(Token::Kind op, bool is_truncating = false);
263 Fragment BinaryIntegerOp(Token::Kind op,
264 Representation representation,
265 bool is_truncating = false);
266 Fragment LoadFpRelativeSlot(intptr_t offset,
267 CompileType result_type,
268 Representation representation = kTagged);
269 Fragment StoreFpRelativeSlot(intptr_t offset);
270 Fragment BranchIfTrue(TargetEntryInstr** then_entry,
271 TargetEntryInstr** otherwise_entry,
272 bool negate = false);
273 Fragment BranchIfNull(TargetEntryInstr** then_entry,
274 TargetEntryInstr** otherwise_entry,
275 bool negate = false);
276 Fragment BranchIfEqual(TargetEntryInstr** then_entry,
277 TargetEntryInstr** otherwise_entry,
278 bool negate = false);
279 Fragment BranchIfStrictEqual(TargetEntryInstr** then_entry,
280 TargetEntryInstr** otherwise_entry);
281 Fragment Return(
282 TokenPosition position,
283 intptr_t yield_index = PcDescriptorsLayout::kInvalidYieldIndex);
284 Fragment CheckStackOverflow(TokenPosition position,
285 intptr_t stack_depth,
286 intptr_t loop_depth);
287 Fragment CheckStackOverflowInPrologue(TokenPosition position);
288 Fragment MemoryCopy(classid_t src_cid, classid_t dest_cid);
289 Fragment TailCall(const Code& code);
290 Fragment Utf8Scan();
291
292 intptr_t GetNextDeoptId() {
293 intptr_t deopt_id = thread_->compiler_state().GetNextDeoptId();
294 if (context_level_array_ != NULL) {
295 intptr_t level = context_depth_;
296 context_level_array_->Add(deopt_id);
297 context_level_array_->Add(level);
298 }
299 return deopt_id;
300 }
301
302 intptr_t AllocateTryIndex() { return next_used_try_index_++; }
303 intptr_t CurrentTryIndex() const { return current_try_index_; }
304 void SetCurrentTryIndex(intptr_t try_index) {
305 current_try_index_ = try_index;
306 }
307
308 bool IsCompiledForOsr() { return osr_id_ != DeoptId::kNone; }
309
310 bool IsInlining() const { return exit_collector_ != nullptr; }
311
312 void InlineBailout(const char* reason);
313
314 Fragment LoadArgDescriptor() {
315 ASSERT(parsed_function_->has_arg_desc_var());
316 return LoadLocal(parsed_function_->arg_desc_var());
317 }
318
319 Fragment TestTypeArgsLen(Fragment eq_branch,
320 Fragment neq_branch,
321 intptr_t num_type_args);
322 Fragment TestDelayedTypeArgs(LocalVariable* closure,
323 Fragment present,
324 Fragment absent);
325 Fragment TestAnyTypeArgs(Fragment present, Fragment absent);
326
327 JoinEntryInstr* BuildThrowNoSuchMethod();
328
329 Fragment AssertBool(TokenPosition position);
330 Fragment BooleanNegate();
331 Fragment AllocateContext(const ZoneGrowableArray<const Slot*>& scope);
332 Fragment AllocateClosure(TokenPosition position,
333 const Function& closure_function);
334 Fragment CreateArray();
335 Fragment InstantiateType(const AbstractType& type);
336 Fragment InstantiateTypeArguments(const TypeArguments& type_arguments);
337 Fragment LoadClassId();
338
339 // Returns true if we are building a graph for inlining of a call site that
340 // enters the function through the unchecked entry.
341 bool InliningUncheckedEntry() const { return inlining_unchecked_entry_; }
342
343 // Returns depth of expression stack.
344 intptr_t GetStackDepth() const {
345 return stack_ == nullptr ? 0 : stack_->definition()->temp_index() + 1;
346 }
347
348 // Builds the graph for an invocation of '_asFunctionInternal'.
349 //
350 // 'signatures' contains the pair [<dart signature>, <native signature>].
351 Fragment BuildFfiAsFunctionInternalCall(const TypeArguments& signatures);
352
353 Fragment AllocateObject(TokenPosition position,
354 const Class& klass,
355 intptr_t argument_count);
356
357 Fragment DebugStepCheck(TokenPosition position);
358
359 // Loads 'receiver' and checks it for null. Throws NoSuchMethod if it is null.
360 // 'function_name' is a selector which is being called (reported in
361 // NoSuchMethod message).
362 // Sets 'receiver' to 'null' after the check if 'clear_the_temp'.
363 // Note that this does _not_ use the result of the CheckNullInstr, so it does
364 // not create a data depedency and might break with code motion.
365 Fragment CheckNull(TokenPosition position,
366 LocalVariable* receiver,
367 const String& function_name,
368 bool clear_the_temp = true);
369
370 // Pops the top of the stack, checks it for null, and pushes the result on
371 // the stack to create a data dependency.
372 // 'function_name' is a selector which is being called (reported in
373 // NoSuchMethod message).
374 // Note that the result can currently only be used in optimized code, because
375 // optimized code uses FlowGraph::RemoveRedefinitions to remove the
376 // redefinitions, while unoptimized code does not.
377 Fragment CheckNullOptimized(TokenPosition position,
378 const String& function_name);
379
380 // Records extra unchecked entry point 'unchecked_entry' in 'graph_entry'.
381 void RecordUncheckedEntryPoint(GraphEntryInstr* graph_entry,
382 FunctionEntryInstr* unchecked_entry);
383
384 // Pop the index of the current entry-point off the stack. If there is any
385 // entrypoint-tracing hook registered in a pragma for the function, it is
386 // called with the name of the current function and the current entry-point
387 // index.
388 Fragment BuildEntryPointsIntrospection();
389
390 // Builds closure call with given number of arguments. Target closure
391 // function is taken from top of the stack.
392 // PushArgument instructions should be already added for arguments.
393 Fragment ClosureCall(TokenPosition position,
394 intptr_t type_args_len,
395 intptr_t argument_count,
396 const Array& argument_names,
397 bool use_unchecked_entry = false);
398
399 // Builds StringInterpolate instruction, an equivalent of
400 // _StringBase._interpolate call.
401 Fragment StringInterpolate(TokenPosition position);
402
403 // Pops function type arguments, instantiator type arguments, dst_type, and
404 // value; and type checks value against the type arguments.
405 Fragment AssertAssignable(
406 TokenPosition position,
407 const String& dst_name,
408 AssertAssignableInstr::Kind kind = AssertAssignableInstr::kUnknown);
409
410 // Returns true if we're currently recording deopt_id -> context level
411 // mapping.
412 bool is_recording_context_levels() const {
413 return context_level_array_ != nullptr;
414 }
415
416 // Sets current context level. It will be recorded for all subsequent
417 // deopt ids (until it is adjusted again).
418 void set_context_depth(intptr_t context_level) {
419 context_depth_ = context_level;
420 }
421
422 // Reset context level for the given deopt id (which was allocated earlier).
423 void reset_context_depth_for_deopt_id(intptr_t deopt_id);
424
425 // Sets raw parameter variables to inferred constant values.
426 Fragment InitConstantParameters();
427
428 protected:
429 intptr_t AllocateBlockId() { return ++last_used_block_id_; }
430
431 const ParsedFunction* parsed_function_;
432 const Function& function_;
433 Thread* thread_;
434 Zone* zone_;
435 intptr_t osr_id_;
436 // Contains (deopt_id, context_level) pairs.
437 ZoneGrowableArray<intptr_t>* context_level_array_;
438 intptr_t context_depth_;
439 intptr_t last_used_block_id_;
440
441 intptr_t current_try_index_;
442 intptr_t next_used_try_index_;
443
444 Value* stack_;
445 InlineExitCollector* exit_collector_;
446
447 const bool inlining_unchecked_entry_;
448
449 friend class StreamingFlowGraphBuilder;
450 friend class BytecodeFlowGraphBuilder;
451
452 private:
453 DISALLOW_COPY_AND_ASSIGN(BaseFlowGraphBuilder);
454};
455
456} // namespace kernel
457} // namespace dart
458
459#endif // RUNTIME_VM_COMPILER_FRONTEND_BASE_FLOW_GRAPH_BUILDER_H_
460