1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/compiler/frontend/flow_graph_builder.h"
6
7#include "vm/compiler/backend/branch_optimizer.h"
8#include "vm/compiler/backend/flow_graph.h"
9#include "vm/compiler/backend/il.h"
10#include "vm/compiler/frontend/kernel_to_il.h"
11#include "vm/object.h"
12#include "vm/zone.h"
13
14namespace dart {
15
16// Quick access to the locally defined zone() method.
17#define Z (zone())
18
19// TODO(srdjan): Allow compiler to add constants as they are encountered in
20// the compilation.
21const double kCommonDoubleConstants[] = {
22 -1.0, -0.5, -0.1, 0.0, 0.1, 0.5, 1.0, 2.0, 4.0, 5.0, 10.0, 20.0, 30.0, 64.0,
23 255.0, NAN,
24 // From dart:math
25 2.718281828459045, 2.302585092994046, 0.6931471805599453,
26 1.4426950408889634, 0.4342944819032518, 3.1415926535897932,
27 0.7071067811865476, 1.4142135623730951};
28
29uword FindDoubleConstant(double value) {
30 intptr_t len = sizeof(kCommonDoubleConstants) / sizeof(double); // NOLINT
31 for (intptr_t i = 0; i < len; i++) {
32 if (Utils::DoublesBitEqual(value, kCommonDoubleConstants[i])) {
33 return reinterpret_cast<uword>(&kCommonDoubleConstants[i]);
34 }
35 }
36 return 0;
37}
38
39void InlineExitCollector::PrepareGraphs(FlowGraph* callee_graph) {
40 ASSERT(callee_graph->graph_entry()->SuccessorCount() == 1);
41 ASSERT(callee_graph->max_block_id() > caller_graph_->max_block_id());
42 ASSERT(callee_graph->max_virtual_register_number() >
43 caller_graph_->max_virtual_register_number());
44
45 // Adjust the caller's maximum block id and current SSA temp index.
46 caller_graph_->set_max_block_id(callee_graph->max_block_id());
47 caller_graph_->set_current_ssa_temp_index(
48 callee_graph->max_virtual_register_number());
49
50 // Attach the outer environment on each instruction in the callee graph.
51 ASSERT(call_->env() != NULL);
52 ASSERT(call_->deopt_id() != DeoptId::kNone);
53 const intptr_t outer_deopt_id = call_->deopt_id();
54 // Scale the edge weights by the call count for the inlined function.
55 double scale_factor =
56 static_cast<double>(call_->CallCount()) /
57 static_cast<double>(caller_graph_->graph_entry()->entry_count());
58 for (BlockIterator block_it = callee_graph->postorder_iterator();
59 !block_it.Done(); block_it.Advance()) {
60 BlockEntryInstr* block = block_it.Current();
61 if (block->IsTargetEntry()) {
62 block->AsTargetEntry()->adjust_edge_weight(scale_factor);
63 }
64 Instruction* instr = block;
65 if (block->env() != NULL) {
66 call_->env()->DeepCopyToOuter(callee_graph->zone(), block,
67 outer_deopt_id);
68 }
69 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
70 instr = it.Current();
71 // TODO(zerny): Avoid creating unnecessary environments. Note that some
72 // optimizations need deoptimization info for non-deoptable instructions,
73 // eg, LICM on GOTOs.
74 if (instr->env() != NULL) {
75 call_->env()->DeepCopyToOuter(callee_graph->zone(), instr,
76 outer_deopt_id);
77 }
78 }
79 if (instr->IsGoto()) {
80 instr->AsGoto()->adjust_edge_weight(scale_factor);
81 }
82 }
83
84 RemoveUnreachableExits(callee_graph);
85}
86
87void InlineExitCollector::AddExit(ReturnInstr* exit) {
88 Data data = {NULL, exit};
89 exits_.Add(data);
90}
91
92void InlineExitCollector::Union(const InlineExitCollector* other) {
93 // It doesn't make sense to combine different calls or calls from
94 // different graphs.
95 ASSERT(caller_graph_ == other->caller_graph_);
96 ASSERT(call_ == other->call_);
97 exits_.AddArray(other->exits_);
98}
99
100int InlineExitCollector::LowestBlockIdFirst(const Data* a, const Data* b) {
101 return (a->exit_block->block_id() - b->exit_block->block_id());
102}
103
104void InlineExitCollector::RemoveUnreachableExits(FlowGraph* callee_graph) {
105 const GrowableArray<BlockEntryInstr*>& postorder = callee_graph->postorder();
106 int j = 0;
107 for (int i = 0; i < exits_.length(); ++i) {
108 BlockEntryInstr* block = exits_[i].exit_return->GetBlock();
109 if ((block != NULL) && (0 <= block->postorder_number()) &&
110 (block->postorder_number() < postorder.length()) &&
111 (postorder[block->postorder_number()] == block)) {
112 if (i != j) {
113 exits_[j] = exits_[i];
114 }
115 j++;
116 }
117 }
118 exits_.TruncateTo(j);
119}
120
121void InlineExitCollector::SortExits() {
122 // Assign block entries here because we did not necessarily know them when
123 // the return exit was added to the array.
124 for (int i = 0; i < exits_.length(); ++i) {
125 exits_[i].exit_block = exits_[i].exit_return->GetBlock();
126 }
127 exits_.Sort(LowestBlockIdFirst);
128}
129
130Definition* InlineExitCollector::JoinReturns(BlockEntryInstr** exit_block,
131 Instruction** last_instruction,
132 intptr_t try_index) {
133 // First sort the list of exits by block id (caching return instruction
134 // block entries as a side effect).
135 SortExits();
136 intptr_t num_exits = exits_.length();
137 if (num_exits == 1) {
138 ReturnAt(0)->UnuseAllInputs();
139 *exit_block = ExitBlockAt(0);
140 *last_instruction = LastInstructionAt(0);
141 return call_->HasUses() ? ValueAt(0)->definition() : NULL;
142 } else {
143 ASSERT(num_exits > 1);
144 // Create a join of the returns.
145 intptr_t join_id = caller_graph_->max_block_id() + 1;
146 caller_graph_->set_max_block_id(join_id);
147 JoinEntryInstr* join = new (Z) JoinEntryInstr(
148 join_id, try_index, CompilerState::Current().GetNextDeoptId());
149
150 // The dominator set of the join is the intersection of the dominator
151 // sets of all the predecessors. If we keep the dominator sets ordered
152 // by height in the dominator tree, we can also get the immediate
153 // dominator of the join node from the intersection.
154 //
155 // block_dominators is the dominator set for each block, ordered from
156 // the immediate dominator to the root of the dominator tree. This is
157 // the order we collect them in (adding at the end).
158 //
159 // join_dominators is the join's dominators ordered from the root of the
160 // dominator tree to the immediate dominator. This order supports
161 // removing during intersection by truncating the list.
162 GrowableArray<BlockEntryInstr*> block_dominators;
163 GrowableArray<BlockEntryInstr*> join_dominators;
164 for (intptr_t i = 0; i < num_exits; ++i) {
165 // Add the control-flow edge.
166 GotoInstr* goto_instr =
167 new (Z) GotoInstr(join, CompilerState::Current().GetNextDeoptId());
168 goto_instr->InheritDeoptTarget(zone(), ReturnAt(i));
169 LastInstructionAt(i)->LinkTo(goto_instr);
170 ExitBlockAt(i)->set_last_instruction(LastInstructionAt(i)->next());
171 join->predecessors_.Add(ExitBlockAt(i));
172
173 // Collect the block's dominators.
174 block_dominators.Clear();
175 BlockEntryInstr* dominator = ExitBlockAt(i)->dominator();
176 while (dominator != NULL) {
177 block_dominators.Add(dominator);
178 dominator = dominator->dominator();
179 }
180
181 if (i == 0) {
182 // The initial dominator set is the first predecessor's dominator
183 // set. Reverse it.
184 for (intptr_t j = block_dominators.length() - 1; j >= 0; --j) {
185 join_dominators.Add(block_dominators[j]);
186 }
187 } else {
188 // Intersect the block's dominators with the join's dominators so far.
189 intptr_t last = block_dominators.length() - 1;
190 for (intptr_t j = 0; j < join_dominators.length(); ++j) {
191 intptr_t k = last - j; // Corresponding index in block_dominators.
192 if ((k < 0) || (join_dominators[j] != block_dominators[k])) {
193 // We either exhausted the dominators for this block before
194 // exhausting the current intersection, or else we found a block
195 // on the path from the root of the tree that is not in common.
196 // I.e., there cannot be an empty set of dominators.
197 ASSERT(j > 0);
198 join_dominators.TruncateTo(j);
199 break;
200 }
201 }
202 }
203 }
204 // The immediate dominator of the join is the last one in the ordered
205 // intersection.
206 join_dominators.Last()->AddDominatedBlock(join);
207 *exit_block = join;
208 *last_instruction = join;
209
210 // If the call has uses, create a phi of the returns.
211 if (call_->HasUses()) {
212 // Add a phi of the return values.
213 PhiInstr* phi = new (Z) PhiInstr(join, num_exits);
214 caller_graph_->AllocateSSAIndexes(phi);
215 phi->mark_alive();
216 for (intptr_t i = 0; i < num_exits; ++i) {
217 ReturnAt(i)->RemoveEnvironment();
218 phi->SetInputAt(i, ValueAt(i));
219 }
220 join->InsertPhi(phi);
221 join->InheritDeoptTargetAfter(caller_graph_, call_, phi);
222 return phi;
223 } else {
224 // In the case that the result is unused, remove the return value uses
225 // from their definition's use list.
226 for (intptr_t i = 0; i < num_exits; ++i) {
227 ReturnAt(i)->UnuseAllInputs();
228 }
229 join->InheritDeoptTargetAfter(caller_graph_, call_, NULL);
230 return NULL;
231 }
232 }
233}
234
235void InlineExitCollector::ReplaceCall(BlockEntryInstr* callee_entry) {
236 ASSERT(call_->previous() != NULL);
237 ASSERT(call_->next() != NULL);
238 BlockEntryInstr* call_block = call_->GetBlock();
239
240 // Insert the callee graph into the caller graph.
241 BlockEntryInstr* callee_exit = NULL;
242 Instruction* callee_last_instruction = NULL;
243
244 if (exits_.length() == 0) {
245 // Handle the case when there are no normal return exits from the callee
246 // (i.e. the callee unconditionally throws) by inserting an artificial
247 // branch (true === true).
248 // The true successor is the inlined body, the false successor
249 // goes to the rest of the caller graph. It is removed as unreachable code
250 // by the constant propagation.
251 TargetEntryInstr* false_block = new (Z) TargetEntryInstr(
252 caller_graph_->allocate_block_id(), call_block->try_index(),
253 CompilerState::Current().GetNextDeoptId());
254 false_block->InheritDeoptTargetAfter(caller_graph_, call_, NULL);
255 false_block->LinkTo(call_->next());
256 call_block->ReplaceAsPredecessorWith(false_block);
257
258 ConstantInstr* true_const = caller_graph_->GetConstant(Bool::True());
259 BranchInstr* branch = new (Z) BranchInstr(
260 new (Z) StrictCompareInstr(TokenPosition::kNoSource, Token::kEQ_STRICT,
261 new (Z) Value(true_const),
262 new (Z) Value(true_const), false,
263 CompilerState::Current().GetNextDeoptId()),
264 CompilerState::Current().GetNextDeoptId()); // No number check.
265 branch->InheritDeoptTarget(zone(), call_);
266
267 auto true_target = BranchSimplifier::ToTargetEntry(zone(), callee_entry);
268 callee_entry->ReplaceAsPredecessorWith(true_target);
269
270 *branch->true_successor_address() = true_target;
271 *branch->false_successor_address() = false_block;
272
273 call_->previous()->AppendInstruction(branch);
274 call_block->set_last_instruction(branch);
275
276 // Replace uses of the return value with sentinel constant to maintain
277 // valid SSA form - even though the rest of the caller is unreachable.
278 call_->ReplaceUsesWith(caller_graph_->GetConstant(Object::sentinel()));
279
280 // Update dominator tree.
281 for (intptr_t i = 0, n = callee_entry->dominated_blocks().length(); i < n;
282 i++) {
283 BlockEntryInstr* block = callee_entry->dominated_blocks()[i];
284 true_target->AddDominatedBlock(block);
285 }
286 for (intptr_t i = 0, n = call_block->dominated_blocks().length(); i < n;
287 i++) {
288 BlockEntryInstr* block = call_block->dominated_blocks()[i];
289 false_block->AddDominatedBlock(block);
290 }
291 call_block->ClearDominatedBlocks();
292 call_block->AddDominatedBlock(true_target);
293 call_block->AddDominatedBlock(false_block);
294
295 } else {
296 Definition* callee_result = JoinReturns(
297 &callee_exit, &callee_last_instruction, call_block->try_index());
298 if (callee_result != NULL) {
299 call_->ReplaceUsesWith(callee_result);
300 }
301 if (callee_last_instruction == callee_entry) {
302 // There are no instructions in the inlined function (e.g., it might be
303 // a return of a parameter or a return of a constant defined in the
304 // initial definitions).
305 call_->previous()->LinkTo(call_->next());
306 } else {
307 call_->previous()->LinkTo(callee_entry->next());
308 callee_last_instruction->LinkTo(call_->next());
309 }
310 if (callee_exit != callee_entry) {
311 // In case of control flow, locally update the predecessors, phis and
312 // dominator tree.
313 //
314 // Pictorially, the graph structure is:
315 //
316 // Bc : call_block Bi : callee_entry
317 // before_call inlined_head
318 // call ... other blocks ...
319 // after_call Be : callee_exit
320 // inlined_foot
321 // And becomes:
322 //
323 // Bc : call_block
324 // before_call
325 // inlined_head
326 // ... other blocks ...
327 // Be : callee_exit
328 // inlined_foot
329 // after_call
330 //
331 // For successors of 'after_call', the call block (Bc) is replaced as a
332 // predecessor by the callee exit (Be).
333 call_block->ReplaceAsPredecessorWith(callee_exit);
334 // For successors of 'inlined_head', the callee entry (Bi) is replaced
335 // as a predecessor by the call block (Bc).
336 callee_entry->ReplaceAsPredecessorWith(call_block);
337
338 // The callee exit is now the immediate dominator of blocks whose
339 // immediate dominator was the call block.
340 ASSERT(callee_exit->dominated_blocks().is_empty());
341 for (intptr_t i = 0; i < call_block->dominated_blocks().length(); ++i) {
342 BlockEntryInstr* block = call_block->dominated_blocks()[i];
343 callee_exit->AddDominatedBlock(block);
344 }
345 // The call block is now the immediate dominator of blocks whose
346 // immediate dominator was the callee entry.
347 call_block->ClearDominatedBlocks();
348 for (intptr_t i = 0; i < callee_entry->dominated_blocks().length(); ++i) {
349 BlockEntryInstr* block = callee_entry->dominated_blocks()[i];
350 call_block->AddDominatedBlock(block);
351 }
352 }
353
354 // Callee entry in not in the graph anymore. Remove it from use lists.
355 callee_entry->UnuseAllInputs();
356 }
357 // Neither call nor the graph entry (if present) are in the
358 // graph at this point. Remove them from use lists.
359 if (callee_entry->PredecessorCount() > 0) {
360 callee_entry->PredecessorAt(0)->AsGraphEntry()->UnuseAllInputs();
361 }
362 call_->UnuseAllInputs();
363}
364
365bool SimpleInstanceOfType(const AbstractType& type) {
366 // Bail if the type is still uninstantiated at compile time.
367 if (!type.IsInstantiated()) return false;
368
369 // Bail if the type is a function or a Dart Function type.
370 if (type.IsFunctionType() || type.IsDartFunctionType()) return false;
371
372 ASSERT(type.HasTypeClass());
373 const Class& type_class = Class::Handle(type.type_class());
374
375 // Bail if the type has any type parameters.
376 if (type_class.IsGeneric()) {
377 // If the interface type we check against is generic but has all-dynamic
378 // type arguments, then we can still use the _simpleInstanceOf
379 // implementation (see also runtime/lib/object.cc:Object_SimpleInstanceOf).
380 const auto& rare_type = AbstractType::Handle(type_class.RareType());
381 // TODO(regis): Revisit the usage of TypeEquality::kSyntactical when
382 // implementing strong mode.
383 return rare_type.IsEquivalent(type, TypeEquality::kSyntactical);
384 }
385
386 // Finally a simple class for instance of checking.
387 return true;
388}
389
390} // namespace dart
391