1 | // Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | // Class for intrinsifying functions. |
5 | |
6 | #include "vm/compiler/graph_intrinsifier.h" |
7 | #include "vm/compiler/backend/block_builder.h" |
8 | #include "vm/compiler/backend/flow_graph.h" |
9 | #include "vm/compiler/backend/flow_graph_compiler.h" |
10 | #include "vm/compiler/backend/il.h" |
11 | #include "vm/compiler/backend/il_printer.h" |
12 | #include "vm/compiler/backend/inliner.h" |
13 | #include "vm/compiler/backend/linearscan.h" |
14 | #include "vm/compiler/backend/range_analysis.h" |
15 | #include "vm/compiler/compiler_pass.h" |
16 | #include "vm/compiler/intrinsifier.h" |
17 | #include "vm/compiler/jit/compiler.h" |
18 | #include "vm/cpu.h" |
19 | #include "vm/flag_list.h" |
20 | |
21 | namespace dart { |
22 | |
23 | DECLARE_FLAG(bool, print_flow_graph); |
24 | DECLARE_FLAG(bool, print_flow_graph_optimized); |
25 | |
26 | class GraphInstrinsicCodeGenScope { |
27 | public: |
28 | explicit GraphInstrinsicCodeGenScope(FlowGraphCompiler* compiler) |
29 | : compiler_(compiler), old_is_optimizing_(compiler->is_optimizing()) { |
30 | compiler_->is_optimizing_ = true; |
31 | } |
32 | ~GraphInstrinsicCodeGenScope() { |
33 | compiler_->is_optimizing_ = old_is_optimizing_; |
34 | } |
35 | |
36 | private: |
37 | FlowGraphCompiler* compiler_; |
38 | bool old_is_optimizing_; |
39 | }; |
40 | |
41 | namespace compiler { |
42 | |
43 | static void EmitCodeFor(FlowGraphCompiler* compiler, FlowGraph* graph) { |
44 | // For graph intrinsics we run the linearscan register allocator, which will |
45 | // pass opt=true for MakeLocationSummary. We therefore also have to ensure |
46 | // `compiler->is_optimizing()` is set to true during EmitNativeCode. |
47 | GraphInstrinsicCodeGenScope optimizing_scope(compiler); |
48 | |
49 | // The FlowGraph here is constructed by the intrinsics builder methods, and |
50 | // is different from compiler->flow_graph(), the original method's flow graph. |
51 | compiler->assembler()->Comment("Graph intrinsic begin" ); |
52 | for (intptr_t i = 0; i < graph->reverse_postorder().length(); i++) { |
53 | BlockEntryInstr* block = graph->reverse_postorder()[i]; |
54 | if (block->IsGraphEntry()) continue; // No code for graph entry needed. |
55 | |
56 | if (block->HasParallelMove()) { |
57 | compiler->parallel_move_resolver()->EmitNativeCode( |
58 | block->parallel_move()); |
59 | } |
60 | |
61 | for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) { |
62 | Instruction* instr = it.Current(); |
63 | if (FLAG_code_comments) compiler->EmitComment(instr); |
64 | if (instr->IsParallelMove()) { |
65 | compiler->parallel_move_resolver()->EmitNativeCode( |
66 | instr->AsParallelMove()); |
67 | } else if (instr->IsInvokeMathCFunction()) { |
68 | ASSERT(instr->locs() != NULL); |
69 | GraphIntrinsifier::IntrinsicCallPrologue(compiler->assembler()); |
70 | instr->EmitNativeCode(compiler); |
71 | GraphIntrinsifier::IntrinsicCallEpilogue(compiler->assembler()); |
72 | } else { |
73 | ASSERT(instr->locs() != NULL); |
74 | // Calls are not supported in intrinsics code. |
75 | ASSERT(!instr->locs()->always_calls()); |
76 | instr->EmitNativeCode(compiler); |
77 | } |
78 | } |
79 | } |
80 | compiler->assembler()->Comment("Graph intrinsic end" ); |
81 | } |
82 | |
83 | bool GraphIntrinsifier::GraphIntrinsify(const ParsedFunction& parsed_function, |
84 | FlowGraphCompiler* compiler) { |
85 | ASSERT(!parsed_function.function().HasOptionalParameters()); |
86 | PrologueInfo prologue_info(-1, -1); |
87 | |
88 | auto graph_entry = |
89 | new GraphEntryInstr(parsed_function, Compiler::kNoOSRDeoptId); |
90 | |
91 | intptr_t block_id = 1; // 0 is GraphEntry. |
92 | graph_entry->set_normal_entry( |
93 | new FunctionEntryInstr(graph_entry, block_id, kInvalidTryIndex, |
94 | CompilerState::Current().GetNextDeoptId())); |
95 | |
96 | FlowGraph* graph = |
97 | new FlowGraph(parsed_function, graph_entry, block_id, prologue_info); |
98 | const Function& function = parsed_function.function(); |
99 | |
100 | switch (function.recognized_kind()) { |
101 | #define EMIT_CASE(class_name, function_name, enum_name, fp) \ |
102 | case MethodRecognizer::k##enum_name: \ |
103 | if (!Build_##enum_name(graph)) return false; \ |
104 | break; |
105 | |
106 | GRAPH_INTRINSICS_LIST(EMIT_CASE); |
107 | #undef EMIT_CASE |
108 | default: |
109 | if (function.IsImplicitGetterFunction()) { |
110 | if (!Build_ImplicitGetter(graph)) return false; |
111 | } else if (function.IsImplicitSetterFunction()) { |
112 | if (!Build_ImplicitSetter(graph)) return false; |
113 | } else { |
114 | return false; |
115 | } |
116 | } |
117 | |
118 | if (FLAG_support_il_printer && FLAG_print_flow_graph && |
119 | FlowGraphPrinter::ShouldPrint(function)) { |
120 | THR_Print("Intrinsic graph before\n" ); |
121 | FlowGraphPrinter printer(*graph); |
122 | printer.PrintBlocks(); |
123 | } |
124 | |
125 | // Prepare for register allocation (cf. FinalizeGraph). |
126 | graph->RemoveRedefinitions(); |
127 | |
128 | // Ensure dominators are re-computed. Normally this is done during SSA |
129 | // construction (which we don't do for graph intrinsics). |
130 | GrowableArray<BitVector*> dominance_frontier; |
131 | graph->ComputeDominators(&dominance_frontier); |
132 | |
133 | CompilerPassState state(parsed_function.thread(), graph, |
134 | /*speculative_inlining_policy*/ nullptr); |
135 | CompilerPass::RunGraphIntrinsicPipeline(&state); |
136 | |
137 | if (FLAG_support_il_printer && FLAG_print_flow_graph && |
138 | FlowGraphPrinter::ShouldPrint(function)) { |
139 | THR_Print("Intrinsic graph after\n" ); |
140 | FlowGraphPrinter printer(*graph); |
141 | printer.PrintBlocks(); |
142 | } |
143 | EmitCodeFor(compiler, graph); |
144 | return true; |
145 | } |
146 | |
147 | static Representation RepresentationForCid(intptr_t cid) { |
148 | switch (cid) { |
149 | case kDoubleCid: |
150 | return kUnboxedDouble; |
151 | case kFloat32x4Cid: |
152 | return kUnboxedFloat32x4; |
153 | case kInt32x4Cid: |
154 | return kUnboxedInt32x4; |
155 | case kFloat64x2Cid: |
156 | return kUnboxedFloat64x2; |
157 | default: |
158 | UNREACHABLE(); |
159 | return kNoRepresentation; |
160 | } |
161 | } |
162 | |
163 | // Notes about the graph intrinsics: |
164 | // |
165 | // IR instructions which would jump to a deoptimization sequence on failure |
166 | // instead branch to the intrinsic slow path. |
167 | // |
168 | static Definition* PrepareIndexedOp(FlowGraph* flow_graph, |
169 | BlockBuilder* builder, |
170 | Definition* array, |
171 | Definition* index, |
172 | const Slot& length_field) { |
173 | Definition* length = builder->AddDefinition(new LoadFieldInstr( |
174 | new Value(array), length_field, TokenPosition::kNoSource)); |
175 | // Note that the intrinsifier must always use deopting array bound |
176 | // checks, because intrinsics currently don't support calls. |
177 | Definition* safe_index = new CheckArrayBoundInstr( |
178 | new Value(length), new Value(index), DeoptId::kNone); |
179 | builder->AddDefinition(safe_index); |
180 | return safe_index; |
181 | } |
182 | |
183 | static Definition* CreateBoxedResultIfNeeded(BlockBuilder* builder, |
184 | Definition* value, |
185 | Representation representation) { |
186 | const auto& function = builder->function(); |
187 | if (function.has_unboxed_return()) { |
188 | return value; |
189 | } else { |
190 | return builder->AddDefinition( |
191 | BoxInstr::Create(representation, new Value(value))); |
192 | } |
193 | } |
194 | |
195 | static Definition* CreateUnboxedResultIfNeeded(BlockBuilder* builder, |
196 | Definition* value) { |
197 | const auto& function = builder->function(); |
198 | if (function.has_unboxed_return() && value->representation() == kTagged) { |
199 | return builder->AddUnboxInstr(FlowGraph::ReturnRepresentationOf(function), |
200 | new Value(value), /* is_checked = */ true); |
201 | } else { |
202 | return value; |
203 | } |
204 | } |
205 | |
206 | static bool IntrinsifyArrayGetIndexed(FlowGraph* flow_graph, |
207 | intptr_t array_cid) { |
208 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
209 | auto normal_entry = graph_entry->normal_entry(); |
210 | BlockBuilder builder(flow_graph, normal_entry); |
211 | |
212 | Definition* array = builder.AddParameter(0, /*with_frame=*/false); |
213 | Definition* index = builder.AddParameter(1, /*with_frame=*/false); |
214 | |
215 | index = PrepareIndexedOp(flow_graph, &builder, array, index, |
216 | Slot::GetLengthFieldForArrayCid(array_cid)); |
217 | |
218 | if (IsExternalTypedDataClassId(array_cid)) { |
219 | array = builder.AddDefinition(new LoadUntaggedInstr( |
220 | new Value(array), target::TypedDataBase::data_field_offset())); |
221 | } |
222 | |
223 | Definition* result = builder.AddDefinition(new LoadIndexedInstr( |
224 | new Value(array), new Value(index), /*index_unboxed=*/false, |
225 | /*index_scale=*/target::Instance::ElementSizeFor(array_cid), array_cid, |
226 | kAlignedAccess, DeoptId::kNone, builder.TokenPos())); |
227 | |
228 | // We don't perform [RangeAnalysis] for graph intrinsics. To inform the |
229 | // following boxing instruction about a more precise range we attach it here |
230 | // manually. |
231 | // http://dartbug.com/36632 |
232 | const bool known_range = |
233 | array_cid == kTypedDataInt8ArrayCid || |
234 | array_cid == kTypedDataUint8ArrayCid || |
235 | array_cid == kTypedDataUint8ClampedArrayCid || |
236 | array_cid == kExternalTypedDataUint8ArrayCid || |
237 | array_cid == kExternalTypedDataUint8ClampedArrayCid || |
238 | array_cid == kTypedDataInt16ArrayCid || |
239 | array_cid == kTypedDataUint16ArrayCid || |
240 | array_cid == kTypedDataInt32ArrayCid || |
241 | array_cid == kTypedDataUint32ArrayCid || array_cid == kOneByteStringCid || |
242 | array_cid == kTwoByteStringCid; |
243 | |
244 | bool clear_environment = false; |
245 | if (known_range) { |
246 | Range range; |
247 | result->InferRange(/*range_analysis=*/nullptr, &range); |
248 | result->set_range(range); |
249 | clear_environment = range.Fits(RangeBoundary::kRangeBoundarySmi); |
250 | } |
251 | |
252 | // Box and/or convert result if necessary. |
253 | switch (array_cid) { |
254 | case kTypedDataInt32ArrayCid: |
255 | case kExternalTypedDataInt32ArrayCid: |
256 | result = CreateBoxedResultIfNeeded(&builder, result, kUnboxedInt32); |
257 | break; |
258 | case kTypedDataUint32ArrayCid: |
259 | case kExternalTypedDataUint32ArrayCid: |
260 | result = CreateBoxedResultIfNeeded(&builder, result, kUnboxedUint32); |
261 | break; |
262 | case kTypedDataFloat32ArrayCid: |
263 | result = builder.AddDefinition( |
264 | new FloatToDoubleInstr(new Value(result), DeoptId::kNone)); |
265 | FALL_THROUGH; |
266 | case kTypedDataFloat64ArrayCid: |
267 | result = CreateBoxedResultIfNeeded(&builder, result, kUnboxedDouble); |
268 | break; |
269 | case kTypedDataFloat32x4ArrayCid: |
270 | result = CreateBoxedResultIfNeeded(&builder, result, kUnboxedFloat32x4); |
271 | break; |
272 | case kTypedDataInt32x4ArrayCid: |
273 | result = CreateBoxedResultIfNeeded(&builder, result, kUnboxedInt32x4); |
274 | break; |
275 | case kTypedDataFloat64x2ArrayCid: |
276 | result = CreateBoxedResultIfNeeded(&builder, result, kUnboxedFloat64x2); |
277 | break; |
278 | case kArrayCid: |
279 | case kImmutableArrayCid: |
280 | // Nothing to do. |
281 | break; |
282 | case kTypedDataInt8ArrayCid: |
283 | case kTypedDataInt16ArrayCid: |
284 | case kTypedDataUint8ArrayCid: |
285 | case kTypedDataUint8ClampedArrayCid: |
286 | case kTypedDataUint16ArrayCid: |
287 | case kExternalTypedDataUint8ArrayCid: |
288 | case kExternalTypedDataUint8ClampedArrayCid: |
289 | result = CreateBoxedResultIfNeeded(&builder, result, kUnboxedIntPtr); |
290 | break; |
291 | case kTypedDataInt64ArrayCid: |
292 | case kTypedDataUint64ArrayCid: |
293 | result = CreateBoxedResultIfNeeded(&builder, result, kUnboxedInt64); |
294 | break; |
295 | default: |
296 | UNREACHABLE(); |
297 | break; |
298 | } |
299 | if (result->IsBoxInteger() && clear_environment) { |
300 | result->AsBoxInteger()->ClearEnv(); |
301 | } |
302 | result = CreateUnboxedResultIfNeeded(&builder, result); |
303 | builder.AddReturn(new Value(result)); |
304 | return true; |
305 | } |
306 | |
307 | static bool IntrinsifyArraySetIndexed(FlowGraph* flow_graph, |
308 | intptr_t array_cid) { |
309 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
310 | auto normal_entry = graph_entry->normal_entry(); |
311 | BlockBuilder builder(flow_graph, normal_entry); |
312 | |
313 | Definition* array = builder.AddParameter(0, /*with_frame=*/false); |
314 | Definition* index = builder.AddParameter(1, /*with_frame=*/false); |
315 | Definition* value = builder.AddParameter(2, /*with_frame=*/false); |
316 | |
317 | index = PrepareIndexedOp(flow_graph, &builder, array, index, |
318 | Slot::GetLengthFieldForArrayCid(array_cid)); |
319 | |
320 | // Value check/conversion. |
321 | switch (array_cid) { |
322 | case kTypedDataUint8ClampedArrayCid: |
323 | case kExternalTypedDataUint8ClampedArrayCid: |
324 | #if defined(TARGET_ARCH_IS_32_BIT) |
325 | // On 32-bit architectures, clamping operations need the exact value |
326 | // for proper operations. On 64-bit architectures, kUnboxedIntPtr |
327 | // maps to kUnboxedInt64. All other situations get away with |
328 | // truncating even non-smi values. |
329 | builder.AddInstruction(new CheckSmiInstr(new Value(value), DeoptId::kNone, |
330 | builder.TokenPos())); |
331 | FALL_THROUGH; |
332 | #endif |
333 | case kTypedDataInt8ArrayCid: |
334 | case kTypedDataInt16ArrayCid: |
335 | case kTypedDataUint8ArrayCid: |
336 | case kTypedDataUint16ArrayCid: |
337 | case kExternalTypedDataUint8ArrayCid: |
338 | value = builder.AddUnboxInstr(kUnboxedIntPtr, new Value(value), |
339 | /* is_checked = */ false); |
340 | value->AsUnboxInteger()->mark_truncating(); |
341 | break; |
342 | case kTypedDataInt32ArrayCid: |
343 | case kExternalTypedDataInt32ArrayCid: |
344 | // Use same truncating unbox-instruction for int32 and uint32. |
345 | FALL_THROUGH; |
346 | case kTypedDataUint32ArrayCid: |
347 | case kExternalTypedDataUint32ArrayCid: |
348 | // Supports smi and mint, slow-case for bigints. |
349 | value = builder.AddUnboxInstr(kUnboxedUint32, new Value(value), |
350 | /* is_checked = */ false); |
351 | break; |
352 | case kTypedDataInt64ArrayCid: |
353 | case kTypedDataUint64ArrayCid: |
354 | value = builder.AddUnboxInstr(kUnboxedInt64, new Value(value), |
355 | /* is_checked = */ false); |
356 | break; |
357 | |
358 | case kTypedDataFloat32ArrayCid: |
359 | case kTypedDataFloat64ArrayCid: |
360 | case kTypedDataFloat32x4ArrayCid: |
361 | case kTypedDataInt32x4ArrayCid: |
362 | case kTypedDataFloat64x2ArrayCid: { |
363 | intptr_t value_check_cid = kDoubleCid; |
364 | Representation rep = kUnboxedDouble; |
365 | switch (array_cid) { |
366 | case kTypedDataFloat32x4ArrayCid: |
367 | value_check_cid = kFloat32x4Cid; |
368 | rep = kUnboxedFloat32x4; |
369 | break; |
370 | case kTypedDataInt32x4ArrayCid: |
371 | value_check_cid = kInt32x4Cid; |
372 | rep = kUnboxedInt32x4; |
373 | break; |
374 | case kTypedDataFloat64x2ArrayCid: |
375 | value_check_cid = kFloat64x2Cid; |
376 | rep = kUnboxedFloat64x2; |
377 | break; |
378 | default: |
379 | // Float32/Float64 case already handled. |
380 | break; |
381 | } |
382 | Zone* zone = flow_graph->zone(); |
383 | Cids* value_check = Cids::CreateMonomorphic(zone, value_check_cid); |
384 | builder.AddInstruction(new CheckClassInstr( |
385 | new Value(value), DeoptId::kNone, *value_check, builder.TokenPos())); |
386 | value = builder.AddUnboxInstr(rep, new Value(value), |
387 | /* is_checked = */ true); |
388 | if (array_cid == kTypedDataFloat32ArrayCid) { |
389 | value = builder.AddDefinition( |
390 | new DoubleToFloatInstr(new Value(value), DeoptId::kNone)); |
391 | } |
392 | break; |
393 | } |
394 | default: |
395 | UNREACHABLE(); |
396 | } |
397 | |
398 | if (IsExternalTypedDataClassId(array_cid)) { |
399 | array = builder.AddDefinition(new LoadUntaggedInstr( |
400 | new Value(array), target::TypedDataBase::data_field_offset())); |
401 | } |
402 | // No store barrier. |
403 | ASSERT(IsExternalTypedDataClassId(array_cid) || |
404 | IsTypedDataClassId(array_cid)); |
405 | builder.AddInstruction(new StoreIndexedInstr( |
406 | new Value(array), new Value(index), new Value(value), kNoStoreBarrier, |
407 | /*index_unboxed=*/false, |
408 | /*index_scale=*/target::Instance::ElementSizeFor(array_cid), array_cid, |
409 | kAlignedAccess, DeoptId::kNone, builder.TokenPos())); |
410 | // Return null. |
411 | Definition* null_def = builder.AddNullDefinition(); |
412 | builder.AddReturn(new Value(null_def)); |
413 | return true; |
414 | } |
415 | |
416 | #define DEFINE_ARRAY_GETTER_INTRINSIC(enum_name) \ |
417 | bool GraphIntrinsifier::Build_##enum_name##GetIndexed( \ |
418 | FlowGraph* flow_graph) { \ |
419 | return IntrinsifyArrayGetIndexed( \ |
420 | flow_graph, MethodRecognizer::MethodKindToReceiverCid( \ |
421 | MethodRecognizer::k##enum_name##GetIndexed)); \ |
422 | } |
423 | |
424 | #define DEFINE_ARRAY_SETTER_INTRINSIC(enum_name) \ |
425 | bool GraphIntrinsifier::Build_##enum_name##SetIndexed( \ |
426 | FlowGraph* flow_graph) { \ |
427 | return IntrinsifyArraySetIndexed( \ |
428 | flow_graph, MethodRecognizer::MethodKindToReceiverCid( \ |
429 | MethodRecognizer::k##enum_name##SetIndexed)); \ |
430 | } |
431 | |
432 | DEFINE_ARRAY_GETTER_INTRINSIC(ObjectArray) |
433 | DEFINE_ARRAY_GETTER_INTRINSIC(ImmutableArray) |
434 | |
435 | #define DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(enum_name) \ |
436 | DEFINE_ARRAY_GETTER_INTRINSIC(enum_name) \ |
437 | DEFINE_ARRAY_SETTER_INTRINSIC(enum_name) |
438 | |
439 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(Int8Array) |
440 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(Uint8Array) |
441 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(ExternalUint8Array) |
442 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(Uint8ClampedArray) |
443 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(ExternalUint8ClampedArray) |
444 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(Int16Array) |
445 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(Uint16Array) |
446 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(Int32Array) |
447 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(Uint32Array) |
448 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(Int64Array) |
449 | DEFINE_ARRAY_GETTER_SETTER_INTRINSICS(Uint64Array) |
450 | |
451 | #undef DEFINE_ARRAY_GETTER_SETTER_INTRINSICS |
452 | #undef DEFINE_ARRAY_GETTER_INTRINSIC |
453 | #undef DEFINE_ARRAY_SETTER_INTRINSIC |
454 | |
455 | #define DEFINE_FLOAT_ARRAY_GETTER_INTRINSIC(enum_name) \ |
456 | bool GraphIntrinsifier::Build_##enum_name##GetIndexed( \ |
457 | FlowGraph* flow_graph) { \ |
458 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) { \ |
459 | return false; \ |
460 | } \ |
461 | return IntrinsifyArrayGetIndexed( \ |
462 | flow_graph, MethodRecognizer::MethodKindToReceiverCid( \ |
463 | MethodRecognizer::k##enum_name##GetIndexed)); \ |
464 | } |
465 | |
466 | #define DEFINE_FLOAT_ARRAY_SETTER_INTRINSIC(enum_name) \ |
467 | bool GraphIntrinsifier::Build_##enum_name##SetIndexed( \ |
468 | FlowGraph* flow_graph) { \ |
469 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) { \ |
470 | return false; \ |
471 | } \ |
472 | return IntrinsifyArraySetIndexed( \ |
473 | flow_graph, MethodRecognizer::MethodKindToReceiverCid( \ |
474 | MethodRecognizer::k##enum_name##SetIndexed)); \ |
475 | } |
476 | |
477 | #define DEFINE_FLOAT_ARRAY_GETTER_SETTER_INTRINSICS(enum_name) \ |
478 | DEFINE_FLOAT_ARRAY_GETTER_INTRINSIC(enum_name) \ |
479 | DEFINE_FLOAT_ARRAY_SETTER_INTRINSIC(enum_name) |
480 | |
481 | DEFINE_FLOAT_ARRAY_GETTER_SETTER_INTRINSICS(Float64Array) |
482 | DEFINE_FLOAT_ARRAY_GETTER_SETTER_INTRINSICS(Float32Array) |
483 | |
484 | #undef DEFINE_FLOAT_ARRAY_GETTER_SETTER_INTRINSICS |
485 | #undef DEFINE_FLOAT_ARRAY_GETTER_INTRINSIC |
486 | #undef DEFINE_FLOAT_ARRAY_SETTER_INTRINSIC |
487 | |
488 | #define DEFINE_SIMD_ARRAY_GETTER_INTRINSIC(enum_name) \ |
489 | bool GraphIntrinsifier::Build_##enum_name##GetIndexed( \ |
490 | FlowGraph* flow_graph) { \ |
491 | if (!FlowGraphCompiler::SupportsUnboxedSimd128()) { \ |
492 | return false; \ |
493 | } \ |
494 | return IntrinsifyArrayGetIndexed( \ |
495 | flow_graph, MethodRecognizer::MethodKindToReceiverCid( \ |
496 | MethodRecognizer::k##enum_name##GetIndexed)); \ |
497 | } |
498 | |
499 | #define DEFINE_SIMD_ARRAY_SETTER_INTRINSIC(enum_name) \ |
500 | bool GraphIntrinsifier::Build_##enum_name##SetIndexed( \ |
501 | FlowGraph* flow_graph) { \ |
502 | if (!FlowGraphCompiler::SupportsUnboxedSimd128()) { \ |
503 | return false; \ |
504 | } \ |
505 | return IntrinsifyArraySetIndexed( \ |
506 | flow_graph, MethodRecognizer::MethodKindToReceiverCid( \ |
507 | MethodRecognizer::k##enum_name##SetIndexed)); \ |
508 | } |
509 | |
510 | #define DEFINE_SIMD_ARRAY_GETTER_SETTER_INTRINSICS(enum_name) \ |
511 | DEFINE_SIMD_ARRAY_GETTER_INTRINSIC(enum_name) \ |
512 | DEFINE_SIMD_ARRAY_SETTER_INTRINSIC(enum_name) |
513 | |
514 | DEFINE_SIMD_ARRAY_GETTER_SETTER_INTRINSICS(Float32x4Array) |
515 | DEFINE_SIMD_ARRAY_GETTER_SETTER_INTRINSICS(Int32x4Array) |
516 | DEFINE_SIMD_ARRAY_GETTER_SETTER_INTRINSICS(Float64x2Array) |
517 | |
518 | #undef DEFINE_SIMD_ARRAY_GETTER_SETTER_INTRINSICS |
519 | #undef DEFINE_SIMD_ARRAY_GETTER_INTRINSIC |
520 | #undef DEFINE_SIMD_ARRAY_SETTER_INTRINSIC |
521 | |
522 | static bool BuildCodeUnitAt(FlowGraph* flow_graph, intptr_t cid) { |
523 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
524 | auto normal_entry = graph_entry->normal_entry(); |
525 | BlockBuilder builder(flow_graph, normal_entry); |
526 | |
527 | Definition* str = builder.AddParameter(0, /*with_frame=*/false); |
528 | Definition* index = builder.AddParameter(1, /*with_frame=*/false); |
529 | |
530 | index = |
531 | PrepareIndexedOp(flow_graph, &builder, str, index, Slot::String_length()); |
532 | |
533 | // For external strings: Load external data. |
534 | if (cid == kExternalOneByteStringCid) { |
535 | str = builder.AddDefinition(new LoadUntaggedInstr( |
536 | new Value(str), target::ExternalOneByteString::external_data_offset())); |
537 | } else if (cid == kExternalTwoByteStringCid) { |
538 | str = builder.AddDefinition(new LoadUntaggedInstr( |
539 | new Value(str), target::ExternalTwoByteString::external_data_offset())); |
540 | } |
541 | |
542 | Definition* load = builder.AddDefinition(new LoadIndexedInstr( |
543 | new Value(str), new Value(index), /*index_unboxed=*/false, |
544 | target::Instance::ElementSizeFor(cid), cid, kAlignedAccess, |
545 | DeoptId::kNone, builder.TokenPos())); |
546 | |
547 | // We don't perform [RangeAnalysis] for graph intrinsics. To inform the |
548 | // following boxing instruction about a more precise range we attach it here |
549 | // manually. |
550 | // http://dartbug.com/36632 |
551 | Range range; |
552 | load->InferRange(/*range_analysis=*/nullptr, &range); |
553 | load->set_range(range); |
554 | |
555 | Definition* result = |
556 | CreateBoxedResultIfNeeded(&builder, load, kUnboxedIntPtr); |
557 | |
558 | if (result->IsBoxInteger()) { |
559 | result->AsBoxInteger()->ClearEnv(); |
560 | } |
561 | |
562 | builder.AddReturn(new Value(result)); |
563 | return true; |
564 | } |
565 | |
566 | bool GraphIntrinsifier::Build_OneByteStringCodeUnitAt(FlowGraph* flow_graph) { |
567 | return BuildCodeUnitAt(flow_graph, kOneByteStringCid); |
568 | } |
569 | |
570 | bool GraphIntrinsifier::Build_TwoByteStringCodeUnitAt(FlowGraph* flow_graph) { |
571 | return BuildCodeUnitAt(flow_graph, kTwoByteStringCid); |
572 | } |
573 | |
574 | bool GraphIntrinsifier::Build_ExternalOneByteStringCodeUnitAt( |
575 | FlowGraph* flow_graph) { |
576 | return BuildCodeUnitAt(flow_graph, kExternalOneByteStringCid); |
577 | } |
578 | |
579 | bool GraphIntrinsifier::Build_ExternalTwoByteStringCodeUnitAt( |
580 | FlowGraph* flow_graph) { |
581 | return BuildCodeUnitAt(flow_graph, kExternalTwoByteStringCid); |
582 | } |
583 | |
584 | static bool BuildSimdOp(FlowGraph* flow_graph, intptr_t cid, Token::Kind kind) { |
585 | if (!FlowGraphCompiler::SupportsUnboxedSimd128()) return false; |
586 | |
587 | const Representation rep = RepresentationForCid(cid); |
588 | |
589 | Zone* zone = flow_graph->zone(); |
590 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
591 | auto normal_entry = graph_entry->normal_entry(); |
592 | BlockBuilder builder(flow_graph, normal_entry); |
593 | |
594 | Definition* left = builder.AddParameter(0, /*with_frame=*/false); |
595 | Definition* right = builder.AddParameter(1, /*with_frame=*/false); |
596 | |
597 | Cids* value_check = Cids::CreateMonomorphic(zone, cid); |
598 | // Check argument. Receiver (left) is known to be a Float32x4. |
599 | builder.AddInstruction(new CheckClassInstr(new Value(right), DeoptId::kNone, |
600 | *value_check, builder.TokenPos())); |
601 | Definition* left_simd = builder.AddUnboxInstr(rep, new Value(left), |
602 | /* is_checked = */ true); |
603 | |
604 | Definition* right_simd = builder.AddUnboxInstr(rep, new Value(right), |
605 | /* is_checked = */ true); |
606 | |
607 | Definition* unboxed_result = builder.AddDefinition(SimdOpInstr::Create( |
608 | SimdOpInstr::KindForOperator(cid, kind), new Value(left_simd), |
609 | new Value(right_simd), DeoptId::kNone)); |
610 | Definition* result = CreateBoxedResultIfNeeded(&builder, unboxed_result, rep); |
611 | |
612 | builder.AddReturn(new Value(result)); |
613 | return true; |
614 | } |
615 | |
616 | bool GraphIntrinsifier::Build_Float32x4Mul(FlowGraph* flow_graph) { |
617 | return BuildSimdOp(flow_graph, kFloat32x4Cid, Token::kMUL); |
618 | } |
619 | |
620 | bool GraphIntrinsifier::Build_Float32x4Div(FlowGraph* flow_graph) { |
621 | return BuildSimdOp(flow_graph, kFloat32x4Cid, Token::kDIV); |
622 | } |
623 | |
624 | bool GraphIntrinsifier::Build_Float32x4Sub(FlowGraph* flow_graph) { |
625 | return BuildSimdOp(flow_graph, kFloat32x4Cid, Token::kSUB); |
626 | } |
627 | |
628 | bool GraphIntrinsifier::Build_Float32x4Add(FlowGraph* flow_graph) { |
629 | return BuildSimdOp(flow_graph, kFloat32x4Cid, Token::kADD); |
630 | } |
631 | |
632 | bool GraphIntrinsifier::Build_Float64x2Mul(FlowGraph* flow_graph) { |
633 | return BuildSimdOp(flow_graph, kFloat64x2Cid, Token::kMUL); |
634 | } |
635 | |
636 | bool GraphIntrinsifier::Build_Float64x2Div(FlowGraph* flow_graph) { |
637 | return BuildSimdOp(flow_graph, kFloat64x2Cid, Token::kDIV); |
638 | } |
639 | |
640 | bool GraphIntrinsifier::Build_Float64x2Sub(FlowGraph* flow_graph) { |
641 | return BuildSimdOp(flow_graph, kFloat64x2Cid, Token::kSUB); |
642 | } |
643 | |
644 | bool GraphIntrinsifier::Build_Float64x2Add(FlowGraph* flow_graph) { |
645 | return BuildSimdOp(flow_graph, kFloat64x2Cid, Token::kADD); |
646 | } |
647 | |
648 | static bool BuildFloat32x4Shuffle(FlowGraph* flow_graph, |
649 | MethodRecognizer::Kind kind) { |
650 | if (!FlowGraphCompiler::SupportsUnboxedDoubles() || |
651 | !FlowGraphCompiler::SupportsUnboxedSimd128()) { |
652 | return false; |
653 | } |
654 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
655 | auto normal_entry = graph_entry->normal_entry(); |
656 | BlockBuilder builder(flow_graph, normal_entry); |
657 | |
658 | Definition* receiver = builder.AddParameter(0, /*with_frame=*/false); |
659 | |
660 | const auto& function = flow_graph->function(); |
661 | Definition* unboxed_receiver = |
662 | !function.is_unboxed_parameter_at(0) |
663 | ? builder.AddUnboxInstr(kUnboxedFloat32x4, new Value(receiver), |
664 | /* is_checked = */ true) |
665 | : receiver; |
666 | |
667 | Definition* unboxed_result = builder.AddDefinition( |
668 | SimdOpInstr::Create(kind, new Value(unboxed_receiver), DeoptId::kNone)); |
669 | |
670 | Definition* result = |
671 | CreateBoxedResultIfNeeded(&builder, unboxed_result, kUnboxedDouble); |
672 | |
673 | builder.AddReturn(new Value(result)); |
674 | return true; |
675 | } |
676 | |
677 | bool GraphIntrinsifier::Build_Float32x4ShuffleX(FlowGraph* flow_graph) { |
678 | return BuildFloat32x4Shuffle(flow_graph, |
679 | MethodRecognizer::kFloat32x4ShuffleX); |
680 | } |
681 | |
682 | bool GraphIntrinsifier::Build_Float32x4ShuffleY(FlowGraph* flow_graph) { |
683 | return BuildFloat32x4Shuffle(flow_graph, |
684 | MethodRecognizer::kFloat32x4ShuffleY); |
685 | } |
686 | |
687 | bool GraphIntrinsifier::Build_Float32x4ShuffleZ(FlowGraph* flow_graph) { |
688 | return BuildFloat32x4Shuffle(flow_graph, |
689 | MethodRecognizer::kFloat32x4ShuffleZ); |
690 | } |
691 | |
692 | bool GraphIntrinsifier::Build_Float32x4ShuffleW(FlowGraph* flow_graph) { |
693 | return BuildFloat32x4Shuffle(flow_graph, |
694 | MethodRecognizer::kFloat32x4ShuffleW); |
695 | } |
696 | |
697 | static bool BuildLoadField(FlowGraph* flow_graph, const Slot& field) { |
698 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
699 | auto normal_entry = graph_entry->normal_entry(); |
700 | BlockBuilder builder(flow_graph, normal_entry); |
701 | |
702 | Definition* array = builder.AddParameter(0, /*with_frame=*/false); |
703 | |
704 | Definition* length = builder.AddDefinition( |
705 | new LoadFieldInstr(new Value(array), field, builder.TokenPos())); |
706 | |
707 | length = CreateUnboxedResultIfNeeded(&builder, length); |
708 | builder.AddReturn(new Value(length)); |
709 | return true; |
710 | } |
711 | |
712 | bool GraphIntrinsifier::Build_ObjectArrayLength(FlowGraph* flow_graph) { |
713 | return BuildLoadField(flow_graph, Slot::Array_length()); |
714 | } |
715 | |
716 | bool GraphIntrinsifier::Build_ImmutableArrayLength(FlowGraph* flow_graph) { |
717 | return BuildLoadField(flow_graph, Slot::Array_length()); |
718 | } |
719 | |
720 | bool GraphIntrinsifier::Build_GrowableArrayLength(FlowGraph* flow_graph) { |
721 | return BuildLoadField(flow_graph, Slot::GrowableObjectArray_length()); |
722 | } |
723 | |
724 | bool GraphIntrinsifier::Build_StringBaseLength(FlowGraph* flow_graph) { |
725 | return BuildLoadField(flow_graph, Slot::String_length()); |
726 | } |
727 | |
728 | bool GraphIntrinsifier::Build_TypedListLength(FlowGraph* flow_graph) { |
729 | return BuildLoadField(flow_graph, Slot::TypedDataBase_length()); |
730 | } |
731 | |
732 | bool GraphIntrinsifier::Build_TypedListViewLength(FlowGraph* flow_graph) { |
733 | return BuildLoadField(flow_graph, Slot::TypedDataBase_length()); |
734 | } |
735 | |
736 | bool GraphIntrinsifier::Build_ByteDataViewLength(FlowGraph* flow_graph) { |
737 | return BuildLoadField(flow_graph, Slot::TypedDataBase_length()); |
738 | } |
739 | |
740 | bool GraphIntrinsifier::Build_GrowableArrayCapacity(FlowGraph* flow_graph) { |
741 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
742 | auto normal_entry = graph_entry->normal_entry(); |
743 | BlockBuilder builder(flow_graph, normal_entry); |
744 | |
745 | Definition* array = builder.AddParameter(0, /*with_frame=*/false); |
746 | |
747 | Definition* backing_store = builder.AddDefinition(new LoadFieldInstr( |
748 | new Value(array), Slot::GrowableObjectArray_data(), builder.TokenPos())); |
749 | Definition* capacity = builder.AddDefinition(new LoadFieldInstr( |
750 | new Value(backing_store), Slot::Array_length(), builder.TokenPos())); |
751 | capacity = CreateUnboxedResultIfNeeded(&builder, capacity); |
752 | builder.AddReturn(new Value(capacity)); |
753 | return true; |
754 | } |
755 | |
756 | bool GraphIntrinsifier::Build_GrowableArrayGetIndexed(FlowGraph* flow_graph) { |
757 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
758 | auto normal_entry = graph_entry->normal_entry(); |
759 | BlockBuilder builder(flow_graph, normal_entry); |
760 | |
761 | Definition* growable_array = builder.AddParameter(0, /*with_frame=*/false); |
762 | Definition* index = builder.AddParameter(1, /*with_frame=*/false); |
763 | |
764 | index = PrepareIndexedOp(flow_graph, &builder, growable_array, index, |
765 | Slot::GrowableObjectArray_length()); |
766 | |
767 | Definition* backing_store = builder.AddDefinition( |
768 | new LoadFieldInstr(new Value(growable_array), |
769 | Slot::GrowableObjectArray_data(), builder.TokenPos())); |
770 | Definition* result = builder.AddDefinition(new LoadIndexedInstr( |
771 | new Value(backing_store), new Value(index), /*index_unboxed=*/false, |
772 | /*index_scale=*/target::Instance::ElementSizeFor(kArrayCid), kArrayCid, |
773 | kAlignedAccess, DeoptId::kNone, builder.TokenPos())); |
774 | result = CreateUnboxedResultIfNeeded(&builder, result); |
775 | builder.AddReturn(new Value(result)); |
776 | return true; |
777 | } |
778 | |
779 | bool GraphIntrinsifier::Build_ObjectArraySetIndexedUnchecked( |
780 | FlowGraph* flow_graph) { |
781 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
782 | auto normal_entry = graph_entry->normal_entry(); |
783 | BlockBuilder builder(flow_graph, normal_entry); |
784 | |
785 | Definition* array = builder.AddParameter(0, /*with_frame=*/false); |
786 | Definition* index = builder.AddParameter(1, /*with_frame=*/false); |
787 | Definition* value = builder.AddParameter(2, /*with_frame=*/false); |
788 | |
789 | index = PrepareIndexedOp(flow_graph, &builder, array, index, |
790 | Slot::Array_length()); |
791 | |
792 | builder.AddInstruction(new StoreIndexedInstr( |
793 | new Value(array), new Value(index), new Value(value), kEmitStoreBarrier, |
794 | /*index_unboxed=*/false, |
795 | /*index_scale=*/target::Instance::ElementSizeFor(kArrayCid), kArrayCid, |
796 | kAlignedAccess, DeoptId::kNone, builder.TokenPos())); |
797 | // Return null. |
798 | Definition* null_def = builder.AddNullDefinition(); |
799 | builder.AddReturn(new Value(null_def)); |
800 | return true; |
801 | } |
802 | |
803 | bool GraphIntrinsifier::Build_GrowableArraySetIndexedUnchecked( |
804 | FlowGraph* flow_graph) { |
805 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
806 | auto normal_entry = graph_entry->normal_entry(); |
807 | BlockBuilder builder(flow_graph, normal_entry); |
808 | |
809 | Definition* array = builder.AddParameter(0, /*with_frame=*/false); |
810 | Definition* index = builder.AddParameter(1, /*with_frame=*/false); |
811 | Definition* value = builder.AddParameter(2, /*with_frame=*/false); |
812 | |
813 | index = PrepareIndexedOp(flow_graph, &builder, array, index, |
814 | Slot::GrowableObjectArray_length()); |
815 | |
816 | Definition* backing_store = builder.AddDefinition(new LoadFieldInstr( |
817 | new Value(array), Slot::GrowableObjectArray_data(), builder.TokenPos())); |
818 | |
819 | builder.AddInstruction(new StoreIndexedInstr( |
820 | new Value(backing_store), new Value(index), new Value(value), |
821 | kEmitStoreBarrier, /*index_unboxed=*/false, |
822 | /*index_scale=*/target::Instance::ElementSizeFor(kArrayCid), kArrayCid, |
823 | kAlignedAccess, DeoptId::kNone, builder.TokenPos())); |
824 | // Return null. |
825 | Definition* null_def = builder.AddNullDefinition(); |
826 | builder.AddReturn(new Value(null_def)); |
827 | return true; |
828 | } |
829 | |
830 | bool GraphIntrinsifier::Build_GrowableArraySetData(FlowGraph* flow_graph) { |
831 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
832 | auto normal_entry = graph_entry->normal_entry(); |
833 | BlockBuilder builder(flow_graph, normal_entry); |
834 | |
835 | Definition* growable_array = builder.AddParameter(0, /*with_frame=*/false); |
836 | Definition* data = builder.AddParameter(1, /*with_frame=*/false); |
837 | Zone* zone = flow_graph->zone(); |
838 | |
839 | Cids* value_check = Cids::CreateMonomorphic(zone, kArrayCid); |
840 | builder.AddInstruction(new CheckClassInstr(new Value(data), DeoptId::kNone, |
841 | *value_check, builder.TokenPos())); |
842 | |
843 | builder.AddInstruction(new StoreInstanceFieldInstr( |
844 | Slot::GrowableObjectArray_data(), new Value(growable_array), |
845 | new Value(data), kEmitStoreBarrier, builder.TokenPos())); |
846 | // Return null. |
847 | Definition* null_def = builder.AddNullDefinition(); |
848 | builder.AddReturn(new Value(null_def)); |
849 | return true; |
850 | } |
851 | |
852 | bool GraphIntrinsifier::Build_GrowableArraySetLength(FlowGraph* flow_graph) { |
853 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
854 | auto normal_entry = graph_entry->normal_entry(); |
855 | BlockBuilder builder(flow_graph, normal_entry); |
856 | |
857 | Definition* growable_array = builder.AddParameter(0, /*with_frame=*/false); |
858 | Definition* length = builder.AddParameter(1, /*with_frame=*/false); |
859 | |
860 | builder.AddInstruction( |
861 | new CheckSmiInstr(new Value(length), DeoptId::kNone, builder.TokenPos())); |
862 | builder.AddInstruction(new StoreInstanceFieldInstr( |
863 | Slot::GrowableObjectArray_length(), new Value(growable_array), |
864 | new Value(length), kNoStoreBarrier, builder.TokenPos())); |
865 | Definition* null_def = builder.AddNullDefinition(); |
866 | builder.AddReturn(new Value(null_def)); |
867 | return true; |
868 | } |
869 | |
870 | static Definition* ConvertOrUnboxDoubleParameter(BlockBuilder* builder, |
871 | Definition* value, |
872 | intptr_t index, |
873 | bool is_checked) { |
874 | const auto& function = builder->function(); |
875 | if (function.is_unboxed_double_parameter_at(index)) { |
876 | return value; |
877 | } else if (function.is_unboxed_integer_parameter_at(index)) { |
878 | if (compiler::target::kWordSize == 4) { |
879 | // Int64ToDoubleInstr is not implemented in 32-bit platforms |
880 | return nullptr; |
881 | } |
882 | auto to_double = new Int64ToDoubleInstr(new Value(value), DeoptId::kNone); |
883 | return builder->AddDefinition(to_double); |
884 | } else { |
885 | ASSERT(!function.is_unboxed_parameter_at(index)); |
886 | return builder->AddUnboxInstr(kUnboxedDouble, value, is_checked); |
887 | } |
888 | } |
889 | |
890 | bool GraphIntrinsifier::Build_DoubleFlipSignBit(FlowGraph* flow_graph) { |
891 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) { |
892 | return false; |
893 | } |
894 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
895 | auto normal_entry = graph_entry->normal_entry(); |
896 | BlockBuilder builder(flow_graph, normal_entry); |
897 | |
898 | Definition* receiver = builder.AddParameter(0, /*with_frame=*/false); |
899 | Definition* unboxed_value = ConvertOrUnboxDoubleParameter( |
900 | &builder, receiver, 0, /* is_checked = */ true); |
901 | if (unboxed_value == nullptr) { |
902 | return false; |
903 | } |
904 | Definition* unboxed_result = builder.AddDefinition(new UnaryDoubleOpInstr( |
905 | Token::kNEGATE, new Value(unboxed_value), DeoptId::kNone)); |
906 | Definition* result = |
907 | CreateBoxedResultIfNeeded(&builder, unboxed_result, kUnboxedDouble); |
908 | builder.AddReturn(new Value(result)); |
909 | return true; |
910 | } |
911 | |
912 | static bool BuildInvokeMathCFunction(BlockBuilder* builder, |
913 | MethodRecognizer::Kind kind, |
914 | FlowGraph* flow_graph, |
915 | intptr_t num_parameters = 1) { |
916 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) { |
917 | return false; |
918 | } |
919 | ZoneGrowableArray<Value*>* args = |
920 | new ZoneGrowableArray<Value*>(num_parameters); |
921 | |
922 | for (intptr_t i = 0; i < num_parameters; i++) { |
923 | Definition* value = builder->AddParameter(i, /*with_frame=*/false); |
924 | Definition* unboxed_value = ConvertOrUnboxDoubleParameter( |
925 | builder, value, i, /* is_checked = */ false); |
926 | if (unboxed_value == nullptr) { |
927 | return false; |
928 | } |
929 | args->Add(new Value(unboxed_value)); |
930 | } |
931 | |
932 | Definition* unboxed_result = |
933 | builder->AddDefinition(new InvokeMathCFunctionInstr( |
934 | args, DeoptId::kNone, kind, builder->TokenPos())); |
935 | Definition* result = |
936 | CreateBoxedResultIfNeeded(builder, unboxed_result, kUnboxedDouble); |
937 | builder->AddReturn(new Value(result)); |
938 | |
939 | return true; |
940 | } |
941 | |
942 | bool GraphIntrinsifier::Build_MathSin(FlowGraph* flow_graph) { |
943 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
944 | |
945 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
946 | auto normal_entry = graph_entry->normal_entry(); |
947 | BlockBuilder builder(flow_graph, normal_entry); |
948 | |
949 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathSin, |
950 | flow_graph); |
951 | } |
952 | |
953 | bool GraphIntrinsifier::Build_MathCos(FlowGraph* flow_graph) { |
954 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
955 | |
956 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
957 | auto normal_entry = graph_entry->normal_entry(); |
958 | BlockBuilder builder(flow_graph, normal_entry); |
959 | |
960 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathCos, |
961 | flow_graph); |
962 | } |
963 | |
964 | bool GraphIntrinsifier::Build_MathTan(FlowGraph* flow_graph) { |
965 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
966 | |
967 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
968 | auto normal_entry = graph_entry->normal_entry(); |
969 | BlockBuilder builder(flow_graph, normal_entry); |
970 | |
971 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathTan, |
972 | flow_graph); |
973 | } |
974 | |
975 | bool GraphIntrinsifier::Build_MathAsin(FlowGraph* flow_graph) { |
976 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
977 | |
978 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
979 | auto normal_entry = graph_entry->normal_entry(); |
980 | BlockBuilder builder(flow_graph, normal_entry); |
981 | |
982 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathAsin, |
983 | flow_graph); |
984 | } |
985 | |
986 | bool GraphIntrinsifier::Build_MathAcos(FlowGraph* flow_graph) { |
987 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
988 | |
989 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
990 | auto normal_entry = graph_entry->normal_entry(); |
991 | BlockBuilder builder(flow_graph, normal_entry); |
992 | |
993 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathAcos, |
994 | flow_graph); |
995 | } |
996 | |
997 | bool GraphIntrinsifier::Build_MathAtan(FlowGraph* flow_graph) { |
998 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
999 | |
1000 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
1001 | auto normal_entry = graph_entry->normal_entry(); |
1002 | BlockBuilder builder(flow_graph, normal_entry); |
1003 | |
1004 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathAtan, |
1005 | flow_graph); |
1006 | } |
1007 | |
1008 | bool GraphIntrinsifier::Build_MathAtan2(FlowGraph* flow_graph) { |
1009 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
1010 | |
1011 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
1012 | auto normal_entry = graph_entry->normal_entry(); |
1013 | BlockBuilder builder(flow_graph, normal_entry); |
1014 | |
1015 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kMathAtan2, |
1016 | flow_graph, |
1017 | /* num_parameters = */ 2); |
1018 | } |
1019 | |
1020 | bool GraphIntrinsifier::Build_DoubleMod(FlowGraph* flow_graph) { |
1021 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
1022 | |
1023 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
1024 | auto normal_entry = graph_entry->normal_entry(); |
1025 | BlockBuilder builder(flow_graph, normal_entry); |
1026 | |
1027 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kDoubleMod, |
1028 | flow_graph, |
1029 | /* num_parameters = */ 2); |
1030 | } |
1031 | |
1032 | bool GraphIntrinsifier::Build_DoubleCeil(FlowGraph* flow_graph) { |
1033 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
1034 | // TODO(johnmccutchan): On X86 this intrinsic can be written in a different |
1035 | // way. |
1036 | if (TargetCPUFeatures::double_truncate_round_supported()) return false; |
1037 | |
1038 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
1039 | auto normal_entry = graph_entry->normal_entry(); |
1040 | BlockBuilder builder(flow_graph, normal_entry); |
1041 | |
1042 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kDoubleCeil, |
1043 | flow_graph); |
1044 | } |
1045 | |
1046 | bool GraphIntrinsifier::Build_DoubleFloor(FlowGraph* flow_graph) { |
1047 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
1048 | // TODO(johnmccutchan): On X86 this intrinsic can be written in a different |
1049 | // way. |
1050 | if (TargetCPUFeatures::double_truncate_round_supported()) return false; |
1051 | |
1052 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
1053 | auto normal_entry = graph_entry->normal_entry(); |
1054 | BlockBuilder builder(flow_graph, normal_entry); |
1055 | |
1056 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kDoubleFloor, |
1057 | flow_graph); |
1058 | } |
1059 | |
1060 | bool GraphIntrinsifier::Build_DoubleTruncate(FlowGraph* flow_graph) { |
1061 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
1062 | // TODO(johnmccutchan): On X86 this intrinsic can be written in a different |
1063 | // way. |
1064 | if (TargetCPUFeatures::double_truncate_round_supported()) return false; |
1065 | |
1066 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
1067 | auto normal_entry = graph_entry->normal_entry(); |
1068 | BlockBuilder builder(flow_graph, normal_entry); |
1069 | |
1070 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kDoubleTruncate, |
1071 | flow_graph); |
1072 | } |
1073 | |
1074 | bool GraphIntrinsifier::Build_DoubleRound(FlowGraph* flow_graph) { |
1075 | if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false; |
1076 | |
1077 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
1078 | auto normal_entry = graph_entry->normal_entry(); |
1079 | BlockBuilder builder(flow_graph, normal_entry); |
1080 | |
1081 | return BuildInvokeMathCFunction(&builder, MethodRecognizer::kDoubleRound, |
1082 | flow_graph); |
1083 | } |
1084 | |
1085 | bool GraphIntrinsifier::Build_ImplicitGetter(FlowGraph* flow_graph) { |
1086 | // This code will only be invoked if our assumptions have been met (see |
1087 | // [Intrinsifier::CanIntrinsifyFieldAccessor]) |
1088 | auto zone = flow_graph->zone(); |
1089 | const auto& function = flow_graph->function(); |
1090 | ASSERT(Intrinsifier::CanIntrinsifyFieldAccessor(function)); |
1091 | |
1092 | auto& field = Field::Handle(zone, function.accessor_field()); |
1093 | if (Field::ShouldCloneFields()) { |
1094 | field = field.CloneFromOriginal(); |
1095 | } |
1096 | ASSERT(field.is_instance() && !field.is_late() && !field.needs_load_guard()); |
1097 | |
1098 | const auto& slot = Slot::Get(field, &flow_graph->parsed_function()); |
1099 | |
1100 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
1101 | auto normal_entry = graph_entry->normal_entry(); |
1102 | BlockBuilder builder(flow_graph, normal_entry); |
1103 | |
1104 | auto receiver = builder.AddParameter(0, /*with_frame=*/false); |
1105 | auto field_value = builder.AddDefinition(new (zone) LoadFieldInstr( |
1106 | new (zone) Value(receiver), slot, builder.TokenPos())); |
1107 | builder.AddReturn(new (zone) Value(field_value)); |
1108 | return true; |
1109 | } |
1110 | |
1111 | bool GraphIntrinsifier::Build_ImplicitSetter(FlowGraph* flow_graph) { |
1112 | // This code will only be invoked if our assumptions have been met (see |
1113 | // [Intrinsifier::CanIntrinsifyFieldAccessor]) |
1114 | auto zone = flow_graph->zone(); |
1115 | const auto& function = flow_graph->function(); |
1116 | ASSERT(Intrinsifier::CanIntrinsifyFieldAccessor(function)); |
1117 | |
1118 | auto& field = Field::Handle(zone, function.accessor_field()); |
1119 | if (Field::ShouldCloneFields()) { |
1120 | field = field.CloneFromOriginal(); |
1121 | } |
1122 | ASSERT(field.is_instance() && !field.is_final()); |
1123 | ASSERT(!function.HasUnboxedParameters() || |
1124 | FlowGraphCompiler::IsUnboxedField(field)); |
1125 | |
1126 | const auto& slot = Slot::Get(field, &flow_graph->parsed_function()); |
1127 | |
1128 | const auto barrier_mode = FlowGraphCompiler::IsUnboxedField(field) |
1129 | ? kNoStoreBarrier |
1130 | : kEmitStoreBarrier; |
1131 | |
1132 | flow_graph->CreateCommonConstants(); |
1133 | GraphEntryInstr* graph_entry = flow_graph->graph_entry(); |
1134 | auto normal_entry = graph_entry->normal_entry(); |
1135 | BlockBuilder builder(flow_graph, normal_entry); |
1136 | |
1137 | auto receiver = builder.AddParameter(0, /*with_frame=*/false); |
1138 | auto value = builder.AddParameter(1, /*with_frame=*/false); |
1139 | |
1140 | if (!function.HasUnboxedParameters() && |
1141 | FlowGraphCompiler::IsUnboxedField(field)) { |
1142 | // We do not support storing to possibly guarded fields in JIT in graph |
1143 | // intrinsics. |
1144 | ASSERT(FLAG_precompiled_mode); |
1145 | |
1146 | Representation representation = kNoRepresentation; |
1147 | switch (field.guarded_cid()) { |
1148 | case kDoubleCid: |
1149 | representation = kUnboxedDouble; |
1150 | break; |
1151 | case kFloat32x4Cid: |
1152 | representation = kUnboxedFloat32x4; |
1153 | break; |
1154 | case kFloat64x2Cid: |
1155 | representation = kUnboxedFloat64x2; |
1156 | break; |
1157 | default: |
1158 | ASSERT(field.is_non_nullable_integer()); |
1159 | representation = kUnboxedInt64; |
1160 | break; |
1161 | } |
1162 | value = builder.AddUnboxInstr(representation, new Value(value), |
1163 | /*is_checked=*/true); |
1164 | } |
1165 | |
1166 | builder.AddInstruction(new (zone) StoreInstanceFieldInstr( |
1167 | slot, new (zone) Value(receiver), new (zone) Value(value), barrier_mode, |
1168 | builder.TokenPos())); |
1169 | |
1170 | builder.AddReturn(new (zone) Value(flow_graph->constant_null())); |
1171 | return true; |
1172 | } |
1173 | |
1174 | } // namespace compiler |
1175 | } // namespace dart |
1176 | |