1 | // Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/type_testing_stubs.h" |
6 | #include "vm/compiler/assembler/disassembler.h" |
7 | #include "vm/object_store.h" |
8 | #include "vm/stub_code.h" |
9 | #include "vm/timeline.h" |
10 | |
11 | #if !defined(DART_PRECOMPILED_RUNTIME) |
12 | #include "vm/compiler/backend/flow_graph_compiler.h" |
13 | #include "vm/compiler/backend/il_printer.h" |
14 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
15 | |
16 | #define __ assembler-> |
17 | |
18 | namespace dart { |
19 | |
20 | DECLARE_FLAG(bool, disassemble_stubs); |
21 | |
22 | TypeTestingStubNamer::TypeTestingStubNamer() |
23 | : lib_(Library::Handle()), |
24 | klass_(Class::Handle()), |
25 | type_(AbstractType::Handle()), |
26 | type_arguments_(TypeArguments::Handle()), |
27 | string_(String::Handle()) {} |
28 | |
29 | const char* TypeTestingStubNamer::StubNameForType( |
30 | const AbstractType& type) const { |
31 | Zone* Z = Thread::Current()->zone(); |
32 | return OS::SCreate(Z, "TypeTestingStub_%s" , StringifyType(type)); |
33 | } |
34 | |
35 | const char* TypeTestingStubNamer::StringifyType( |
36 | const AbstractType& type) const { |
37 | NoSafepointScope no_safepoint; |
38 | Zone* Z = Thread::Current()->zone(); |
39 | if (type.IsType() && !type.IsFunctionType()) { |
40 | const intptr_t cid = Type::Cast(type).type_class_id(); |
41 | ClassTable* class_table = Isolate::Current()->class_table(); |
42 | klass_ = class_table->At(cid); |
43 | ASSERT(!klass_.IsNull()); |
44 | |
45 | const char* curl = "" ; |
46 | lib_ = klass_.library(); |
47 | if (!lib_.IsNull()) { |
48 | string_ = lib_.url(); |
49 | curl = OS::SCreate(Z, "%s_" , string_.ToCString()); |
50 | } else { |
51 | static intptr_t counter = 0; |
52 | curl = OS::SCreate(Z, "nolib%" Pd "_" , counter++); |
53 | } |
54 | |
55 | const char* concatenated = AssemblerSafeName( |
56 | OS::SCreate(Z, "%s_%s" , curl, klass_.ScrubbedNameCString())); |
57 | |
58 | const intptr_t type_parameters = klass_.NumTypeParameters(); |
59 | if (type.arguments() != TypeArguments::null() && type_parameters > 0) { |
60 | type_arguments_ = type.arguments(); |
61 | ASSERT(type_arguments_.Length() >= type_parameters); |
62 | const intptr_t length = type_arguments_.Length(); |
63 | for (intptr_t i = 0; i < type_parameters; ++i) { |
64 | type_ = type_arguments_.TypeAt(length - type_parameters + i); |
65 | concatenated = |
66 | OS::SCreate(Z, "%s__%s" , concatenated, StringifyType(type_)); |
67 | } |
68 | } |
69 | return concatenated; |
70 | } else if (type.IsTypeParameter()) { |
71 | string_ = TypeParameter::Cast(type).name(); |
72 | return AssemblerSafeName(OS::SCreate(Z, "%s" , string_.ToCString())); |
73 | } else { |
74 | return AssemblerSafeName(OS::SCreate(Z, "%s" , type.ToCString())); |
75 | } |
76 | } |
77 | |
78 | const char* TypeTestingStubNamer::AssemblerSafeName(char* cname) { |
79 | char* cursor = cname; |
80 | while (*cursor != '\0') { |
81 | char c = *cursor; |
82 | if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || |
83 | (c >= '0' && c <= '9') || (c == '_'))) { |
84 | *cursor = '_'; |
85 | } |
86 | cursor++; |
87 | } |
88 | return cname; |
89 | } |
90 | |
91 | CodePtr TypeTestingStubGenerator::DefaultCodeForType( |
92 | const AbstractType& type, |
93 | bool lazy_specialize /* = true */) { |
94 | if (type.IsTypeRef()) { |
95 | return Isolate::Current()->null_safety() |
96 | ? StubCode::DefaultTypeTest().raw() |
97 | : StubCode::DefaultNullableTypeTest().raw(); |
98 | } |
99 | |
100 | // During bootstrapping we have no access to stubs yet, so we'll just return |
101 | // `null` and patch these later in `Object::FinishInit()`. |
102 | if (!StubCode::HasBeenInitialized()) { |
103 | ASSERT(type.IsType()); |
104 | const classid_t cid = type.type_class_id(); |
105 | ASSERT(cid == kDynamicCid || cid == kVoidCid); |
106 | return Code::null(); |
107 | } |
108 | |
109 | if (type.IsTopTypeForSubtyping()) { |
110 | return StubCode::TopTypeTypeTest().raw(); |
111 | } |
112 | |
113 | if (type.IsType() || type.IsTypeParameter()) { |
114 | const bool should_specialize = !FLAG_precompiled_mode && lazy_specialize; |
115 | const bool nullable = Instance::NullIsAssignableTo(type); |
116 | if (should_specialize) { |
117 | return nullable ? StubCode::LazySpecializeNullableTypeTest().raw() |
118 | : StubCode::LazySpecializeTypeTest().raw(); |
119 | } else { |
120 | return nullable ? StubCode::DefaultNullableTypeTest().raw() |
121 | : StubCode::DefaultTypeTest().raw(); |
122 | } |
123 | } |
124 | |
125 | return StubCode::UnreachableTypeTest().raw(); |
126 | } |
127 | |
128 | #if !defined(DART_PRECOMPILED_RUNTIME) |
129 | void TypeTestingStubGenerator::SpecializeStubFor(Thread* thread, |
130 | const AbstractType& type) { |
131 | HierarchyInfo hi(thread); |
132 | TypeTestingStubGenerator generator; |
133 | const Code& code = |
134 | Code::Handle(thread->zone(), generator.OptimizedCodeForType(type)); |
135 | type.SetTypeTestingStub(code); |
136 | } |
137 | #endif |
138 | |
139 | TypeTestingStubGenerator::TypeTestingStubGenerator() |
140 | : object_store_(Isolate::Current()->object_store()) {} |
141 | |
142 | CodePtr TypeTestingStubGenerator::OptimizedCodeForType( |
143 | const AbstractType& type) { |
144 | #if !defined(TARGET_ARCH_IA32) |
145 | ASSERT(StubCode::HasBeenInitialized()); |
146 | |
147 | if (type.IsTypeRef()) { |
148 | return TypeTestingStubGenerator::DefaultCodeForType( |
149 | type, /*lazy_specialize=*/false); |
150 | } |
151 | |
152 | if (type.IsTopTypeForSubtyping()) { |
153 | return StubCode::TopTypeTypeTest().raw(); |
154 | } |
155 | |
156 | if (type.IsCanonical()) { |
157 | if (type.IsType()) { |
158 | #if !defined(DART_PRECOMPILED_RUNTIME) |
159 | const Code& code = Code::Handle( |
160 | TypeTestingStubGenerator::BuildCodeForType(Type::Cast(type))); |
161 | if (!code.IsNull()) { |
162 | return code.raw(); |
163 | } |
164 | |
165 | // Fall back to default. |
166 | #else |
167 | // In the precompiled runtime we cannot lazily create new optimized type |
168 | // testing stubs, so if we cannot find one, we'll just return the default |
169 | // one. |
170 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
171 | } |
172 | } |
173 | #endif // !defined(TARGET_ARCH_IA32) |
174 | return TypeTestingStubGenerator::DefaultCodeForType( |
175 | type, /*lazy_specialize=*/false); |
176 | } |
177 | |
178 | #if !defined(TARGET_ARCH_IA32) |
179 | #if !defined(DART_PRECOMPILED_RUNTIME) |
180 | |
181 | CodePtr TypeTestingStubGenerator::BuildCodeForType(const Type& type) { |
182 | auto thread = Thread::Current(); |
183 | auto zone = thread->zone(); |
184 | HierarchyInfo* hi = thread->hierarchy_info(); |
185 | ASSERT(hi != NULL); |
186 | |
187 | if (!hi->CanUseSubtypeRangeCheckFor(type) && |
188 | !hi->CanUseGenericSubtypeRangeCheckFor(type)) { |
189 | return Code::null(); |
190 | } |
191 | |
192 | const Class& type_class = Class::Handle(type.type_class()); |
193 | ASSERT(!type_class.IsNull()); |
194 | |
195 | auto& slow_tts_stub = Code::ZoneHandle(zone); |
196 | if (FLAG_precompiled_mode && FLAG_use_bare_instructions) { |
197 | slow_tts_stub = thread->isolate()->object_store()->slow_tts_stub(); |
198 | } |
199 | |
200 | // To use the already-defined __ Macro ! |
201 | compiler::Assembler assembler(nullptr); |
202 | compiler::UnresolvedPcRelativeCalls unresolved_calls; |
203 | BuildOptimizedTypeTestStub(&assembler, &unresolved_calls, slow_tts_stub, hi, |
204 | type, type_class); |
205 | |
206 | const auto& static_calls_table = |
207 | Array::Handle(zone, compiler::StubCodeCompiler::BuildStaticCallsTable( |
208 | zone, &unresolved_calls)); |
209 | |
210 | const char* name = namer_.StubNameForType(type); |
211 | const auto pool_attachment = FLAG_use_bare_instructions |
212 | ? Code::PoolAttachment::kNotAttachPool |
213 | : Code::PoolAttachment::kAttachPool; |
214 | |
215 | Code& code = Code::Handle(thread->zone()); |
216 | auto install_code_fun = [&]() { |
217 | code = Code::FinalizeCode(nullptr, &assembler, pool_attachment, |
218 | /*optimized=*/false, /*stats=*/nullptr); |
219 | if (!static_calls_table.IsNull()) { |
220 | code.set_static_calls_target_table(static_calls_table); |
221 | } |
222 | }; |
223 | |
224 | // We have to ensure no mutators are running, because: |
225 | // |
226 | // a) We allocate an instructions object, which might cause us to |
227 | // temporarily flip page protections from (RX -> RW -> RX). |
228 | // |
229 | thread->isolate_group()->RunWithStoppedMutators(install_code_fun, |
230 | /*use_force_growth=*/true); |
231 | |
232 | Code::NotifyCodeObservers(name, code, /*optimized=*/false); |
233 | |
234 | code.set_owner(type); |
235 | #ifndef PRODUCT |
236 | if (FLAG_support_disassembler && FLAG_disassemble_stubs) { |
237 | LogBlock lb; |
238 | THR_Print("Code for stub '%s' (type = %s): {\n" , name, type.ToCString()); |
239 | DisassembleToStdout formatter; |
240 | code.Disassemble(&formatter); |
241 | THR_Print("}\n" ); |
242 | const ObjectPool& object_pool = ObjectPool::Handle(code.object_pool()); |
243 | if (!object_pool.IsNull()) { |
244 | object_pool.DebugPrint(); |
245 | } |
246 | } |
247 | #endif // !PRODUCT |
248 | |
249 | return code.raw(); |
250 | } |
251 | |
252 | void TypeTestingStubGenerator::BuildOptimizedTypeTestStubFastCases( |
253 | compiler::Assembler* assembler, |
254 | HierarchyInfo* hi, |
255 | const Type& type, |
256 | const Class& type_class) { |
257 | // These are handled via the TopTypeTypeTestStub! |
258 | ASSERT(!type.IsTopTypeForSubtyping()); |
259 | |
260 | // Fast case for 'int'. |
261 | if (type.IsIntType()) { |
262 | compiler::Label non_smi_value; |
263 | __ BranchIfNotSmi(TypeTestABI::kInstanceReg, &non_smi_value); |
264 | __ Ret(); |
265 | __ Bind(&non_smi_value); |
266 | } else if (type.IsDartFunctionType()) { |
267 | compiler::Label continue_checking; |
268 | __ CompareImmediate(TTSInternalRegs::kScratchReg, kClosureCid); |
269 | __ BranchIf(NOT_EQUAL, &continue_checking); |
270 | __ Ret(); |
271 | __ Bind(&continue_checking); |
272 | |
273 | } else if (type.IsObjectType()) { |
274 | ASSERT(type.IsNonNullable() && Isolate::Current()->null_safety()); |
275 | compiler::Label continue_checking; |
276 | __ CompareObject(TypeTestABI::kInstanceReg, Object::null_object()); |
277 | __ BranchIf(EQUAL, &continue_checking); |
278 | __ Ret(); |
279 | __ Bind(&continue_checking); |
280 | |
281 | } else { |
282 | // TODO(kustermann): Make more fast cases, e.g. Type::Number() |
283 | // is implemented by Smi. |
284 | } |
285 | |
286 | // Check the cid ranges which are a subtype of [type]. |
287 | if (hi->CanUseSubtypeRangeCheckFor(type)) { |
288 | const CidRangeVector& ranges = hi->SubtypeRangesForClass( |
289 | type_class, |
290 | /*include_abstract=*/false, |
291 | /*exclude_null=*/!Instance::NullIsAssignableTo(type)); |
292 | |
293 | const Type& int_type = Type::Handle(Type::IntType()); |
294 | const bool smi_is_ok = int_type.IsSubtypeOf(type, Heap::kNew); |
295 | |
296 | BuildOptimizedSubtypeRangeCheck(assembler, ranges, smi_is_ok); |
297 | } else { |
298 | ASSERT(hi->CanUseGenericSubtypeRangeCheckFor(type)); |
299 | |
300 | const intptr_t num_type_parameters = type_class.NumTypeParameters(); |
301 | const intptr_t num_type_arguments = type_class.NumTypeArguments(); |
302 | |
303 | const TypeArguments& tp = |
304 | TypeArguments::Handle(type_class.type_parameters()); |
305 | ASSERT(tp.Length() == num_type_parameters); |
306 | |
307 | const TypeArguments& ta = TypeArguments::Handle(type.arguments()); |
308 | ASSERT(ta.Length() == num_type_arguments); |
309 | |
310 | BuildOptimizedSubclassRangeCheckWithTypeArguments(assembler, hi, type, |
311 | type_class, tp, ta); |
312 | } |
313 | |
314 | if (Instance::NullIsAssignableTo(type)) { |
315 | // Fast case for 'null'. |
316 | compiler::Label non_null; |
317 | __ CompareObject(TypeTestABI::kInstanceReg, Object::null_object()); |
318 | __ BranchIf(NOT_EQUAL, &non_null); |
319 | __ Ret(); |
320 | __ Bind(&non_null); |
321 | } |
322 | } |
323 | |
324 | void TypeTestingStubGenerator::BuildOptimizedSubtypeRangeCheck( |
325 | compiler::Assembler* assembler, |
326 | const CidRangeVector& ranges, |
327 | bool smi_is_ok) { |
328 | compiler::Label cid_range_failed, is_subtype; |
329 | |
330 | if (smi_is_ok) { |
331 | __ LoadClassIdMayBeSmi(TTSInternalRegs::kScratchReg, |
332 | TypeTestABI::kInstanceReg); |
333 | } else { |
334 | __ BranchIfSmi(TypeTestABI::kInstanceReg, &cid_range_failed); |
335 | __ LoadClassId(TTSInternalRegs::kScratchReg, TypeTestABI::kInstanceReg); |
336 | } |
337 | |
338 | FlowGraphCompiler::GenerateCidRangesCheck( |
339 | assembler, TTSInternalRegs::kScratchReg, ranges, &is_subtype, |
340 | &cid_range_failed, true); |
341 | __ Bind(&is_subtype); |
342 | __ Ret(); |
343 | __ Bind(&cid_range_failed); |
344 | } |
345 | |
346 | void TypeTestingStubGenerator:: |
347 | BuildOptimizedSubclassRangeCheckWithTypeArguments( |
348 | compiler::Assembler* assembler, |
349 | HierarchyInfo* hi, |
350 | const Type& type, |
351 | const Class& type_class, |
352 | const TypeArguments& tp, |
353 | const TypeArguments& ta) { |
354 | // a) First we make a quick sub*class* cid-range check. |
355 | compiler::Label check_failed; |
356 | ASSERT(!type_class.is_implemented()); |
357 | const CidRangeVector& ranges = hi->SubclassRangesForClass(type_class); |
358 | BuildOptimizedSubclassRangeCheck(assembler, ranges, &check_failed); |
359 | // fall through to continue |
360 | |
361 | // b) Then we'll load the values for the type parameters. |
362 | __ LoadField( |
363 | TTSInternalRegs::kInstanceTypeArgumentsReg, |
364 | compiler::FieldAddress( |
365 | TypeTestABI::kInstanceReg, |
366 | compiler::target::Class::TypeArgumentsFieldOffset(type_class))); |
367 | |
368 | // The kernel frontend should fill in any non-assigned type parameters on |
369 | // construction with dynamic/Object, so we should never get the null type |
370 | // argument vector in created instances. |
371 | // |
372 | // TODO(kustermann): We could consider not using "null" as type argument |
373 | // vector representing all-dynamic to avoid this extra check (which will be |
374 | // uncommon because most Dart code in 2.0 will be strongly typed)! |
375 | __ CompareObject(TTSInternalRegs::kInstanceTypeArgumentsReg, |
376 | Object::null_object()); |
377 | const Type& rare_type = Type::Handle(Type::RawCast(type_class.RareType())); |
378 | if (rare_type.IsSubtypeOf(type, Heap::kNew)) { |
379 | compiler::Label process_done; |
380 | __ BranchIf(NOT_EQUAL, &process_done); |
381 | __ Ret(); |
382 | __ Bind(&process_done); |
383 | } else { |
384 | __ BranchIf(EQUAL, &check_failed); |
385 | } |
386 | |
387 | // c) Then we'll check each value of the type argument. |
388 | AbstractType& type_arg = AbstractType::Handle(); |
389 | |
390 | const intptr_t num_type_parameters = type_class.NumTypeParameters(); |
391 | const intptr_t num_type_arguments = type_class.NumTypeArguments(); |
392 | for (intptr_t i = 0; i < num_type_parameters; ++i) { |
393 | const intptr_t type_param_value_offset_i = |
394 | num_type_arguments - num_type_parameters + i; |
395 | |
396 | type_arg = ta.TypeAt(type_param_value_offset_i); |
397 | ASSERT(type_arg.IsTypeParameter() || |
398 | hi->CanUseSubtypeRangeCheckFor(type_arg)); |
399 | |
400 | BuildOptimizedTypeArgumentValueCheck( |
401 | assembler, hi, type_arg, type_param_value_offset_i, &check_failed); |
402 | } |
403 | __ Ret(); |
404 | |
405 | // If anything fails. |
406 | __ Bind(&check_failed); |
407 | } |
408 | |
409 | void TypeTestingStubGenerator::BuildOptimizedSubclassRangeCheck( |
410 | compiler::Assembler* assembler, |
411 | const CidRangeVector& ranges, |
412 | compiler::Label* check_failed) { |
413 | __ LoadClassIdMayBeSmi(TTSInternalRegs::kScratchReg, |
414 | TypeTestABI::kInstanceReg); |
415 | |
416 | compiler::Label is_subtype; |
417 | FlowGraphCompiler::GenerateCidRangesCheck( |
418 | assembler, TTSInternalRegs::kScratchReg, ranges, &is_subtype, |
419 | check_failed, true); |
420 | __ Bind(&is_subtype); |
421 | } |
422 | |
423 | // Generate code to verify that instance's type argument is a subtype of |
424 | // 'type_arg'. |
425 | void TypeTestingStubGenerator::BuildOptimizedTypeArgumentValueCheck( |
426 | compiler::Assembler* assembler, |
427 | HierarchyInfo* hi, |
428 | const AbstractType& type_arg, |
429 | intptr_t type_param_value_offset_i, |
430 | compiler::Label* check_failed) { |
431 | if (type_arg.IsTopTypeForSubtyping()) { |
432 | return; |
433 | } |
434 | |
435 | // If the upper bound is a type parameter and its value is "dynamic" |
436 | // we always succeed. |
437 | compiler::Label is_dynamic; |
438 | if (type_arg.IsTypeParameter()) { |
439 | const TypeParameter& type_param = TypeParameter::Cast(type_arg); |
440 | const Register kTypeArgumentsReg = |
441 | type_param.IsClassTypeParameter() |
442 | ? TypeTestABI::kInstantiatorTypeArgumentsReg |
443 | : TypeTestABI::kFunctionTypeArgumentsReg; |
444 | |
445 | __ CompareObject(kTypeArgumentsReg, Object::null_object()); |
446 | __ BranchIf(EQUAL, &is_dynamic); |
447 | |
448 | __ LoadField( |
449 | TTSInternalRegs::kScratchReg, |
450 | compiler::FieldAddress(kTypeArgumentsReg, |
451 | compiler::target::TypeArguments::type_at_offset( |
452 | type_param.index()))); |
453 | __ CompareWithFieldValue( |
454 | TTSInternalRegs::kScratchReg, |
455 | compiler::FieldAddress(TTSInternalRegs::kInstanceTypeArgumentsReg, |
456 | compiler::target::TypeArguments::type_at_offset( |
457 | type_param_value_offset_i))); |
458 | __ BranchIf(NOT_EQUAL, check_failed); |
459 | } else { |
460 | const Class& type_class = Class::Handle(type_arg.type_class()); |
461 | const CidRangeVector& ranges = hi->SubtypeRangesForClass( |
462 | type_class, |
463 | /*include_abstract=*/true, |
464 | /*exclude_null=*/!Instance::NullIsAssignableTo(type_arg)); |
465 | |
466 | __ LoadField( |
467 | TTSInternalRegs::kScratchReg, |
468 | compiler::FieldAddress(TTSInternalRegs::kInstanceTypeArgumentsReg, |
469 | compiler::target::TypeArguments::type_at_offset( |
470 | type_param_value_offset_i))); |
471 | __ LoadField( |
472 | TTSInternalRegs::kScratchReg, |
473 | compiler::FieldAddress(TTSInternalRegs::kScratchReg, |
474 | compiler::target::Type::type_class_id_offset())); |
475 | |
476 | compiler::Label is_subtype; |
477 | __ SmiUntag(TTSInternalRegs::kScratchReg); |
478 | FlowGraphCompiler::GenerateCidRangesCheck( |
479 | assembler, TTSInternalRegs::kScratchReg, ranges, &is_subtype, |
480 | check_failed, true); |
481 | __ Bind(&is_subtype); |
482 | |
483 | // Weak NNBD mode uses LEGACY_SUBTYPE which ignores nullability. |
484 | // We don't need to check nullability of LHS for nullable and legacy RHS |
485 | // ("Right Legacy", "Right Nullable" rules). |
486 | if (Isolate::Current()->null_safety() && !type_arg.IsNullable() && |
487 | !type_arg.IsLegacy()) { |
488 | // Nullable type is not a subtype of non-nullable type. |
489 | // TODO(dartbug.com/40736): Allocate a register for instance type argument |
490 | // and avoid reloading it. |
491 | __ LoadField(TTSInternalRegs::kScratchReg, |
492 | compiler::FieldAddress( |
493 | TTSInternalRegs::kInstanceTypeArgumentsReg, |
494 | compiler::target::TypeArguments::type_at_offset( |
495 | type_param_value_offset_i))); |
496 | __ CompareTypeNullabilityWith(TTSInternalRegs::kScratchReg, |
497 | compiler::target::Nullability::kNullable); |
498 | __ BranchIf(EQUAL, check_failed); |
499 | } |
500 | } |
501 | |
502 | __ Bind(&is_dynamic); |
503 | } |
504 | |
505 | void RegisterTypeArgumentsUse(const Function& function, |
506 | TypeUsageInfo* type_usage_info, |
507 | const Class& klass, |
508 | Definition* type_arguments) { |
509 | // The [type_arguments] can, in the general case, be any kind of [Definition] |
510 | // but generally (in order of expected frequency) |
511 | // |
512 | // Case a) |
513 | // type_arguments <- Constant(#null) |
514 | // type_arguments <- Constant(#TypeArguments: [ ... ]) |
515 | // |
516 | // Case b) |
517 | // type_arguments <- InstantiateTypeArguments( |
518 | // <type-expr-with-parameters>, ita, fta) |
519 | // |
520 | // Case c) |
521 | // type_arguments <- LoadField(vx) |
522 | // type_arguments <- LoadField(vx T{_ABC}) |
523 | // type_arguments <- LoadField(vx T{Type: class: '_ABC'}) |
524 | // |
525 | // Case d, e) |
526 | // type_arguments <- LoadIndexedUnsafe(rbp[vx + 16])) |
527 | // type_arguments <- Parameter(0) |
528 | |
529 | if (ConstantInstr* constant = type_arguments->AsConstant()) { |
530 | const Object& object = constant->value(); |
531 | ASSERT(object.IsNull() || object.IsTypeArguments()); |
532 | const TypeArguments& type_arguments = |
533 | TypeArguments::Handle(TypeArguments::RawCast(object.raw())); |
534 | type_usage_info->UseTypeArgumentsInInstanceCreation(klass, type_arguments); |
535 | } else if (InstantiateTypeArgumentsInstr* instantiate = |
536 | type_arguments->AsInstantiateTypeArguments()) { |
537 | const TypeArguments& ta = instantiate->type_arguments(); |
538 | ASSERT(!ta.IsNull()); |
539 | type_usage_info->UseTypeArgumentsInInstanceCreation(klass, ta); |
540 | } else if (LoadFieldInstr* load_field = type_arguments->AsLoadField()) { |
541 | Definition* instance = load_field->instance()->definition(); |
542 | intptr_t cid = instance->Type()->ToNullableCid(); |
543 | if (cid == kDynamicCid) { |
544 | // This is an approximation: If we only know the type, but not the cid, we |
545 | // might have a this-dispatch where we know it's either this class or any |
546 | // subclass. |
547 | // We try to strengthen this assumption furher down by checking the offset |
548 | // of the type argument vector, but generally speaking this could be a |
549 | // false-postive, which is still ok! |
550 | const AbstractType& type = *instance->Type()->ToAbstractType(); |
551 | if (type.IsType()) { |
552 | const Class& type_class = Class::Handle(type.type_class()); |
553 | if (type_class.NumTypeArguments() >= klass.NumTypeArguments()) { |
554 | cid = type_class.id(); |
555 | } |
556 | } |
557 | } |
558 | if (cid != kDynamicCid) { |
559 | const Class& instance_klass = |
560 | Class::Handle(Isolate::Current()->class_table()->At(cid)); |
561 | if (load_field->slot().IsTypeArguments() && instance_klass.IsGeneric() && |
562 | compiler::target::Class::TypeArgumentsFieldOffset(instance_klass) == |
563 | load_field->slot().offset_in_bytes()) { |
564 | // This is a subset of Case c) above, namely forwarding the type |
565 | // argument vector. |
566 | // |
567 | // We use the declaration type arguments for the instance creation, |
568 | // which is a non-instantiated, expanded, type arguments vector. |
569 | const Type& declaration_type = |
570 | Type::Handle(instance_klass.DeclarationType()); |
571 | TypeArguments& declaration_type_args = |
572 | TypeArguments::Handle(declaration_type.arguments()); |
573 | type_usage_info->UseTypeArgumentsInInstanceCreation( |
574 | klass, declaration_type_args); |
575 | } |
576 | } |
577 | } else if (type_arguments->IsParameter() || |
578 | type_arguments->IsLoadIndexedUnsafe()) { |
579 | // This happens in constructors with non-optional/optional parameters |
580 | // where we forward the type argument vector to object allocation. |
581 | // |
582 | // Theoretically this could be a false-positive, which is still ok, but |
583 | // practically it's guaranteed that this is a forward of a type argument |
584 | // vector passed in by the caller. |
585 | if (function.IsFactory()) { |
586 | const Class& enclosing_class = Class::Handle(function.Owner()); |
587 | const Type& declaration_type = |
588 | Type::Handle(enclosing_class.DeclarationType()); |
589 | TypeArguments& declaration_type_args = |
590 | TypeArguments::Handle(declaration_type.arguments()); |
591 | type_usage_info->UseTypeArgumentsInInstanceCreation( |
592 | klass, declaration_type_args); |
593 | } |
594 | } else { |
595 | // It can also be a phi node where the inputs are any of the above, |
596 | // or it could be the result of _prependTypeArguments call. |
597 | ASSERT(type_arguments->IsPhi() || type_arguments->IsStaticCall()); |
598 | } |
599 | } |
600 | |
601 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
602 | |
603 | #else // !defined(TARGET_ARCH_IA32) |
604 | |
605 | #if !defined(DART_PRECOMPILED_RUNTIME) |
606 | void RegisterTypeArgumentsUse(const Function& function, |
607 | TypeUsageInfo* type_usage_info, |
608 | const Class& klass, |
609 | Definition* type_arguments) { |
610 | // We only have a [TypeUsageInfo] object available durin AOT compilation. |
611 | UNREACHABLE(); |
612 | } |
613 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
614 | |
615 | #endif // !defined(TARGET_ARCH_IA32) |
616 | |
617 | #undef __ |
618 | |
619 | const TypeArguments& TypeArgumentInstantiator::InstantiateTypeArguments( |
620 | const Class& klass, |
621 | const TypeArguments& type_arguments) { |
622 | const intptr_t len = klass.NumTypeArguments(); |
623 | ScopedHandle<TypeArguments> instantiated_type_arguments( |
624 | &type_arguments_handles_); |
625 | *instantiated_type_arguments = TypeArguments::New(len); |
626 | for (intptr_t i = 0; i < len; ++i) { |
627 | type_ = type_arguments.TypeAt(i); |
628 | type_ = InstantiateType(type_); |
629 | instantiated_type_arguments->SetTypeAt(i, type_); |
630 | ASSERT(type_.IsCanonical() || |
631 | (type_.IsTypeRef() && |
632 | AbstractType::Handle(TypeRef::Cast(type_).type()).IsCanonical())); |
633 | } |
634 | *instantiated_type_arguments = |
635 | instantiated_type_arguments->Canonicalize(NULL); |
636 | return *instantiated_type_arguments; |
637 | } |
638 | |
639 | AbstractTypePtr TypeArgumentInstantiator::InstantiateType( |
640 | const AbstractType& type) { |
641 | if (type.IsTypeParameter()) { |
642 | const TypeParameter& parameter = TypeParameter::Cast(type); |
643 | ASSERT(parameter.IsClassTypeParameter()); |
644 | ASSERT(parameter.IsFinalized()); |
645 | if (instantiator_type_arguments_.IsNull()) { |
646 | return Type::DynamicType(); |
647 | } |
648 | AbstractType& result = AbstractType::Handle( |
649 | instantiator_type_arguments_.TypeAt(parameter.index())); |
650 | result = result.SetInstantiatedNullability(TypeParameter::Cast(type), |
651 | Heap::kOld); |
652 | return result.NormalizeFutureOrType(Heap::kOld); |
653 | } else if (type.IsFunctionType()) { |
654 | // No support for function types yet. |
655 | UNREACHABLE(); |
656 | return nullptr; |
657 | } else if (type.IsTypeRef()) { |
658 | // No support for recursive types. |
659 | UNREACHABLE(); |
660 | return nullptr; |
661 | } else if (type.IsType()) { |
662 | if (type.IsInstantiated() || type.arguments() == TypeArguments::null()) { |
663 | return type.raw(); |
664 | } |
665 | |
666 | const Type& from = Type::Cast(type); |
667 | klass_ = from.type_class(); |
668 | |
669 | ScopedHandle<Type> to(&type_handles_); |
670 | ScopedHandle<TypeArguments> to_type_arguments(&type_arguments_handles_); |
671 | |
672 | *to_type_arguments = TypeArguments::null(); |
673 | *to = Type::New(klass_, *to_type_arguments, type.token_pos()); |
674 | |
675 | *to_type_arguments = from.arguments(); |
676 | to->set_arguments(InstantiateTypeArguments(klass_, *to_type_arguments)); |
677 | to->SetIsFinalized(); |
678 | *to ^= to->Canonicalize(NULL); |
679 | |
680 | return to->raw(); |
681 | } |
682 | UNREACHABLE(); |
683 | return NULL; |
684 | } |
685 | |
686 | TypeUsageInfo::TypeUsageInfo(Thread* thread) |
687 | : ThreadStackResource(thread), |
688 | zone_(thread->zone()), |
689 | finder_(zone_), |
690 | assert_assignable_types_(), |
691 | instance_creation_arguments_( |
692 | new TypeArgumentsSet[thread->isolate()->class_table()->NumCids()]), |
693 | klass_(Class::Handle(zone_)) { |
694 | thread->set_type_usage_info(this); |
695 | } |
696 | |
697 | TypeUsageInfo::~TypeUsageInfo() { |
698 | thread()->set_type_usage_info(NULL); |
699 | delete[] instance_creation_arguments_; |
700 | } |
701 | |
702 | void TypeUsageInfo::UseTypeInAssertAssignable(const AbstractType& type) { |
703 | if (!assert_assignable_types_.HasKey(&type)) { |
704 | AddTypeToSet(&assert_assignable_types_, &type); |
705 | } |
706 | } |
707 | |
708 | void TypeUsageInfo::UseTypeArgumentsInInstanceCreation( |
709 | const Class& klass, |
710 | const TypeArguments& ta) { |
711 | if (ta.IsNull() || ta.IsCanonical()) { |
712 | // The Dart VM performs an optimization where it re-uses type argument |
713 | // vectors if the use-site needs a prefix of an already-existent type |
714 | // arguments vector. |
715 | // |
716 | // For example: |
717 | // |
718 | // class Foo<K, V> { |
719 | // foo() => new Bar<K>(); |
720 | // } |
721 | // |
722 | // So the length of the type arguments vector can be longer than the number |
723 | // of type arguments the class expects. |
724 | ASSERT(ta.IsNull() || klass.NumTypeArguments() <= ta.Length()); |
725 | |
726 | // If this is a non-instantiated [TypeArguments] object, then it referes to |
727 | // type parameters. We need to ensure the type parameters in [ta] only |
728 | // refer to type parameters in the class. |
729 | if (!ta.IsNull() && !ta.IsInstantiated() && |
730 | finder_.FindClass(ta).IsNull()) { |
731 | return; |
732 | } |
733 | |
734 | klass_ = klass.raw(); |
735 | while (klass_.NumTypeArguments() > 0) { |
736 | const intptr_t cid = klass_.id(); |
737 | TypeArgumentsSet& set = instance_creation_arguments_[cid]; |
738 | if (!set.HasKey(&ta)) { |
739 | set.Insert(&TypeArguments::ZoneHandle(zone_, ta.raw())); |
740 | } |
741 | klass_ = klass_.SuperClass(); |
742 | } |
743 | } |
744 | } |
745 | |
746 | void TypeUsageInfo::BuildTypeUsageInformation() { |
747 | ClassTable* class_table = thread()->isolate()->class_table(); |
748 | const intptr_t cid_count = class_table->NumCids(); |
749 | |
750 | // Step 1) Propagate instantiated type argument vectors. |
751 | PropagateTypeArguments(class_table, cid_count); |
752 | |
753 | // Step 2) Collect the type parameters we're interested in. |
754 | TypeParameterSet parameters_tested_against; |
755 | CollectTypeParametersUsedInAssertAssignable(¶meters_tested_against); |
756 | |
757 | // Step 2) Add all types which flow into a type parameter we test against to |
758 | // the set of types tested against. |
759 | UpdateAssertAssignableTypes(class_table, cid_count, |
760 | ¶meters_tested_against); |
761 | } |
762 | |
763 | void TypeUsageInfo::PropagateTypeArguments(ClassTable* class_table, |
764 | intptr_t cid_count) { |
765 | // See comment in .h file for what this method does. |
766 | |
767 | Class& klass = Class::Handle(zone_); |
768 | TypeArguments& temp_type_arguments = TypeArguments::Handle(zone_); |
769 | |
770 | // We cannot modify a set while we are iterating over it, so we delay the |
771 | // addition to the set to the point when iteration has finished and use this |
772 | // list as temporary storage. |
773 | GrowableObjectArray& delayed_type_argument_set = |
774 | GrowableObjectArray::Handle(zone_, GrowableObjectArray::New()); |
775 | |
776 | TypeArgumentInstantiator instantiator(zone_); |
777 | |
778 | const intptr_t kPropgationRounds = 2; |
779 | for (intptr_t round = 0; round < kPropgationRounds; ++round) { |
780 | for (intptr_t cid = 0; cid < cid_count; ++cid) { |
781 | if (!class_table->IsValidIndex(cid) || |
782 | !class_table->HasValidClassAt(cid)) { |
783 | continue; |
784 | } |
785 | |
786 | klass = class_table->At(cid); |
787 | bool null_in_delayed_type_argument_set = false; |
788 | delayed_type_argument_set.SetLength(0); |
789 | |
790 | auto it = instance_creation_arguments_[cid].GetIterator(); |
791 | for (const TypeArguments** type_arguments = it.Next(); |
792 | type_arguments != nullptr; type_arguments = it.Next()) { |
793 | // We have a "type allocation" with "klass<type_arguments[0:N]>". |
794 | if (!(*type_arguments)->IsNull() && |
795 | !(*type_arguments)->IsInstantiated()) { |
796 | const Class& enclosing_class = finder_.FindClass(**type_arguments); |
797 | if (!klass.IsNull()) { |
798 | // We know that "klass<type_arguments[0:N]>" happens inside |
799 | // [enclosing_class]. |
800 | if (enclosing_class.raw() != klass.raw()) { |
801 | // Now we try to instantiate [type_arguments] with all the known |
802 | // instantiator type argument vectors of the [enclosing_class]. |
803 | const intptr_t enclosing_class_cid = enclosing_class.id(); |
804 | TypeArgumentsSet& instantiator_set = |
805 | instance_creation_arguments_[enclosing_class_cid]; |
806 | auto it2 = instantiator_set.GetIterator(); |
807 | for (const TypeArguments** instantiator_type_arguments = |
808 | it2.Next(); |
809 | instantiator_type_arguments != nullptr; |
810 | instantiator_type_arguments = it2.Next()) { |
811 | // We have also a "type allocation" with |
812 | // "enclosing_class<instantiator_type_arguments[0:M]>". |
813 | if ((*instantiator_type_arguments)->IsNull() || |
814 | (*instantiator_type_arguments)->IsInstantiated()) { |
815 | temp_type_arguments = instantiator.Instantiate( |
816 | klass, **type_arguments, **instantiator_type_arguments); |
817 | if (temp_type_arguments.IsNull() && |
818 | !null_in_delayed_type_argument_set) { |
819 | null_in_delayed_type_argument_set = true; |
820 | delayed_type_argument_set.Add(temp_type_arguments); |
821 | } else { |
822 | delayed_type_argument_set.Add(temp_type_arguments); |
823 | } |
824 | } |
825 | } |
826 | } |
827 | } |
828 | } |
829 | } |
830 | |
831 | // Now we add the [delayed_type_argument_set] elements to the set of |
832 | // instantiator type arguments of [klass] (and its superclasses). |
833 | if (delayed_type_argument_set.Length() > 0) { |
834 | while (klass.NumTypeArguments() > 0) { |
835 | TypeArgumentsSet& type_argument_set = |
836 | instance_creation_arguments_[klass.id()]; |
837 | const intptr_t len = delayed_type_argument_set.Length(); |
838 | for (intptr_t i = 0; i < len; ++i) { |
839 | temp_type_arguments = |
840 | TypeArguments::RawCast(delayed_type_argument_set.At(i)); |
841 | if (!type_argument_set.HasKey(&temp_type_arguments)) { |
842 | type_argument_set.Insert( |
843 | &TypeArguments::ZoneHandle(zone_, temp_type_arguments.raw())); |
844 | } |
845 | } |
846 | klass = klass.SuperClass(); |
847 | } |
848 | } |
849 | } |
850 | } |
851 | } |
852 | |
853 | void TypeUsageInfo::CollectTypeParametersUsedInAssertAssignable( |
854 | TypeParameterSet* set) { |
855 | TypeParameter& param = TypeParameter::Handle(zone_); |
856 | auto it = assert_assignable_types_.GetIterator(); |
857 | for (const AbstractType** type = it.Next(); type != nullptr; |
858 | type = it.Next()) { |
859 | AddToSetIfParameter(set, *type, ¶m); |
860 | } |
861 | } |
862 | |
863 | void TypeUsageInfo::UpdateAssertAssignableTypes( |
864 | ClassTable* class_table, |
865 | intptr_t cid_count, |
866 | TypeParameterSet* parameters_tested_against) { |
867 | Class& klass = Class::Handle(zone_); |
868 | TypeParameter& param = TypeParameter::Handle(zone_); |
869 | TypeArguments& params = TypeArguments::Handle(zone_); |
870 | AbstractType& type = AbstractType::Handle(zone_); |
871 | |
872 | // Because Object/dynamic are common values for type parameters, we add them |
873 | // eagerly and avoid doing it down inside the loop. |
874 | type = Type::DynamicType(); |
875 | UseTypeInAssertAssignable(type); |
876 | type = Type::ObjectType(); // TODO(regis): Add nullable Object? |
877 | UseTypeInAssertAssignable(type); |
878 | |
879 | for (intptr_t cid = 0; cid < cid_count; ++cid) { |
880 | if (!class_table->IsValidIndex(cid) || !class_table->HasValidClassAt(cid)) { |
881 | continue; |
882 | } |
883 | klass = class_table->At(cid); |
884 | if (klass.NumTypeArguments() <= 0) { |
885 | continue; |
886 | } |
887 | |
888 | const intptr_t num_parameters = klass.NumTypeParameters(); |
889 | params = klass.type_parameters(); |
890 | for (intptr_t i = 0; i < num_parameters; ++i) { |
891 | param ^= params.TypeAt(i); |
892 | if (parameters_tested_against->HasKey(¶m)) { |
893 | TypeArgumentsSet& ta_set = instance_creation_arguments_[cid]; |
894 | auto it = ta_set.GetIterator(); |
895 | for (const TypeArguments** ta = it.Next(); ta != nullptr; |
896 | ta = it.Next()) { |
897 | // We only add instantiated types to the set (and dynamic/Object were |
898 | // already handled above). |
899 | if (!(*ta)->IsNull()) { |
900 | type = (*ta)->TypeAt(i); |
901 | if (type.IsInstantiated()) { |
902 | UseTypeInAssertAssignable(type); |
903 | } |
904 | } |
905 | } |
906 | } |
907 | } |
908 | } |
909 | } |
910 | |
911 | void TypeUsageInfo::AddToSetIfParameter(TypeParameterSet* set, |
912 | const AbstractType* type, |
913 | TypeParameter* param) { |
914 | if (type->IsTypeParameter()) { |
915 | *param ^= type->raw(); |
916 | if (!param->IsNull() && !set->HasKey(param)) { |
917 | set->Insert(&TypeParameter::Handle(zone_, param->raw())); |
918 | } |
919 | } |
920 | } |
921 | |
922 | void TypeUsageInfo::AddTypeToSet(TypeSet* set, const AbstractType* type) { |
923 | if (!set->HasKey(type)) { |
924 | set->Insert(&AbstractType::ZoneHandle(zone_, type->raw())); |
925 | } |
926 | } |
927 | |
928 | bool TypeUsageInfo::IsUsedInTypeTest(const AbstractType& type) { |
929 | const AbstractType* dereferenced_type = &type; |
930 | if (type.IsTypeRef()) { |
931 | dereferenced_type = &AbstractType::Handle(TypeRef::Cast(type).type()); |
932 | } |
933 | if (dereferenced_type->IsFinalized()) { |
934 | return assert_assignable_types_.HasKey(dereferenced_type); |
935 | } |
936 | return false; |
937 | } |
938 | |
939 | #if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME) |
940 | |
941 | void DeoptimizeTypeTestingStubs() { |
942 | class CollectTypes : public ObjectVisitor { |
943 | public: |
944 | CollectTypes(GrowableArray<AbstractType*>* types, Zone* zone) |
945 | : types_(types), object_(Object::Handle(zone)), zone_(zone) {} |
946 | |
947 | void VisitObject(ObjectPtr object) { |
948 | if (object->IsPseudoObject()) { |
949 | // Cannot even be wrapped in handles. |
950 | return; |
951 | } |
952 | object_ = object; |
953 | if (object_.IsAbstractType()) { |
954 | types_->Add( |
955 | &AbstractType::Handle(zone_, AbstractType::RawCast(object))); |
956 | } |
957 | } |
958 | |
959 | private: |
960 | GrowableArray<AbstractType*>* types_; |
961 | Object& object_; |
962 | Zone* zone_; |
963 | }; |
964 | |
965 | Thread* thread = Thread::Current(); |
966 | TIMELINE_DURATION(thread, Isolate, "DeoptimizeTypeTestingStubs" ); |
967 | HANDLESCOPE(thread); |
968 | Zone* zone = thread->zone(); |
969 | GrowableArray<AbstractType*> types; |
970 | { |
971 | HeapIterationScope iter(thread); |
972 | CollectTypes visitor(&types, zone); |
973 | iter.IterateObjects(&visitor); |
974 | } |
975 | |
976 | TypeTestingStubGenerator generator; |
977 | Code& code = Code::Handle(zone); |
978 | for (intptr_t i = 0; i < types.length(); i++) { |
979 | code = generator.DefaultCodeForType(*types[i]); |
980 | types[i]->SetTypeTestingStub(code); |
981 | } |
982 | } |
983 | |
984 | #endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME) |
985 | |
986 | } // namespace dart |
987 | |