1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/object.h"
6
7#include <memory>
8
9#include "include/dart_api.h"
10#include "platform/assert.h"
11#include "platform/text_buffer.h"
12#include "platform/unaligned.h"
13#include "platform/unicode.h"
14#include "vm/bit_vector.h"
15#include "vm/bootstrap.h"
16#include "vm/class_finalizer.h"
17#include "vm/code_comments.h"
18#include "vm/code_descriptors.h"
19#include "vm/code_observers.h"
20#include "vm/compiler/assembler/disassembler.h"
21#include "vm/compiler/assembler/disassembler_kbc.h"
22#include "vm/compiler/jit/compiler.h"
23#include "vm/cpu.h"
24#include "vm/dart.h"
25#include "vm/dart_api_state.h"
26#include "vm/dart_entry.h"
27#include "vm/datastream.h"
28#include "vm/debugger.h"
29#include "vm/deopt_instructions.h"
30#include "vm/double_conversion.h"
31#include "vm/elf.h"
32#include "vm/exceptions.h"
33#include "vm/growable_array.h"
34#include "vm/hash.h"
35#include "vm/hash_table.h"
36#include "vm/heap/become.h"
37#include "vm/heap/heap.h"
38#include "vm/heap/weak_code.h"
39#include "vm/image_snapshot.h"
40#include "vm/isolate_reload.h"
41#include "vm/kernel.h"
42#include "vm/kernel_binary.h"
43#include "vm/kernel_isolate.h"
44#include "vm/kernel_loader.h"
45#include "vm/native_symbol.h"
46#include "vm/object_store.h"
47#include "vm/parser.h"
48#include "vm/profiler.h"
49#include "vm/resolver.h"
50#include "vm/reusable_handles.h"
51#include "vm/runtime_entry.h"
52#include "vm/scopes.h"
53#include "vm/stack_frame.h"
54#include "vm/stub_code.h"
55#include "vm/symbols.h"
56#include "vm/tags.h"
57#include "vm/thread_registry.h"
58#include "vm/timeline.h"
59#include "vm/type_table.h"
60#include "vm/type_testing_stubs.h"
61#include "vm/zone_text_buffer.h"
62
63#if !defined(DART_PRECOMPILED_RUNTIME)
64#include "vm/compiler/aot/precompiler.h"
65#include "vm/compiler/assembler/assembler.h"
66#include "vm/compiler/backend/code_statistics.h"
67#include "vm/compiler/compiler_state.h"
68#include "vm/compiler/frontend/bytecode_fingerprints.h"
69#include "vm/compiler/frontend/bytecode_reader.h"
70#include "vm/compiler/frontend/kernel_fingerprints.h"
71#include "vm/compiler/frontend/kernel_translation_helper.h"
72#include "vm/compiler/intrinsifier.h"
73#endif // !defined(DART_PRECOMPILED_RUNTIME)
74
75namespace dart {
76
77DEFINE_FLAG(int,
78 huge_method_cutoff_in_code_size,
79 200000,
80 "Huge method cutoff in unoptimized code size (in bytes).");
81DEFINE_FLAG(
82 bool,
83 show_internal_names,
84 false,
85 "Show names of internal classes (e.g. \"OneByteString\") in error messages "
86 "instead of showing the corresponding interface names (e.g. \"String\"). "
87 "Also show legacy nullability in type names.");
88DEFINE_FLAG(bool, use_lib_cache, false, "Use library name cache");
89DEFINE_FLAG(bool, use_exp_cache, false, "Use library exported name cache");
90
91DEFINE_FLAG(bool,
92 remove_script_timestamps_for_test,
93 false,
94 "Remove script timestamps to allow for deterministic testing.");
95
96DECLARE_FLAG(bool, dual_map_code);
97DECLARE_FLAG(bool, intrinsify);
98DECLARE_FLAG(bool, trace_deoptimization);
99DECLARE_FLAG(bool, trace_deoptimization_verbose);
100DECLARE_FLAG(bool, trace_reload);
101DECLARE_FLAG(bool, write_protect_code);
102DECLARE_FLAG(bool, precompiled_mode);
103DECLARE_FLAG(int, max_polymorphic_checks);
104
105static const char* const kGetterPrefix = "get:";
106static const intptr_t kGetterPrefixLength = strlen(kGetterPrefix);
107static const char* const kSetterPrefix = "set:";
108static const intptr_t kSetterPrefixLength = strlen(kSetterPrefix);
109static const char* const kInitPrefix = "init:";
110static const intptr_t kInitPrefixLength = strlen(kInitPrefix);
111
112// A cache of VM heap allocated preinitialized empty ic data entry arrays.
113ArrayPtr ICData::cached_icdata_arrays_[kCachedICDataArrayCount];
114// A VM heap allocated preinitialized empty subtype entry array.
115ArrayPtr SubtypeTestCache::cached_array_;
116
117cpp_vtable Object::builtin_vtables_[kNumPredefinedCids] = {};
118
119// These are initialized to a value that will force a illegal memory access if
120// they are being used.
121#if defined(RAW_NULL)
122#error RAW_NULL should not be defined.
123#endif
124#define RAW_NULL static_cast<uword>(kHeapObjectTag)
125
126#define CHECK_ERROR(error) \
127 { \
128 ErrorPtr err = (error); \
129 if (err != Error::null()) { \
130 return err; \
131 } \
132 }
133
134#define DEFINE_SHARED_READONLY_HANDLE(Type, name) \
135 Type* Object::name##_ = nullptr;
136SHARED_READONLY_HANDLES_LIST(DEFINE_SHARED_READONLY_HANDLE)
137#undef DEFINE_SHARED_READONLY_HANDLE
138
139ObjectPtr Object::null_ = static_cast<ObjectPtr>(RAW_NULL);
140BoolPtr Object::true_ = static_cast<BoolPtr>(RAW_NULL);
141BoolPtr Object::false_ = static_cast<BoolPtr>(RAW_NULL);
142ClassPtr Object::class_class_ = static_cast<ClassPtr>(RAW_NULL);
143ClassPtr Object::dynamic_class_ = static_cast<ClassPtr>(RAW_NULL);
144ClassPtr Object::void_class_ = static_cast<ClassPtr>(RAW_NULL);
145ClassPtr Object::type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL);
146ClassPtr Object::patch_class_class_ = static_cast<ClassPtr>(RAW_NULL);
147ClassPtr Object::function_class_ = static_cast<ClassPtr>(RAW_NULL);
148ClassPtr Object::closure_data_class_ = static_cast<ClassPtr>(RAW_NULL);
149ClassPtr Object::signature_data_class_ = static_cast<ClassPtr>(RAW_NULL);
150ClassPtr Object::redirection_data_class_ = static_cast<ClassPtr>(RAW_NULL);
151ClassPtr Object::ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL);
152ClassPtr Object::field_class_ = static_cast<ClassPtr>(RAW_NULL);
153ClassPtr Object::script_class_ = static_cast<ClassPtr>(RAW_NULL);
154ClassPtr Object::library_class_ = static_cast<ClassPtr>(RAW_NULL);
155ClassPtr Object::namespace_class_ = static_cast<ClassPtr>(RAW_NULL);
156ClassPtr Object::kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL);
157ClassPtr Object::code_class_ = static_cast<ClassPtr>(RAW_NULL);
158ClassPtr Object::bytecode_class_ = static_cast<ClassPtr>(RAW_NULL);
159ClassPtr Object::instructions_class_ = static_cast<ClassPtr>(RAW_NULL);
160ClassPtr Object::instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL);
161ClassPtr Object::object_pool_class_ = static_cast<ClassPtr>(RAW_NULL);
162ClassPtr Object::pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
163ClassPtr Object::code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL);
164ClassPtr Object::compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL);
165ClassPtr Object::var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
166ClassPtr Object::exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL);
167ClassPtr Object::context_class_ = static_cast<ClassPtr>(RAW_NULL);
168ClassPtr Object::context_scope_class_ = static_cast<ClassPtr>(RAW_NULL);
169ClassPtr Object::dyncalltypecheck_class_ = static_cast<ClassPtr>(RAW_NULL);
170ClassPtr Object::singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL);
171ClassPtr Object::unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL);
172ClassPtr Object::monomorphicsmiablecall_class_ =
173 static_cast<ClassPtr>(RAW_NULL);
174ClassPtr Object::icdata_class_ = static_cast<ClassPtr>(RAW_NULL);
175ClassPtr Object::megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL);
176ClassPtr Object::subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL);
177ClassPtr Object::loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL);
178ClassPtr Object::api_error_class_ = static_cast<ClassPtr>(RAW_NULL);
179ClassPtr Object::language_error_class_ = static_cast<ClassPtr>(RAW_NULL);
180ClassPtr Object::unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL);
181ClassPtr Object::unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL);
182ClassPtr Object::weak_serialization_reference_class_ =
183 static_cast<ClassPtr>(RAW_NULL);
184
185const double MegamorphicCache::kLoadFactor = 0.50;
186
187static void AppendSubString(BaseTextBuffer* buffer,
188 const char* name,
189 intptr_t start_pos,
190 intptr_t len) {
191 buffer->Printf("%.*s", static_cast<int>(len), &name[start_pos]);
192}
193
194// Remove private keys, but retain getter/setter/constructor/mixin manglings.
195StringPtr String::RemovePrivateKey(const String& name) {
196 ASSERT(name.IsOneByteString());
197 GrowableArray<uint8_t> without_key(name.Length());
198 intptr_t i = 0;
199 while (i < name.Length()) {
200 while (i < name.Length()) {
201 uint8_t c = name.CharAt(i++);
202 if (c == '@') break;
203 without_key.Add(c);
204 }
205 while (i < name.Length()) {
206 uint8_t c = name.CharAt(i);
207 if ((c < '0') || (c > '9')) break;
208 i++;
209 }
210 }
211
212 return String::FromLatin1(without_key.data(), without_key.length());
213}
214
215// Takes a vm internal name and makes it suitable for external user.
216//
217// Examples:
218//
219// Internal getter and setter prefixes are changed:
220//
221// get:foo -> foo
222// set:foo -> foo=
223//
224// Private name mangling is removed, possibly multiple times:
225//
226// _ReceivePortImpl@709387912 -> _ReceivePortImpl
227// _ReceivePortImpl@709387912._internal@709387912 ->
228// _ReceivePortImpl._internal
229// _C@6328321&_E@6328321&_F@6328321 -> _C&_E&_F
230//
231// The trailing . on the default constructor name is dropped:
232//
233// List. -> List
234//
235// And so forth:
236//
237// get:foo@6328321 -> foo
238// _MyClass@6328321. -> _MyClass
239// _MyClass@6328321.named -> _MyClass.named
240//
241// For extension methods the following demangling is done
242// ext|func -> ext.func (instance extension method)
243// ext|get#prop -> ext.prop (instance extension getter)
244// ext|set#prop -> ext.prop= (instance extension setter)
245// ext|sfunc -> ext.sfunc (static extension method)
246// get:ext|sprop -> ext.sprop (static extension getter)
247// set:ext|sprop -> ext.sprop= (static extension setter)
248//
249const char* String::ScrubName(const String& name, bool is_extension) {
250 Thread* thread = Thread::Current();
251 NoSafepointScope no_safepoint(thread);
252 Zone* zone = thread->zone();
253 ZoneTextBuffer printer(zone);
254
255#if !defined(DART_PRECOMPILED_RUNTIME)
256 if (name.Equals(Symbols::TopLevel())) {
257 // Name of invisible top-level class.
258 return "";
259 }
260#endif // !defined(DART_PRECOMPILED_RUNTIME)
261
262 const char* cname = name.ToCString();
263 ASSERT(strlen(cname) == static_cast<size_t>(name.Length()));
264 const intptr_t name_len = name.Length();
265 // First remove all private name mangling and if 'is_extension' is true
266 // substitute the first '|' character with '.'.
267 intptr_t start_pos = 0;
268 intptr_t sum_segment_len = 0;
269 for (intptr_t i = 0; i < name_len; i++) {
270 if ((cname[i] == '@') && ((i + 1) < name_len) && (cname[i + 1] >= '0') &&
271 (cname[i + 1] <= '9')) {
272 // Append the current segment to the unmangled name.
273 const intptr_t segment_len = i - start_pos;
274 sum_segment_len += segment_len;
275 AppendSubString(&printer, cname, start_pos, segment_len);
276 // Advance until past the name mangling. The private keys are only
277 // numbers so we skip until the first non-number.
278 i++; // Skip the '@'.
279 while ((i < name.Length()) && (name.CharAt(i) >= '0') &&
280 (name.CharAt(i) <= '9')) {
281 i++;
282 }
283 start_pos = i;
284 i--; // Account for for-loop increment.
285 } else if (is_extension && cname[i] == '|') {
286 // Append the current segment to the unmangled name.
287 const intptr_t segment_len = i - start_pos;
288 AppendSubString(&printer, cname, start_pos, segment_len);
289 // Append the '.' character (replaces '|' with '.').
290 AppendSubString(&printer, ".", 0, 1);
291 start_pos = i + 1;
292 // Account for length of segments added so far.
293 sum_segment_len += (segment_len + 1);
294 }
295 }
296
297 const char* unmangled_name = NULL;
298 if (start_pos == 0) {
299 // No name unmangling needed, reuse the name that was passed in.
300 unmangled_name = cname;
301 sum_segment_len = name_len;
302 } else if (name.Length() != start_pos) {
303 // Append the last segment.
304 const intptr_t segment_len = name.Length() - start_pos;
305 sum_segment_len += segment_len;
306 AppendSubString(&printer, cname, start_pos, segment_len);
307 }
308 if (unmangled_name == NULL) {
309 // Merge unmangled_segments.
310 unmangled_name = printer.buffer();
311 }
312
313 printer.Clear();
314 intptr_t start = 0;
315 intptr_t final_len = 0;
316 intptr_t len = sum_segment_len;
317 bool is_setter = false;
318 if (is_extension) {
319 // First scan till we see the '.' character.
320 for (intptr_t i = 0; i < len; i++) {
321 if (unmangled_name[i] == '.') {
322 intptr_t slen = i + 1;
323 intptr_t plen = slen - start;
324 AppendSubString(&printer, unmangled_name, start, plen);
325 final_len = plen;
326 unmangled_name += slen;
327 len -= slen;
328 break;
329 } else if (unmangled_name[i] == ':') {
330 if (start != 0) {
331 // Reset and break.
332 start = 0;
333 is_setter = false;
334 break;
335 }
336 if (unmangled_name[0] == 's') {
337 is_setter = true;
338 }
339 start = i + 1;
340 }
341 }
342 }
343 intptr_t dot_pos = -1; // Position of '.' in the name, if any.
344 start = 0;
345 for (intptr_t i = start; i < len; i++) {
346 if (unmangled_name[i] == ':' ||
347 (is_extension && unmangled_name[i] == '#')) {
348 if (start != 0) {
349 // Reset and break.
350 start = 0;
351 dot_pos = -1;
352 break;
353 }
354 ASSERT(start == 0); // Only one : is possible in getters or setters.
355 if (unmangled_name[0] == 's') {
356 ASSERT(!is_setter);
357 is_setter = true;
358 }
359 start = i + 1;
360 } else if (unmangled_name[i] == '.') {
361 if (dot_pos != -1) {
362 // Reset and break.
363 start = 0;
364 dot_pos = -1;
365 break;
366 }
367 ASSERT(dot_pos == -1); // Only one dot is supported.
368 dot_pos = i;
369 }
370 }
371
372 if (!is_extension && (start == 0) && (dot_pos == -1)) {
373 // This unmangled_name is fine as it is.
374 return unmangled_name;
375 }
376
377 // Drop the trailing dot if needed.
378 intptr_t end = ((dot_pos + 1) == len) ? dot_pos : len;
379
380 intptr_t substr_len = end - start;
381 final_len += substr_len;
382 AppendSubString(&printer, unmangled_name, start, substr_len);
383 if (is_setter) {
384 const char* equals = Symbols::Equals().ToCString();
385 const intptr_t equals_len = strlen(equals);
386 AppendSubString(&printer, equals, 0, equals_len);
387 final_len += equals_len;
388 }
389
390 return printer.buffer();
391}
392
393StringPtr String::ScrubNameRetainPrivate(const String& name,
394 bool is_extension) {
395#if !defined(DART_PRECOMPILED_RUNTIME)
396 intptr_t len = name.Length();
397 intptr_t start = 0;
398 intptr_t at_pos = -1; // Position of '@' in the name, if any.
399 bool is_setter = false;
400
401 String& result = String::Handle();
402
403 // If extension strip out the leading prefix e.g" ext|func would strip out
404 // 'ext|'.
405 if (is_extension) {
406 // First scan till we see the '|' character.
407 for (intptr_t i = 0; i < len; i++) {
408 if (name.CharAt(i) == '|') {
409 result = String::SubString(name, start, (i - start));
410 result = String::Concat(result, Symbols::Dot());
411 start = i + 1;
412 break;
413 } else if (name.CharAt(i) == ':') {
414 if (start != 0) {
415 // Reset and break.
416 start = 0;
417 is_setter = false;
418 break;
419 }
420 if (name.CharAt(0) == 's') {
421 is_setter = true;
422 }
423 start = i + 1;
424 }
425 }
426 }
427
428 for (intptr_t i = start; i < len; i++) {
429 if (name.CharAt(i) == ':' || (is_extension && name.CharAt(i) == '#')) {
430 // Only one : is possible in getters or setters.
431 ASSERT(is_extension || start == 0);
432 if (name.CharAt(start) == 's') {
433 is_setter = true;
434 }
435 start = i + 1;
436 } else if (name.CharAt(i) == '@') {
437 // Setters should have only one @ so we know where to put the =.
438 ASSERT(!is_setter || (at_pos == -1));
439 at_pos = i;
440 }
441 }
442
443 if (start == 0) {
444 // This unmangled_name is fine as it is.
445 return name.raw();
446 }
447
448 if (is_extension) {
449 const String& fname =
450 String::Handle(String::SubString(name, start, (len - start)));
451 result = String::Concat(result, fname);
452 } else {
453 result = String::SubString(name, start, (len - start));
454 }
455
456 if (is_setter) {
457 // Setters need to end with '='.
458 if (at_pos == -1) {
459 return String::Concat(result, Symbols::Equals());
460 } else {
461 const String& pre_at =
462 String::Handle(String::SubString(result, 0, at_pos - 4));
463 const String& post_at =
464 String::Handle(String::SubString(name, at_pos, len - at_pos));
465 result = String::Concat(pre_at, Symbols::Equals());
466 result = String::Concat(result, post_at);
467 }
468 }
469
470 return result.raw();
471#endif // !defined(DART_PRECOMPILED_RUNTIME)
472 return name.raw(); // In AOT, return argument unchanged.
473}
474
475template <typename type>
476static bool IsSpecialCharacter(type value) {
477 return ((value == '"') || (value == '\n') || (value == '\f') ||
478 (value == '\b') || (value == '\t') || (value == '\v') ||
479 (value == '\r') || (value == '\\') || (value == '$'));
480}
481
482static inline bool IsAsciiNonprintable(int32_t c) {
483 return ((0 <= c) && (c < 32)) || (c == 127);
484}
485
486static int32_t EscapeOverhead(int32_t c) {
487 if (IsSpecialCharacter(c)) {
488 return 1; // 1 additional byte for the backslash.
489 } else if (IsAsciiNonprintable(c)) {
490 return 3; // 3 additional bytes to encode c as \x00.
491 }
492 return 0;
493}
494
495template <typename type>
496static type SpecialCharacter(type value) {
497 if (value == '"') {
498 return '"';
499 } else if (value == '\n') {
500 return 'n';
501 } else if (value == '\f') {
502 return 'f';
503 } else if (value == '\b') {
504 return 'b';
505 } else if (value == '\t') {
506 return 't';
507 } else if (value == '\v') {
508 return 'v';
509 } else if (value == '\r') {
510 return 'r';
511 } else if (value == '\\') {
512 return '\\';
513 } else if (value == '$') {
514 return '$';
515 }
516 UNREACHABLE();
517 return '\0';
518}
519
520static BytecodePtr CreateVMInternalBytecode(KernelBytecode::Opcode opcode) {
521 const KBCInstr* instructions = nullptr;
522 intptr_t instructions_size = 0;
523
524 KernelBytecode::GetVMInternalBytecodeInstructions(opcode, &instructions,
525 &instructions_size);
526
527 const auto& bytecode = Bytecode::Handle(
528 Bytecode::New(reinterpret_cast<uword>(instructions), instructions_size,
529 -1, Object::empty_object_pool()));
530 bytecode.set_pc_descriptors(Object::empty_descriptors());
531 bytecode.set_exception_handlers(Object::empty_exception_handlers());
532 return bytecode.raw();
533}
534
535void Object::InitNullAndBool(Isolate* isolate) {
536 // Should only be run by the vm isolate.
537 ASSERT(isolate == Dart::vm_isolate());
538
539 // TODO(iposva): NoSafepointScope needs to be added here.
540 ASSERT(class_class() == null_);
541
542 Heap* heap = isolate->heap();
543
544 // Allocate and initialize the null instance.
545 // 'null_' must be the first object allocated as it is used in allocation to
546 // clear the object.
547 {
548 uword address = heap->Allocate(Instance::InstanceSize(), Heap::kOld);
549 null_ = static_cast<InstancePtr>(address + kHeapObjectTag);
550 // The call below is using 'null_' to initialize itself.
551 InitializeObject(address, kNullCid, Instance::InstanceSize());
552 }
553
554 // Allocate and initialize the bool instances.
555 // These must be allocated such that at kBoolValueBitPosition, the address
556 // of true is 0 and the address of false is 1, and their addresses are
557 // otherwise identical.
558 {
559 // Allocate a dummy bool object to give true the desired alignment.
560 uword address = heap->Allocate(Bool::InstanceSize(), Heap::kOld);
561 InitializeObject(address, kBoolCid, Bool::InstanceSize());
562 static_cast<BoolPtr>(address + kHeapObjectTag)->ptr()->value_ = false;
563 }
564 {
565 // Allocate true.
566 uword address = heap->Allocate(Bool::InstanceSize(), Heap::kOld);
567 true_ = static_cast<BoolPtr>(address + kHeapObjectTag);
568 InitializeObject(address, kBoolCid, Bool::InstanceSize());
569 true_->ptr()->value_ = true;
570 true_->ptr()->SetCanonical();
571 }
572 {
573 // Allocate false.
574 uword address = heap->Allocate(Bool::InstanceSize(), Heap::kOld);
575 false_ = static_cast<BoolPtr>(address + kHeapObjectTag);
576 InitializeObject(address, kBoolCid, Bool::InstanceSize());
577 false_->ptr()->value_ = false;
578 false_->ptr()->SetCanonical();
579 }
580
581 // Check that the objects have been allocated at appropriate addresses.
582 ASSERT(static_cast<uword>(true_) ==
583 static_cast<uword>(null_) + kTrueOffsetFromNull);
584 ASSERT(static_cast<uword>(false_) ==
585 static_cast<uword>(null_) + kFalseOffsetFromNull);
586 ASSERT((static_cast<uword>(true_) & kBoolValueMask) == 0);
587 ASSERT((static_cast<uword>(false_) & kBoolValueMask) != 0);
588 ASSERT(static_cast<uword>(false_) ==
589 (static_cast<uword>(true_) | kBoolValueMask));
590}
591
592void Object::InitVtables() {
593 {
594 Object fake_handle;
595 builtin_vtables_[kObjectCid] = fake_handle.vtable();
596 }
597
598#define INIT_VTABLE(clazz) \
599 { \
600 clazz fake_handle; \
601 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
602 }
603 CLASS_LIST_NO_OBJECT_NOR_STRING_NOR_ARRAY(INIT_VTABLE)
604#undef INIT_VTABLE
605
606#define INIT_VTABLE(clazz) \
607 { \
608 Array fake_handle; \
609 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
610 }
611 CLASS_LIST_ARRAYS(INIT_VTABLE)
612#undef INIT_VTABLE
613
614#define INIT_VTABLE(clazz) \
615 { \
616 String fake_handle; \
617 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
618 }
619 CLASS_LIST_STRINGS(INIT_VTABLE)
620#undef INIT_VTABLE
621
622 {
623 Instance fake_handle;
624 builtin_vtables_[kFfiNativeTypeCid] = fake_handle.vtable();
625 }
626
627#define INIT_VTABLE(clazz) \
628 { \
629 Instance fake_handle; \
630 builtin_vtables_[kFfi##clazz##Cid] = fake_handle.vtable(); \
631 }
632 CLASS_LIST_FFI_TYPE_MARKER(INIT_VTABLE)
633#undef INIT_VTABLE
634
635 {
636 Instance fake_handle;
637 builtin_vtables_[kFfiNativeFunctionCid] = fake_handle.vtable();
638 }
639
640 {
641 Pointer fake_handle;
642 builtin_vtables_[kFfiPointerCid] = fake_handle.vtable();
643 }
644
645 {
646 DynamicLibrary fake_handle;
647 builtin_vtables_[kFfiDynamicLibraryCid] = fake_handle.vtable();
648 }
649
650#define INIT_VTABLE(clazz) \
651 { \
652 Instance fake_handle; \
653 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
654 }
655 CLASS_LIST_WASM(INIT_VTABLE)
656#undef INIT_VTABLE
657
658#define INIT_VTABLE(clazz) \
659 { \
660 TypedData fake_internal_handle; \
661 builtin_vtables_[kTypedData##clazz##Cid] = fake_internal_handle.vtable(); \
662 TypedDataView fake_view_handle; \
663 builtin_vtables_[kTypedData##clazz##ViewCid] = fake_view_handle.vtable(); \
664 ExternalTypedData fake_external_handle; \
665 builtin_vtables_[kExternalTypedData##clazz##Cid] = \
666 fake_external_handle.vtable(); \
667 }
668 CLASS_LIST_TYPED_DATA(INIT_VTABLE)
669#undef INIT_VTABLE
670
671 {
672 TypedDataView fake_handle;
673 builtin_vtables_[kByteDataViewCid] = fake_handle.vtable();
674 }
675
676 {
677 Instance fake_handle;
678 builtin_vtables_[kByteBufferCid] = fake_handle.vtable();
679 builtin_vtables_[kNullCid] = fake_handle.vtable();
680 builtin_vtables_[kDynamicCid] = fake_handle.vtable();
681 builtin_vtables_[kVoidCid] = fake_handle.vtable();
682 builtin_vtables_[kNeverCid] = fake_handle.vtable();
683 }
684}
685
686void Object::Init(Isolate* isolate) {
687 // Should only be run by the vm isolate.
688 ASSERT(isolate == Dart::vm_isolate());
689
690 InitVtables();
691
692 Heap* heap = isolate->heap();
693
694// Allocate the read only object handles here.
695#define INITIALIZE_SHARED_READONLY_HANDLE(Type, name) \
696 name##_ = Type::ReadOnlyHandle();
697 SHARED_READONLY_HANDLES_LIST(INITIALIZE_SHARED_READONLY_HANDLE)
698#undef INITIALIZE_SHARED_READONLY_HANDLE
699
700 *null_object_ = Object::null();
701 *null_array_ = Array::null();
702 *null_string_ = String::null();
703 *null_instance_ = Instance::null();
704 *null_function_ = Function::null();
705 *null_type_arguments_ = TypeArguments::null();
706 *empty_type_arguments_ = TypeArguments::null();
707 *null_abstract_type_ = AbstractType::null();
708 *null_compressed_stack_maps_ = CompressedStackMaps::null();
709 *bool_true_ = true_;
710 *bool_false_ = false_;
711
712 // Initialize the empty and zero array handles to null_ in order to be able to
713 // check if the empty and zero arrays were allocated (RAW_NULL is not
714 // available).
715 *empty_array_ = Array::null();
716 *zero_array_ = Array::null();
717
718 Class& cls = Class::Handle();
719
720 // Allocate and initialize the class class.
721 {
722 intptr_t size = Class::InstanceSize();
723 uword address = heap->Allocate(size, Heap::kOld);
724 class_class_ = static_cast<ClassPtr>(address + kHeapObjectTag);
725 InitializeObject(address, Class::kClassId, size);
726
727 Class fake;
728 // Initialization from Class::New<Class>.
729 // Directly set raw_ to break a circular dependency: SetRaw will attempt
730 // to lookup class class in the class table where it is not registered yet.
731 cls.raw_ = class_class_;
732 ASSERT(builtin_vtables_[kClassCid] == fake.vtable());
733 cls.set_instance_size(
734 Class::InstanceSize(),
735 compiler::target::RoundedAllocationSize(RTN::Class::InstanceSize()));
736 const intptr_t host_next_field_offset = Class::NextFieldOffset();
737 const intptr_t target_next_field_offset = RTN::Class::NextFieldOffset();
738 cls.set_next_field_offset(host_next_field_offset, target_next_field_offset);
739 cls.set_id(Class::kClassId);
740 cls.set_state_bits(0);
741 cls.set_is_allocate_finalized();
742 cls.set_is_declaration_loaded();
743 cls.set_is_type_finalized();
744 cls.set_type_arguments_field_offset_in_words(Class::kNoTypeArguments,
745 RTN::Class::kNoTypeArguments);
746 cls.set_num_type_arguments(0);
747 cls.set_num_native_fields(0);
748 cls.InitEmptyFields();
749 isolate->class_table()->Register(cls);
750 }
751
752 // Allocate and initialize the null class.
753 cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate);
754 cls.set_num_type_arguments(0);
755 isolate->object_store()->set_null_class(cls);
756
757 // Allocate and initialize Never class.
758 cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate);
759 cls.set_num_type_arguments(0);
760 cls.set_is_allocate_finalized();
761 cls.set_is_declaration_loaded();
762 cls.set_is_type_finalized();
763 isolate->object_store()->set_never_class(cls);
764
765 // Allocate and initialize the free list element class.
766 cls =
767 Class::New<FreeListElement::FakeInstance,
768 RTN::FreeListElement::FakeInstance>(kFreeListElement, isolate);
769 cls.set_num_type_arguments(0);
770 cls.set_is_allocate_finalized();
771 cls.set_is_declaration_loaded();
772 cls.set_is_type_finalized();
773
774 // Allocate and initialize the forwarding corpse class.
775 cls = Class::New<ForwardingCorpse::FakeInstance,
776 RTN::ForwardingCorpse::FakeInstance>(kForwardingCorpse,
777 isolate);
778 cls.set_num_type_arguments(0);
779 cls.set_is_allocate_finalized();
780 cls.set_is_declaration_loaded();
781 cls.set_is_type_finalized();
782
783 // Allocate and initialize the sentinel values.
784 {
785 *sentinel_ ^=
786 Object::Allocate(kNeverCid, Instance::InstanceSize(), Heap::kOld);
787
788 *transition_sentinel_ ^=
789 Object::Allocate(kNeverCid, Instance::InstanceSize(), Heap::kOld);
790 }
791
792 // Allocate and initialize optimizing compiler constants.
793 {
794 *unknown_constant_ ^=
795 Object::Allocate(kNeverCid, Instance::InstanceSize(), Heap::kOld);
796 *non_constant_ ^=
797 Object::Allocate(kNeverCid, Instance::InstanceSize(), Heap::kOld);
798 }
799
800 // Allocate the remaining VM internal classes.
801 cls = Class::New<TypeArguments, RTN::TypeArguments>(isolate);
802 type_arguments_class_ = cls.raw();
803
804 cls = Class::New<PatchClass, RTN::PatchClass>(isolate);
805 patch_class_class_ = cls.raw();
806
807 cls = Class::New<Function, RTN::Function>(isolate);
808 function_class_ = cls.raw();
809
810 cls = Class::New<ClosureData, RTN::ClosureData>(isolate);
811 closure_data_class_ = cls.raw();
812
813 cls = Class::New<SignatureData, RTN::SignatureData>(isolate);
814 signature_data_class_ = cls.raw();
815
816 cls = Class::New<RedirectionData, RTN::RedirectionData>(isolate);
817 redirection_data_class_ = cls.raw();
818
819 cls = Class::New<FfiTrampolineData, RTN::FfiTrampolineData>(isolate);
820 ffi_trampoline_data_class_ = cls.raw();
821
822 cls = Class::New<Field, RTN::Field>(isolate);
823 field_class_ = cls.raw();
824
825 cls = Class::New<Script, RTN::Script>(isolate);
826 script_class_ = cls.raw();
827
828 cls = Class::New<Library, RTN::Library>(isolate);
829 library_class_ = cls.raw();
830
831 cls = Class::New<Namespace, RTN::Namespace>(isolate);
832 namespace_class_ = cls.raw();
833
834 cls = Class::New<KernelProgramInfo, RTN::KernelProgramInfo>(isolate);
835 kernel_program_info_class_ = cls.raw();
836
837 cls = Class::New<Code, RTN::Code>(isolate);
838 code_class_ = cls.raw();
839
840 cls = Class::New<Bytecode, RTN::Bytecode>(isolate);
841 bytecode_class_ = cls.raw();
842
843 cls = Class::New<Instructions, RTN::Instructions>(isolate);
844 instructions_class_ = cls.raw();
845
846 cls = Class::New<InstructionsSection, RTN::InstructionsSection>(isolate);
847 instructions_section_class_ = cls.raw();
848
849 cls = Class::New<ObjectPool, RTN::ObjectPool>(isolate);
850 object_pool_class_ = cls.raw();
851
852 cls = Class::New<PcDescriptors, RTN::PcDescriptors>(isolate);
853 pc_descriptors_class_ = cls.raw();
854
855 cls = Class::New<CodeSourceMap, RTN::CodeSourceMap>(isolate);
856 code_source_map_class_ = cls.raw();
857
858 cls = Class::New<CompressedStackMaps, RTN::CompressedStackMaps>(isolate);
859 compressed_stackmaps_class_ = cls.raw();
860
861 cls = Class::New<LocalVarDescriptors, RTN::LocalVarDescriptors>(isolate);
862 var_descriptors_class_ = cls.raw();
863
864 cls = Class::New<ExceptionHandlers, RTN::ExceptionHandlers>(isolate);
865 exception_handlers_class_ = cls.raw();
866
867 cls = Class::New<Context, RTN::Context>(isolate);
868 context_class_ = cls.raw();
869
870 cls = Class::New<ContextScope, RTN::ContextScope>(isolate);
871 context_scope_class_ = cls.raw();
872
873 cls = Class::New<ParameterTypeCheck, RTN::ParameterTypeCheck>(isolate);
874 dyncalltypecheck_class_ = cls.raw();
875
876 cls = Class::New<SingleTargetCache, RTN::SingleTargetCache>(isolate);
877 singletargetcache_class_ = cls.raw();
878
879 cls = Class::New<UnlinkedCall, RTN::UnlinkedCall>(isolate);
880 unlinkedcall_class_ = cls.raw();
881
882 cls =
883 Class::New<MonomorphicSmiableCall, RTN::MonomorphicSmiableCall>(isolate);
884 monomorphicsmiablecall_class_ = cls.raw();
885
886 cls = Class::New<ICData, RTN::ICData>(isolate);
887 icdata_class_ = cls.raw();
888
889 cls = Class::New<MegamorphicCache, RTN::MegamorphicCache>(isolate);
890 megamorphic_cache_class_ = cls.raw();
891
892 cls = Class::New<SubtypeTestCache, RTN::SubtypeTestCache>(isolate);
893 subtypetestcache_class_ = cls.raw();
894
895 cls = Class::New<LoadingUnit, RTN::LoadingUnit>(isolate);
896 loadingunit_class_ = cls.raw();
897
898 cls = Class::New<ApiError, RTN::ApiError>(isolate);
899 api_error_class_ = cls.raw();
900
901 cls = Class::New<LanguageError, RTN::LanguageError>(isolate);
902 language_error_class_ = cls.raw();
903
904 cls = Class::New<UnhandledException, RTN::UnhandledException>(isolate);
905 unhandled_exception_class_ = cls.raw();
906
907 cls = Class::New<UnwindError, RTN::UnwindError>(isolate);
908 unwind_error_class_ = cls.raw();
909
910 cls = Class::New<WeakSerializationReference, RTN::WeakSerializationReference>(
911 isolate);
912 weak_serialization_reference_class_ = cls.raw();
913
914 ASSERT(class_class() != null_);
915
916 // Pre-allocate classes in the vm isolate so that we can for example create a
917 // symbol table and populate it with some frequently used strings as symbols.
918 cls = Class::New<Array, RTN::Array>(isolate);
919 isolate->object_store()->set_array_class(cls);
920 cls.set_type_arguments_field_offset(Array::type_arguments_offset(),
921 RTN::Array::type_arguments_offset());
922 cls.set_num_type_arguments(1);
923 cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate);
924 isolate->object_store()->set_immutable_array_class(cls);
925 cls.set_type_arguments_field_offset(Array::type_arguments_offset(),
926 RTN::Array::type_arguments_offset());
927 cls.set_num_type_arguments(1);
928 cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(isolate);
929 isolate->object_store()->set_growable_object_array_class(cls);
930 cls.set_type_arguments_field_offset(
931 GrowableObjectArray::type_arguments_offset(),
932 RTN::GrowableObjectArray::type_arguments_offset());
933 cls.set_num_type_arguments(1);
934 cls = Class::NewStringClass(kOneByteStringCid, isolate);
935 isolate->object_store()->set_one_byte_string_class(cls);
936 cls = Class::NewStringClass(kTwoByteStringCid, isolate);
937 isolate->object_store()->set_two_byte_string_class(cls);
938 cls = Class::New<Mint, RTN::Mint>(isolate);
939 isolate->object_store()->set_mint_class(cls);
940 cls = Class::New<Double, RTN::Double>(isolate);
941 isolate->object_store()->set_double_class(cls);
942
943 // Ensure that class kExternalTypedDataUint8ArrayCid is registered as we
944 // need it when reading in the token stream of bootstrap classes in the VM
945 // isolate.
946 Class::NewExternalTypedDataClass(kExternalTypedDataUint8ArrayCid, isolate);
947
948 // Needed for object pools of VM isolate stubs.
949 Class::NewTypedDataClass(kTypedDataInt8ArrayCid, isolate);
950
951 // Allocate and initialize the empty_array instance.
952 {
953 uword address = heap->Allocate(Array::InstanceSize(0), Heap::kOld);
954 InitializeObject(address, kImmutableArrayCid, Array::InstanceSize(0));
955 Array::initializeHandle(empty_array_,
956 static_cast<ArrayPtr>(address + kHeapObjectTag));
957 empty_array_->StoreSmi(&empty_array_->raw_ptr()->length_, Smi::New(0));
958 empty_array_->SetCanonical();
959 }
960
961 Smi& smi = Smi::Handle();
962 // Allocate and initialize the zero_array instance.
963 {
964 uword address = heap->Allocate(Array::InstanceSize(1), Heap::kOld);
965 InitializeObject(address, kImmutableArrayCid, Array::InstanceSize(1));
966 Array::initializeHandle(zero_array_,
967 static_cast<ArrayPtr>(address + kHeapObjectTag));
968 zero_array_->StoreSmi(&zero_array_->raw_ptr()->length_, Smi::New(1));
969 smi = Smi::New(0);
970 zero_array_->SetAt(0, smi);
971 zero_array_->SetCanonical();
972 }
973
974 // Allocate and initialize the canonical empty context scope object.
975 {
976 uword address = heap->Allocate(ContextScope::InstanceSize(0), Heap::kOld);
977 InitializeObject(address, kContextScopeCid, ContextScope::InstanceSize(0));
978 ContextScope::initializeHandle(
979 empty_context_scope_,
980 static_cast<ContextScopePtr>(address + kHeapObjectTag));
981 empty_context_scope_->StoreNonPointer(
982 &empty_context_scope_->raw_ptr()->num_variables_, 0);
983 empty_context_scope_->StoreNonPointer(
984 &empty_context_scope_->raw_ptr()->is_implicit_, true);
985 empty_context_scope_->SetCanonical();
986 }
987
988 // Allocate and initialize the canonical empty object pool object.
989 {
990 uword address = heap->Allocate(ObjectPool::InstanceSize(0), Heap::kOld);
991 InitializeObject(address, kObjectPoolCid, ObjectPool::InstanceSize(0));
992 ObjectPool::initializeHandle(
993 empty_object_pool_,
994 static_cast<ObjectPoolPtr>(address + kHeapObjectTag));
995 empty_object_pool_->StoreNonPointer(&empty_object_pool_->raw_ptr()->length_,
996 0);
997 empty_object_pool_->SetCanonical();
998 }
999
1000 // Allocate and initialize the empty_descriptors instance.
1001 {
1002 uword address = heap->Allocate(PcDescriptors::InstanceSize(0), Heap::kOld);
1003 InitializeObject(address, kPcDescriptorsCid,
1004 PcDescriptors::InstanceSize(0));
1005 PcDescriptors::initializeHandle(
1006 empty_descriptors_,
1007 static_cast<PcDescriptorsPtr>(address + kHeapObjectTag));
1008 empty_descriptors_->StoreNonPointer(&empty_descriptors_->raw_ptr()->length_,
1009 0);
1010 empty_descriptors_->SetCanonical();
1011 }
1012
1013 // Allocate and initialize the canonical empty variable descriptor object.
1014 {
1015 uword address =
1016 heap->Allocate(LocalVarDescriptors::InstanceSize(0), Heap::kOld);
1017 InitializeObject(address, kLocalVarDescriptorsCid,
1018 LocalVarDescriptors::InstanceSize(0));
1019 LocalVarDescriptors::initializeHandle(
1020 empty_var_descriptors_,
1021 static_cast<LocalVarDescriptorsPtr>(address + kHeapObjectTag));
1022 empty_var_descriptors_->StoreNonPointer(
1023 &empty_var_descriptors_->raw_ptr()->num_entries_, 0);
1024 empty_var_descriptors_->SetCanonical();
1025 }
1026
1027 // Allocate and initialize the canonical empty exception handler info object.
1028 // The vast majority of all functions do not contain an exception handler
1029 // and can share this canonical descriptor.
1030 {
1031 uword address =
1032 heap->Allocate(ExceptionHandlers::InstanceSize(0), Heap::kOld);
1033 InitializeObject(address, kExceptionHandlersCid,
1034 ExceptionHandlers::InstanceSize(0));
1035 ExceptionHandlers::initializeHandle(
1036 empty_exception_handlers_,
1037 static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag));
1038 empty_exception_handlers_->StoreNonPointer(
1039 &empty_exception_handlers_->raw_ptr()->num_entries_, 0);
1040 empty_exception_handlers_->SetCanonical();
1041 }
1042
1043 // Allocate and initialize the canonical empty type arguments object.
1044 {
1045 uword address = heap->Allocate(TypeArguments::InstanceSize(0), Heap::kOld);
1046 InitializeObject(address, kTypeArgumentsCid,
1047 TypeArguments::InstanceSize(0));
1048 TypeArguments::initializeHandle(
1049 empty_type_arguments_,
1050 static_cast<TypeArgumentsPtr>(address + kHeapObjectTag));
1051 empty_type_arguments_->StoreSmi(&empty_type_arguments_->raw_ptr()->length_,
1052 Smi::New(0));
1053 empty_type_arguments_->StoreSmi(&empty_type_arguments_->raw_ptr()->hash_,
1054 Smi::New(0));
1055 empty_type_arguments_->ComputeHash();
1056 empty_type_arguments_->SetCanonical();
1057 }
1058
1059 // The VM isolate snapshot object table is initialized to an empty array
1060 // as we do not have any VM isolate snapshot at this time.
1061 *vm_isolate_snapshot_object_table_ = Object::empty_array().raw();
1062
1063 cls = Class::New<Instance, RTN::Instance>(kDynamicCid, isolate);
1064 cls.set_is_abstract();
1065 cls.set_num_type_arguments(0);
1066 cls.set_is_allocate_finalized();
1067 cls.set_is_declaration_loaded();
1068 cls.set_is_type_finalized();
1069 dynamic_class_ = cls.raw();
1070
1071 cls = Class::New<Instance, RTN::Instance>(kVoidCid, isolate);
1072 cls.set_num_type_arguments(0);
1073 cls.set_is_allocate_finalized();
1074 cls.set_is_declaration_loaded();
1075 cls.set_is_type_finalized();
1076 void_class_ = cls.raw();
1077
1078 cls = Class::New<Type, RTN::Type>(isolate);
1079 cls.set_is_allocate_finalized();
1080 cls.set_is_declaration_loaded();
1081 cls.set_is_type_finalized();
1082
1083 cls = dynamic_class_;
1084 *dynamic_type_ = Type::New(cls, Object::null_type_arguments(),
1085 TokenPosition::kNoSource, Nullability::kNullable);
1086 dynamic_type_->SetIsFinalized();
1087 dynamic_type_->ComputeHash();
1088 dynamic_type_->SetCanonical();
1089
1090 cls = void_class_;
1091 *void_type_ = Type::New(cls, Object::null_type_arguments(),
1092 TokenPosition::kNoSource, Nullability::kNullable);
1093 void_type_->SetIsFinalized();
1094 void_type_->ComputeHash();
1095 void_type_->SetCanonical();
1096
1097 // Since TypeArguments objects are passed as function arguments, make them
1098 // behave as Dart instances, although they are just VM objects.
1099 // Note that we cannot set the super type to ObjectType, which does not live
1100 // in the vm isolate. See special handling in Class::SuperClass().
1101 cls = type_arguments_class_;
1102 cls.set_interfaces(Object::empty_array());
1103 cls.SetFields(Object::empty_array());
1104 cls.SetFunctions(Object::empty_array());
1105
1106 cls = Class::New<Bool, RTN::Bool>(isolate);
1107 isolate->object_store()->set_bool_class(cls);
1108
1109 *smi_illegal_cid_ = Smi::New(kIllegalCid);
1110 *smi_zero_ = Smi::New(0);
1111
1112 String& error_str = String::Handle();
1113 error_str = String::New(
1114 "Internal Dart data pointers have been acquired, please release them "
1115 "using Dart_TypedDataReleaseData.",
1116 Heap::kOld);
1117 *typed_data_acquire_error_ = ApiError::New(error_str, Heap::kOld);
1118 error_str = String::New("SnapshotWriter Error", Heap::kOld);
1119 *snapshot_writer_error_ =
1120 LanguageError::New(error_str, Report::kError, Heap::kOld);
1121 error_str = String::New("Branch offset overflow", Heap::kOld);
1122 *branch_offset_error_ =
1123 LanguageError::New(error_str, Report::kBailout, Heap::kOld);
1124 error_str = String::New("Speculative inlining failed", Heap::kOld);
1125 *speculative_inlining_error_ =
1126 LanguageError::New(error_str, Report::kBailout, Heap::kOld);
1127 error_str = String::New("Background Compilation Failed", Heap::kOld);
1128 *background_compilation_error_ =
1129 LanguageError::New(error_str, Report::kBailout, Heap::kOld);
1130 error_str = String::New("Out of memory", Heap::kOld);
1131 *out_of_memory_error_ =
1132 LanguageError::New(error_str, Report::kBailout, Heap::kOld);
1133
1134 // Allocate the parameter arrays for method extractor types and names.
1135 *extractor_parameter_types_ = Array::New(1, Heap::kOld);
1136 extractor_parameter_types_->SetAt(0, Object::dynamic_type());
1137 *extractor_parameter_names_ = Array::New(1, Heap::kOld);
1138 // Fill in extractor_parameter_names_ later, after symbols are initialized
1139 // (in Object::FinalizeVMIsolate). extractor_parameter_names_ object
1140 // needs to be created earlier as VM isolate snapshot reader references it
1141 // before Object::FinalizeVMIsolate.
1142
1143 *implicit_getter_bytecode_ =
1144 CreateVMInternalBytecode(KernelBytecode::kVMInternal_ImplicitGetter);
1145
1146 *implicit_setter_bytecode_ =
1147 CreateVMInternalBytecode(KernelBytecode::kVMInternal_ImplicitSetter);
1148
1149 *implicit_static_getter_bytecode_ = CreateVMInternalBytecode(
1150 KernelBytecode::kVMInternal_ImplicitStaticGetter);
1151
1152 *method_extractor_bytecode_ =
1153 CreateVMInternalBytecode(KernelBytecode::kVMInternal_MethodExtractor);
1154
1155 *invoke_closure_bytecode_ =
1156 CreateVMInternalBytecode(KernelBytecode::kVMInternal_InvokeClosure);
1157
1158 *invoke_field_bytecode_ =
1159 CreateVMInternalBytecode(KernelBytecode::kVMInternal_InvokeField);
1160
1161 *nsm_dispatcher_bytecode_ = CreateVMInternalBytecode(
1162 KernelBytecode::kVMInternal_NoSuchMethodDispatcher);
1163
1164 *dynamic_invocation_forwarder_bytecode_ = CreateVMInternalBytecode(
1165 KernelBytecode::kVMInternal_ForwardDynamicInvocation);
1166
1167 // Some thread fields need to be reinitialized as null constants have not been
1168 // initialized until now.
1169 Thread* thr = Thread::Current();
1170 ASSERT(thr != NULL);
1171 thr->ClearStickyError();
1172 thr->clear_pending_functions();
1173
1174 ASSERT(!null_object_->IsSmi());
1175 ASSERT(!null_array_->IsSmi());
1176 ASSERT(null_array_->IsArray());
1177 ASSERT(!null_string_->IsSmi());
1178 ASSERT(null_string_->IsString());
1179 ASSERT(!null_instance_->IsSmi());
1180 ASSERT(null_instance_->IsInstance());
1181 ASSERT(!null_function_->IsSmi());
1182 ASSERT(null_function_->IsFunction());
1183 ASSERT(!null_type_arguments_->IsSmi());
1184 ASSERT(null_type_arguments_->IsTypeArguments());
1185 ASSERT(!null_compressed_stack_maps_->IsSmi());
1186 ASSERT(null_compressed_stack_maps_->IsCompressedStackMaps());
1187 ASSERT(!empty_array_->IsSmi());
1188 ASSERT(empty_array_->IsArray());
1189 ASSERT(!zero_array_->IsSmi());
1190 ASSERT(zero_array_->IsArray());
1191 ASSERT(!empty_context_scope_->IsSmi());
1192 ASSERT(empty_context_scope_->IsContextScope());
1193 ASSERT(!empty_descriptors_->IsSmi());
1194 ASSERT(empty_descriptors_->IsPcDescriptors());
1195 ASSERT(!empty_var_descriptors_->IsSmi());
1196 ASSERT(empty_var_descriptors_->IsLocalVarDescriptors());
1197 ASSERT(!empty_exception_handlers_->IsSmi());
1198 ASSERT(empty_exception_handlers_->IsExceptionHandlers());
1199 ASSERT(!sentinel_->IsSmi());
1200 ASSERT(sentinel_->IsInstance());
1201 ASSERT(!transition_sentinel_->IsSmi());
1202 ASSERT(transition_sentinel_->IsInstance());
1203 ASSERT(!unknown_constant_->IsSmi());
1204 ASSERT(unknown_constant_->IsInstance());
1205 ASSERT(!non_constant_->IsSmi());
1206 ASSERT(non_constant_->IsInstance());
1207 ASSERT(!bool_true_->IsSmi());
1208 ASSERT(bool_true_->IsBool());
1209 ASSERT(!bool_false_->IsSmi());
1210 ASSERT(bool_false_->IsBool());
1211 ASSERT(smi_illegal_cid_->IsSmi());
1212 ASSERT(smi_zero_->IsSmi());
1213 ASSERT(!typed_data_acquire_error_->IsSmi());
1214 ASSERT(typed_data_acquire_error_->IsApiError());
1215 ASSERT(!snapshot_writer_error_->IsSmi());
1216 ASSERT(snapshot_writer_error_->IsLanguageError());
1217 ASSERT(!branch_offset_error_->IsSmi());
1218 ASSERT(branch_offset_error_->IsLanguageError());
1219 ASSERT(!speculative_inlining_error_->IsSmi());
1220 ASSERT(speculative_inlining_error_->IsLanguageError());
1221 ASSERT(!background_compilation_error_->IsSmi());
1222 ASSERT(background_compilation_error_->IsLanguageError());
1223 ASSERT(!out_of_memory_error_->IsSmi());
1224 ASSERT(out_of_memory_error_->IsLanguageError());
1225 ASSERT(!vm_isolate_snapshot_object_table_->IsSmi());
1226 ASSERT(vm_isolate_snapshot_object_table_->IsArray());
1227 ASSERT(!extractor_parameter_types_->IsSmi());
1228 ASSERT(extractor_parameter_types_->IsArray());
1229 ASSERT(!extractor_parameter_names_->IsSmi());
1230 ASSERT(extractor_parameter_names_->IsArray());
1231 ASSERT(!implicit_getter_bytecode_->IsSmi());
1232 ASSERT(implicit_getter_bytecode_->IsBytecode());
1233 ASSERT(!implicit_setter_bytecode_->IsSmi());
1234 ASSERT(implicit_setter_bytecode_->IsBytecode());
1235 ASSERT(!implicit_static_getter_bytecode_->IsSmi());
1236 ASSERT(implicit_static_getter_bytecode_->IsBytecode());
1237 ASSERT(!method_extractor_bytecode_->IsSmi());
1238 ASSERT(method_extractor_bytecode_->IsBytecode());
1239 ASSERT(!invoke_closure_bytecode_->IsSmi());
1240 ASSERT(invoke_closure_bytecode_->IsBytecode());
1241 ASSERT(!invoke_field_bytecode_->IsSmi());
1242 ASSERT(invoke_field_bytecode_->IsBytecode());
1243 ASSERT(!nsm_dispatcher_bytecode_->IsSmi());
1244 ASSERT(nsm_dispatcher_bytecode_->IsBytecode());
1245 ASSERT(!dynamic_invocation_forwarder_bytecode_->IsSmi());
1246 ASSERT(dynamic_invocation_forwarder_bytecode_->IsBytecode());
1247}
1248
1249void Object::FinishInit(Isolate* isolate) {
1250 // The type testing stubs we initialize in AbstractType objects for the
1251 // canonical type of kDynamicCid/kVoidCid need to be set in this
1252 // method, which is called after StubCode::InitOnce().
1253 Code& code = Code::Handle();
1254
1255 code = TypeTestingStubGenerator::DefaultCodeForType(*dynamic_type_);
1256 dynamic_type_->SetTypeTestingStub(code);
1257
1258 code = TypeTestingStubGenerator::DefaultCodeForType(*void_type_);
1259 void_type_->SetTypeTestingStub(code);
1260}
1261
1262void Object::Cleanup() {
1263 null_ = static_cast<ObjectPtr>(RAW_NULL);
1264 true_ = static_cast<BoolPtr>(RAW_NULL);
1265 false_ = static_cast<BoolPtr>(RAW_NULL);
1266 class_class_ = static_cast<ClassPtr>(RAW_NULL);
1267 dynamic_class_ = static_cast<ClassPtr>(RAW_NULL);
1268 void_class_ = static_cast<ClassPtr>(RAW_NULL);
1269 type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL);
1270 patch_class_class_ = static_cast<ClassPtr>(RAW_NULL);
1271 function_class_ = static_cast<ClassPtr>(RAW_NULL);
1272 closure_data_class_ = static_cast<ClassPtr>(RAW_NULL);
1273 signature_data_class_ = static_cast<ClassPtr>(RAW_NULL);
1274 redirection_data_class_ = static_cast<ClassPtr>(RAW_NULL);
1275 ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL);
1276 field_class_ = static_cast<ClassPtr>(RAW_NULL);
1277 script_class_ = static_cast<ClassPtr>(RAW_NULL);
1278 library_class_ = static_cast<ClassPtr>(RAW_NULL);
1279 namespace_class_ = static_cast<ClassPtr>(RAW_NULL);
1280 kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL);
1281 code_class_ = static_cast<ClassPtr>(RAW_NULL);
1282 bytecode_class_ = static_cast<ClassPtr>(RAW_NULL);
1283 instructions_class_ = static_cast<ClassPtr>(RAW_NULL);
1284 instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL);
1285 object_pool_class_ = static_cast<ClassPtr>(RAW_NULL);
1286 pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
1287 code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL);
1288 compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL);
1289 var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
1290 exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL);
1291 context_class_ = static_cast<ClassPtr>(RAW_NULL);
1292 context_scope_class_ = static_cast<ClassPtr>(RAW_NULL);
1293 dyncalltypecheck_class_ = static_cast<ClassPtr>(RAW_NULL);
1294 singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL);
1295 unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL);
1296 monomorphicsmiablecall_class_ = static_cast<ClassPtr>(RAW_NULL);
1297 icdata_class_ = static_cast<ClassPtr>(RAW_NULL);
1298 megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL);
1299 subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL);
1300 loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL);
1301 api_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1302 language_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1303 unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL);
1304 unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1305}
1306
1307// An object visitor which will mark all visited objects. This is used to
1308// premark all objects in the vm_isolate_ heap. Also precalculates hash
1309// codes so that we can get the identity hash code of objects in the read-
1310// only VM isolate.
1311class FinalizeVMIsolateVisitor : public ObjectVisitor {
1312 public:
1313 FinalizeVMIsolateVisitor()
1314#if defined(HASH_IN_OBJECT_HEADER)
1315 : counter_(1337)
1316#endif
1317 {
1318 }
1319
1320 void VisitObject(ObjectPtr obj) {
1321 // Free list elements should never be marked.
1322 ASSERT(!obj->ptr()->IsMarked());
1323 // No forwarding corpses in the VM isolate.
1324 ASSERT(!obj->IsForwardingCorpse());
1325 if (!obj->IsFreeListElement()) {
1326 obj->ptr()->SetMarkBitUnsynchronized();
1327 Object::FinalizeReadOnlyObject(obj);
1328#if defined(HASH_IN_OBJECT_HEADER)
1329 // These objects end up in the read-only VM isolate which is shared
1330 // between isolates, so we have to prepopulate them with identity hash
1331 // codes, since we can't add hash codes later.
1332 if (Object::GetCachedHash(obj) == 0) {
1333 // Some classes have identity hash codes that depend on their contents,
1334 // not per object.
1335 ASSERT(!obj->IsStringInstance());
1336 if (!obj->IsMint() && !obj->IsDouble() && !obj->IsRawNull() &&
1337 !obj->IsBool()) {
1338 counter_ += 2011; // The year Dart was announced and a prime.
1339 counter_ &= 0x3fffffff;
1340 if (counter_ == 0) counter_++;
1341 Object::SetCachedHash(obj, counter_);
1342 }
1343 }
1344#endif
1345 }
1346 }
1347
1348 private:
1349#if defined(HASH_IN_OBJECT_HEADER)
1350 int32_t counter_;
1351#endif
1352};
1353
1354#define SET_CLASS_NAME(class_name, name) \
1355 cls = class_name##_class(); \
1356 cls.set_name(Symbols::name());
1357
1358void Object::FinalizeVMIsolate(Isolate* isolate) {
1359 // Should only be run by the vm isolate.
1360 ASSERT(isolate == Dart::vm_isolate());
1361
1362 // Finish initialization of extractor_parameter_names_ which was
1363 // Started in Object::InitOnce()
1364 extractor_parameter_names_->SetAt(0, Symbols::This());
1365
1366 // Set up names for all VM singleton classes.
1367 Class& cls = Class::Handle();
1368
1369 SET_CLASS_NAME(class, Class);
1370 SET_CLASS_NAME(dynamic, Dynamic);
1371 SET_CLASS_NAME(void, Void);
1372 SET_CLASS_NAME(type_arguments, TypeArguments);
1373 SET_CLASS_NAME(patch_class, PatchClass);
1374 SET_CLASS_NAME(function, Function);
1375 SET_CLASS_NAME(closure_data, ClosureData);
1376 SET_CLASS_NAME(signature_data, SignatureData);
1377 SET_CLASS_NAME(redirection_data, RedirectionData);
1378 SET_CLASS_NAME(ffi_trampoline_data, FfiTrampolineData);
1379 SET_CLASS_NAME(field, Field);
1380 SET_CLASS_NAME(script, Script);
1381 SET_CLASS_NAME(library, LibraryClass);
1382 SET_CLASS_NAME(namespace, Namespace);
1383 SET_CLASS_NAME(kernel_program_info, KernelProgramInfo);
1384 SET_CLASS_NAME(code, Code);
1385 SET_CLASS_NAME(bytecode, Bytecode);
1386 SET_CLASS_NAME(instructions, Instructions);
1387 SET_CLASS_NAME(instructions_section, InstructionsSection);
1388 SET_CLASS_NAME(object_pool, ObjectPool);
1389 SET_CLASS_NAME(code_source_map, CodeSourceMap);
1390 SET_CLASS_NAME(pc_descriptors, PcDescriptors);
1391 SET_CLASS_NAME(compressed_stackmaps, CompressedStackMaps);
1392 SET_CLASS_NAME(var_descriptors, LocalVarDescriptors);
1393 SET_CLASS_NAME(exception_handlers, ExceptionHandlers);
1394 SET_CLASS_NAME(context, Context);
1395 SET_CLASS_NAME(context_scope, ContextScope);
1396 SET_CLASS_NAME(dyncalltypecheck, ParameterTypeCheck);
1397 SET_CLASS_NAME(singletargetcache, SingleTargetCache);
1398 SET_CLASS_NAME(unlinkedcall, UnlinkedCall);
1399 SET_CLASS_NAME(monomorphicsmiablecall, MonomorphicSmiableCall);
1400 SET_CLASS_NAME(icdata, ICData);
1401 SET_CLASS_NAME(megamorphic_cache, MegamorphicCache);
1402 SET_CLASS_NAME(subtypetestcache, SubtypeTestCache);
1403 SET_CLASS_NAME(loadingunit, LoadingUnit);
1404 SET_CLASS_NAME(api_error, ApiError);
1405 SET_CLASS_NAME(language_error, LanguageError);
1406 SET_CLASS_NAME(unhandled_exception, UnhandledException);
1407 SET_CLASS_NAME(unwind_error, UnwindError);
1408
1409 // Set up names for classes which are also pre-allocated in the vm isolate.
1410 cls = isolate->object_store()->array_class();
1411 cls.set_name(Symbols::_List());
1412 cls = isolate->object_store()->one_byte_string_class();
1413 cls.set_name(Symbols::OneByteString());
1414 cls = isolate->object_store()->never_class();
1415 cls.set_name(Symbols::Never());
1416
1417 // Set up names for the pseudo-classes for free list elements and forwarding
1418 // corpses. Mainly this makes VM debugging easier.
1419 cls = isolate->class_table()->At(kFreeListElement);
1420 cls.set_name(Symbols::FreeListElement());
1421 cls = isolate->class_table()->At(kForwardingCorpse);
1422 cls.set_name(Symbols::ForwardingCorpse());
1423
1424 {
1425 ASSERT(isolate == Dart::vm_isolate());
1426 Thread* thread = Thread::Current();
1427 WritableVMIsolateScope scope(thread);
1428 HeapIterationScope iteration(thread);
1429 FinalizeVMIsolateVisitor premarker;
1430 ASSERT(isolate->heap()->UsedInWords(Heap::kNew) == 0);
1431 iteration.IterateOldObjectsNoImagePages(&premarker);
1432 // Make the VM isolate read-only again after setting all objects as marked.
1433 // Note objects in image pages are already pre-marked.
1434 }
1435}
1436
1437void Object::FinalizeReadOnlyObject(ObjectPtr object) {
1438 NoSafepointScope no_safepoint;
1439 intptr_t cid = object->GetClassId();
1440 if (cid == kOneByteStringCid) {
1441 OneByteStringPtr str = static_cast<OneByteStringPtr>(object);
1442 if (String::GetCachedHash(str) == 0) {
1443 intptr_t hash = String::Hash(str);
1444 String::SetCachedHash(str, hash);
1445 }
1446 intptr_t size = OneByteString::UnroundedSize(str);
1447 ASSERT(size <= str->ptr()->HeapSize());
1448 memset(reinterpret_cast<void*>(ObjectLayout::ToAddr(str) + size), 0,
1449 str->ptr()->HeapSize() - size);
1450 } else if (cid == kTwoByteStringCid) {
1451 TwoByteStringPtr str = static_cast<TwoByteStringPtr>(object);
1452 if (String::GetCachedHash(str) == 0) {
1453 intptr_t hash = String::Hash(str);
1454 String::SetCachedHash(str, hash);
1455 }
1456 ASSERT(String::GetCachedHash(str) != 0);
1457 intptr_t size = TwoByteString::UnroundedSize(str);
1458 ASSERT(size <= str->ptr()->HeapSize());
1459 memset(reinterpret_cast<void*>(ObjectLayout::ToAddr(str) + size), 0,
1460 str->ptr()->HeapSize() - size);
1461 } else if (cid == kExternalOneByteStringCid) {
1462 ExternalOneByteStringPtr str =
1463 static_cast<ExternalOneByteStringPtr>(object);
1464 if (String::GetCachedHash(str) == 0) {
1465 intptr_t hash = String::Hash(str);
1466 String::SetCachedHash(str, hash);
1467 }
1468 } else if (cid == kExternalTwoByteStringCid) {
1469 ExternalTwoByteStringPtr str =
1470 static_cast<ExternalTwoByteStringPtr>(object);
1471 if (String::GetCachedHash(str) == 0) {
1472 intptr_t hash = String::Hash(str);
1473 String::SetCachedHash(str, hash);
1474 }
1475 } else if (cid == kCodeSourceMapCid) {
1476 CodeSourceMapPtr map = CodeSourceMap::RawCast(object);
1477 intptr_t size = CodeSourceMap::UnroundedSize(map);
1478 ASSERT(size <= map->ptr()->HeapSize());
1479 memset(reinterpret_cast<void*>(ObjectLayout::ToAddr(map) + size), 0,
1480 map->ptr()->HeapSize() - size);
1481 } else if (cid == kCompressedStackMapsCid) {
1482 CompressedStackMapsPtr maps = CompressedStackMaps::RawCast(object);
1483 intptr_t size = CompressedStackMaps::UnroundedSize(maps);
1484 ASSERT(size <= maps->ptr()->HeapSize());
1485 memset(reinterpret_cast<void*>(ObjectLayout::ToAddr(maps) + size), 0,
1486 maps->ptr()->HeapSize() - size);
1487 } else if (cid == kPcDescriptorsCid) {
1488 PcDescriptorsPtr desc = PcDescriptors::RawCast(object);
1489 intptr_t size = PcDescriptors::UnroundedSize(desc);
1490 ASSERT(size <= desc->ptr()->HeapSize());
1491 memset(reinterpret_cast<void*>(ObjectLayout::ToAddr(desc) + size), 0,
1492 desc->ptr()->HeapSize() - size);
1493 }
1494}
1495
1496void Object::set_vm_isolate_snapshot_object_table(const Array& table) {
1497 ASSERT(Isolate::Current() == Dart::vm_isolate());
1498 *vm_isolate_snapshot_object_table_ = table.raw();
1499}
1500
1501// Make unused space in an object whose type has been transformed safe
1502// for traversing during GC.
1503// The unused part of the transformed object is marked as an TypedDataInt8Array
1504// object.
1505void Object::MakeUnusedSpaceTraversable(const Object& obj,
1506 intptr_t original_size,
1507 intptr_t used_size) {
1508 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
1509 ASSERT(!obj.IsNull());
1510 ASSERT(original_size >= used_size);
1511 if (original_size > used_size) {
1512 intptr_t leftover_size = original_size - used_size;
1513
1514 uword addr = ObjectLayout::ToAddr(obj.raw()) + used_size;
1515 if (leftover_size >= TypedData::InstanceSize(0)) {
1516 // Update the leftover space as a TypedDataInt8Array object.
1517 TypedDataPtr raw =
1518 static_cast<TypedDataPtr>(ObjectLayout::FromAddr(addr));
1519 uword new_tags =
1520 ObjectLayout::ClassIdTag::update(kTypedDataInt8ArrayCid, 0);
1521 new_tags = ObjectLayout::SizeTag::update(leftover_size, new_tags);
1522 const bool is_old = obj.raw()->IsOldObject();
1523 new_tags = ObjectLayout::OldBit::update(is_old, new_tags);
1524 new_tags = ObjectLayout::OldAndNotMarkedBit::update(is_old, new_tags);
1525 new_tags = ObjectLayout::OldAndNotRememberedBit::update(is_old, new_tags);
1526 new_tags = ObjectLayout::NewBit::update(!is_old, new_tags);
1527 // On architectures with a relaxed memory model, the concurrent marker may
1528 // observe the write of the filler object's header before observing the
1529 // new array length, and so treat it as a pointer. Ensure it is a Smi so
1530 // the marker won't dereference it.
1531 ASSERT((new_tags & kSmiTagMask) == kSmiTag);
1532 uint32_t tags = raw->ptr()->tags_;
1533 uint32_t old_tags;
1534 // TODO(iposva): Investigate whether CompareAndSwapWord is necessary.
1535 do {
1536 old_tags = tags;
1537 // We can't use obj.CompareAndSwapTags here because we don't have a
1538 // handle for the new object.
1539 } while (!raw->ptr()->tags_.WeakCAS(old_tags, new_tags));
1540
1541 intptr_t leftover_len = (leftover_size - TypedData::InstanceSize(0));
1542 ASSERT(TypedData::InstanceSize(leftover_len) == leftover_size);
1543 raw->ptr()->StoreSmi(&(raw->ptr()->length_), Smi::New(leftover_len));
1544 raw->ptr()->RecomputeDataField();
1545 } else {
1546 // Update the leftover space as a basic object.
1547 ASSERT(leftover_size == Object::InstanceSize());
1548 ObjectPtr raw = static_cast<ObjectPtr>(ObjectLayout::FromAddr(addr));
1549 uword new_tags = ObjectLayout::ClassIdTag::update(kInstanceCid, 0);
1550 new_tags = ObjectLayout::SizeTag::update(leftover_size, new_tags);
1551 const bool is_old = obj.raw()->IsOldObject();
1552 new_tags = ObjectLayout::OldBit::update(is_old, new_tags);
1553 new_tags = ObjectLayout::OldAndNotMarkedBit::update(is_old, new_tags);
1554 new_tags = ObjectLayout::OldAndNotRememberedBit::update(is_old, new_tags);
1555 new_tags = ObjectLayout::NewBit::update(!is_old, new_tags);
1556 // On architectures with a relaxed memory model, the concurrent marker may
1557 // observe the write of the filler object's header before observing the
1558 // new array length, and so treat it as a pointer. Ensure it is a Smi so
1559 // the marker won't dereference it.
1560 ASSERT((new_tags & kSmiTagMask) == kSmiTag);
1561 uint32_t tags = raw->ptr()->tags_;
1562 uint32_t old_tags;
1563 // TODO(iposva): Investigate whether CompareAndSwapWord is necessary.
1564 do {
1565 old_tags = tags;
1566 // We can't use obj.CompareAndSwapTags here because we don't have a
1567 // handle for the new object.
1568 } while (!raw->ptr()->tags_.WeakCAS(old_tags, new_tags));
1569 }
1570 }
1571}
1572
1573void Object::VerifyBuiltinVtables() {
1574#if defined(DEBUG)
1575 ASSERT(builtin_vtables_[kIllegalCid] == 0);
1576 ASSERT(builtin_vtables_[kFreeListElement] == 0);
1577 ASSERT(builtin_vtables_[kForwardingCorpse] == 0);
1578 ClassTable* table = Isolate::Current()->class_table();
1579 for (intptr_t cid = kObjectCid; cid < kNumPredefinedCids; cid++) {
1580 if (table->HasValidClassAt(cid)) {
1581 ASSERT(builtin_vtables_[cid] != 0);
1582 }
1583 }
1584#endif
1585}
1586
1587void Object::RegisterClass(const Class& cls,
1588 const String& name,
1589 const Library& lib) {
1590 ASSERT(name.Length() > 0);
1591 ASSERT(name.CharAt(0) != '_');
1592 cls.set_name(name);
1593 lib.AddClass(cls);
1594}
1595
1596void Object::RegisterPrivateClass(const Class& cls,
1597 const String& public_class_name,
1598 const Library& lib) {
1599 ASSERT(public_class_name.Length() > 0);
1600 ASSERT(public_class_name.CharAt(0) == '_');
1601 String& str = String::Handle();
1602 str = lib.PrivateName(public_class_name);
1603 cls.set_name(str);
1604 lib.AddClass(cls);
1605}
1606
1607// Initialize a new isolate from source or from a snapshot.
1608//
1609// There are three possibilities:
1610// 1. Running a Kernel binary. This function will bootstrap from the KERNEL
1611// file.
1612// 2. There is no vm snapshot. This function will bootstrap from source.
1613// 3. There is a vm snapshot. The caller should initialize from the snapshot.
1614//
1615// A non-NULL kernel argument indicates (1).
1616// A NULL kernel indicates (2) or (3).
1617ErrorPtr Object::Init(Isolate* isolate,
1618 const uint8_t* kernel_buffer,
1619 intptr_t kernel_buffer_size) {
1620 Thread* thread = Thread::Current();
1621 Zone* zone = thread->zone();
1622 ASSERT(isolate == thread->isolate());
1623 TIMELINE_DURATION(thread, Isolate, "Object::Init");
1624
1625#if defined(DART_PRECOMPILED_RUNTIME)
1626 const bool bootstrapping = false;
1627#else
1628 const bool is_kernel = (kernel_buffer != NULL);
1629 const bool bootstrapping =
1630 (Dart::vm_snapshot_kind() == Snapshot::kNone) || is_kernel;
1631#endif // defined(DART_PRECOMPILED_RUNTIME).
1632
1633 if (bootstrapping) {
1634#if !defined(DART_PRECOMPILED_RUNTIME)
1635 // Object::Init version when we are bootstrapping from source or from a
1636 // Kernel binary.
1637 // This will initialize isolate group object_store, shared by all isolates
1638 // running in the isolate group.
1639 ObjectStore* object_store = isolate->object_store();
1640
1641 Class& cls = Class::Handle(zone);
1642 Type& type = Type::Handle(zone);
1643 Array& array = Array::Handle(zone);
1644 Library& lib = Library::Handle(zone);
1645 TypeArguments& type_args = TypeArguments::Handle(zone);
1646
1647 // All RawArray fields will be initialized to an empty array, therefore
1648 // initialize array class first.
1649 cls = Class::New<Array, RTN::Array>(isolate);
1650 ASSERT(object_store->array_class() == Class::null());
1651 object_store->set_array_class(cls);
1652
1653 // VM classes that are parameterized (Array, ImmutableArray,
1654 // GrowableObjectArray, and LinkedHashMap) are also pre-finalized, so
1655 // CalculateFieldOffsets() is not called, so we need to set the offset of
1656 // their type_arguments_ field, which is explicitly declared in their
1657 // respective Raw* classes.
1658 cls.set_type_arguments_field_offset(Array::type_arguments_offset(),
1659 RTN::Array::type_arguments_offset());
1660 cls.set_num_type_arguments(1);
1661
1662 // Set up the growable object array class (Has to be done after the array
1663 // class is setup as one of its field is an array object).
1664 cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(isolate);
1665 object_store->set_growable_object_array_class(cls);
1666 cls.set_type_arguments_field_offset(
1667 GrowableObjectArray::type_arguments_offset(),
1668 RTN::GrowableObjectArray::type_arguments_offset());
1669 cls.set_num_type_arguments(1);
1670
1671 // Initialize hash set for canonical types.
1672 const intptr_t kInitialCanonicalTypeSize = 16;
1673 array = HashTables::New<CanonicalTypeSet>(kInitialCanonicalTypeSize,
1674 Heap::kOld);
1675 object_store->set_canonical_types(array);
1676
1677 // Initialize hash set for canonical type parameters.
1678 const intptr_t kInitialCanonicalTypeParameterSize = 4;
1679 array = HashTables::New<CanonicalTypeParameterSet>(
1680 kInitialCanonicalTypeParameterSize, Heap::kOld);
1681 object_store->set_canonical_type_parameters(array);
1682
1683 // Initialize hash set for canonical_type_arguments_.
1684 const intptr_t kInitialCanonicalTypeArgumentsSize = 4;
1685 array = HashTables::New<CanonicalTypeArgumentsSet>(
1686 kInitialCanonicalTypeArgumentsSize, Heap::kOld);
1687 object_store->set_canonical_type_arguments(array);
1688
1689 // Setup type class early in the process.
1690 const Class& type_cls =
1691 Class::Handle(zone, Class::New<Type, RTN::Type>(isolate));
1692 const Class& type_ref_cls =
1693 Class::Handle(zone, Class::New<TypeRef, RTN::TypeRef>(isolate));
1694 const Class& type_parameter_cls = Class::Handle(
1695 zone, Class::New<TypeParameter, RTN::TypeParameter>(isolate));
1696 const Class& library_prefix_cls = Class::Handle(
1697 zone, Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate));
1698
1699 // Pre-allocate the OneByteString class needed by the symbol table.
1700 cls = Class::NewStringClass(kOneByteStringCid, isolate);
1701 object_store->set_one_byte_string_class(cls);
1702
1703 // Pre-allocate the TwoByteString class needed by the symbol table.
1704 cls = Class::NewStringClass(kTwoByteStringCid, isolate);
1705 object_store->set_two_byte_string_class(cls);
1706
1707 // Setup the symbol table for the symbols created in the isolate.
1708 Symbols::SetupSymbolTable(isolate);
1709
1710 // Set up the libraries array before initializing the core library.
1711 const GrowableObjectArray& libraries =
1712 GrowableObjectArray::Handle(zone, GrowableObjectArray::New(Heap::kOld));
1713 object_store->set_libraries(libraries);
1714
1715 // Pre-register the core library.
1716 Library::InitCoreLibrary(isolate);
1717
1718 // Basic infrastructure has been setup, initialize the class dictionary.
1719 const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
1720 ASSERT(!core_lib.IsNull());
1721
1722 const GrowableObjectArray& pending_classes =
1723 GrowableObjectArray::Handle(zone, GrowableObjectArray::New());
1724 object_store->set_pending_classes(pending_classes);
1725
1726 // Now that the symbol table is initialized and that the core dictionary as
1727 // well as the core implementation dictionary have been setup, preallocate
1728 // remaining classes and register them by name in the dictionaries.
1729 String& name = String::Handle(zone);
1730 cls = object_store->array_class(); // Was allocated above.
1731 RegisterPrivateClass(cls, Symbols::_List(), core_lib);
1732 pending_classes.Add(cls);
1733 // We cannot use NewNonParameterizedType(), because Array is
1734 // parameterized. Warning: class _List has not been patched yet. Its
1735 // declared number of type parameters is still 0. It will become 1 after
1736 // patching. The array type allocated below represents the raw type _List
1737 // and not _List<E> as we could expect. Use with caution.
1738 type =
1739 Type::New(Class::Handle(zone, cls.raw()), TypeArguments::Handle(zone),
1740 TokenPosition::kNoSource, Nullability::kNonNullable);
1741 type.SetIsFinalized();
1742 type ^= type.Canonicalize();
1743 object_store->set_array_type(type);
1744 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
1745 object_store->set_legacy_array_type(type);
1746 type = type.ToNullability(Nullability::kNonNullable, Heap::kOld);
1747 object_store->set_non_nullable_array_type(type);
1748
1749 cls = object_store->growable_object_array_class(); // Was allocated above.
1750 RegisterPrivateClass(cls, Symbols::_GrowableList(), core_lib);
1751 pending_classes.Add(cls);
1752
1753 cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate);
1754 object_store->set_immutable_array_class(cls);
1755 cls.set_type_arguments_field_offset(Array::type_arguments_offset(),
1756 RTN::Array::type_arguments_offset());
1757 cls.set_num_type_arguments(1);
1758 ASSERT(object_store->immutable_array_class() !=
1759 object_store->array_class());
1760 cls.set_is_prefinalized();
1761 RegisterPrivateClass(cls, Symbols::_ImmutableList(), core_lib);
1762 pending_classes.Add(cls);
1763
1764 cls = object_store->one_byte_string_class(); // Was allocated above.
1765 RegisterPrivateClass(cls, Symbols::OneByteString(), core_lib);
1766 pending_classes.Add(cls);
1767
1768 cls = object_store->two_byte_string_class(); // Was allocated above.
1769 RegisterPrivateClass(cls, Symbols::TwoByteString(), core_lib);
1770 pending_classes.Add(cls);
1771
1772 cls = Class::NewStringClass(kExternalOneByteStringCid, isolate);
1773 object_store->set_external_one_byte_string_class(cls);
1774 RegisterPrivateClass(cls, Symbols::ExternalOneByteString(), core_lib);
1775 pending_classes.Add(cls);
1776
1777 cls = Class::NewStringClass(kExternalTwoByteStringCid, isolate);
1778 object_store->set_external_two_byte_string_class(cls);
1779 RegisterPrivateClass(cls, Symbols::ExternalTwoByteString(), core_lib);
1780 pending_classes.Add(cls);
1781
1782 // Pre-register the isolate library so the native class implementations can
1783 // be hooked up before compiling it.
1784 Library& isolate_lib = Library::Handle(
1785 zone, Library::LookupLibrary(thread, Symbols::DartIsolate()));
1786 if (isolate_lib.IsNull()) {
1787 isolate_lib = Library::NewLibraryHelper(Symbols::DartIsolate(), true);
1788 isolate_lib.SetLoadRequested();
1789 isolate_lib.Register(thread);
1790 }
1791 object_store->set_bootstrap_library(ObjectStore::kIsolate, isolate_lib);
1792 ASSERT(!isolate_lib.IsNull());
1793 ASSERT(isolate_lib.raw() == Library::IsolateLibrary());
1794
1795 cls = Class::New<Capability, RTN::Capability>(isolate);
1796 RegisterPrivateClass(cls, Symbols::_CapabilityImpl(), isolate_lib);
1797 pending_classes.Add(cls);
1798
1799 cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate);
1800 RegisterPrivateClass(cls, Symbols::_RawReceivePortImpl(), isolate_lib);
1801 pending_classes.Add(cls);
1802
1803 cls = Class::New<SendPort, RTN::SendPort>(isolate);
1804 RegisterPrivateClass(cls, Symbols::_SendPortImpl(), isolate_lib);
1805 pending_classes.Add(cls);
1806
1807 cls =
1808 Class::New<TransferableTypedData, RTN::TransferableTypedData>(isolate);
1809 RegisterPrivateClass(cls, Symbols::_TransferableTypedDataImpl(),
1810 isolate_lib);
1811 pending_classes.Add(cls);
1812
1813 const Class& stacktrace_cls =
1814 Class::Handle(zone, Class::New<StackTrace, RTN::StackTrace>(isolate));
1815 RegisterPrivateClass(stacktrace_cls, Symbols::_StackTrace(), core_lib);
1816 pending_classes.Add(stacktrace_cls);
1817 // Super type set below, after Object is allocated.
1818
1819 cls = Class::New<RegExp, RTN::RegExp>(isolate);
1820 RegisterPrivateClass(cls, Symbols::_RegExp(), core_lib);
1821 pending_classes.Add(cls);
1822
1823 // Initialize the base interfaces used by the core VM classes.
1824
1825 // Allocate and initialize the pre-allocated classes in the core library.
1826 // The script and token index of these pre-allocated classes is set up in
1827 // the parser when the corelib script is compiled (see
1828 // Parser::ParseClassDefinition).
1829 cls = Class::New<Instance, RTN::Instance>(kInstanceCid, isolate);
1830 object_store->set_object_class(cls);
1831 cls.set_name(Symbols::Object());
1832 cls.set_num_type_arguments(0);
1833 cls.set_is_prefinalized();
1834 cls.set_is_const();
1835 core_lib.AddClass(cls);
1836 pending_classes.Add(cls);
1837 type = Type::NewNonParameterizedType(cls);
1838 object_store->set_object_type(type);
1839 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
1840 object_store->set_legacy_object_type(type);
1841 type = type.ToNullability(Nullability::kNonNullable, Heap::kOld);
1842 object_store->set_non_nullable_object_type(type);
1843 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
1844 object_store->set_nullable_object_type(type);
1845
1846 cls = Class::New<Bool, RTN::Bool>(isolate);
1847 object_store->set_bool_class(cls);
1848 RegisterClass(cls, Symbols::Bool(), core_lib);
1849 pending_classes.Add(cls);
1850
1851 cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate);
1852 object_store->set_null_class(cls);
1853 cls.set_num_type_arguments(0);
1854 cls.set_is_prefinalized();
1855 RegisterClass(cls, Symbols::Null(), core_lib);
1856 pending_classes.Add(cls);
1857
1858 cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate);
1859 cls.set_num_type_arguments(0);
1860 cls.set_is_allocate_finalized();
1861 cls.set_is_declaration_loaded();
1862 cls.set_is_type_finalized();
1863 cls.set_name(Symbols::Never());
1864 object_store->set_never_class(cls);
1865
1866 ASSERT(!library_prefix_cls.IsNull());
1867 RegisterPrivateClass(library_prefix_cls, Symbols::_LibraryPrefix(),
1868 core_lib);
1869 pending_classes.Add(library_prefix_cls);
1870
1871 RegisterPrivateClass(type_cls, Symbols::_Type(), core_lib);
1872 pending_classes.Add(type_cls);
1873
1874 RegisterPrivateClass(type_ref_cls, Symbols::_TypeRef(), core_lib);
1875 pending_classes.Add(type_ref_cls);
1876
1877 RegisterPrivateClass(type_parameter_cls, Symbols::_TypeParameter(),
1878 core_lib);
1879 pending_classes.Add(type_parameter_cls);
1880
1881 cls = Class::New<Integer, RTN::Integer>(isolate);
1882 object_store->set_integer_implementation_class(cls);
1883 RegisterPrivateClass(cls, Symbols::_IntegerImplementation(), core_lib);
1884 pending_classes.Add(cls);
1885
1886 cls = Class::New<Smi, RTN::Smi>(isolate);
1887 object_store->set_smi_class(cls);
1888 RegisterPrivateClass(cls, Symbols::_Smi(), core_lib);
1889 pending_classes.Add(cls);
1890
1891 cls = Class::New<Mint, RTN::Mint>(isolate);
1892 object_store->set_mint_class(cls);
1893 RegisterPrivateClass(cls, Symbols::_Mint(), core_lib);
1894 pending_classes.Add(cls);
1895
1896 cls = Class::New<Double, RTN::Double>(isolate);
1897 object_store->set_double_class(cls);
1898 RegisterPrivateClass(cls, Symbols::_Double(), core_lib);
1899 pending_classes.Add(cls);
1900
1901 // Class that represents the Dart class _Closure and C++ class Closure.
1902 cls = Class::New<Closure, RTN::Closure>(isolate);
1903 object_store->set_closure_class(cls);
1904 RegisterPrivateClass(cls, Symbols::_Closure(), core_lib);
1905 pending_classes.Add(cls);
1906
1907 cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate);
1908 object_store->set_weak_property_class(cls);
1909 RegisterPrivateClass(cls, Symbols::_WeakProperty(), core_lib);
1910
1911// Pre-register the mirrors library so we can place the vm class
1912// MirrorReference there rather than the core library.
1913#if !defined(DART_PRECOMPILED_RUNTIME)
1914 lib = Library::LookupLibrary(thread, Symbols::DartMirrors());
1915 if (lib.IsNull()) {
1916 lib = Library::NewLibraryHelper(Symbols::DartMirrors(), true);
1917 lib.SetLoadRequested();
1918 lib.Register(thread);
1919 }
1920 object_store->set_bootstrap_library(ObjectStore::kMirrors, lib);
1921 ASSERT(!lib.IsNull());
1922 ASSERT(lib.raw() == Library::MirrorsLibrary());
1923
1924 cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate);
1925 RegisterPrivateClass(cls, Symbols::_MirrorReference(), lib);
1926#endif
1927
1928 // Pre-register the collection library so we can place the vm class
1929 // LinkedHashMap there rather than the core library.
1930 lib = Library::LookupLibrary(thread, Symbols::DartCollection());
1931 if (lib.IsNull()) {
1932 lib = Library::NewLibraryHelper(Symbols::DartCollection(), true);
1933 lib.SetLoadRequested();
1934 lib.Register(thread);
1935 }
1936
1937 object_store->set_bootstrap_library(ObjectStore::kCollection, lib);
1938 ASSERT(!lib.IsNull());
1939 ASSERT(lib.raw() == Library::CollectionLibrary());
1940 cls = Class::New<LinkedHashMap, RTN::LinkedHashMap>(isolate);
1941 object_store->set_linked_hash_map_class(cls);
1942 cls.set_type_arguments_field_offset(
1943 LinkedHashMap::type_arguments_offset(),
1944 RTN::LinkedHashMap::type_arguments_offset());
1945 cls.set_num_type_arguments(2);
1946 RegisterPrivateClass(cls, Symbols::_LinkedHashMap(), lib);
1947 pending_classes.Add(cls);
1948
1949 // Pre-register the async library so we can place the vm class
1950 // FutureOr there rather than the core library.
1951 lib = Library::LookupLibrary(thread, Symbols::DartAsync());
1952 if (lib.IsNull()) {
1953 lib = Library::NewLibraryHelper(Symbols::DartAsync(), true);
1954 lib.SetLoadRequested();
1955 lib.Register(thread);
1956 }
1957 object_store->set_bootstrap_library(ObjectStore::kAsync, lib);
1958 ASSERT(!lib.IsNull());
1959 ASSERT(lib.raw() == Library::AsyncLibrary());
1960 cls = Class::New<FutureOr, RTN::FutureOr>(isolate);
1961 cls.set_type_arguments_field_offset(FutureOr::type_arguments_offset(),
1962 RTN::FutureOr::type_arguments_offset());
1963 cls.set_num_type_arguments(1);
1964 RegisterClass(cls, Symbols::FutureOr(), lib);
1965 pending_classes.Add(cls);
1966
1967 // Pre-register the developer library so we can place the vm class
1968 // UserTag there rather than the core library.
1969 lib = Library::LookupLibrary(thread, Symbols::DartDeveloper());
1970 if (lib.IsNull()) {
1971 lib = Library::NewLibraryHelper(Symbols::DartDeveloper(), true);
1972 lib.SetLoadRequested();
1973 lib.Register(thread);
1974 }
1975 object_store->set_bootstrap_library(ObjectStore::kDeveloper, lib);
1976 ASSERT(!lib.IsNull());
1977 ASSERT(lib.raw() == Library::DeveloperLibrary());
1978 cls = Class::New<UserTag, RTN::UserTag>(isolate);
1979 RegisterPrivateClass(cls, Symbols::_UserTag(), lib);
1980 pending_classes.Add(cls);
1981
1982 // Setup some default native field classes which can be extended for
1983 // specifying native fields in dart classes.
1984 Library::InitNativeWrappersLibrary(isolate, is_kernel);
1985 ASSERT(object_store->native_wrappers_library() != Library::null());
1986
1987 // Pre-register the typed_data library so the native class implementations
1988 // can be hooked up before compiling it.
1989 lib = Library::LookupLibrary(thread, Symbols::DartTypedData());
1990 if (lib.IsNull()) {
1991 lib = Library::NewLibraryHelper(Symbols::DartTypedData(), true);
1992 lib.SetLoadRequested();
1993 lib.Register(thread);
1994 }
1995 object_store->set_bootstrap_library(ObjectStore::kTypedData, lib);
1996 ASSERT(!lib.IsNull());
1997 ASSERT(lib.raw() == Library::TypedDataLibrary());
1998#define REGISTER_TYPED_DATA_CLASS(clazz) \
1999 cls = Class::NewTypedDataClass(kTypedData##clazz##ArrayCid, isolate); \
2000 RegisterPrivateClass(cls, Symbols::_##clazz##List(), lib);
2001
2002 DART_CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_CLASS);
2003#undef REGISTER_TYPED_DATA_CLASS
2004#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \
2005 cls = Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate); \
2006 RegisterPrivateClass(cls, Symbols::_##clazz##View(), lib); \
2007 pending_classes.Add(cls);
2008
2009 CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS);
2010
2011 cls = Class::NewTypedDataViewClass(kByteDataViewCid, isolate);
2012 RegisterPrivateClass(cls, Symbols::_ByteDataView(), lib);
2013 pending_classes.Add(cls);
2014
2015#undef REGISTER_TYPED_DATA_VIEW_CLASS
2016#define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \
2017 cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \
2018 isolate); \
2019 RegisterPrivateClass(cls, Symbols::_External##clazz(), lib);
2020
2021 cls = Class::New<Instance, RTN::Instance>(kByteBufferCid, isolate,
2022 /*register_class=*/false);
2023 cls.set_instance_size(0, 0);
2024 cls.set_next_field_offset(-kWordSize, -compiler::target::kWordSize);
2025 isolate->class_table()->Register(cls);
2026 RegisterPrivateClass(cls, Symbols::_ByteBuffer(), lib);
2027 pending_classes.Add(cls);
2028
2029 CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS);
2030#undef REGISTER_EXT_TYPED_DATA_CLASS
2031 // Register Float32x4, Int32x4, and Float64x2 in the object store.
2032 cls = Class::New<Float32x4, RTN::Float32x4>(isolate);
2033 RegisterPrivateClass(cls, Symbols::_Float32x4(), lib);
2034 pending_classes.Add(cls);
2035 object_store->set_float32x4_class(cls);
2036
2037 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate,
2038 /*register_class=*/true,
2039 /*is_abstract=*/true);
2040 RegisterClass(cls, Symbols::Float32x4(), lib);
2041 cls.set_num_type_arguments(0);
2042 cls.set_is_prefinalized();
2043 type = Type::NewNonParameterizedType(cls);
2044 object_store->set_float32x4_type(type);
2045
2046 cls = Class::New<Int32x4, RTN::Int32x4>(isolate);
2047 RegisterPrivateClass(cls, Symbols::_Int32x4(), lib);
2048 pending_classes.Add(cls);
2049 object_store->set_int32x4_class(cls);
2050
2051 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate,
2052 /*register_class=*/true,
2053 /*is_abstract=*/true);
2054 RegisterClass(cls, Symbols::Int32x4(), lib);
2055 cls.set_num_type_arguments(0);
2056 cls.set_is_prefinalized();
2057 type = Type::NewNonParameterizedType(cls);
2058 object_store->set_int32x4_type(type);
2059
2060 cls = Class::New<Float64x2, RTN::Float64x2>(isolate);
2061 RegisterPrivateClass(cls, Symbols::_Float64x2(), lib);
2062 pending_classes.Add(cls);
2063 object_store->set_float64x2_class(cls);
2064
2065 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate,
2066 /*register_class=*/true,
2067 /*is_abstract=*/true);
2068 RegisterClass(cls, Symbols::Float64x2(), lib);
2069 cls.set_num_type_arguments(0);
2070 cls.set_is_prefinalized();
2071 type = Type::NewNonParameterizedType(cls);
2072 object_store->set_float64x2_type(type);
2073
2074 // Set the super type of class StackTrace to Object type so that the
2075 // 'toString' method is implemented.
2076 type = object_store->object_type();
2077 stacktrace_cls.set_super_type(type);
2078
2079 // Abstract class that represents the Dart class Type.
2080 // Note that this class is implemented by Dart class _AbstractType.
2081 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate,
2082 /*register_class=*/true,
2083 /*is_abstract=*/true);
2084 cls.set_num_type_arguments(0);
2085 cls.set_is_prefinalized();
2086 RegisterClass(cls, Symbols::Type(), core_lib);
2087 pending_classes.Add(cls);
2088 type = Type::NewNonParameterizedType(cls);
2089 object_store->set_type_type(type);
2090
2091 // Abstract class that represents the Dart class Function.
2092 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate,
2093 /*register_class=*/true,
2094 /*is_abstract=*/true);
2095 cls.set_num_type_arguments(0);
2096 cls.set_is_prefinalized();
2097 RegisterClass(cls, Symbols::Function(), core_lib);
2098 pending_classes.Add(cls);
2099 type = Type::NewNonParameterizedType(cls);
2100 object_store->set_function_type(type);
2101 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2102 object_store->set_legacy_function_type(type);
2103 type = type.ToNullability(Nullability::kNonNullable, Heap::kOld);
2104 object_store->set_non_nullable_function_type(type);
2105
2106 cls = Class::New<Number, RTN::Number>(isolate);
2107 RegisterClass(cls, Symbols::Number(), core_lib);
2108 pending_classes.Add(cls);
2109 type = Type::NewNonParameterizedType(cls);
2110 object_store->set_number_type(type);
2111 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2112 object_store->set_legacy_number_type(type);
2113 type = type.ToNullability(Nullability::kNonNullable, Heap::kOld);
2114 object_store->set_non_nullable_number_type(type);
2115
2116 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate,
2117 /*register_class=*/true,
2118 /*is_abstract=*/true);
2119 RegisterClass(cls, Symbols::Int(), core_lib);
2120 cls.set_num_type_arguments(0);
2121 cls.set_is_prefinalized();
2122 pending_classes.Add(cls);
2123 type = Type::NewNonParameterizedType(cls);
2124 object_store->set_int_type(type);
2125 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2126 object_store->set_legacy_int_type(type);
2127 type = type.ToNullability(Nullability::kNonNullable, Heap::kOld);
2128 object_store->set_non_nullable_int_type(type);
2129 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
2130 object_store->set_nullable_int_type(type);
2131
2132 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate,
2133 /*register_class=*/true,
2134 /*is_abstract=*/true);
2135 RegisterClass(cls, Symbols::Double(), core_lib);
2136 cls.set_num_type_arguments(0);
2137 cls.set_is_prefinalized();
2138 pending_classes.Add(cls);
2139 type = Type::NewNonParameterizedType(cls);
2140 object_store->set_double_type(type);
2141 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2142 object_store->set_legacy_double_type(type);
2143 type = type.ToNullability(Nullability::kNonNullable, Heap::kOld);
2144 object_store->set_non_nullable_double_type(type);
2145 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
2146 object_store->set_nullable_double_type(type);
2147
2148 name = Symbols::_String().raw();
2149 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate,
2150 /*register_class=*/true,
2151 /*is_abstract=*/true);
2152 RegisterClass(cls, name, core_lib);
2153 cls.set_num_type_arguments(0);
2154 cls.set_is_prefinalized();
2155 pending_classes.Add(cls);
2156 type = Type::NewNonParameterizedType(cls);
2157 object_store->set_string_type(type);
2158 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2159 object_store->set_legacy_string_type(type);
2160 type = type.ToNullability(Nullability::kNonNullable, Heap::kOld);
2161 object_store->set_non_nullable_string_type(type);
2162
2163 cls = object_store->bool_class();
2164 type = Type::NewNonParameterizedType(cls);
2165 object_store->set_bool_type(type);
2166 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2167 object_store->set_legacy_bool_type(type);
2168 type = type.ToNullability(Nullability::kNonNullable, Heap::kOld);
2169 object_store->set_non_nullable_bool_type(type);
2170
2171 cls = object_store->smi_class();
2172 type = Type::NewNonParameterizedType(cls);
2173 object_store->set_smi_type(type);
2174 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2175 object_store->set_legacy_smi_type(type);
2176 type = type.ToNullability(Nullability::kNonNullable, Heap::kOld);
2177 object_store->set_non_nullable_smi_type(type);
2178
2179 cls = object_store->mint_class();
2180 type = Type::NewNonParameterizedType(cls);
2181 object_store->set_mint_type(type);
2182 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2183 object_store->set_legacy_mint_type(type);
2184 type = type.ToNullability(Nullability::kNonNullable, Heap::kOld);
2185 object_store->set_non_nullable_mint_type(type);
2186
2187 // The classes 'void' and 'dynamic' are phony classes to make type checking
2188 // more regular; they live in the VM isolate. The class 'void' is not
2189 // registered in the class dictionary because its name is a reserved word.
2190 // The class 'dynamic' is registered in the class dictionary because its
2191 // name is a built-in identifier (this is wrong). The corresponding types
2192 // are stored in the object store.
2193 cls = object_store->null_class();
2194 type = Type::New(cls, Object::null_type_arguments(),
2195 TokenPosition::kNoSource, Nullability::kNullable);
2196 type.SetIsFinalized();
2197 type ^= type.Canonicalize();
2198 object_store->set_null_type(type);
2199 ASSERT(type.IsNullable());
2200
2201 // Consider removing when/if Null becomes an ordinary class.
2202 type = object_store->object_type();
2203 cls.set_super_type(type);
2204
2205 cls = object_store->never_class();
2206 type = Type::New(cls, Object::null_type_arguments(),
2207 TokenPosition::kNoSource, Nullability::kNonNullable);
2208 type.SetIsFinalized();
2209 type ^= type.Canonicalize();
2210 object_store->set_never_type(type);
2211
2212 // Create and cache commonly used type arguments <int>, <double>,
2213 // <String>, <String, dynamic> and <String, String>.
2214 type_args = TypeArguments::New(1);
2215 type = object_store->int_type();
2216 type_args.SetTypeAt(0, type);
2217 type_args = type_args.Canonicalize();
2218 object_store->set_type_argument_int(type_args);
2219 type_args = TypeArguments::New(1);
2220 type = object_store->legacy_int_type();
2221 type_args.SetTypeAt(0, type);
2222 type_args = type_args.Canonicalize();
2223 object_store->set_type_argument_legacy_int(type_args);
2224 type_args = TypeArguments::New(1);
2225 type = object_store->non_nullable_int_type();
2226 type_args.SetTypeAt(0, type);
2227 type_args = type_args.Canonicalize();
2228 object_store->set_type_argument_non_nullable_int(type_args);
2229
2230 type_args = TypeArguments::New(1);
2231 type = object_store->double_type();
2232 type_args.SetTypeAt(0, type);
2233 type_args = type_args.Canonicalize();
2234 object_store->set_type_argument_double(type_args);
2235 type_args = TypeArguments::New(1);
2236 type = object_store->legacy_double_type();
2237 type_args.SetTypeAt(0, type);
2238 type_args = type_args.Canonicalize();
2239 object_store->set_type_argument_legacy_double(type_args);
2240 type_args = TypeArguments::New(1);
2241 type = object_store->non_nullable_double_type();
2242 type_args.SetTypeAt(0, type);
2243 type_args = type_args.Canonicalize();
2244 object_store->set_type_argument_non_nullable_double(type_args);
2245
2246 type_args = TypeArguments::New(1);
2247 type = object_store->string_type();
2248 type_args.SetTypeAt(0, type);
2249 type_args = type_args.Canonicalize();
2250 object_store->set_type_argument_string(type_args);
2251 type_args = TypeArguments::New(1);
2252 type = object_store->legacy_string_type();
2253 type_args.SetTypeAt(0, type);
2254 type_args = type_args.Canonicalize();
2255 object_store->set_type_argument_legacy_string(type_args);
2256 type_args = TypeArguments::New(1);
2257 type = object_store->non_nullable_string_type();
2258 type_args.SetTypeAt(0, type);
2259 type_args = type_args.Canonicalize();
2260 object_store->set_type_argument_non_nullable_string(type_args);
2261
2262 type_args = TypeArguments::New(2);
2263 type = object_store->string_type();
2264 type_args.SetTypeAt(0, type);
2265 type_args.SetTypeAt(1, Object::dynamic_type());
2266 type_args = type_args.Canonicalize();
2267 object_store->set_type_argument_string_dynamic(type_args);
2268 type_args = TypeArguments::New(2);
2269 type = object_store->legacy_string_type();
2270 type_args.SetTypeAt(0, type);
2271 type_args.SetTypeAt(1, Object::dynamic_type());
2272 type_args = type_args.Canonicalize();
2273 object_store->set_type_argument_legacy_string_dynamic(type_args);
2274 type_args = TypeArguments::New(2);
2275 type = object_store->non_nullable_string_type();
2276 type_args.SetTypeAt(0, type);
2277 type_args.SetTypeAt(1, Object::dynamic_type());
2278 type_args = type_args.Canonicalize();
2279 object_store->set_type_argument_non_nullable_string_dynamic(type_args);
2280
2281 type_args = TypeArguments::New(2);
2282 type = object_store->string_type();
2283 type_args.SetTypeAt(0, type);
2284 type_args.SetTypeAt(1, type);
2285 type_args = type_args.Canonicalize();
2286 object_store->set_type_argument_string_string(type_args);
2287 type_args = TypeArguments::New(2);
2288 type = object_store->legacy_string_type();
2289 type_args.SetTypeAt(0, type);
2290 type_args.SetTypeAt(1, type);
2291 type_args = type_args.Canonicalize();
2292 object_store->set_type_argument_legacy_string_legacy_string(type_args);
2293 type_args = TypeArguments::New(2);
2294 type = object_store->non_nullable_string_type();
2295 type_args.SetTypeAt(0, type);
2296 type_args.SetTypeAt(1, type);
2297 type_args = type_args.Canonicalize();
2298 object_store->set_type_argument_non_nullable_string_non_nullable_string(
2299 type_args);
2300
2301 lib = Library::LookupLibrary(thread, Symbols::DartFfi());
2302 if (lib.IsNull()) {
2303 lib = Library::NewLibraryHelper(Symbols::DartFfi(), true);
2304 lib.SetLoadRequested();
2305 lib.Register(thread);
2306 }
2307 object_store->set_bootstrap_library(ObjectStore::kFfi, lib);
2308
2309 cls = Class::New<Instance, RTN::Instance>(kFfiNativeTypeCid, isolate);
2310 cls.set_num_type_arguments(0);
2311 cls.set_is_prefinalized();
2312 pending_classes.Add(cls);
2313 object_store->set_ffi_native_type_class(cls);
2314 RegisterClass(cls, Symbols::FfiNativeType(), lib);
2315
2316#define REGISTER_FFI_TYPE_MARKER(clazz) \
2317 cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate); \
2318 cls.set_num_type_arguments(0); \
2319 cls.set_is_prefinalized(); \
2320 pending_classes.Add(cls); \
2321 RegisterClass(cls, Symbols::Ffi##clazz(), lib);
2322 CLASS_LIST_FFI_TYPE_MARKER(REGISTER_FFI_TYPE_MARKER);
2323#undef REGISTER_FFI_TYPE_MARKER
2324
2325 cls = Class::New<Instance, RTN::Instance>(kFfiNativeFunctionCid, isolate);
2326 cls.set_type_arguments_field_offset(Pointer::type_arguments_offset(),
2327 RTN::Pointer::type_arguments_offset());
2328 cls.set_num_type_arguments(1);
2329 cls.set_is_prefinalized();
2330 pending_classes.Add(cls);
2331 RegisterClass(cls, Symbols::FfiNativeFunction(), lib);
2332
2333 cls = Class::NewPointerClass(kFfiPointerCid, isolate);
2334 object_store->set_ffi_pointer_class(cls);
2335 pending_classes.Add(cls);
2336 RegisterClass(cls, Symbols::FfiPointer(), lib);
2337
2338 cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(kFfiDynamicLibraryCid,
2339 isolate);
2340 cls.set_instance_size(DynamicLibrary::InstanceSize(),
2341 compiler::target::RoundedAllocationSize(
2342 RTN::DynamicLibrary::InstanceSize()));
2343 cls.set_is_prefinalized();
2344 pending_classes.Add(cls);
2345 RegisterClass(cls, Symbols::FfiDynamicLibrary(), lib);
2346
2347 lib = Library::LookupLibrary(thread, Symbols::DartWasm());
2348 if (lib.IsNull()) {
2349 lib = Library::NewLibraryHelper(Symbols::DartWasm(), true);
2350 lib.SetLoadRequested();
2351 lib.Register(thread);
2352 }
2353 object_store->set_bootstrap_library(ObjectStore::kWasm, lib);
2354
2355#define REGISTER_WASM_TYPE(clazz) \
2356 cls = Class::New<Instance, RTN::Instance>(k##clazz##Cid, isolate); \
2357 cls.set_num_type_arguments(0); \
2358 cls.set_is_prefinalized(); \
2359 pending_classes.Add(cls); \
2360 RegisterClass(cls, Symbols::clazz(), lib);
2361 CLASS_LIST_WASM(REGISTER_WASM_TYPE);
2362#undef REGISTER_WASM_TYPE
2363
2364 // Finish the initialization by compiling the bootstrap scripts containing
2365 // the base interfaces and the implementation of the internal classes.
2366 const Error& error = Error::Handle(
2367 zone, Bootstrap::DoBootstrapping(kernel_buffer, kernel_buffer_size));
2368 if (!error.IsNull()) {
2369 return error.raw();
2370 }
2371
2372 isolate->class_table()->CopySizesFromClassObjects();
2373
2374 ClassFinalizer::VerifyBootstrapClasses();
2375
2376 // Set up the intrinsic state of all functions (core, math and typed data).
2377 compiler::Intrinsifier::InitializeState();
2378
2379 // Set up recognized state of all functions (core, math and typed data).
2380 MethodRecognizer::InitializeState();
2381
2382 // Adds static const fields (class ids) to the class 'ClassID');
2383 lib = Library::LookupLibrary(thread, Symbols::DartInternal());
2384 ASSERT(!lib.IsNull());
2385 cls = lib.LookupClassAllowPrivate(Symbols::ClassID());
2386 ASSERT(!cls.IsNull());
2387 const bool injected = cls.InjectCIDFields();
2388 ASSERT(injected);
2389
2390 isolate->object_store()->InitKnownObjects();
2391#endif // !defined(DART_PRECOMPILED_RUNTIME)
2392 } else {
2393 // Object::Init version when we are running in a version of dart that has a
2394 // full snapshot linked in and an isolate is initialized using the full
2395 // snapshot.
2396 ObjectStore* object_store = isolate->object_store();
2397
2398 Class& cls = Class::Handle(zone);
2399
2400 // Set up empty classes in the object store, these will get initialized
2401 // correctly when we read from the snapshot. This is done to allow
2402 // bootstrapping of reading classes from the snapshot. Some classes are not
2403 // stored in the object store. Yet we still need to create their Class
2404 // object so that they get put into the class_table (as a side effect of
2405 // Class::New()).
2406 cls = Class::New<Instance, RTN::Instance>(kInstanceCid, isolate);
2407 object_store->set_object_class(cls);
2408
2409 cls = Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate);
2410 cls = Class::New<Type, RTN::Type>(isolate);
2411 cls = Class::New<TypeRef, RTN::TypeRef>(isolate);
2412 cls = Class::New<TypeParameter, RTN::TypeParameter>(isolate);
2413
2414 cls = Class::New<Array, RTN::Array>(isolate);
2415 object_store->set_array_class(cls);
2416
2417 cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate);
2418 object_store->set_immutable_array_class(cls);
2419
2420 cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(isolate);
2421 object_store->set_growable_object_array_class(cls);
2422
2423 cls = Class::New<LinkedHashMap, RTN::LinkedHashMap>(isolate);
2424 object_store->set_linked_hash_map_class(cls);
2425
2426 cls = Class::New<Float32x4, RTN::Float32x4>(isolate);
2427 object_store->set_float32x4_class(cls);
2428
2429 cls = Class::New<Int32x4, RTN::Int32x4>(isolate);
2430 object_store->set_int32x4_class(cls);
2431
2432 cls = Class::New<Float64x2, RTN::Float64x2>(isolate);
2433 object_store->set_float64x2_class(cls);
2434
2435#define REGISTER_TYPED_DATA_CLASS(clazz) \
2436 cls = Class::NewTypedDataClass(kTypedData##clazz##Cid, isolate);
2437 CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_CLASS);
2438#undef REGISTER_TYPED_DATA_CLASS
2439#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \
2440 cls = Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate);
2441 CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS);
2442#undef REGISTER_TYPED_DATA_VIEW_CLASS
2443 cls = Class::NewTypedDataViewClass(kByteDataViewCid, isolate);
2444#define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \
2445 cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \
2446 isolate);
2447 CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS);
2448#undef REGISTER_EXT_TYPED_DATA_CLASS
2449
2450 cls = Class::New<Instance, RTN::Instance>(kFfiNativeTypeCid, isolate);
2451 object_store->set_ffi_native_type_class(cls);
2452
2453#define REGISTER_FFI_CLASS(clazz) \
2454 cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate);
2455 CLASS_LIST_FFI_TYPE_MARKER(REGISTER_FFI_CLASS);
2456#undef REGISTER_FFI_CLASS
2457
2458#define REGISTER_WASM_CLASS(clazz) \
2459 cls = Class::New<Instance, RTN::Instance>(k##clazz##Cid, isolate);
2460 CLASS_LIST_WASM(REGISTER_WASM_CLASS);
2461#undef REGISTER_WASM_CLASS
2462
2463 cls = Class::New<Instance, RTN::Instance>(kFfiNativeFunctionCid, isolate);
2464
2465 cls = Class::NewPointerClass(kFfiPointerCid, isolate);
2466 object_store->set_ffi_pointer_class(cls);
2467
2468 cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(kFfiDynamicLibraryCid,
2469 isolate);
2470
2471 cls = Class::New<Instance, RTN::Instance>(kByteBufferCid, isolate,
2472 /*register_isolate=*/false);
2473 cls.set_instance_size_in_words(0, 0);
2474 isolate->class_table()->Register(cls);
2475
2476 cls = Class::New<Integer, RTN::Integer>(isolate);
2477 object_store->set_integer_implementation_class(cls);
2478
2479 cls = Class::New<Smi, RTN::Smi>(isolate);
2480 object_store->set_smi_class(cls);
2481
2482 cls = Class::New<Mint, RTN::Mint>(isolate);
2483 object_store->set_mint_class(cls);
2484
2485 cls = Class::New<Double, RTN::Double>(isolate);
2486 object_store->set_double_class(cls);
2487
2488 cls = Class::New<Closure, RTN::Closure>(isolate);
2489 object_store->set_closure_class(cls);
2490
2491 cls = Class::NewStringClass(kOneByteStringCid, isolate);
2492 object_store->set_one_byte_string_class(cls);
2493
2494 cls = Class::NewStringClass(kTwoByteStringCid, isolate);
2495 object_store->set_two_byte_string_class(cls);
2496
2497 cls = Class::NewStringClass(kExternalOneByteStringCid, isolate);
2498 object_store->set_external_one_byte_string_class(cls);
2499
2500 cls = Class::NewStringClass(kExternalTwoByteStringCid, isolate);
2501 object_store->set_external_two_byte_string_class(cls);
2502
2503 cls = Class::New<Bool, RTN::Bool>(isolate);
2504 object_store->set_bool_class(cls);
2505
2506 cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate);
2507 object_store->set_null_class(cls);
2508
2509 cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate);
2510 object_store->set_never_class(cls);
2511
2512 cls = Class::New<Capability, RTN::Capability>(isolate);
2513 cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate);
2514 cls = Class::New<SendPort, RTN::SendPort>(isolate);
2515 cls = Class::New<StackTrace, RTN::StackTrace>(isolate);
2516 cls = Class::New<RegExp, RTN::RegExp>(isolate);
2517 cls = Class::New<Number, RTN::Number>(isolate);
2518
2519 cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate);
2520 object_store->set_weak_property_class(cls);
2521
2522 cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate);
2523 cls = Class::New<UserTag, RTN::UserTag>(isolate);
2524 cls = Class::New<FutureOr, RTN::FutureOr>(isolate);
2525 cls =
2526 Class::New<TransferableTypedData, RTN::TransferableTypedData>(isolate);
2527 }
2528 return Error::null();
2529}
2530
2531#if defined(DEBUG)
2532bool Object::InVMIsolateHeap() const {
2533 if (FLAG_verify_handles && raw()->ptr()->InVMIsolateHeap()) {
2534 Heap* vm_isolate_heap = Dart::vm_isolate()->heap();
2535 uword addr = ObjectLayout::ToAddr(raw());
2536 if (!vm_isolate_heap->Contains(addr)) {
2537 ASSERT(FLAG_write_protect_code);
2538 addr = ObjectLayout::ToAddr(OldPage::ToWritable(raw()));
2539 ASSERT(vm_isolate_heap->Contains(addr));
2540 }
2541 }
2542 return raw()->ptr()->InVMIsolateHeap();
2543}
2544#endif // DEBUG
2545
2546void Object::Print() const {
2547 THR_Print("%s\n", ToCString());
2548}
2549
2550StringPtr Object::DictionaryName() const {
2551 return String::null();
2552}
2553
2554void Object::InitializeObject(uword address, intptr_t class_id, intptr_t size) {
2555 // Note: we skip the header word here because it confuses TSAN. TSAN records
2556 // an 8-byte write from the this loop, but doesn't overwrite that entry with
2557 // the 4-byte relaxed store of the header below, then reports false data races
2558 // based on the record of the 8-byte write.
2559 uword cur = address + sizeof(ObjectLayout);
2560 uword end = address + size;
2561 if (class_id == kInstructionsCid) {
2562 compiler::target::uword initial_value = kBreakInstructionFiller;
2563 while (cur < end) {
2564 *reinterpret_cast<compiler::target::uword*>(cur) = initial_value;
2565 cur += compiler::target::kWordSize;
2566 }
2567 } else {
2568 uword initial_value;
2569 bool needs_init;
2570 if (IsTypedDataBaseClassId(class_id)) {
2571 initial_value = 0;
2572 // If the size is greater than both kNewAllocatableSize and
2573 // kAllocatablePageSize, the object must have been allocated to a new
2574 // large page, which must already have been zero initialized by the OS.
2575 needs_init = Heap::IsAllocatableInNewSpace(size) ||
2576 Heap::IsAllocatableViaFreeLists(size);
2577 } else {
2578 initial_value = static_cast<uword>(null_);
2579 needs_init = true;
2580 }
2581 if (needs_init) {
2582 while (cur < end) {
2583 *reinterpret_cast<uword*>(cur) = initial_value;
2584 cur += kWordSize;
2585 }
2586 } else {
2587 // Check that MemorySantizer understands this is initialized.
2588 MSAN_CHECK_INITIALIZED(reinterpret_cast<void*>(address), size);
2589#if defined(DEBUG)
2590 while (cur < end) {
2591 ASSERT(*reinterpret_cast<uword*>(cur) == initial_value);
2592 cur += kWordSize;
2593 }
2594#endif
2595 }
2596 }
2597 uint32_t tags = 0;
2598 ASSERT(class_id != kIllegalCid);
2599 tags = ObjectLayout::ClassIdTag::update(class_id, tags);
2600 tags = ObjectLayout::SizeTag::update(size, tags);
2601 const bool is_old =
2602 (address & kNewObjectAlignmentOffset) == kOldObjectAlignmentOffset;
2603 tags = ObjectLayout::OldBit::update(is_old, tags);
2604 tags = ObjectLayout::OldAndNotMarkedBit::update(is_old, tags);
2605 tags = ObjectLayout::OldAndNotRememberedBit::update(is_old, tags);
2606 tags = ObjectLayout::NewBit::update(!is_old, tags);
2607 reinterpret_cast<ObjectLayout*>(address)->tags_ = tags;
2608#if defined(HASH_IN_OBJECT_HEADER)
2609 reinterpret_cast<ObjectLayout*>(address)->hash_ = 0;
2610#endif
2611}
2612
2613void Object::CheckHandle() const {
2614#if defined(DEBUG)
2615 if (raw_ != Object::null()) {
2616 intptr_t cid = raw_->GetClassIdMayBeSmi();
2617 if (cid >= kNumPredefinedCids) {
2618 cid = kInstanceCid;
2619 }
2620 ASSERT(vtable() == builtin_vtables_[cid]);
2621 if (FLAG_verify_handles && raw_->IsHeapObject()) {
2622 Heap* isolate_heap = IsolateGroup::Current()->heap();
2623 if (!isolate_heap->new_space()->scavenging()) {
2624 Heap* vm_isolate_heap = Dart::vm_isolate()->heap();
2625 uword addr = ObjectLayout::ToAddr(raw_);
2626 if (!isolate_heap->Contains(addr) && !vm_isolate_heap->Contains(addr)) {
2627 ASSERT(FLAG_write_protect_code);
2628 addr = ObjectLayout::ToAddr(OldPage::ToWritable(raw_));
2629 ASSERT(isolate_heap->Contains(addr) ||
2630 vm_isolate_heap->Contains(addr));
2631 }
2632 }
2633 }
2634 }
2635#endif
2636}
2637
2638ObjectPtr Object::Allocate(intptr_t cls_id, intptr_t size, Heap::Space space) {
2639 ASSERT(Utils::IsAligned(size, kObjectAlignment));
2640 Thread* thread = Thread::Current();
2641 ASSERT(thread->execution_state() == Thread::kThreadInVM);
2642 ASSERT(thread->no_safepoint_scope_depth() == 0);
2643 ASSERT(thread->no_callback_scope_depth() == 0);
2644 Heap* heap = thread->heap();
2645
2646 uword address = heap->Allocate(size, space);
2647 if (UNLIKELY(address == 0)) {
2648 if (thread->top_exit_frame_info() != 0) {
2649 // Use the preallocated out of memory exception to avoid calling
2650 // into dart code or allocating any code.
2651 const Instance& exception =
2652 Instance::Handle(thread->isolate()->object_store()->out_of_memory());
2653 Exceptions::Throw(thread, exception);
2654 UNREACHABLE();
2655 } else if (thread->long_jump_base() != nullptr) {
2656 Report::LongJump(Object::out_of_memory_error());
2657 UNREACHABLE();
2658 } else {
2659 // Nowhere to propagate an exception to.
2660 OUT_OF_MEMORY();
2661 }
2662 }
2663#ifndef PRODUCT
2664 auto class_table = thread->isolate_group()->shared_class_table();
2665 if (class_table->TraceAllocationFor(cls_id)) {
2666 Profiler::SampleAllocation(thread, cls_id);
2667 }
2668#endif // !PRODUCT
2669 NoSafepointScope no_safepoint;
2670 InitializeObject(address, cls_id, size);
2671 ObjectPtr raw_obj = static_cast<ObjectPtr>(address + kHeapObjectTag);
2672 ASSERT(cls_id == ObjectLayout::ClassIdTag::decode(raw_obj->ptr()->tags_));
2673 if (raw_obj->IsOldObject() && UNLIKELY(thread->is_marking())) {
2674 // Black allocation. Prevents a data race between the mutator and concurrent
2675 // marker on ARM and ARM64 (the marker may observe a publishing store of
2676 // this object before the stores that initialize its slots), and helps the
2677 // collection to finish sooner.
2678 raw_obj->ptr()->SetMarkBitUnsynchronized();
2679 // Setting the mark bit must not be ordered after a publishing store of this
2680 // object. Adding a barrier here is cheaper than making every store into the
2681 // heap a store-release. Compare Scavenger::ScavengePointer.
2682 std::atomic_thread_fence(std::memory_order_release);
2683 heap->old_space()->AllocateBlack(size);
2684 }
2685 return raw_obj;
2686}
2687
2688class WriteBarrierUpdateVisitor : public ObjectPointerVisitor {
2689 public:
2690 explicit WriteBarrierUpdateVisitor(Thread* thread, ObjectPtr obj)
2691 : ObjectPointerVisitor(thread->isolate()->group()),
2692 thread_(thread),
2693 old_obj_(obj) {
2694 ASSERT(old_obj_->IsOldObject());
2695 }
2696
2697 void VisitPointers(ObjectPtr* from, ObjectPtr* to) {
2698 if (old_obj_->IsArray()) {
2699 for (ObjectPtr* slot = from; slot <= to; ++slot) {
2700 ObjectPtr value = *slot;
2701 if (value->IsHeapObject()) {
2702 old_obj_->ptr()->CheckArrayPointerStore(slot, value, thread_);
2703 }
2704 }
2705 } else {
2706 for (ObjectPtr* slot = from; slot <= to; ++slot) {
2707 ObjectPtr value = *slot;
2708 if (value->IsHeapObject()) {
2709 old_obj_->ptr()->CheckHeapPointerStore(value, thread_);
2710 }
2711 }
2712 }
2713 }
2714
2715 private:
2716 Thread* thread_;
2717 ObjectPtr old_obj_;
2718
2719 DISALLOW_COPY_AND_ASSIGN(WriteBarrierUpdateVisitor);
2720};
2721
2722bool Object::IsReadOnlyHandle() const {
2723 return Dart::IsReadOnlyHandle(reinterpret_cast<uword>(this));
2724}
2725
2726bool Object::IsNotTemporaryScopedHandle() const {
2727 return (IsZoneHandle() || IsReadOnlyHandle());
2728}
2729
2730ObjectPtr Object::Clone(const Object& orig, Heap::Space space) {
2731 const Class& cls = Class::Handle(orig.clazz());
2732 intptr_t size = orig.raw()->ptr()->HeapSize();
2733 ObjectPtr raw_clone = Object::Allocate(cls.id(), size, space);
2734 NoSafepointScope no_safepoint;
2735 // Copy the body of the original into the clone.
2736 uword orig_addr = ObjectLayout::ToAddr(orig.raw());
2737 uword clone_addr = ObjectLayout::ToAddr(raw_clone);
2738 static const intptr_t kHeaderSizeInBytes = sizeof(ObjectLayout);
2739 memmove(reinterpret_cast<uint8_t*>(clone_addr + kHeaderSizeInBytes),
2740 reinterpret_cast<uint8_t*>(orig_addr + kHeaderSizeInBytes),
2741 size - kHeaderSizeInBytes);
2742
2743 // Add clone to store buffer, if needed.
2744 if (!raw_clone->IsOldObject()) {
2745 // No need to remember an object in new space.
2746 return raw_clone;
2747 }
2748 WriteBarrierUpdateVisitor visitor(Thread::Current(), raw_clone);
2749 raw_clone->ptr()->VisitPointers(&visitor);
2750 return raw_clone;
2751}
2752
2753StringPtr Class::Name() const {
2754 return raw_ptr()->name_;
2755}
2756
2757StringPtr Class::ScrubbedName() const {
2758 return Symbols::New(Thread::Current(), ScrubbedNameCString());
2759}
2760
2761const char* Class::ScrubbedNameCString() const {
2762 return String::ScrubName(String::Handle(Name()));
2763}
2764
2765StringPtr Class::UserVisibleName() const {
2766#if !defined(PRODUCT)
2767 ASSERT(raw_ptr()->user_name_ != String::null());
2768 return raw_ptr()->user_name_;
2769#endif // !defined(PRODUCT)
2770 // No caching in PRODUCT, regenerate.
2771 return Symbols::New(Thread::Current(), GenerateUserVisibleName());
2772}
2773
2774const char* Class::UserVisibleNameCString() const {
2775#if !defined(PRODUCT)
2776 ASSERT(raw_ptr()->user_name_ != String::null());
2777 return String::Handle(raw_ptr()->user_name_).ToCString();
2778#endif // !defined(PRODUCT)
2779 return GenerateUserVisibleName(); // No caching in PRODUCT, regenerate.
2780}
2781
2782const char* Class::NameCString(NameVisibility name_visibility) const {
2783 switch (name_visibility) {
2784 case Object::kInternalName:
2785 return String::Handle(Name()).ToCString();
2786 case Object::kScrubbedName:
2787 return ScrubbedNameCString();
2788 case Object::kUserVisibleName:
2789 return UserVisibleNameCString();
2790 default:
2791 UNREACHABLE();
2792 return nullptr;
2793 }
2794}
2795
2796ClassPtr Class::Mixin() const {
2797 if (is_transformed_mixin_application()) {
2798 const Array& interfaces = Array::Handle(this->interfaces());
2799 const Type& mixin_type =
2800 Type::Handle(Type::RawCast(interfaces.At(interfaces.Length() - 1)));
2801 return mixin_type.type_class();
2802 }
2803 return raw();
2804}
2805
2806NNBDMode Class::nnbd_mode() const {
2807 return Library::Handle(library()).nnbd_mode();
2808}
2809
2810bool Class::IsInFullSnapshot() const {
2811 NoSafepointScope no_safepoint;
2812 return LibraryLayout::InFullSnapshotBit::decode(
2813 raw_ptr()->library_->ptr()->flags_);
2814}
2815
2816AbstractTypePtr Class::RareType() const {
2817 if (!IsGeneric() && !IsClosureClass() && !IsTypedefClass()) {
2818 return DeclarationType();
2819 }
2820 ASSERT(is_declaration_loaded());
2821 const Type& type = Type::Handle(
2822 Type::New(*this, Object::null_type_arguments(), TokenPosition::kNoSource,
2823 Nullability::kNonNullable));
2824 return ClassFinalizer::FinalizeType(*this, type);
2825}
2826
2827template <class FakeObject, class TargetFakeObject>
2828ClassPtr Class::New(Isolate* isolate, bool register_class) {
2829 ASSERT(Object::class_class() != Class::null());
2830 Class& result = Class::Handle();
2831 {
2832 ObjectPtr raw =
2833 Object::Allocate(Class::kClassId, Class::InstanceSize(), Heap::kOld);
2834 NoSafepointScope no_safepoint;
2835 result ^= raw;
2836 }
2837 Object::VerifyBuiltinVtable<FakeObject>(FakeObject::kClassId);
2838 result.set_token_pos(TokenPosition::kNoSource);
2839 result.set_end_token_pos(TokenPosition::kNoSource);
2840 result.set_instance_size(FakeObject::InstanceSize(),
2841 compiler::target::RoundedAllocationSize(
2842 TargetFakeObject::InstanceSize()));
2843 result.set_type_arguments_field_offset_in_words(kNoTypeArguments,
2844 RTN::Class::kNoTypeArguments);
2845 const intptr_t host_next_field_offset = FakeObject::NextFieldOffset();
2846 const intptr_t target_next_field_offset = TargetFakeObject::NextFieldOffset();
2847 result.set_next_field_offset(host_next_field_offset,
2848 target_next_field_offset);
2849 COMPILE_ASSERT((FakeObject::kClassId != kInstanceCid));
2850 result.set_id(FakeObject::kClassId);
2851 result.set_num_type_arguments(0);
2852 result.set_num_native_fields(0);
2853 result.set_state_bits(0);
2854 if ((FakeObject::kClassId < kInstanceCid) ||
2855 (FakeObject::kClassId == kTypeArgumentsCid)) {
2856 // VM internal classes are done. There is no finalization needed or
2857 // possible in this case.
2858 result.set_is_declaration_loaded();
2859 result.set_is_type_finalized();
2860 result.set_is_allocate_finalized();
2861 } else if (FakeObject::kClassId != kClosureCid) {
2862 // VM backed classes are almost ready: run checks and resolve class
2863 // references, but do not recompute size.
2864 result.set_is_prefinalized();
2865 }
2866 NOT_IN_PRECOMPILED(result.set_is_declared_in_bytecode(false));
2867 NOT_IN_PRECOMPILED(result.set_binary_declaration_offset(0));
2868 result.InitEmptyFields();
2869 if (register_class) {
2870 isolate->class_table()->Register(result);
2871 }
2872 return result.raw();
2873}
2874
2875static void ReportTooManyTypeArguments(const Class& cls) {
2876 Report::MessageF(Report::kError, Script::Handle(cls.script()),
2877 cls.token_pos(), Report::AtLocation,
2878 "too many type parameters declared in class '%s' or in its "
2879 "super classes",
2880 String::Handle(cls.Name()).ToCString());
2881 UNREACHABLE();
2882}
2883
2884void Class::set_num_type_arguments(intptr_t value) const {
2885 if (!Utils::IsInt(16, value)) {
2886 ReportTooManyTypeArguments(*this);
2887 }
2888 StoreNonPointer(&raw_ptr()->num_type_arguments_, value);
2889}
2890
2891void Class::set_has_pragma(bool value) const {
2892 set_state_bits(HasPragmaBit::update(value, raw_ptr()->state_bits_));
2893}
2894
2895// Initialize class fields of type Array with empty array.
2896void Class::InitEmptyFields() {
2897 if (Object::empty_array().raw() == Array::null()) {
2898 // The empty array has not been initialized yet.
2899 return;
2900 }
2901 StorePointer(&raw_ptr()->interfaces_, Object::empty_array().raw());
2902 StorePointer(&raw_ptr()->constants_, Object::empty_array().raw());
2903 StorePointer(&raw_ptr()->functions_, Object::empty_array().raw());
2904 StorePointer(&raw_ptr()->fields_, Object::empty_array().raw());
2905 StorePointer(&raw_ptr()->invocation_dispatcher_cache_,
2906 Object::empty_array().raw());
2907}
2908
2909ArrayPtr Class::OffsetToFieldMap(bool original_classes) const {
2910 if (raw_ptr()->offset_in_words_to_field_ == Array::null()) {
2911 ASSERT(is_finalized());
2912 const intptr_t length = raw_ptr()->host_instance_size_in_words_;
2913 const Array& array = Array::Handle(Array::New(length, Heap::kOld));
2914 Class& cls = Class::Handle(this->raw());
2915 Array& fields = Array::Handle();
2916 Field& f = Field::Handle();
2917 while (!cls.IsNull()) {
2918 fields = cls.fields();
2919 for (intptr_t i = 0; i < fields.Length(); ++i) {
2920 f ^= fields.At(i);
2921 if (f.is_instance()) {
2922 array.SetAt(f.HostOffset() >> kWordSizeLog2, f);
2923 }
2924 }
2925 cls = cls.SuperClass(original_classes);
2926 }
2927 StorePointer(&raw_ptr()->offset_in_words_to_field_, array.raw());
2928 }
2929 return raw_ptr()->offset_in_words_to_field_;
2930}
2931
2932bool Class::HasInstanceFields() const {
2933 const Array& field_array = Array::Handle(fields());
2934 Field& field = Field::Handle();
2935 for (intptr_t i = 0; i < field_array.Length(); ++i) {
2936 field ^= field_array.At(i);
2937 if (!field.is_static()) {
2938 return true;
2939 }
2940 }
2941 return false;
2942}
2943
2944class FunctionName {
2945 public:
2946 FunctionName(const String& name, String* tmp_string)
2947 : name_(name), tmp_string_(tmp_string) {}
2948 bool Matches(const Function& function) const {
2949 if (name_.IsSymbol()) {
2950 return name_.raw() == function.name();
2951 } else {
2952 *tmp_string_ = function.name();
2953 return name_.Equals(*tmp_string_);
2954 }
2955 }
2956 intptr_t Hash() const { return name_.Hash(); }
2957
2958 private:
2959 const String& name_;
2960 String* tmp_string_;
2961};
2962
2963// Traits for looking up Functions by name.
2964class ClassFunctionsTraits {
2965 public:
2966 static const char* Name() { return "ClassFunctionsTraits"; }
2967 static bool ReportStats() { return false; }
2968
2969 // Called when growing the table.
2970 static bool IsMatch(const Object& a, const Object& b) {
2971 ASSERT(a.IsFunction() && b.IsFunction());
2972 // Function objects are always canonical.
2973 return a.raw() == b.raw();
2974 }
2975 static bool IsMatch(const FunctionName& name, const Object& obj) {
2976 return name.Matches(Function::Cast(obj));
2977 }
2978 static uword Hash(const Object& key) {
2979 return String::HashRawSymbol(Function::Cast(key).name());
2980 }
2981 static uword Hash(const FunctionName& name) { return name.Hash(); }
2982};
2983typedef UnorderedHashSet<ClassFunctionsTraits> ClassFunctionsSet;
2984
2985void Class::SetFunctions(const Array& value) const {
2986 ASSERT(Thread::Current()->IsMutatorThread());
2987 ASSERT(!value.IsNull());
2988 StorePointer(&raw_ptr()->functions_, value.raw());
2989 const intptr_t len = value.Length();
2990 if (len >= kFunctionLookupHashTreshold) {
2991 ClassFunctionsSet set(HashTables::New<ClassFunctionsSet>(len, Heap::kOld));
2992 Function& func = Function::Handle();
2993 for (intptr_t i = 0; i < len; ++i) {
2994 func ^= value.At(i);
2995 // Verify that all the functions in the array have this class as owner.
2996 ASSERT(func.Owner() == raw());
2997 set.Insert(func);
2998 }
2999 StorePointer(&raw_ptr()->functions_hash_table_, set.Release().raw());
3000 } else {
3001 StorePointer(&raw_ptr()->functions_hash_table_, Array::null());
3002 }
3003}
3004
3005void Class::AddFunction(const Function& function) const {
3006 ASSERT(Thread::Current()->IsMutatorThread());
3007 const Array& arr = Array::Handle(functions());
3008 const Array& new_arr =
3009 Array::Handle(Array::Grow(arr, arr.Length() + 1, Heap::kOld));
3010 new_arr.SetAt(arr.Length(), function);
3011 StorePointer(&raw_ptr()->functions_, new_arr.raw());
3012 // Add to hash table, if any.
3013 const intptr_t new_len = new_arr.Length();
3014 if (new_len == kFunctionLookupHashTreshold) {
3015 // Transition to using hash table.
3016 SetFunctions(new_arr);
3017 } else if (new_len > kFunctionLookupHashTreshold) {
3018 ClassFunctionsSet set(raw_ptr()->functions_hash_table_);
3019 set.Insert(function);
3020 StorePointer(&raw_ptr()->functions_hash_table_, set.Release().raw());
3021 }
3022}
3023
3024void Class::RemoveFunction(const Function& function) const {
3025 ASSERT(Thread::Current()->IsMutatorThread());
3026 const Array& arr = Array::Handle(functions());
3027 StorePointer(&raw_ptr()->functions_, Object::empty_array().raw());
3028 StorePointer(&raw_ptr()->functions_hash_table_, Array::null());
3029 Function& entry = Function::Handle();
3030 for (intptr_t i = 0; i < arr.Length(); i++) {
3031 entry ^= arr.At(i);
3032 if (function.raw() != entry.raw()) {
3033 AddFunction(entry);
3034 }
3035 }
3036}
3037
3038FunctionPtr Class::FunctionFromIndex(intptr_t idx) const {
3039 const Array& funcs = Array::Handle(functions());
3040 if ((idx < 0) || (idx >= funcs.Length())) {
3041 return Function::null();
3042 }
3043 Function& func = Function::Handle();
3044 func ^= funcs.At(idx);
3045 ASSERT(!func.IsNull());
3046 return func.raw();
3047}
3048
3049FunctionPtr Class::ImplicitClosureFunctionFromIndex(intptr_t idx) const {
3050 const Array& funcs = Array::Handle(functions());
3051 if ((idx < 0) || (idx >= funcs.Length())) {
3052 return Function::null();
3053 }
3054 Function& func = Function::Handle();
3055 func ^= funcs.At(idx);
3056 ASSERT(!func.IsNull());
3057 if (!func.HasImplicitClosureFunction()) {
3058 return Function::null();
3059 }
3060 const Function& closure_func =
3061 Function::Handle(func.ImplicitClosureFunction());
3062 ASSERT(!closure_func.IsNull());
3063 return closure_func.raw();
3064}
3065
3066intptr_t Class::FindImplicitClosureFunctionIndex(const Function& needle) const {
3067 Thread* thread = Thread::Current();
3068 if (EnsureIsFinalized(thread) != Error::null()) {
3069 return -1;
3070 }
3071 REUSABLE_ARRAY_HANDLESCOPE(thread);
3072 REUSABLE_FUNCTION_HANDLESCOPE(thread);
3073 Array& funcs = thread->ArrayHandle();
3074 Function& function = thread->FunctionHandle();
3075 funcs = functions();
3076 ASSERT(!funcs.IsNull());
3077 Function& implicit_closure = Function::Handle(thread->zone());
3078 const intptr_t len = funcs.Length();
3079 for (intptr_t i = 0; i < len; i++) {
3080 function ^= funcs.At(i);
3081 implicit_closure = function.implicit_closure_function();
3082 if (implicit_closure.IsNull()) {
3083 // Skip non-implicit closure functions.
3084 continue;
3085 }
3086 if (needle.raw() == implicit_closure.raw()) {
3087 return i;
3088 }
3089 }
3090 // No function found.
3091 return -1;
3092}
3093
3094intptr_t Class::FindInvocationDispatcherFunctionIndex(
3095 const Function& needle) const {
3096 Thread* thread = Thread::Current();
3097 if (EnsureIsFinalized(thread) != Error::null()) {
3098 return -1;
3099 }
3100 REUSABLE_ARRAY_HANDLESCOPE(thread);
3101 REUSABLE_OBJECT_HANDLESCOPE(thread);
3102 Array& funcs = thread->ArrayHandle();
3103 Object& object = thread->ObjectHandle();
3104 funcs = invocation_dispatcher_cache();
3105 ASSERT(!funcs.IsNull());
3106 const intptr_t len = funcs.Length();
3107 for (intptr_t i = 0; i < len; i++) {
3108 object = funcs.At(i);
3109 // The invocation_dispatcher_cache is a table with some entries that
3110 // are functions.
3111 if (object.IsFunction()) {
3112 if (Function::Cast(object).raw() == needle.raw()) {
3113 return i;
3114 }
3115 }
3116 }
3117 // No function found.
3118 return -1;
3119}
3120
3121FunctionPtr Class::InvocationDispatcherFunctionFromIndex(intptr_t idx) const {
3122 Thread* thread = Thread::Current();
3123 REUSABLE_ARRAY_HANDLESCOPE(thread);
3124 REUSABLE_OBJECT_HANDLESCOPE(thread);
3125 Array& dispatcher_cache = thread->ArrayHandle();
3126 Object& object = thread->ObjectHandle();
3127 dispatcher_cache = invocation_dispatcher_cache();
3128 object = dispatcher_cache.At(idx);
3129 if (!object.IsFunction()) {
3130 return Function::null();
3131 }
3132 return Function::Cast(object).raw();
3133}
3134
3135void Class::set_signature_function(const Function& value) const {
3136 ASSERT(value.IsClosureFunction() || value.IsSignatureFunction());
3137 StorePointer(&raw_ptr()->signature_function_, value.raw());
3138}
3139
3140void Class::set_state_bits(intptr_t bits) const {
3141 StoreNonPointer(&raw_ptr()->state_bits_, static_cast<uint32_t>(bits));
3142}
3143
3144void Class::set_library(const Library& value) const {
3145 StorePointer(&raw_ptr()->library_, value.raw());
3146}
3147
3148void Class::set_type_parameters(const TypeArguments& value) const {
3149 ASSERT((num_type_arguments() == kUnknownNumTypeArguments) ||
3150 is_declared_in_bytecode() || is_prefinalized());
3151 StorePointer(&raw_ptr()->type_parameters_, value.raw());
3152}
3153
3154intptr_t Class::NumTypeParameters(Thread* thread) const {
3155 if (!is_declaration_loaded()) {
3156 ASSERT(is_prefinalized());
3157 const intptr_t cid = id();
3158 if ((cid == kArrayCid) || (cid == kImmutableArrayCid) ||
3159 (cid == kGrowableObjectArrayCid)) {
3160 return 1; // List's type parameter may not have been parsed yet.
3161 }
3162 return 0;
3163 }
3164 if (type_parameters() == TypeArguments::null()) {
3165 return 0;
3166 }
3167 REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
3168 TypeArguments& type_params = thread->TypeArgumentsHandle();
3169 type_params = type_parameters();
3170 return type_params.Length();
3171}
3172
3173intptr_t Class::ComputeNumTypeArguments() const {
3174 ASSERT(is_declaration_loaded());
3175 Thread* thread = Thread::Current();
3176 Zone* zone = thread->zone();
3177 Isolate* isolate = thread->isolate();
3178 const intptr_t num_type_params = NumTypeParameters();
3179
3180 if ((super_type() == AbstractType::null()) ||
3181 (super_type() == isolate->object_store()->object_type())) {
3182 return num_type_params;
3183 }
3184
3185 const auto& sup_type = AbstractType::Handle(zone, super_type());
3186 ASSERT(sup_type.IsType());
3187
3188 const auto& sup_class = Class::Handle(zone, sup_type.type_class());
3189 ASSERT(!sup_class.IsTypedefClass());
3190
3191 const intptr_t sup_class_num_type_args = sup_class.NumTypeArguments();
3192 if (num_type_params == 0) {
3193 return sup_class_num_type_args;
3194 }
3195
3196 const auto& sup_type_args = TypeArguments::Handle(zone, sup_type.arguments());
3197 if (sup_type_args.IsNull()) {
3198 // The super type is raw or the super class is non generic.
3199 // In either case, overlapping is not possible.
3200 return sup_class_num_type_args + num_type_params;
3201 }
3202
3203 const intptr_t sup_type_args_length = sup_type_args.Length();
3204 // At this point, the super type may or may not be finalized. In either case,
3205 // the result of this function must remain the same.
3206 // The value of num_sup_type_args may increase when the super type is
3207 // finalized, but the last [sup_type_args_length] type arguments will not be
3208 // modified by finalization, only shifted to higher indices in the vector.
3209 // The super type may not even be resolved yet. This is not necessary, since
3210 // we only check for matching type parameters, which are resolved by default.
3211 const auto& type_params = TypeArguments::Handle(zone, type_parameters());
3212 // Determine the maximum overlap of a prefix of the vector consisting of the
3213 // type parameters of this class with a suffix of the vector consisting of the
3214 // type arguments of the super type of this class.
3215 // The number of own type arguments of this class is the number of its type
3216 // parameters minus the number of type arguments in the overlap.
3217 // Attempt to overlap the whole vector of type parameters; reduce the size
3218 // of the vector (keeping the first type parameter) until it fits or until
3219 // its size is zero.
3220 auto& type_param = TypeParameter::Handle(zone);
3221 auto& sup_type_arg = AbstractType::Handle(zone);
3222 for (intptr_t num_overlapping_type_args =
3223 (num_type_params < sup_type_args_length) ? num_type_params
3224 : sup_type_args_length;
3225 num_overlapping_type_args > 0; num_overlapping_type_args--) {
3226 intptr_t i = 0;
3227 for (; i < num_overlapping_type_args; i++) {
3228 type_param ^= type_params.TypeAt(i);
3229 sup_type_arg = sup_type_args.TypeAt(sup_type_args_length -
3230 num_overlapping_type_args + i);
3231 if (!type_param.Equals(sup_type_arg)) break;
3232 }
3233 if (i == num_overlapping_type_args) {
3234 // Overlap found.
3235 return sup_class_num_type_args + num_type_params -
3236 num_overlapping_type_args;
3237 }
3238 }
3239 // No overlap found.
3240 return sup_class_num_type_args + num_type_params;
3241}
3242
3243intptr_t Class::NumTypeArguments() const {
3244 // Return cached value if already calculated.
3245 intptr_t num_type_args = num_type_arguments();
3246 if (num_type_args != kUnknownNumTypeArguments) {
3247 return num_type_args;
3248 }
3249
3250 num_type_args = ComputeNumTypeArguments();
3251 ASSERT(num_type_args != kUnknownNumTypeArguments);
3252 set_num_type_arguments(num_type_args);
3253 return num_type_args;
3254}
3255
3256ClassPtr Class::SuperClass(bool original_classes) const {
3257 Thread* thread = Thread::Current();
3258 Zone* zone = thread->zone();
3259 Isolate* isolate = thread->isolate();
3260 if (super_type() == AbstractType::null()) {
3261 if (id() == kTypeArgumentsCid) {
3262 // Pretend TypeArguments objects are Dart instances.
3263 return isolate->class_table()->At(kInstanceCid);
3264 }
3265 return Class::null();
3266 }
3267 const AbstractType& sup_type = AbstractType::Handle(zone, super_type());
3268 const intptr_t type_class_id = sup_type.type_class_id();
3269 if (original_classes) {
3270 return isolate->GetClassForHeapWalkAt(type_class_id);
3271 } else {
3272 return isolate->class_table()->At(type_class_id);
3273 }
3274}
3275
3276void Class::set_super_type(const AbstractType& value) const {
3277 ASSERT(value.IsNull() || (value.IsType() && !value.IsDynamicType()));
3278 StorePointer(&raw_ptr()->super_type_, value.raw());
3279}
3280
3281TypeParameterPtr Class::LookupTypeParameter(const String& type_name) const {
3282 ASSERT(!type_name.IsNull());
3283 Thread* thread = Thread::Current();
3284 REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
3285 REUSABLE_TYPE_PARAMETER_HANDLESCOPE(thread);
3286 REUSABLE_STRING_HANDLESCOPE(thread);
3287 TypeArguments& type_params = thread->TypeArgumentsHandle();
3288 TypeParameter& type_param = thread->TypeParameterHandle();
3289 String& type_param_name = thread->StringHandle();
3290
3291 type_params = type_parameters();
3292 if (!type_params.IsNull()) {
3293 const intptr_t num_type_params = type_params.Length();
3294 for (intptr_t i = 0; i < num_type_params; i++) {
3295 type_param ^= type_params.TypeAt(i);
3296 type_param_name = type_param.name();
3297 if (type_param_name.Equals(type_name)) {
3298 return type_param.raw();
3299 }
3300 }
3301 }
3302 return TypeParameter::null();
3303}
3304
3305UnboxedFieldBitmap Class::CalculateFieldOffsets() const {
3306 Array& flds = Array::Handle(fields());
3307 const Class& super = Class::Handle(SuperClass());
3308 intptr_t host_offset = 0;
3309 UnboxedFieldBitmap host_bitmap{};
3310 // Target offsets might differ if the word size are different
3311 intptr_t target_offset = 0;
3312 intptr_t host_type_args_field_offset = kNoTypeArguments;
3313 intptr_t target_type_args_field_offset = RTN::Class::kNoTypeArguments;
3314 if (super.IsNull()) {
3315 host_offset = Instance::NextFieldOffset();
3316 target_offset = RTN::Instance::NextFieldOffset();
3317 ASSERT(host_offset > 0);
3318 ASSERT(target_offset > 0);
3319 } else {
3320 ASSERT(super.is_finalized() || super.is_prefinalized());
3321 host_type_args_field_offset = super.host_type_arguments_field_offset();
3322 target_type_args_field_offset = super.target_type_arguments_field_offset();
3323 host_offset = super.host_next_field_offset();
3324 ASSERT(host_offset > 0);
3325 target_offset = super.target_next_field_offset();
3326 ASSERT(target_offset > 0);
3327 // We should never call CalculateFieldOffsets for native wrapper
3328 // classes, assert this.
3329 ASSERT(num_native_fields() == 0);
3330 set_num_native_fields(super.num_native_fields());
3331
3332 if (FLAG_precompiled_mode) {
3333 host_bitmap = Isolate::Current()
3334 ->group()
3335 ->shared_class_table()
3336 ->GetUnboxedFieldsMapAt(super.id());
3337 }
3338 }
3339 // If the super class is parameterized, use the same type_arguments field,
3340 // otherwise, if this class is the first in the super chain to be
3341 // parameterized, introduce a new type_arguments field.
3342 if (host_type_args_field_offset == kNoTypeArguments) {
3343 ASSERT(target_type_args_field_offset == RTN::Class::kNoTypeArguments);
3344 const TypeArguments& type_params = TypeArguments::Handle(type_parameters());
3345 if (!type_params.IsNull()) {
3346 ASSERT(type_params.Length() > 0);
3347 // The instance needs a type_arguments field.
3348 host_type_args_field_offset = host_offset;
3349 target_type_args_field_offset = target_offset;
3350 host_offset += kWordSize;
3351 target_offset += compiler::target::kWordSize;
3352 }
3353 } else {
3354 ASSERT(target_type_args_field_offset != RTN::Class::kNoTypeArguments);
3355 }
3356
3357 set_type_arguments_field_offset(host_type_args_field_offset,
3358 target_type_args_field_offset);
3359 ASSERT(host_offset > 0);
3360 ASSERT(target_offset > 0);
3361 Field& field = Field::Handle();
3362 const intptr_t len = flds.Length();
3363 for (intptr_t i = 0; i < len; i++) {
3364 field ^= flds.At(i);
3365 // Offset is computed only for instance fields.
3366 if (!field.is_static()) {
3367 ASSERT(field.HostOffset() == 0);
3368 ASSERT(field.TargetOffset() == 0);
3369 field.SetOffset(host_offset, target_offset);
3370
3371 if (FLAG_precompiled_mode && field.is_unboxing_candidate()) {
3372 intptr_t field_size;
3373 switch (field.guarded_cid()) {
3374 case kDoubleCid:
3375 field_size = sizeof(DoubleLayout::value_);
3376 break;
3377 case kFloat32x4Cid:
3378 field_size = sizeof(Float32x4Layout::value_);
3379 break;
3380 case kFloat64x2Cid:
3381 field_size = sizeof(Float64x2Layout::value_);
3382 break;
3383 default:
3384 if (field.is_non_nullable_integer()) {
3385 field_size = sizeof(MintLayout::value_);
3386 } else {
3387 UNREACHABLE();
3388 field_size = 0;
3389 }
3390 break;
3391 }
3392
3393 const intptr_t host_num_words = field_size / kWordSize;
3394 const intptr_t host_next_offset = host_offset + field_size;
3395 const intptr_t host_next_position = host_next_offset / kWordSize;
3396
3397 const intptr_t target_next_offset = target_offset + field_size;
3398 const intptr_t target_next_position =
3399 target_next_offset / compiler::target::kWordSize;
3400
3401 // The bitmap has fixed length. Checks if the offset position is smaller
3402 // than its length. If it is not, than the field should be boxed
3403 if (host_next_position <= UnboxedFieldBitmap::Length() &&
3404 target_next_position <= UnboxedFieldBitmap::Length()) {
3405 for (intptr_t j = 0; j < host_num_words; j++) {
3406 // Activate the respective bit in the bitmap, indicating that the
3407 // content is not a pointer
3408 host_bitmap.Set(host_offset / kWordSize);
3409 host_offset += kWordSize;
3410 }
3411
3412 ASSERT(host_offset == host_next_offset);
3413 target_offset = target_next_offset;
3414 } else {
3415 // Make the field boxed
3416 field.set_is_unboxing_candidate(false);
3417 host_offset += kWordSize;
3418 target_offset += compiler::target::kWordSize;
3419 }
3420 } else {
3421 host_offset += kWordSize;
3422 target_offset += compiler::target::kWordSize;
3423 }
3424 }
3425 }
3426 set_instance_size(RoundedAllocationSize(host_offset),
3427 compiler::target::RoundedAllocationSize(target_offset));
3428 set_next_field_offset(host_offset, target_offset);
3429
3430 return host_bitmap;
3431}
3432
3433void Class::AddInvocationDispatcher(const String& target_name,
3434 const Array& args_desc,
3435 const Function& dispatcher) const {
3436 auto& cache = Array::Handle(invocation_dispatcher_cache());
3437 InvocationDispatcherTable dispatchers(cache);
3438 intptr_t i = 0;
3439 for (auto dispatcher : dispatchers) {
3440 if (dispatcher.Get<kInvocationDispatcherName>() == String::null()) {
3441 break;
3442 }
3443 i++;
3444 }
3445 if (i == dispatchers.Length()) {
3446 const intptr_t new_len =
3447 cache.Length() == 0
3448 ? static_cast<intptr_t>(Class::kInvocationDispatcherEntrySize)
3449 : cache.Length() * 2;
3450 cache = Array::Grow(cache, new_len);
3451 set_invocation_dispatcher_cache(cache);
3452 }
3453 auto entry = dispatchers[i];
3454 entry.Set<Class::kInvocationDispatcherName>(target_name);
3455 entry.Set<Class::kInvocationDispatcherArgsDesc>(args_desc);
3456 entry.Set<Class::kInvocationDispatcherFunction>(dispatcher);
3457}
3458
3459FunctionPtr Class::GetInvocationDispatcher(const String& target_name,
3460 const Array& args_desc,
3461 FunctionLayout::Kind kind,
3462 bool create_if_absent) const {
3463 ASSERT(kind == FunctionLayout::kNoSuchMethodDispatcher ||
3464 kind == FunctionLayout::kInvokeFieldDispatcher ||
3465 kind == FunctionLayout::kDynamicInvocationForwarder);
3466 auto Z = Thread::Current()->zone();
3467 auto& function = Function::Handle(Z);
3468 auto& name = String::Handle(Z);
3469 auto& desc = Array::Handle(Z);
3470 auto& cache = Array::Handle(Z, invocation_dispatcher_cache());
3471 ASSERT(!cache.IsNull());
3472
3473 InvocationDispatcherTable dispatchers(cache);
3474 for (auto dispatcher : dispatchers) {
3475 name = dispatcher.Get<Class::kInvocationDispatcherName>();
3476 if (name.IsNull()) break; // Reached last entry.
3477 if (!name.Equals(target_name)) continue;
3478 desc = dispatcher.Get<Class::kInvocationDispatcherArgsDesc>();
3479 if (desc.raw() != args_desc.raw()) continue;
3480 function = dispatcher.Get<Class::kInvocationDispatcherFunction>();
3481 if (function.kind() == kind) {
3482 break; // Found match.
3483 }
3484 }
3485
3486 if (function.IsNull() && create_if_absent) {
3487 function = CreateInvocationDispatcher(target_name, args_desc, kind);
3488 AddInvocationDispatcher(target_name, args_desc, function);
3489 }
3490 return function.raw();
3491}
3492
3493FunctionPtr Class::CreateInvocationDispatcher(const String& target_name,
3494 const Array& args_desc,
3495 FunctionLayout::Kind kind) const {
3496 Thread* thread = Thread::Current();
3497 Zone* zone = thread->zone();
3498 Function& invocation = Function::Handle(
3499 zone, Function::New(
3500 String::Handle(zone, Symbols::New(thread, target_name)), kind,
3501 false, // Not static.
3502 false, // Not const.
3503 false, // Not abstract.
3504 false, // Not external.
3505 false, // Not native.
3506 *this, TokenPosition::kMinSource));
3507 ArgumentsDescriptor desc(args_desc);
3508 if (desc.TypeArgsLen() > 0) {
3509 // Make dispatcher function generic, since type arguments are passed.
3510 const TypeArguments& type_params =
3511 TypeArguments::Handle(zone, TypeArguments::New(desc.TypeArgsLen()));
3512 // The presence of a type parameter array is enough to mark this dispatcher
3513 // as generic. To save memory, we do not copy the type parameters to the
3514 // array (they are not accessed), but leave it as an array of null objects.
3515 invocation.set_type_parameters(type_params);
3516 }
3517
3518 invocation.set_num_fixed_parameters(desc.PositionalCount());
3519 invocation.SetNumOptionalParameters(desc.NamedCount(),
3520 false); // Not positional.
3521 invocation.set_parameter_types(
3522 Array::Handle(zone, Array::New(desc.Count(), Heap::kOld)));
3523 invocation.set_parameter_names(
3524 Array::Handle(zone, Array::New(desc.Count(), Heap::kOld)));
3525 // Receiver.
3526 invocation.SetParameterTypeAt(0, Object::dynamic_type());
3527 invocation.SetParameterNameAt(0, Symbols::This());
3528 // Remaining positional parameters.
3529 intptr_t i = 1;
3530 for (; i < desc.PositionalCount(); i++) {
3531 invocation.SetParameterTypeAt(i, Object::dynamic_type());
3532 char name[64];
3533 Utils::SNPrint(name, 64, ":p%" Pd, i);
3534 invocation.SetParameterNameAt(
3535 i, String::Handle(zone, Symbols::New(thread, name)));
3536 }
3537
3538 // Named parameters.
3539 for (; i < desc.Count(); i++) {
3540 invocation.SetParameterTypeAt(i, Object::dynamic_type());
3541 intptr_t index = i - desc.PositionalCount();
3542 invocation.SetParameterNameAt(i, String::Handle(zone, desc.NameAt(index)));
3543 }
3544 invocation.set_result_type(Object::dynamic_type());
3545 invocation.set_is_debuggable(false);
3546 invocation.set_is_visible(false);
3547 invocation.set_is_reflectable(false);
3548 invocation.set_saved_args_desc(args_desc);
3549
3550 return invocation.raw();
3551}
3552
3553// Method extractors are used to create implicit closures from methods.
3554// When an expression obj.M is evaluated for the first time and receiver obj
3555// does not have a getter called M but has a method called M then an extractor
3556// is created and injected as a getter (under the name get:M) into the class
3557// owning method M.
3558FunctionPtr Function::CreateMethodExtractor(const String& getter_name) const {
3559 Thread* thread = Thread::Current();
3560 Zone* zone = thread->zone();
3561 ASSERT(Field::IsGetterName(getter_name));
3562 const Function& closure_function =
3563 Function::Handle(zone, ImplicitClosureFunction());
3564
3565 const Class& owner = Class::Handle(zone, closure_function.Owner());
3566 Function& extractor = Function::Handle(
3567 zone,
3568 Function::New(String::Handle(zone, Symbols::New(thread, getter_name)),
3569 FunctionLayout::kMethodExtractor,
3570 false, // Not static.
3571 false, // Not const.
3572 is_abstract(),
3573 false, // Not external.
3574 false, // Not native.
3575 owner, TokenPosition::kMethodExtractor));
3576
3577 // Initialize signature: receiver is a single fixed parameter.
3578 const intptr_t kNumParameters = 1;
3579 extractor.set_num_fixed_parameters(kNumParameters);
3580 extractor.SetNumOptionalParameters(0, false);
3581 extractor.set_parameter_types(Object::extractor_parameter_types());
3582 extractor.set_parameter_names(Object::extractor_parameter_names());
3583 extractor.set_result_type(Object::dynamic_type());
3584
3585 extractor.InheritBinaryDeclarationFrom(*this);
3586
3587 extractor.set_extracted_method_closure(closure_function);
3588 extractor.set_is_debuggable(false);
3589 extractor.set_is_visible(false);
3590
3591 owner.AddFunction(extractor);
3592
3593 return extractor.raw();
3594}
3595
3596FunctionPtr Function::GetMethodExtractor(const String& getter_name) const {
3597 ASSERT(Field::IsGetterName(getter_name));
3598 const Function& closure_function =
3599 Function::Handle(ImplicitClosureFunction());
3600 const Class& owner = Class::Handle(closure_function.Owner());
3601 Function& result = Function::Handle(owner.LookupDynamicFunction(getter_name));
3602 if (result.IsNull()) {
3603 result = CreateMethodExtractor(getter_name);
3604 }
3605 ASSERT(result.kind() == FunctionLayout::kMethodExtractor);
3606 return result.raw();
3607}
3608
3609bool Library::FindPragma(Thread* T,
3610 bool only_core,
3611 const Object& obj,
3612 const String& pragma_name,
3613 Object* options) {
3614 auto I = T->isolate();
3615 auto Z = T->zone();
3616 auto& lib = Library::Handle(Z);
3617
3618 if (obj.IsClass()) {
3619 auto& klass = Class::Cast(obj);
3620 if (!klass.has_pragma()) return false;
3621 lib = klass.library();
3622 } else if (obj.IsFunction()) {
3623 auto& function = Function::Cast(obj);
3624 if (!function.has_pragma()) return false;
3625 lib = Class::Handle(Z, function.Owner()).library();
3626 } else if (obj.IsField()) {
3627 auto& field = Field::Cast(obj);
3628 if (!field.has_pragma()) return false;
3629 lib = Class::Handle(Z, field.Owner()).library();
3630 } else {
3631 UNREACHABLE();
3632 }
3633
3634 if (only_core && !lib.IsAnyCoreLibrary()) {
3635 return false;
3636 }
3637
3638 Object& metadata_obj = Object::Handle(Z, lib.GetMetadata(obj));
3639 if (metadata_obj.IsUnwindError()) {
3640 Report::LongJump(UnwindError::Cast(metadata_obj));
3641 }
3642
3643 // If there is a compile-time error while evaluating the metadata, we will
3644 // simply claim there was no @pramga annotation.
3645 if (metadata_obj.IsNull() || metadata_obj.IsLanguageError()) {
3646 return false;
3647 }
3648 ASSERT(metadata_obj.IsArray());
3649
3650 auto& metadata = Array::Cast(metadata_obj);
3651 auto& pragma_class = Class::Handle(Z, I->object_store()->pragma_class());
3652 auto& pragma_name_field =
3653 Field::Handle(Z, pragma_class.LookupField(Symbols::name()));
3654 auto& pragma_options_field =
3655 Field::Handle(Z, pragma_class.LookupField(Symbols::options()));
3656
3657 auto& pragma = Object::Handle(Z);
3658 for (intptr_t i = 0; i < metadata.Length(); ++i) {
3659 pragma = metadata.At(i);
3660 if (pragma.clazz() != pragma_class.raw() ||
3661 Instance::Cast(pragma).GetField(pragma_name_field) !=
3662 pragma_name.raw()) {
3663 continue;
3664 }
3665 *options = Instance::Cast(pragma).GetField(pragma_options_field);
3666 return true;
3667 }
3668
3669 return false;
3670}
3671
3672bool Function::IsDynamicInvocationForwarderName(const String& name) {
3673 return IsDynamicInvocationForwarderName(name.raw());
3674}
3675
3676bool Function::IsDynamicInvocationForwarderName(StringPtr name) {
3677 return String::StartsWith(name, Symbols::DynamicPrefix().raw());
3678}
3679
3680StringPtr Function::DemangleDynamicInvocationForwarderName(const String& name) {
3681 const intptr_t kDynamicPrefixLength = 4; // "dyn:"
3682 ASSERT(Symbols::DynamicPrefix().Length() == kDynamicPrefixLength);
3683 return Symbols::New(Thread::Current(), name, kDynamicPrefixLength,
3684 name.Length() - kDynamicPrefixLength);
3685}
3686
3687StringPtr Function::CreateDynamicInvocationForwarderName(const String& name) {
3688 return Symbols::FromConcat(Thread::Current(), Symbols::DynamicPrefix(), name);
3689}
3690
3691#if !defined(DART_PRECOMPILED_RUNTIME)
3692FunctionPtr Function::CreateDynamicInvocationForwarder(
3693 const String& mangled_name) const {
3694 Thread* thread = Thread::Current();
3695 Zone* zone = thread->zone();
3696
3697 Function& forwarder = Function::Handle(zone);
3698 forwarder ^= Object::Clone(*this, Heap::kOld);
3699
3700 forwarder.reset_unboxed_parameters_and_return();
3701
3702 forwarder.set_name(mangled_name);
3703 forwarder.set_is_native(false);
3704 // TODO(dartbug.com/37737): Currently, we intentionally keep the recognized
3705 // kind when creating the dynamic invocation forwarder.
3706 forwarder.set_kind(FunctionLayout::kDynamicInvocationForwarder);
3707 forwarder.set_is_debuggable(false);
3708
3709 // TODO(vegorov) for error reporting reasons it is better to make this
3710 // function visible and instead use a TailCall to invoke the target.
3711 // Our TailCall instruction is not ready for such usage though it
3712 // blocks inlining and can't take Function-s only Code objects.
3713 forwarder.set_is_visible(false);
3714
3715 forwarder.ClearICDataArray();
3716 forwarder.ClearBytecode();
3717 forwarder.ClearCode();
3718 forwarder.set_usage_counter(0);
3719 forwarder.set_deoptimization_counter(0);
3720 forwarder.set_optimized_instruction_count(0);
3721 forwarder.set_inlining_depth(0);
3722 forwarder.set_optimized_call_site_count(0);
3723
3724 forwarder.InheritBinaryDeclarationFrom(*this);
3725
3726 const Array& checks = Array::Handle(zone, Array::New(1));
3727 checks.SetAt(0, *this);
3728 forwarder.SetForwardingChecks(checks);
3729
3730 return forwarder.raw();
3731}
3732
3733FunctionPtr Function::GetDynamicInvocationForwarder(
3734 const String& mangled_name,
3735 bool allow_add /* = true */) const {
3736 ASSERT(IsDynamicInvocationForwarderName(mangled_name));
3737 auto zone = Thread::Current()->zone();
3738 const Class& owner = Class::Handle(zone, Owner());
3739 Function& result = Function::Handle(
3740 zone,
3741 owner.GetInvocationDispatcher(mangled_name, Array::null_array(),
3742 FunctionLayout::kDynamicInvocationForwarder,
3743 /*create_if_absent=*/false));
3744
3745 if (!result.IsNull()) {
3746 return result.raw();
3747 }
3748
3749 // Check if function actually needs a dynamic invocation forwarder.
3750 if (!kernel::NeedsDynamicInvocationForwarder(*this)) {
3751 result = raw();
3752 } else if (allow_add) {
3753 result = CreateDynamicInvocationForwarder(mangled_name);
3754 }
3755
3756 if (allow_add) {
3757 owner.AddInvocationDispatcher(mangled_name, Array::null_array(), result);
3758 }
3759
3760 return result.raw();
3761}
3762
3763#endif
3764
3765bool AbstractType::InstantiateAndTestSubtype(
3766 AbstractType* subtype,
3767 AbstractType* supertype,
3768 const TypeArguments& instantiator_type_args,
3769 const TypeArguments& function_type_args) {
3770 if (!subtype->IsInstantiated()) {
3771 *subtype = subtype->InstantiateFrom(
3772 instantiator_type_args, function_type_args, kAllFree, Heap::kOld);
3773 }
3774 if (!supertype->IsInstantiated()) {
3775 *supertype = supertype->InstantiateFrom(
3776 instantiator_type_args, function_type_args, kAllFree, Heap::kOld);
3777 }
3778 return subtype->IsSubtypeOf(*supertype, Heap::kOld);
3779}
3780
3781ArrayPtr Class::invocation_dispatcher_cache() const {
3782 return raw_ptr()->invocation_dispatcher_cache_;
3783}
3784
3785void Class::set_invocation_dispatcher_cache(const Array& cache) const {
3786 StorePointer(&raw_ptr()->invocation_dispatcher_cache_, cache.raw());
3787}
3788
3789void Class::Finalize() const {
3790 auto thread = Thread::Current();
3791 Isolate* isolate = thread->isolate();
3792 ASSERT(thread->IsMutatorThread());
3793 ASSERT(!isolate->all_classes_finalized());
3794 ASSERT(!is_finalized());
3795 // Prefinalized classes have a VM internal representation and no Dart fields.
3796 // Their instance size is precomputed and field offsets are known.
3797 if (!is_prefinalized()) {
3798 // Compute offsets of instance fields, instance size and bitmap for unboxed
3799 // fields.
3800 const auto host_bitmap = CalculateFieldOffsets();
3801 if (raw() == isolate->class_table()->At(id())) {
3802 // Sets the new size in the class table.
3803 isolate->class_table()->SetAt(id(), raw());
3804 if (FLAG_precompiled_mode && !ClassTable::IsTopLevelCid(id())) {
3805 isolate->group()->shared_class_table()->SetUnboxedFieldsMapAt(
3806 id(), host_bitmap);
3807 }
3808 }
3809 }
3810
3811#if defined(DEBUG)
3812 if (is_const()) {
3813 // Double-check that all fields are final (CFE should guarantee that if it
3814 // marks the class as having a constant constructor).
3815 auto Z = thread->zone();
3816 const auto& super_class = Class::Handle(Z, SuperClass());
3817 ASSERT(super_class.IsNull() || super_class.is_const());
3818 const auto& fields = Array::Handle(Z, this->fields());
3819 auto& field = Field::Handle(Z);
3820 for (intptr_t i = 0; i < fields.Length(); ++i) {
3821 field ^= fields.At(i);
3822 ASSERT(field.is_static() || field.is_final());
3823 }
3824 }
3825#endif
3826
3827 set_is_finalized();
3828}
3829
3830class CHACodeArray : public WeakCodeReferences {
3831 public:
3832 explicit CHACodeArray(const Class& cls)
3833 : WeakCodeReferences(Array::Handle(cls.dependent_code())), cls_(cls) {}
3834
3835 virtual void UpdateArrayTo(const Array& value) {
3836 // TODO(fschneider): Fails for classes in the VM isolate.
3837 cls_.set_dependent_code(value);
3838 }
3839
3840 virtual void ReportDeoptimization(const Code& code) {
3841 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
3842 Function& function = Function::Handle(code.function());
3843 THR_Print("Deoptimizing %s because CHA optimized (%s).\n",
3844 function.ToFullyQualifiedCString(), cls_.ToCString());
3845 }
3846 }
3847
3848 virtual void ReportSwitchingCode(const Code& code) {
3849 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
3850 Function& function = Function::Handle(code.function());
3851 THR_Print(
3852 "Switching %s to unoptimized code because CHA invalid"
3853 " (%s)\n",
3854 function.ToFullyQualifiedCString(), cls_.ToCString());
3855 }
3856 }
3857
3858 private:
3859 const Class& cls_;
3860 DISALLOW_COPY_AND_ASSIGN(CHACodeArray);
3861};
3862
3863#if defined(DEBUG)
3864static bool IsMutatorOrAtSafepoint() {
3865 Thread* thread = Thread::Current();
3866 return thread->IsMutatorThread() || thread->IsAtSafepoint();
3867}
3868#endif
3869
3870void Class::RegisterCHACode(const Code& code) {
3871 if (FLAG_trace_cha) {
3872 THR_Print("RegisterCHACode '%s' depends on class '%s'\n",
3873 Function::Handle(code.function()).ToQualifiedCString(),
3874 ToCString());
3875 }
3876 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
3877 ASSERT(code.is_optimized());
3878 CHACodeArray a(*this);
3879 a.Register(code);
3880}
3881
3882void Class::DisableCHAOptimizedCode(const Class& subclass) {
3883 ASSERT(Thread::Current()->IsMutatorThread());
3884 CHACodeArray a(*this);
3885 if (FLAG_trace_deoptimization && a.HasCodes()) {
3886 if (subclass.IsNull()) {
3887 THR_Print("Deopt for CHA (all)\n");
3888 } else {
3889 THR_Print("Deopt for CHA (new subclass %s)\n", subclass.ToCString());
3890 }
3891 }
3892 a.DisableCode();
3893}
3894
3895void Class::DisableAllCHAOptimizedCode() {
3896 DisableCHAOptimizedCode(Class::Handle());
3897}
3898
3899bool Class::TraceAllocation(Isolate* isolate) const {
3900#ifndef PRODUCT
3901 auto class_table = isolate->group()->shared_class_table();
3902 return class_table->TraceAllocationFor(id());
3903#else
3904 return false;
3905#endif
3906}
3907
3908void Class::SetTraceAllocation(bool trace_allocation) const {
3909#ifndef PRODUCT
3910 Isolate* isolate = Isolate::Current();
3911 const bool changed = trace_allocation != this->TraceAllocation(isolate);
3912 if (changed) {
3913 auto class_table = isolate->group()->shared_class_table();
3914 class_table->SetTraceAllocationFor(id(), trace_allocation);
3915 DisableAllocationStub();
3916 }
3917#else
3918 UNREACHABLE();
3919#endif
3920}
3921
3922void Class::set_dependent_code(const Array& array) const {
3923 StorePointer(&raw_ptr()->dependent_code_, array.raw());
3924}
3925
3926// Conventions:
3927// * For throwing a NSM in a class klass we use its runtime type as receiver,
3928// i.e., klass.RareType().
3929// * For throwing a NSM in a library, we just pass the null instance as
3930// receiver.
3931static ObjectPtr ThrowNoSuchMethod(const Instance& receiver,
3932 const String& function_name,
3933 const Array& arguments,
3934 const Array& argument_names,
3935 const InvocationMirror::Level level,
3936 const InvocationMirror::Kind kind) {
3937 const Smi& invocation_type =
3938 Smi::Handle(Smi::New(InvocationMirror::EncodeType(level, kind)));
3939
3940 const Array& args = Array::Handle(Array::New(7));
3941 args.SetAt(0, receiver);
3942 args.SetAt(1, function_name);
3943 args.SetAt(2, invocation_type);
3944 args.SetAt(3, Object::smi_zero()); // Type arguments length.
3945 args.SetAt(4, Object::null_type_arguments());
3946 args.SetAt(5, arguments);
3947 args.SetAt(6, argument_names);
3948
3949 const Library& libcore = Library::Handle(Library::CoreLibrary());
3950 const Class& NoSuchMethodError =
3951 Class::Handle(libcore.LookupClass(Symbols::NoSuchMethodError()));
3952 const Function& throwNew = Function::Handle(
3953 NoSuchMethodError.LookupFunctionAllowPrivate(Symbols::ThrowNew()));
3954 return DartEntry::InvokeFunction(throwNew, args);
3955}
3956
3957static ObjectPtr ThrowTypeError(const TokenPosition token_pos,
3958 const Instance& src_value,
3959 const AbstractType& dst_type,
3960 const String& dst_name) {
3961 const Array& args = Array::Handle(Array::New(4));
3962 const Smi& pos = Smi::Handle(Smi::New(token_pos.value()));
3963 args.SetAt(0, pos);
3964 args.SetAt(1, src_value);
3965 args.SetAt(2, dst_type);
3966 args.SetAt(3, dst_name);
3967
3968 const Library& libcore = Library::Handle(Library::CoreLibrary());
3969 const Class& TypeError =
3970 Class::Handle(libcore.LookupClassAllowPrivate(Symbols::TypeError()));
3971 const Function& throwNew = Function::Handle(
3972 TypeError.LookupFunctionAllowPrivate(Symbols::ThrowNew()));
3973 return DartEntry::InvokeFunction(throwNew, args);
3974}
3975
3976ObjectPtr Class::InvokeGetter(const String& getter_name,
3977 bool throw_nsm_if_absent,
3978 bool respect_reflectable,
3979 bool check_is_entrypoint) const {
3980 Thread* thread = Thread::Current();
3981 Zone* zone = thread->zone();
3982
3983 CHECK_ERROR(EnsureIsFinalized(thread));
3984
3985 // Note static fields do not have implicit getters.
3986 const Field& field = Field::Handle(zone, LookupStaticField(getter_name));
3987
3988 if (!field.IsNull() && check_is_entrypoint) {
3989 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kGetterOnly));
3990 }
3991
3992 if (field.IsNull() || field.IsUninitialized()) {
3993 const String& internal_getter_name =
3994 String::Handle(zone, Field::GetterName(getter_name));
3995 Function& getter =
3996 Function::Handle(zone, LookupStaticFunction(internal_getter_name));
3997
3998 if (field.IsNull() && !getter.IsNull() && check_is_entrypoint) {
3999 CHECK_ERROR(getter.VerifyCallEntryPoint());
4000 }
4001
4002 if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) {
4003 if (getter.IsNull()) {
4004 getter = LookupStaticFunction(getter_name);
4005 if (!getter.IsNull()) {
4006 if (check_is_entrypoint) {
4007 CHECK_ERROR(getter.VerifyClosurizedEntryPoint());
4008 }
4009 if (getter.SafeToClosurize()) {
4010 // Looking for a getter but found a regular method: closurize it.
4011 const Function& closure_function =
4012 Function::Handle(zone, getter.ImplicitClosureFunction());
4013 return closure_function.ImplicitStaticClosure();
4014 }
4015 }
4016 }
4017 if (throw_nsm_if_absent) {
4018 return ThrowNoSuchMethod(
4019 AbstractType::Handle(zone, RareType()), getter_name,
4020 Object::null_array(), Object::null_array(),
4021 InvocationMirror::kStatic, InvocationMirror::kGetter);
4022 }
4023 // Fall through case: Indicate that we didn't find any function or field
4024 // using a special null instance. This is different from a field being
4025 // null. Callers make sure that this null does not leak into Dartland.
4026 return Object::sentinel().raw();
4027 }
4028
4029 // Invoke the getter and return the result.
4030 return DartEntry::InvokeFunction(getter, Object::empty_array());
4031 }
4032
4033 return field.StaticValue();
4034}
4035
4036ObjectPtr Class::InvokeSetter(const String& setter_name,
4037 const Instance& value,
4038 bool respect_reflectable,
4039 bool check_is_entrypoint) const {
4040 Thread* thread = Thread::Current();
4041 Zone* zone = thread->zone();
4042
4043 CHECK_ERROR(EnsureIsFinalized(thread));
4044
4045 // Check for real fields and user-defined setters.
4046 const Field& field = Field::Handle(zone, LookupStaticField(setter_name));
4047 const String& internal_setter_name =
4048 String::Handle(zone, Field::SetterName(setter_name));
4049
4050 if (!field.IsNull() && check_is_entrypoint) {
4051 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kSetterOnly));
4052 }
4053
4054 AbstractType& parameter_type = AbstractType::Handle(zone);
4055 if (field.IsNull()) {
4056 const Function& setter =
4057 Function::Handle(zone, LookupStaticFunction(internal_setter_name));
4058 if (!setter.IsNull() && check_is_entrypoint) {
4059 CHECK_ERROR(setter.VerifyCallEntryPoint());
4060 }
4061 const int kNumArgs = 1;
4062 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
4063 args.SetAt(0, value);
4064 if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) {
4065 return ThrowNoSuchMethod(AbstractType::Handle(zone, RareType()),
4066 internal_setter_name, args, Object::null_array(),
4067 InvocationMirror::kStatic,
4068 InvocationMirror::kSetter);
4069 }
4070 parameter_type = setter.ParameterTypeAt(0);
4071 if (!value.RuntimeTypeIsSubtypeOf(parameter_type,
4072 Object::null_type_arguments(),
4073 Object::null_type_arguments())) {
4074 const String& argument_name =
4075 String::Handle(zone, setter.ParameterNameAt(0));
4076 return ThrowTypeError(setter.token_pos(), value, parameter_type,
4077 argument_name);
4078 }
4079 // Invoke the setter and return the result.
4080 return DartEntry::InvokeFunction(setter, args);
4081 }
4082
4083 if (field.is_final() || (respect_reflectable && !field.is_reflectable())) {
4084 const int kNumArgs = 1;
4085 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
4086 args.SetAt(0, value);
4087 return ThrowNoSuchMethod(AbstractType::Handle(zone, RareType()),
4088 internal_setter_name, args, Object::null_array(),
4089 InvocationMirror::kStatic,
4090 InvocationMirror::kSetter);
4091 }
4092
4093 parameter_type = field.type();
4094 if (!value.RuntimeTypeIsSubtypeOf(parameter_type,
4095 Object::null_type_arguments(),
4096 Object::null_type_arguments())) {
4097 const String& argument_name = String::Handle(zone, field.name());
4098 return ThrowTypeError(field.token_pos(), value, parameter_type,
4099 argument_name);
4100 }
4101 field.SetStaticValue(value);
4102 return value.raw();
4103}
4104
4105// Creates a new array of boxed arguments suitable for invoking the callable
4106// from the original boxed arguments for a static call. Also sets the contents
4107// of the handle pointed to by [callable_args_desc_array_out] to an appropriate
4108// arguments descriptor array for the new arguments.
4109//
4110// Assumes [arg_names] are consistent with [static_args_descriptor].
4111static ArrayPtr CreateCallableArgumentsFromStatic(
4112 Zone* zone,
4113 const Instance& receiver,
4114 const Array& static_args,
4115 const Array& arg_names,
4116 const ArgumentsDescriptor& static_args_descriptor) {
4117 const intptr_t num_static_type_args = static_args_descriptor.TypeArgsLen();
4118 const intptr_t num_static_args = static_args_descriptor.Count();
4119 // Double check that the static args descriptor expects boxed arguments
4120 // and the static args descriptor is consistent with the static arguments.
4121 ASSERT_EQUAL(static_args_descriptor.Size(), num_static_args);
4122 ASSERT_EQUAL(static_args.Length(),
4123 num_static_args + (num_static_type_args > 0 ? 1 : 0));
4124 // Add an additional slot to store the callable as the receiver.
4125 const auto& callable_args =
4126 Array::Handle(zone, Array::New(static_args.Length() + 1));
4127 const intptr_t first_arg_index = static_args_descriptor.FirstArgIndex();
4128 auto& temp = Object::Handle(zone);
4129 // Copy the static args into the corresponding slots of the callable args.
4130 if (num_static_type_args > 0) {
4131 temp = static_args.At(0);
4132 callable_args.SetAt(0, temp);
4133 }
4134 for (intptr_t i = first_arg_index; i < static_args.Length(); i++) {
4135 temp = static_args.At(i);
4136 callable_args.SetAt(i + 1, temp);
4137 }
4138 // Set the receiver slot in the callable args.
4139 callable_args.SetAt(first_arg_index, receiver);
4140 return callable_args.raw();
4141}
4142
4143// Return the result of invoking the callable contained in the arguments.
4144// Performs non-covariant type checks when the callable function does not
4145// expect to be called dynamically.
4146static ObjectPtr InvokeCallableWithChecks(Zone* zone,
4147 const Array& args,
4148 const Array& args_descriptor_array) {
4149 auto& result = Object::Handle(
4150 zone, DartEntry::ResolveCallable(args, args_descriptor_array));
4151 if (result.IsError()) {
4152 return result.raw();
4153 }
4154 const auto& function =
4155 Function::Handle(zone, Function::RawCast(result.raw()));
4156 if (!function.IsNull() && !function.CanReceiveDynamicInvocation()) {
4157 // Let DoArgumentTypesMatch extract the appropriate instantiator
4158 // and function tavs from the arguments (including the callable).
4159 ArgumentsDescriptor call_args_descriptor(args_descriptor_array);
4160 result = function.DoArgumentTypesMatch(args, call_args_descriptor);
4161 if (result.IsError()) {
4162 return result.raw();
4163 }
4164 }
4165 return DartEntry::InvokeCallable(function, args, args_descriptor_array);
4166}
4167
4168ObjectPtr Class::Invoke(const String& function_name,
4169 const Array& args,
4170 const Array& arg_names,
4171 bool respect_reflectable,
4172 bool check_is_entrypoint) const {
4173 Thread* thread = Thread::Current();
4174 Zone* zone = thread->zone();
4175 CHECK_ERROR(EnsureIsFinalized(thread));
4176
4177 // We don't pass any explicit type arguments, which will be understood as
4178 // using dynamic for any function type arguments by lower layers.
4179 const int kTypeArgsLen = 0;
4180 const Array& args_descriptor_array = Array::Handle(
4181 zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(),
4182 arg_names, Heap::kNew));
4183 ArgumentsDescriptor args_descriptor(args_descriptor_array);
4184
4185 Function& function =
4186 Function::Handle(zone, LookupStaticFunction(function_name));
4187
4188 if (!function.IsNull() && check_is_entrypoint) {
4189 CHECK_ERROR(function.VerifyCallEntryPoint());
4190 }
4191
4192 if (function.IsNull()) {
4193 // Didn't find a method: try to find a getter and invoke call on its result.
4194 const Object& getter_result = Object::Handle(
4195 zone, InvokeGetter(function_name, false, respect_reflectable,
4196 check_is_entrypoint));
4197 if (getter_result.raw() != Object::sentinel().raw()) {
4198 if (check_is_entrypoint) {
4199 CHECK_ERROR(EntryPointFieldInvocationError(function_name));
4200 }
4201 const auto& call_args_descriptor_array = Array::Handle(
4202 zone, ArgumentsDescriptor::NewBoxed(args_descriptor.TypeArgsLen(),
4203 args_descriptor.Count() + 1,
4204 arg_names, Heap::kNew));
4205 const auto& call_args = Array::Handle(
4206 zone,
4207 CreateCallableArgumentsFromStatic(zone, Instance::Cast(getter_result),
4208 args, arg_names, args_descriptor));
4209 return InvokeCallableWithChecks(zone, call_args,
4210 call_args_descriptor_array);
4211 }
4212 }
4213
4214 if (function.IsNull() ||
4215 !function.AreValidArguments(args_descriptor, nullptr) ||
4216 (respect_reflectable && !function.is_reflectable())) {
4217 return ThrowNoSuchMethod(
4218 AbstractType::Handle(zone, RareType()), function_name, args, arg_names,
4219 InvocationMirror::kStatic, InvocationMirror::kMethod);
4220 }
4221 // This is a static function, so we pass an empty instantiator tav.
4222 ASSERT(function.is_static());
4223 if (!function.CanReceiveDynamicInvocation()) {
4224 ObjectPtr type_error = function.DoArgumentTypesMatch(
4225 args, args_descriptor, Object::empty_type_arguments());
4226 if (type_error != Error::null()) {
4227 return type_error;
4228 }
4229 }
4230 return DartEntry::InvokeFunction(function, args, args_descriptor_array);
4231}
4232
4233static ObjectPtr EvaluateCompiledExpressionHelper(
4234 const ExternalTypedData& kernel_buffer,
4235 const Array& type_definitions,
4236 const String& library_url,
4237 const String& klass,
4238 const Array& arguments,
4239 const TypeArguments& type_arguments);
4240
4241ObjectPtr Class::EvaluateCompiledExpression(
4242 const ExternalTypedData& kernel_buffer,
4243 const Array& type_definitions,
4244 const Array& arguments,
4245 const TypeArguments& type_arguments) const {
4246 ASSERT(Thread::Current()->IsMutatorThread());
4247 if (id() < kInstanceCid || id() == kTypeArgumentsCid) {
4248 const Instance& exception = Instance::Handle(String::New(
4249 "Expressions can be evaluated only with regular Dart instances"));
4250 const Instance& stacktrace = Instance::Handle();
4251 return UnhandledException::New(exception, stacktrace);
4252 }
4253
4254 return EvaluateCompiledExpressionHelper(
4255 kernel_buffer, type_definitions,
4256 String::Handle(Library::Handle(library()).url()),
4257 IsTopLevel() ? String::Handle() : String::Handle(UserVisibleName()),
4258 arguments, type_arguments);
4259}
4260
4261void Class::EnsureDeclarationLoaded() const {
4262 if (!is_declaration_loaded()) {
4263#if defined(DART_PRECOMPILED_RUNTIME)
4264 UNREACHABLE();
4265#else
4266 // Loading of class declaration can be postponed until needed
4267 // if class comes from bytecode.
4268 if (!is_declared_in_bytecode()) {
4269 FATAL1("Unable to use class %s which is not loaded yet.", ToCString());
4270 }
4271 kernel::BytecodeReader::LoadClassDeclaration(*this);
4272 ASSERT(is_declaration_loaded());
4273 ASSERT(is_type_finalized());
4274#endif
4275 }
4276}
4277
4278// Ensure that top level parsing of the class has been done.
4279ErrorPtr Class::EnsureIsFinalized(Thread* thread) const {
4280 ASSERT(!IsNull());
4281 // Finalized classes have already been parsed.
4282 if (is_finalized()) {
4283 return Error::null();
4284 }
4285 if (Compiler::IsBackgroundCompilation()) {
4286 Compiler::AbortBackgroundCompilation(DeoptId::kNone,
4287 "Class finalization while compiling");
4288 }
4289 ASSERT(thread->IsMutatorThread());
4290 ASSERT(thread != NULL);
4291 const Error& error =
4292 Error::Handle(thread->zone(), ClassFinalizer::LoadClassMembers(*this));
4293 if (!error.IsNull()) {
4294 ASSERT(thread == Thread::Current());
4295 if (thread->long_jump_base() != NULL) {
4296 Report::LongJump(error);
4297 UNREACHABLE();
4298 }
4299 }
4300 return error.raw();
4301}
4302
4303// Ensure that code outdated by finalized class is cleaned up, new instance of
4304// this class is ready to be allocated.
4305ErrorPtr Class::EnsureIsAllocateFinalized(Thread* thread) const {
4306 ASSERT(!IsNull());
4307 // Finalized classes have already been parsed.
4308 if (is_allocate_finalized()) {
4309 return Error::null();
4310 }
4311 if (Compiler::IsBackgroundCompilation()) {
4312 Compiler::AbortBackgroundCompilation(
4313 DeoptId::kNone, "Class allocate finalization while compiling");
4314 }
4315 ASSERT(thread->IsMutatorThread());
4316 ASSERT(thread != NULL);
4317 Error& error = Error::Handle(thread->zone(), EnsureIsFinalized(thread));
4318 if (!error.IsNull()) {
4319 ASSERT(thread == Thread::Current());
4320 if (thread->long_jump_base() != NULL) {
4321 Report::LongJump(error);
4322 UNREACHABLE();
4323 }
4324 }
4325 error ^= ClassFinalizer::AllocateFinalizeClass(*this);
4326 return error.raw();
4327}
4328
4329void Class::SetFields(const Array& value) const {
4330 ASSERT(!value.IsNull());
4331#if defined(DEBUG)
4332 // Verify that all the fields in the array have this class as owner.
4333 Field& field = Field::Handle();
4334 intptr_t len = value.Length();
4335 for (intptr_t i = 0; i < len; i++) {
4336 field ^= value.At(i);
4337 ASSERT(field.IsOriginal());
4338 ASSERT(field.Owner() == raw());
4339 }
4340#endif
4341 // The value of static fields is already initialized to null.
4342 StorePointer(&raw_ptr()->fields_, value.raw());
4343}
4344
4345void Class::AddField(const Field& field) const {
4346 const Array& arr = Array::Handle(fields());
4347 const Array& new_arr = Array::Handle(Array::Grow(arr, arr.Length() + 1));
4348 new_arr.SetAt(arr.Length(), field);
4349 SetFields(new_arr);
4350}
4351
4352void Class::AddFields(const GrowableArray<const Field*>& new_fields) const {
4353 const intptr_t num_new_fields = new_fields.length();
4354 if (num_new_fields == 0) return;
4355 const Array& arr = Array::Handle(fields());
4356 const intptr_t num_old_fields = arr.Length();
4357 const Array& new_arr = Array::Handle(
4358 Array::Grow(arr, num_old_fields + num_new_fields, Heap::kOld));
4359 for (intptr_t i = 0; i < num_new_fields; i++) {
4360 new_arr.SetAt(i + num_old_fields, *new_fields.At(i));
4361 }
4362 SetFields(new_arr);
4363}
4364
4365bool Class::InjectCIDFields() const {
4366 if (library() != Library::InternalLibrary() ||
4367 Name() != Symbols::ClassID().raw()) {
4368 return false;
4369 }
4370
4371 Thread* thread = Thread::Current();
4372 Zone* zone = thread->zone();
4373 Field& field = Field::Handle(zone);
4374 Smi& value = Smi::Handle(zone);
4375 String& field_name = String::Handle(zone);
4376
4377 static const struct {
4378 const char* const field_name;
4379 const intptr_t cid;
4380 } cid_fields[] = {
4381#define CLASS_LIST_WITH_NULL(V) \
4382 V(Null) \
4383 CLASS_LIST_NO_OBJECT(V)
4384#define ADD_SET_FIELD(clazz) {"cid" #clazz, k##clazz##Cid},
4385 CLASS_LIST_WITH_NULL(ADD_SET_FIELD)
4386#undef ADD_SET_FIELD
4387#define ADD_SET_FIELD(clazz) {"cid" #clazz "View", kTypedData##clazz##ViewCid},
4388 CLASS_LIST_TYPED_DATA(ADD_SET_FIELD)
4389#undef ADD_SET_FIELD
4390#define ADD_SET_FIELD(clazz) {"cid" #clazz, kTypedData##clazz##Cid},
4391 CLASS_LIST_TYPED_DATA(ADD_SET_FIELD)
4392#undef ADD_SET_FIELD
4393#define ADD_SET_FIELD(clazz) \
4394 {"cidExternal" #clazz, kExternalTypedData##clazz##Cid},
4395 CLASS_LIST_TYPED_DATA(ADD_SET_FIELD)
4396#undef ADD_SET_FIELD
4397#undef CLASS_LIST_WITH_NULL
4398 };
4399
4400 const AbstractType& field_type = Type::Handle(zone, Type::IntType());
4401 for (size_t i = 0; i < ARRAY_SIZE(cid_fields); i++) {
4402 field_name = Symbols::New(thread, cid_fields[i].field_name);
4403 field = Field::New(field_name, /* is_static = */ true,
4404 /* is_final = */ false,
4405 /* is_const = */ true,
4406 /* is_reflectable = */ false,
4407 /* is_late = */ false, *this, field_type,
4408 TokenPosition::kMinSource, TokenPosition::kMinSource);
4409 value = Smi::New(cid_fields[i].cid);
4410 field.SetStaticValue(value, true);
4411 AddField(field);
4412 }
4413
4414 return true;
4415}
4416
4417template <class FakeInstance, class TargetFakeInstance>
4418ClassPtr Class::NewCommon(intptr_t index) {
4419 ASSERT(Object::class_class() != Class::null());
4420 Class& result = Class::Handle();
4421 {
4422 ObjectPtr raw =
4423 Object::Allocate(Class::kClassId, Class::InstanceSize(), Heap::kOld);
4424 NoSafepointScope no_safepoint;
4425 result ^= raw;
4426 }
4427 // Here kIllegalCid means not-yet-assigned.
4428 Object::VerifyBuiltinVtable<FakeInstance>(index == kIllegalCid ? kInstanceCid
4429 : index);
4430 result.set_token_pos(TokenPosition::kNoSource);
4431 result.set_end_token_pos(TokenPosition::kNoSource);
4432 const intptr_t host_instance_size = FakeInstance::InstanceSize();
4433 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
4434 TargetFakeInstance::InstanceSize());
4435 result.set_instance_size(host_instance_size, target_instance_size);
4436 result.set_type_arguments_field_offset_in_words(kNoTypeArguments,
4437 RTN::Class::kNoTypeArguments);
4438 const intptr_t host_next_field_offset = FakeInstance::NextFieldOffset();
4439 const intptr_t target_next_field_offset =
4440 TargetFakeInstance::NextFieldOffset();
4441 result.set_next_field_offset(host_next_field_offset,
4442 target_next_field_offset);
4443 result.set_id(index);
4444 result.set_num_type_arguments(kUnknownNumTypeArguments);
4445 result.set_num_native_fields(0);
4446 result.set_state_bits(0);
4447 NOT_IN_PRECOMPILED(result.set_is_declared_in_bytecode(false));
4448 NOT_IN_PRECOMPILED(result.set_binary_declaration_offset(0));
4449 result.InitEmptyFields();
4450 return result.raw();
4451}
4452
4453template <class FakeInstance, class TargetFakeInstance>
4454ClassPtr Class::New(intptr_t index,
4455 Isolate* isolate,
4456 bool register_class,
4457 bool is_abstract) {
4458 Class& result =
4459 Class::Handle(NewCommon<FakeInstance, TargetFakeInstance>(index));
4460 if (is_abstract) {
4461 result.set_is_abstract();
4462 }
4463 if (register_class) {
4464 isolate->class_table()->Register(result);
4465 }
4466 return result.raw();
4467}
4468
4469ClassPtr Class::New(const Library& lib,
4470 const String& name,
4471 const Script& script,
4472 TokenPosition token_pos,
4473 bool register_class) {
4474 Class& result =
4475 Class::Handle(NewCommon<Instance, RTN::Instance>(kIllegalCid));
4476 result.set_library(lib);
4477 result.set_name(name);
4478 result.set_script(script);
4479 result.set_token_pos(token_pos);
4480
4481 // The size gets initialized to 0. Once the class gets finalized the class
4482 // finalizer will set the correct size.
4483 ASSERT(!result.is_finalized() && !result.is_prefinalized());
4484 result.set_instance_size_in_words(0, 0);
4485
4486 if (register_class) {
4487 Isolate::Current()->RegisterClass(result);
4488 }
4489 return result.raw();
4490}
4491
4492ClassPtr Class::NewInstanceClass() {
4493 return Class::New<Instance, RTN::Instance>(kIllegalCid, Isolate::Current());
4494}
4495
4496ClassPtr Class::NewNativeWrapper(const Library& library,
4497 const String& name,
4498 int field_count) {
4499 Class& cls = Class::Handle(library.LookupClass(name));
4500 if (cls.IsNull()) {
4501 cls = New(library, name, Script::Handle(), TokenPosition::kNoSource);
4502 cls.SetFields(Object::empty_array());
4503 cls.SetFunctions(Object::empty_array());
4504 // Set super class to Object.
4505 cls.set_super_type(Type::Handle(Type::ObjectType()));
4506 // Compute instance size. First word contains a pointer to a properly
4507 // sized typed array once the first native field has been set.
4508 const intptr_t host_instance_size = sizeof(InstanceLayout) + kWordSize;
4509#if defined(DART_PRECOMPILER)
4510 const intptr_t target_instance_size =
4511 compiler::target::Instance::InstanceSize() +
4512 compiler::target::kWordSize;
4513#else
4514 const intptr_t target_instance_size =
4515 sizeof(InstanceLayout) + compiler::target::kWordSize;
4516#endif
4517 cls.set_instance_size(
4518 RoundedAllocationSize(host_instance_size),
4519 compiler::target::RoundedAllocationSize(target_instance_size));
4520 cls.set_next_field_offset(host_instance_size, target_instance_size);
4521 cls.set_num_native_fields(field_count);
4522 cls.set_is_allocate_finalized();
4523 cls.set_is_declaration_loaded();
4524 cls.set_is_type_finalized();
4525 cls.set_is_synthesized_class();
4526 library.AddClass(cls);
4527 return cls.raw();
4528 } else {
4529 return Class::null();
4530 }
4531}
4532
4533ClassPtr Class::NewStringClass(intptr_t class_id, Isolate* isolate) {
4534 intptr_t host_instance_size, target_instance_size;
4535 if (class_id == kOneByteStringCid) {
4536 host_instance_size = OneByteString::InstanceSize();
4537 target_instance_size = compiler::target::RoundedAllocationSize(
4538 RTN::OneByteString::InstanceSize());
4539 } else if (class_id == kTwoByteStringCid) {
4540 host_instance_size = TwoByteString::InstanceSize();
4541 target_instance_size = compiler::target::RoundedAllocationSize(
4542 RTN::TwoByteString::InstanceSize());
4543 } else if (class_id == kExternalOneByteStringCid) {
4544 host_instance_size = ExternalOneByteString::InstanceSize();
4545 target_instance_size = compiler::target::RoundedAllocationSize(
4546 RTN::ExternalOneByteString::InstanceSize());
4547 } else {
4548 ASSERT(class_id == kExternalTwoByteStringCid);
4549 host_instance_size = ExternalTwoByteString::InstanceSize();
4550 target_instance_size = compiler::target::RoundedAllocationSize(
4551 RTN::ExternalTwoByteString::InstanceSize());
4552 }
4553 Class& result = Class::Handle(
4554 New<String, RTN::String>(class_id, isolate, /*register_class=*/false));
4555 result.set_instance_size(host_instance_size, target_instance_size);
4556
4557 const intptr_t host_next_field_offset = String::NextFieldOffset();
4558 const intptr_t target_next_field_offset = RTN::String::NextFieldOffset();
4559 result.set_next_field_offset(host_next_field_offset,
4560 target_next_field_offset);
4561 result.set_is_prefinalized();
4562 isolate->class_table()->Register(result);
4563 return result.raw();
4564}
4565
4566ClassPtr Class::NewTypedDataClass(intptr_t class_id, Isolate* isolate) {
4567 ASSERT(IsTypedDataClassId(class_id));
4568 const intptr_t host_instance_size = TypedData::InstanceSize();
4569 const intptr_t target_instance_size =
4570 compiler::target::RoundedAllocationSize(RTN::TypedData::InstanceSize());
4571 Class& result = Class::Handle(New<TypedData, RTN::TypedData>(
4572 class_id, isolate, /*register_class=*/false));
4573 result.set_instance_size(host_instance_size, target_instance_size);
4574
4575 const intptr_t host_next_field_offset = TypedData::NextFieldOffset();
4576 const intptr_t target_next_field_offset = RTN::TypedData::NextFieldOffset();
4577 result.set_next_field_offset(host_next_field_offset,
4578 target_next_field_offset);
4579 result.set_is_prefinalized();
4580 isolate->class_table()->Register(result);
4581 return result.raw();
4582}
4583
4584ClassPtr Class::NewTypedDataViewClass(intptr_t class_id, Isolate* isolate) {
4585 ASSERT(IsTypedDataViewClassId(class_id));
4586 const intptr_t host_instance_size = TypedDataView::InstanceSize();
4587 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
4588 RTN::TypedDataView::InstanceSize());
4589 Class& result = Class::Handle(New<TypedDataView, RTN::TypedDataView>(
4590 class_id, isolate, /*register_class=*/false));
4591 result.set_instance_size(host_instance_size, target_instance_size);
4592
4593 const intptr_t host_next_field_offset = TypedDataView::NextFieldOffset();
4594 const intptr_t target_next_field_offset =
4595 RTN::TypedDataView::NextFieldOffset();
4596 result.set_next_field_offset(host_next_field_offset,
4597 target_next_field_offset);
4598 result.set_is_prefinalized();
4599 isolate->class_table()->Register(result);
4600 return result.raw();
4601}
4602
4603ClassPtr Class::NewExternalTypedDataClass(intptr_t class_id, Isolate* isolate) {
4604 ASSERT(IsExternalTypedDataClassId(class_id));
4605 const intptr_t host_instance_size = ExternalTypedData::InstanceSize();
4606 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
4607 RTN::ExternalTypedData::InstanceSize());
4608 Class& result = Class::Handle(New<ExternalTypedData, RTN::ExternalTypedData>(
4609 class_id, isolate, /*register_class=*/false));
4610
4611 const intptr_t host_next_field_offset = ExternalTypedData::NextFieldOffset();
4612 const intptr_t target_next_field_offset =
4613 RTN::ExternalTypedData::NextFieldOffset();
4614 result.set_instance_size(host_instance_size, target_instance_size);
4615 result.set_next_field_offset(host_next_field_offset,
4616 target_next_field_offset);
4617 result.set_is_prefinalized();
4618 isolate->class_table()->Register(result);
4619 return result.raw();
4620}
4621
4622ClassPtr Class::NewPointerClass(intptr_t class_id, Isolate* isolate) {
4623 ASSERT(IsFfiPointerClassId(class_id));
4624 intptr_t host_instance_size = Pointer::InstanceSize();
4625 intptr_t target_instance_size =
4626 compiler::target::RoundedAllocationSize(RTN::Pointer::InstanceSize());
4627 Class& result = Class::Handle(
4628 New<Pointer, RTN::Pointer>(class_id, isolate, /*register_class=*/false));
4629 result.set_instance_size(host_instance_size, target_instance_size);
4630 result.set_type_arguments_field_offset(Pointer::type_arguments_offset(),
4631 RTN::Pointer::type_arguments_offset());
4632
4633 const intptr_t host_next_field_offset = Pointer::NextFieldOffset();
4634 const intptr_t target_next_field_offset = RTN::Pointer::NextFieldOffset();
4635
4636 result.set_next_field_offset(host_next_field_offset,
4637 target_next_field_offset);
4638 result.set_is_prefinalized();
4639 isolate->class_table()->Register(result);
4640 return result.raw();
4641}
4642
4643void Class::set_name(const String& value) const {
4644 ASSERT(raw_ptr()->name_ == String::null());
4645 ASSERT(value.IsSymbol());
4646 StorePointer(&raw_ptr()->name_, value.raw());
4647#if !defined(PRODUCT)
4648 if (raw_ptr()->user_name_ == String::null()) {
4649 // TODO(johnmccutchan): Eagerly set user name for VM isolate classes,
4650 // lazily set user name for the other classes.
4651 // Generate and set user_name.
4652 const String& user_name = String::Handle(
4653 Symbols::New(Thread::Current(), GenerateUserVisibleName()));
4654 set_user_name(user_name);
4655 }
4656#endif // !defined(PRODUCT)
4657}
4658
4659#if !defined(PRODUCT)
4660void Class::set_user_name(const String& value) const {
4661 StorePointer(&raw_ptr()->user_name_, value.raw());
4662}
4663#endif // !defined(PRODUCT)
4664
4665const char* Class::GenerateUserVisibleName() const {
4666 if (FLAG_show_internal_names) {
4667 return String::Handle(Name()).ToCString();
4668 }
4669 switch (id()) {
4670 case kFloat32x4Cid:
4671 return Symbols::Float32x4().ToCString();
4672 case kInt32x4Cid:
4673 return Symbols::Int32x4().ToCString();
4674 case kTypedDataInt8ArrayCid:
4675 case kExternalTypedDataInt8ArrayCid:
4676 return Symbols::Int8List().ToCString();
4677 case kTypedDataUint8ArrayCid:
4678 case kExternalTypedDataUint8ArrayCid:
4679 return Symbols::Uint8List().ToCString();
4680 case kTypedDataUint8ClampedArrayCid:
4681 case kExternalTypedDataUint8ClampedArrayCid:
4682 return Symbols::Uint8ClampedList().ToCString();
4683 case kTypedDataInt16ArrayCid:
4684 case kExternalTypedDataInt16ArrayCid:
4685 return Symbols::Int16List().ToCString();
4686 case kTypedDataUint16ArrayCid:
4687 case kExternalTypedDataUint16ArrayCid:
4688 return Symbols::Uint16List().ToCString();
4689 case kTypedDataInt32ArrayCid:
4690 case kExternalTypedDataInt32ArrayCid:
4691 return Symbols::Int32List().ToCString();
4692 case kTypedDataUint32ArrayCid:
4693 case kExternalTypedDataUint32ArrayCid:
4694 return Symbols::Uint32List().ToCString();
4695 case kTypedDataInt64ArrayCid:
4696 case kExternalTypedDataInt64ArrayCid:
4697 return Symbols::Int64List().ToCString();
4698 case kTypedDataUint64ArrayCid:
4699 case kExternalTypedDataUint64ArrayCid:
4700 return Symbols::Uint64List().ToCString();
4701 case kTypedDataInt32x4ArrayCid:
4702 case kExternalTypedDataInt32x4ArrayCid:
4703 return Symbols::Int32x4List().ToCString();
4704 case kTypedDataFloat32x4ArrayCid:
4705 case kExternalTypedDataFloat32x4ArrayCid:
4706 return Symbols::Float32x4List().ToCString();
4707 case kTypedDataFloat64x2ArrayCid:
4708 case kExternalTypedDataFloat64x2ArrayCid:
4709 return Symbols::Float64x2List().ToCString();
4710 case kTypedDataFloat32ArrayCid:
4711 case kExternalTypedDataFloat32ArrayCid:
4712 return Symbols::Float32List().ToCString();
4713 case kTypedDataFloat64ArrayCid:
4714 case kExternalTypedDataFloat64ArrayCid:
4715 return Symbols::Float64List().ToCString();
4716
4717 case kFfiPointerCid:
4718 return Symbols::FfiPointer().ToCString();
4719 case kFfiDynamicLibraryCid:
4720 return Symbols::FfiDynamicLibrary().ToCString();
4721
4722#if !defined(PRODUCT)
4723 case kNullCid:
4724 return Symbols::Null().ToCString();
4725 case kDynamicCid:
4726 return Symbols::Dynamic().ToCString();
4727 case kVoidCid:
4728 return Symbols::Void().ToCString();
4729 case kNeverCid:
4730 return Symbols::Never().ToCString();
4731 case kClassCid:
4732 return Symbols::Class().ToCString();
4733 case kTypeArgumentsCid:
4734 return Symbols::TypeArguments().ToCString();
4735 case kPatchClassCid:
4736 return Symbols::PatchClass().ToCString();
4737 case kFunctionCid:
4738 return Symbols::Function().ToCString();
4739 case kClosureDataCid:
4740 return Symbols::ClosureData().ToCString();
4741 case kSignatureDataCid:
4742 return Symbols::SignatureData().ToCString();
4743 case kRedirectionDataCid:
4744 return Symbols::RedirectionData().ToCString();
4745 case kFfiTrampolineDataCid:
4746 return Symbols::FfiTrampolineData().ToCString();
4747 case kFieldCid:
4748 return Symbols::Field().ToCString();
4749 case kScriptCid:
4750 return Symbols::Script().ToCString();
4751 case kLibraryCid:
4752 return Symbols::Library().ToCString();
4753 case kLibraryPrefixCid:
4754 return Symbols::LibraryPrefix().ToCString();
4755 case kNamespaceCid:
4756 return Symbols::Namespace().ToCString();
4757 case kKernelProgramInfoCid:
4758 return Symbols::KernelProgramInfo().ToCString();
4759 case kCodeCid:
4760 return Symbols::Code().ToCString();
4761 case kBytecodeCid:
4762 return Symbols::Bytecode().ToCString();
4763 case kInstructionsCid:
4764 return Symbols::Instructions().ToCString();
4765 case kInstructionsSectionCid:
4766 return Symbols::InstructionsSection().ToCString();
4767 case kObjectPoolCid:
4768 return Symbols::ObjectPool().ToCString();
4769 case kCodeSourceMapCid:
4770 return Symbols::CodeSourceMap().ToCString();
4771 case kPcDescriptorsCid:
4772 return Symbols::PcDescriptors().ToCString();
4773 case kCompressedStackMapsCid:
4774 return Symbols::CompressedStackMaps().ToCString();
4775 case kLocalVarDescriptorsCid:
4776 return Symbols::LocalVarDescriptors().ToCString();
4777 case kExceptionHandlersCid:
4778 return Symbols::ExceptionHandlers().ToCString();
4779 case kContextCid:
4780 return Symbols::Context().ToCString();
4781 case kContextScopeCid:
4782 return Symbols::ContextScope().ToCString();
4783 case kParameterTypeCheckCid:
4784 return Symbols::ParameterTypeCheck().ToCString();
4785 case kSingleTargetCacheCid:
4786 return Symbols::SingleTargetCache().ToCString();
4787 case kICDataCid:
4788 return Symbols::ICData().ToCString();
4789 case kMegamorphicCacheCid:
4790 return Symbols::MegamorphicCache().ToCString();
4791 case kSubtypeTestCacheCid:
4792 return Symbols::SubtypeTestCache().ToCString();
4793 case kLoadingUnitCid:
4794 return Symbols::LoadingUnit().ToCString();
4795 case kApiErrorCid:
4796 return Symbols::ApiError().ToCString();
4797 case kLanguageErrorCid:
4798 return Symbols::LanguageError().ToCString();
4799 case kUnhandledExceptionCid:
4800 return Symbols::UnhandledException().ToCString();
4801 case kUnwindErrorCid:
4802 return Symbols::UnwindError().ToCString();
4803 case kIntegerCid:
4804 case kSmiCid:
4805 case kMintCid:
4806 return Symbols::Int().ToCString();
4807 case kDoubleCid:
4808 return Symbols::Double().ToCString();
4809 case kOneByteStringCid:
4810 case kTwoByteStringCid:
4811 case kExternalOneByteStringCid:
4812 case kExternalTwoByteStringCid:
4813 return Symbols::_String().ToCString();
4814 case kArrayCid:
4815 case kImmutableArrayCid:
4816 case kGrowableObjectArrayCid:
4817 return Symbols::List().ToCString();
4818#endif // !defined(PRODUCT)
4819 }
4820 String& name = String::Handle(Name());
4821 name = Symbols::New(Thread::Current(), String::ScrubName(name));
4822 if (name.raw() == Symbols::FutureImpl().raw() &&
4823 library() == Library::AsyncLibrary()) {
4824 return Symbols::Future().ToCString();
4825 }
4826 return name.ToCString();
4827}
4828
4829void Class::set_script(const Script& value) const {
4830 StorePointer(&raw_ptr()->script_, value.raw());
4831}
4832
4833void Class::set_token_pos(TokenPosition token_pos) const {
4834 ASSERT(!token_pos.IsClassifying());
4835 StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
4836}
4837
4838void Class::set_end_token_pos(TokenPosition token_pos) const {
4839 ASSERT(!token_pos.IsClassifying());
4840 StoreNonPointer(&raw_ptr()->end_token_pos_, token_pos);
4841}
4842
4843int32_t Class::SourceFingerprint() const {
4844#if !defined(DART_PRECOMPILED_RUNTIME)
4845 if (is_declared_in_bytecode()) {
4846 return 0; // TODO(37353): Implement or remove.
4847 }
4848 return kernel::KernelSourceFingerprintHelper::CalculateClassFingerprint(
4849 *this);
4850#else
4851 return 0;
4852#endif // !defined(DART_PRECOMPILED_RUNTIME)
4853}
4854
4855void Class::set_is_implemented() const {
4856 set_state_bits(ImplementedBit::update(true, raw_ptr()->state_bits_));
4857}
4858
4859void Class::set_is_abstract() const {
4860 set_state_bits(AbstractBit::update(true, raw_ptr()->state_bits_));
4861}
4862
4863void Class::set_is_declaration_loaded() const {
4864 ASSERT(!is_declaration_loaded());
4865 set_state_bits(ClassLoadingBits::update(ClassLayout::kDeclarationLoaded,
4866 raw_ptr()->state_bits_));
4867}
4868
4869void Class::set_is_type_finalized() const {
4870 ASSERT(is_declaration_loaded());
4871 ASSERT(!is_type_finalized());
4872 set_state_bits(ClassLoadingBits::update(ClassLayout::kTypeFinalized,
4873 raw_ptr()->state_bits_));
4874}
4875
4876void Class::set_is_synthesized_class() const {
4877 set_state_bits(SynthesizedClassBit::update(true, raw_ptr()->state_bits_));
4878}
4879
4880void Class::set_is_enum_class() const {
4881 set_state_bits(EnumBit::update(true, raw_ptr()->state_bits_));
4882}
4883
4884void Class::set_is_const() const {
4885 set_state_bits(ConstBit::update(true, raw_ptr()->state_bits_));
4886}
4887
4888void Class::set_is_transformed_mixin_application() const {
4889 set_state_bits(
4890 TransformedMixinApplicationBit::update(true, raw_ptr()->state_bits_));
4891}
4892
4893void Class::set_is_fields_marked_nullable() const {
4894 set_state_bits(FieldsMarkedNullableBit::update(true, raw_ptr()->state_bits_));
4895}
4896
4897void Class::set_is_allocated(bool value) const {
4898 set_state_bits(IsAllocatedBit::update(value, raw_ptr()->state_bits_));
4899}
4900
4901void Class::set_is_loaded(bool value) const {
4902 set_state_bits(IsLoadedBit::update(value, raw_ptr()->state_bits_));
4903}
4904
4905void Class::set_is_finalized() const {
4906 ASSERT(!is_finalized());
4907 set_state_bits(ClassFinalizedBits::update(ClassLayout::kFinalized,
4908 raw_ptr()->state_bits_));
4909}
4910
4911void Class::set_is_allocate_finalized() const {
4912 ASSERT(!is_allocate_finalized());
4913 set_state_bits(ClassFinalizedBits::update(ClassLayout::kAllocateFinalized,
4914 raw_ptr()->state_bits_));
4915}
4916
4917void Class::set_is_prefinalized() const {
4918 ASSERT(!is_finalized());
4919 set_state_bits(ClassFinalizedBits::update(ClassLayout::kPreFinalized,
4920 raw_ptr()->state_bits_));
4921}
4922
4923void Class::set_interfaces(const Array& value) const {
4924 ASSERT(!value.IsNull());
4925 StorePointer(&raw_ptr()->interfaces_, value.raw());
4926}
4927
4928void Class::AddDirectImplementor(const Class& implementor,
4929 bool is_mixin) const {
4930 ASSERT(is_implemented());
4931 ASSERT(!implementor.IsNull());
4932 GrowableObjectArray& direct_implementors =
4933 GrowableObjectArray::Handle(raw_ptr()->direct_implementors_);
4934 if (direct_implementors.IsNull()) {
4935 direct_implementors = GrowableObjectArray::New(4, Heap::kOld);
4936 StorePointer(&raw_ptr()->direct_implementors_, direct_implementors.raw());
4937 }
4938#if defined(DEBUG)
4939 // Verify that the same class is not added twice.
4940 // The only exception is mixins: when mixin application is transformed,
4941 // mixin is added to the end of interfaces list and may be duplicated:
4942 // class X = A with B implements B;
4943 // This is rare and harmless.
4944 if (!is_mixin) {
4945 for (intptr_t i = 0; i < direct_implementors.Length(); i++) {
4946 ASSERT(direct_implementors.At(i) != implementor.raw());
4947 }
4948 }
4949#endif
4950 direct_implementors.Add(implementor, Heap::kOld);
4951}
4952
4953void Class::ClearDirectImplementors() const {
4954 StorePointer(&raw_ptr()->direct_implementors_, GrowableObjectArray::null());
4955}
4956
4957void Class::AddDirectSubclass(const Class& subclass) const {
4958 ASSERT(!subclass.IsNull());
4959 ASSERT(subclass.SuperClass() == raw());
4960 // Do not keep track of the direct subclasses of class Object.
4961 ASSERT(!IsObjectClass());
4962 GrowableObjectArray& direct_subclasses =
4963 GrowableObjectArray::Handle(raw_ptr()->direct_subclasses_);
4964 if (direct_subclasses.IsNull()) {
4965 direct_subclasses = GrowableObjectArray::New(4, Heap::kOld);
4966 StorePointer(&raw_ptr()->direct_subclasses_, direct_subclasses.raw());
4967 }
4968#if defined(DEBUG)
4969 // Verify that the same class is not added twice.
4970 for (intptr_t i = 0; i < direct_subclasses.Length(); i++) {
4971 ASSERT(direct_subclasses.At(i) != subclass.raw());
4972 }
4973#endif
4974 direct_subclasses.Add(subclass, Heap::kOld);
4975}
4976
4977void Class::ClearDirectSubclasses() const {
4978 StorePointer(&raw_ptr()->direct_subclasses_, GrowableObjectArray::null());
4979}
4980
4981ArrayPtr Class::constants() const {
4982 return raw_ptr()->constants_;
4983}
4984
4985void Class::set_constants(const Array& value) const {
4986 ASSERT(!value.IsNull());
4987 StorePointer(&raw_ptr()->constants_, value.raw());
4988}
4989
4990void Class::set_declaration_type(const Type& value) const {
4991 ASSERT(id() != kDynamicCid && id() != kVoidCid);
4992 ASSERT(!value.IsNull() && value.IsCanonical() && value.IsOld());
4993 ASSERT((declaration_type() == Object::null()) ||
4994 (declaration_type() == value.raw())); // Set during own finalization.
4995 // Since DeclarationType is used as the runtime type of instances of a
4996 // non-generic class, its nullability must be kNonNullable.
4997 // The exception is DeclarationType of Null which is kNullable.
4998 ASSERT(value.type_class_id() != kNullCid || value.IsNullable());
4999 ASSERT(value.type_class_id() == kNullCid || value.IsNonNullable());
5000 StorePointer(&raw_ptr()->declaration_type_, value.raw());
5001}
5002
5003TypePtr Class::DeclarationType() const {
5004 ASSERT(is_declaration_loaded());
5005 if (IsNullClass()) {
5006 return Type::NullType();
5007 }
5008 if (IsDynamicClass()) {
5009 return Type::DynamicType();
5010 }
5011 if (IsVoidClass()) {
5012 return Type::VoidType();
5013 }
5014 if (declaration_type() != Type::null()) {
5015 return declaration_type();
5016 }
5017 // For efficiency, the runtimeType intrinsic returns the type cached by
5018 // DeclarationType without checking its nullability. Therefore, we
5019 // consistently cache the kNonNullable version of the type.
5020 // The exception is type Null which is stored as kNullable.
5021 Type& type =
5022 Type::Handle(Type::New(*this, TypeArguments::Handle(type_parameters()),
5023 token_pos(), Nullability::kNonNullable));
5024 type ^= ClassFinalizer::FinalizeType(*this, type);
5025 set_declaration_type(type);
5026 return type.raw();
5027}
5028
5029void Class::set_allocation_stub(const Code& value) const {
5030 // Never clear the stub as it may still be a target, but will be GC-d if
5031 // not referenced.
5032 ASSERT(!value.IsNull());
5033 ASSERT(raw_ptr()->allocation_stub_ == Code::null());
5034 StorePointer(&raw_ptr()->allocation_stub_, value.raw());
5035}
5036
5037void Class::DisableAllocationStub() const {
5038 const Code& existing_stub = Code::Handle(allocation_stub());
5039 if (existing_stub.IsNull()) {
5040 return;
5041 }
5042 ASSERT(!existing_stub.IsDisabled());
5043 // Change the stub so that the next caller will regenerate the stub.
5044 existing_stub.DisableStubCode();
5045 // Disassociate the existing stub from class.
5046 StorePointer(&raw_ptr()->allocation_stub_, Code::null());
5047}
5048
5049bool Class::IsDartFunctionClass() const {
5050 return raw() == Type::Handle(Type::DartFunctionType()).type_class();
5051}
5052
5053bool Class::IsFutureClass() const {
5054 // Looking up future_class in the object store would not work, because
5055 // this function is called during class finalization, before the object store
5056 // field would be initialized by InitKnownObjects().
5057 return (Name() == Symbols::Future().raw()) &&
5058 (library() == Library::AsyncLibrary());
5059}
5060
5061// Checks if type T0 is a subtype of type T1.
5062// Type T0 is specified by class 'cls' parameterized with 'type_arguments' and
5063// by 'nullability', and type T1 is specified by 'other' and must have a type
5064// class.
5065bool Class::IsSubtypeOf(const Class& cls,
5066 const TypeArguments& type_arguments,
5067 Nullability nullability,
5068 const AbstractType& other,
5069 Heap::Space space,
5070 TrailPtr trail) {
5071 // This function does not support Null, Never, dynamic, or void as type T0.
5072 classid_t this_cid = cls.id();
5073 ASSERT(this_cid != kNullCid && this_cid != kNeverCid &&
5074 this_cid != kDynamicCid && this_cid != kVoidCid);
5075 // Type T1 must have a type class (e.g. not a type parameter).
5076 ASSERT(other.HasTypeClass());
5077 const classid_t other_cid = other.type_class_id();
5078 if (other_cid == kDynamicCid || other_cid == kVoidCid) {
5079 return true;
5080 }
5081 Thread* thread = Thread::Current();
5082 Zone* zone = thread->zone();
5083 Isolate* isolate = thread->isolate();
5084 // Nullability of left and right hand sides is verified in strong mode only.
5085 const bool verified_nullability = !isolate->null_safety() ||
5086 nullability != Nullability::kNullable ||
5087 !other.IsNonNullable();
5088
5089 // Right Object.
5090 if (other_cid == kObjectCid) {
5091 return verified_nullability;
5092 }
5093 const Class& other_class = Class::Handle(zone, other.type_class());
5094 const TypeArguments& other_type_arguments =
5095 TypeArguments::Handle(zone, other.arguments());
5096 // Use the 'this_class' object as if it was the receiver of this method, but
5097 // instead of recursing, reset it to the super class and loop.
5098 Class& this_class = Class::Handle(zone, cls.raw());
5099 while (true) {
5100 // Apply additional subtyping rules if T0 or T1 are 'FutureOr'.
5101
5102 // Left FutureOr:
5103 // if T0 is FutureOr<S0> then:
5104 // T0 <: T1 iff Future<S0> <: T1 and S0 <: T1
5105 if (this_cid == kFutureOrCid) {
5106 // Check Future<S0> <: T1.
5107 ObjectStore* object_store = Isolate::Current()->object_store();
5108 const Class& future_class =
5109 Class::Handle(zone, object_store->future_class());
5110 ASSERT(!future_class.IsNull() && future_class.NumTypeParameters() == 1 &&
5111 this_class.NumTypeParameters() == 1);
5112 ASSERT(type_arguments.IsNull() || type_arguments.Length() >= 1);
5113 if (Class::IsSubtypeOf(future_class, type_arguments,
5114 Nullability::kNonNullable, other, space, trail)) {
5115 // Check S0 <: T1.
5116 const AbstractType& type_arg =
5117 AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0));
5118 if (type_arg.IsSubtypeOf(other, space, trail)) {
5119 return verified_nullability;
5120 }
5121 }
5122 }
5123
5124 // Right FutureOr:
5125 // if T1 is FutureOr<S1> then:
5126 // T0 <: T1 iff any of the following hold:
5127 // either T0 <: Future<S1>
5128 // or T0 <: S1
5129 // or T0 is X0 and X0 has bound S0 and S0 <: T1 (checked elsewhere)
5130 if (other_cid == kFutureOrCid) {
5131 const AbstractType& other_type_arg =
5132 AbstractType::Handle(zone, other_type_arguments.TypeAtNullSafe(0));
5133 // Check if S1 is a top type.
5134 if (other_type_arg.IsTopTypeForSubtyping()) {
5135 return true;
5136 }
5137 // Check T0 <: Future<S1> when T0 is Future<S0>.
5138 if (this_class.IsFutureClass()) {
5139 const AbstractType& type_arg =
5140 AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0));
5141 // If T0 is Future<S0>, then T0 <: Future<S1>, iff S0 <: S1.
5142 if (type_arg.IsSubtypeOf(other_type_arg, space, trail)) {
5143 if (verified_nullability) {
5144 return true;
5145 }
5146 }
5147 }
5148 // Check T0 <: Future<S1> when T0 is FutureOr<S0> is already done.
5149 // Check T0 <: S1.
5150 if (other_type_arg.HasTypeClass() &&
5151 Class::IsSubtypeOf(this_class, type_arguments, nullability,
5152 other_type_arg, space, trail)) {
5153 return true;
5154 }
5155 }
5156
5157 // Left nullable:
5158 // if T0 is S0? then:
5159 // T0 <: T1 iff S0 <: T1 and Null <: T1
5160 if (!verified_nullability) {
5161 return false;
5162 }
5163
5164 // Check for reflexivity.
5165 if (this_class.raw() == other_class.raw()) {
5166 const intptr_t num_type_params = this_class.NumTypeParameters();
5167 if (num_type_params == 0) {
5168 return true;
5169 }
5170 const intptr_t num_type_args = this_class.NumTypeArguments();
5171 const intptr_t from_index = num_type_args - num_type_params;
5172 // Since we do not truncate the type argument vector of a subclass (see
5173 // below), we only check a subvector of the proper length.
5174 // Check for covariance.
5175 if (other_type_arguments.IsNull()) {
5176 return true;
5177 }
5178 AbstractType& type = AbstractType::Handle(zone);
5179 AbstractType& other_type = AbstractType::Handle(zone);
5180 for (intptr_t i = 0; i < num_type_params; ++i) {
5181 type = type_arguments.TypeAtNullSafe(from_index + i);
5182 other_type = other_type_arguments.TypeAt(from_index + i);
5183 ASSERT(!type.IsNull() && !other_type.IsNull());
5184 if (!type.IsSubtypeOf(other_type, space, trail)) {
5185 return false;
5186 }
5187 }
5188 return true;
5189 }
5190 // Check for 'direct super type' specified in the implements clause
5191 // and check for transitivity at the same time.
5192 Array& interfaces = Array::Handle(zone, this_class.interfaces());
5193 AbstractType& interface = AbstractType::Handle(zone);
5194 Class& interface_class = Class::Handle(zone);
5195 TypeArguments& interface_args = TypeArguments::Handle(zone);
5196 for (intptr_t i = 0; i < interfaces.Length(); i++) {
5197 interface ^= interfaces.At(i);
5198 ASSERT(interface.IsFinalized());
5199 interface_class = interface.type_class();
5200 interface_args = interface.arguments();
5201 if (!interface_args.IsNull() && !interface_args.IsInstantiated()) {
5202 // This type class implements an interface that is parameterized with
5203 // generic type(s), e.g. it implements List<T>.
5204 // The uninstantiated type T must be instantiated using the type
5205 // parameters of this type before performing the type test.
5206 // The type arguments of this type that are referred to by the type
5207 // parameters of the interface are at the end of the type vector,
5208 // after the type arguments of the super type of this type.
5209 // The index of the type parameters is adjusted upon finalization.
5210 interface_args = interface_args.InstantiateFrom(
5211 type_arguments, Object::null_type_arguments(), kNoneFree, space);
5212 }
5213 // In Dart 2, implementing Function has no meaning.
5214 // TODO(regis): Can we encounter and skip Object as well?
5215 if (interface_class.IsDartFunctionClass()) {
5216 continue;
5217 }
5218 // No need to pass the trail as cycles are not possible via interfaces.
5219 if (Class::IsSubtypeOf(interface_class, interface_args,
5220 Nullability::kNonNullable, other, space)) {
5221 return true;
5222 }
5223 }
5224 // "Recurse" up the class hierarchy until we have reached the top.
5225 this_class = this_class.SuperClass();
5226 if (this_class.IsNull()) {
5227 return false;
5228 }
5229 this_cid = this_class.id();
5230 }
5231 UNREACHABLE();
5232 return false;
5233}
5234
5235bool Class::IsTopLevel() const {
5236 return Name() == Symbols::TopLevel().raw();
5237}
5238
5239bool Class::IsPrivate() const {
5240 return Library::IsPrivate(String::Handle(Name()));
5241}
5242
5243FunctionPtr Class::LookupDynamicFunction(const String& name) const {
5244 return LookupFunction(name, kInstance);
5245}
5246
5247FunctionPtr Class::LookupDynamicFunctionAllowAbstract(
5248 const String& name) const {
5249 return LookupFunction(name, kInstanceAllowAbstract);
5250}
5251
5252FunctionPtr Class::LookupDynamicFunctionAllowPrivate(const String& name) const {
5253 return LookupFunctionAllowPrivate(name, kInstance);
5254}
5255
5256FunctionPtr Class::LookupStaticFunction(const String& name) const {
5257 return LookupFunction(name, kStatic);
5258}
5259
5260FunctionPtr Class::LookupStaticFunctionAllowPrivate(const String& name) const {
5261 return LookupFunctionAllowPrivate(name, kStatic);
5262}
5263
5264FunctionPtr Class::LookupConstructor(const String& name) const {
5265 return LookupFunction(name, kConstructor);
5266}
5267
5268FunctionPtr Class::LookupConstructorAllowPrivate(const String& name) const {
5269 return LookupFunctionAllowPrivate(name, kConstructor);
5270}
5271
5272FunctionPtr Class::LookupFactory(const String& name) const {
5273 return LookupFunction(name, kFactory);
5274}
5275
5276FunctionPtr Class::LookupFactoryAllowPrivate(const String& name) const {
5277 return LookupFunctionAllowPrivate(name, kFactory);
5278}
5279
5280FunctionPtr Class::LookupFunction(const String& name) const {
5281 return LookupFunction(name, kAny);
5282}
5283
5284FunctionPtr Class::LookupFunctionAllowPrivate(const String& name) const {
5285 return LookupFunctionAllowPrivate(name, kAny);
5286}
5287
5288// Returns true if 'prefix' and 'accessor_name' match 'name'.
5289static bool MatchesAccessorName(const String& name,
5290 const char* prefix,
5291 intptr_t prefix_length,
5292 const String& accessor_name) {
5293 intptr_t name_len = name.Length();
5294 intptr_t accessor_name_len = accessor_name.Length();
5295
5296 if (name_len != (accessor_name_len + prefix_length)) {
5297 return false;
5298 }
5299 for (intptr_t i = 0; i < prefix_length; i++) {
5300 if (name.CharAt(i) != prefix[i]) {
5301 return false;
5302 }
5303 }
5304 for (intptr_t i = 0, j = prefix_length; i < accessor_name_len; i++, j++) {
5305 if (name.CharAt(j) != accessor_name.CharAt(i)) {
5306 return false;
5307 }
5308 }
5309 return true;
5310}
5311
5312FunctionPtr Class::CheckFunctionType(const Function& func, MemberKind kind) {
5313 if ((kind == kInstance) || (kind == kInstanceAllowAbstract)) {
5314 if (func.IsDynamicFunction(kind == kInstanceAllowAbstract)) {
5315 return func.raw();
5316 }
5317 } else if (kind == kStatic) {
5318 if (func.IsStaticFunction()) {
5319 return func.raw();
5320 }
5321 } else if (kind == kConstructor) {
5322 if (func.IsGenerativeConstructor()) {
5323 ASSERT(!func.is_static());
5324 return func.raw();
5325 }
5326 } else if (kind == kFactory) {
5327 if (func.IsFactory()) {
5328 ASSERT(func.is_static());
5329 return func.raw();
5330 }
5331 } else if (kind == kAny) {
5332 return func.raw();
5333 }
5334 return Function::null();
5335}
5336
5337FunctionPtr Class::LookupFunction(const String& name, MemberKind kind) const {
5338 ASSERT(!IsNull());
5339 Thread* thread = Thread::Current();
5340 if (EnsureIsFinalized(thread) != Error::null()) {
5341 return Function::null();
5342 }
5343 REUSABLE_ARRAY_HANDLESCOPE(thread);
5344 REUSABLE_FUNCTION_HANDLESCOPE(thread);
5345 Array& funcs = thread->ArrayHandle();
5346 funcs = functions();
5347 ASSERT(!funcs.IsNull());
5348 const intptr_t len = funcs.Length();
5349 Function& function = thread->FunctionHandle();
5350 if (len >= kFunctionLookupHashTreshold) {
5351 // Cache functions hash table to allow multi threaded access.
5352 const Array& hash_table =
5353 Array::Handle(thread->zone(), raw_ptr()->functions_hash_table_);
5354 if (!hash_table.IsNull()) {
5355 ClassFunctionsSet set(hash_table.raw());
5356 REUSABLE_STRING_HANDLESCOPE(thread);
5357 function ^= set.GetOrNull(FunctionName(name, &(thread->StringHandle())));
5358 // No mutations.
5359 ASSERT(set.Release().raw() == hash_table.raw());
5360 return function.IsNull() ? Function::null()
5361 : CheckFunctionType(function, kind);
5362 }
5363 }
5364 if (name.IsSymbol()) {
5365 // Quick Symbol compare.
5366 NoSafepointScope no_safepoint;
5367 for (intptr_t i = 0; i < len; i++) {
5368 function ^= funcs.At(i);
5369 if (function.name() == name.raw()) {
5370 return CheckFunctionType(function, kind);
5371 }
5372 }
5373 } else {
5374 REUSABLE_STRING_HANDLESCOPE(thread);
5375 String& function_name = thread->StringHandle();
5376 for (intptr_t i = 0; i < len; i++) {
5377 function ^= funcs.At(i);
5378 function_name = function.name();
5379 if (function_name.Equals(name)) {
5380 return CheckFunctionType(function, kind);
5381 }
5382 }
5383 }
5384 // No function found.
5385 return Function::null();
5386}
5387
5388FunctionPtr Class::LookupFunctionAllowPrivate(const String& name,
5389 MemberKind kind) const {
5390 ASSERT(!IsNull());
5391 Thread* thread = Thread::Current();
5392 if (EnsureIsFinalized(thread) != Error::null()) {
5393 return Function::null();
5394 }
5395 REUSABLE_ARRAY_HANDLESCOPE(thread);
5396 REUSABLE_FUNCTION_HANDLESCOPE(thread);
5397 REUSABLE_STRING_HANDLESCOPE(thread);
5398 Array& funcs = thread->ArrayHandle();
5399 funcs = functions();
5400 ASSERT(!funcs.IsNull());
5401 const intptr_t len = funcs.Length();
5402 Function& function = thread->FunctionHandle();
5403 String& function_name = thread->StringHandle();
5404 for (intptr_t i = 0; i < len; i++) {
5405 function ^= funcs.At(i);
5406 function_name = function.name();
5407 if (String::EqualsIgnoringPrivateKey(function_name, name)) {
5408 return CheckFunctionType(function, kind);
5409 }
5410 }
5411 // No function found.
5412 return Function::null();
5413}
5414
5415FunctionPtr Class::LookupGetterFunction(const String& name) const {
5416 return LookupAccessorFunction(kGetterPrefix, kGetterPrefixLength, name);
5417}
5418
5419FunctionPtr Class::LookupSetterFunction(const String& name) const {
5420 return LookupAccessorFunction(kSetterPrefix, kSetterPrefixLength, name);
5421}
5422
5423FunctionPtr Class::LookupAccessorFunction(const char* prefix,
5424 intptr_t prefix_length,
5425 const String& name) const {
5426 ASSERT(!IsNull());
5427 Thread* thread = Thread::Current();
5428 if (EnsureIsFinalized(thread) != Error::null()) {
5429 return Function::null();
5430 }
5431 REUSABLE_ARRAY_HANDLESCOPE(thread);
5432 REUSABLE_FUNCTION_HANDLESCOPE(thread);
5433 REUSABLE_STRING_HANDLESCOPE(thread);
5434 Array& funcs = thread->ArrayHandle();
5435 funcs = functions();
5436 intptr_t len = funcs.Length();
5437 Function& function = thread->FunctionHandle();
5438 String& function_name = thread->StringHandle();
5439 for (intptr_t i = 0; i < len; i++) {
5440 function ^= funcs.At(i);
5441 function_name = function.name();
5442 if (MatchesAccessorName(function_name, prefix, prefix_length, name)) {
5443 return function.raw();
5444 }
5445 }
5446
5447 // No function found.
5448 return Function::null();
5449}
5450
5451FieldPtr Class::LookupInstanceField(const String& name) const {
5452 return LookupField(name, kInstance);
5453}
5454
5455FieldPtr Class::LookupStaticField(const String& name) const {
5456 return LookupField(name, kStatic);
5457}
5458
5459FieldPtr Class::LookupField(const String& name) const {
5460 return LookupField(name, kAny);
5461}
5462
5463FieldPtr Class::LookupField(const String& name, MemberKind kind) const {
5464 ASSERT(!IsNull());
5465 Thread* thread = Thread::Current();
5466 if (EnsureIsFinalized(thread) != Error::null()) {
5467 return Field::null();
5468 }
5469 REUSABLE_ARRAY_HANDLESCOPE(thread);
5470 REUSABLE_FIELD_HANDLESCOPE(thread);
5471 REUSABLE_STRING_HANDLESCOPE(thread);
5472 Array& flds = thread->ArrayHandle();
5473 flds = fields();
5474 ASSERT(!flds.IsNull());
5475 intptr_t len = flds.Length();
5476 Field& field = thread->FieldHandle();
5477 if (name.IsSymbol()) {
5478 // Use fast raw pointer string compare for symbols.
5479 for (intptr_t i = 0; i < len; i++) {
5480 field ^= flds.At(i);
5481 if (name.raw() == field.name()) {
5482 if (kind == kInstance) {
5483 return field.is_static() ? Field::null() : field.raw();
5484 } else if (kind == kStatic) {
5485 return field.is_static() ? field.raw() : Field::null();
5486 }
5487 ASSERT(kind == kAny);
5488 return field.raw();
5489 }
5490 }
5491 } else {
5492 String& field_name = thread->StringHandle();
5493 for (intptr_t i = 0; i < len; i++) {
5494 field ^= flds.At(i);
5495 field_name = field.name();
5496 if (name.Equals(field_name)) {
5497 if (kind == kInstance) {
5498 return field.is_static() ? Field::null() : field.raw();
5499 } else if (kind == kStatic) {
5500 return field.is_static() ? field.raw() : Field::null();
5501 }
5502 ASSERT(kind == kAny);
5503 return field.raw();
5504 }
5505 }
5506 }
5507 return Field::null();
5508}
5509
5510FieldPtr Class::LookupFieldAllowPrivate(const String& name,
5511 bool instance_only) const {
5512 ASSERT(!IsNull());
5513 // Use slow string compare, ignoring privacy name mangling.
5514 Thread* thread = Thread::Current();
5515 if (EnsureIsFinalized(thread) != Error::null()) {
5516 return Field::null();
5517 }
5518 REUSABLE_ARRAY_HANDLESCOPE(thread);
5519 REUSABLE_FIELD_HANDLESCOPE(thread);
5520 REUSABLE_STRING_HANDLESCOPE(thread);
5521 Array& flds = thread->ArrayHandle();
5522 flds = fields();
5523 ASSERT(!flds.IsNull());
5524 intptr_t len = flds.Length();
5525 Field& field = thread->FieldHandle();
5526 String& field_name = thread->StringHandle();
5527 for (intptr_t i = 0; i < len; i++) {
5528 field ^= flds.At(i);
5529 field_name = field.name();
5530 if (field.is_static() && instance_only) {
5531 // If we only care about instance fields, skip statics.
5532 continue;
5533 }
5534 if (String::EqualsIgnoringPrivateKey(field_name, name)) {
5535 return field.raw();
5536 }
5537 }
5538 return Field::null();
5539}
5540
5541FieldPtr Class::LookupInstanceFieldAllowPrivate(const String& name) const {
5542 Field& field = Field::Handle(LookupFieldAllowPrivate(name, true));
5543 if (!field.IsNull() && !field.is_static()) {
5544 return field.raw();
5545 }
5546 return Field::null();
5547}
5548
5549FieldPtr Class::LookupStaticFieldAllowPrivate(const String& name) const {
5550 Field& field = Field::Handle(LookupFieldAllowPrivate(name));
5551 if (!field.IsNull() && field.is_static()) {
5552 return field.raw();
5553 }
5554 return Field::null();
5555}
5556
5557const char* Class::ToCString() const {
5558 NoSafepointScope no_safepoint;
5559 const Library& lib = Library::Handle(library());
5560 const char* library_name = lib.IsNull() ? "" : lib.ToCString();
5561 const char* class_name = String::Handle(Name()).ToCString();
5562 return OS::SCreate(Thread::Current()->zone(), "%s Class: %s", library_name,
5563 class_name);
5564}
5565
5566// Thomas Wang, Integer Hash Functions.
5567// https://gist.github.com/badboy/6267743
5568// "64 bit to 32 bit Hash Functions"
5569static uword Hash64To32(uint64_t v) {
5570 v = ~v + (v << 18);
5571 v = v ^ (v >> 31);
5572 v = v * 21;
5573 v = v ^ (v >> 11);
5574 v = v + (v << 6);
5575 v = v ^ (v >> 22);
5576 return static_cast<uint32_t>(v);
5577}
5578
5579class CanonicalDoubleKey {
5580 public:
5581 explicit CanonicalDoubleKey(const Double& key)
5582 : key_(&key), value_(key.value()) {}
5583 explicit CanonicalDoubleKey(const double value) : key_(NULL), value_(value) {}
5584 bool Matches(const Double& obj) const {
5585 return obj.BitwiseEqualsToDouble(value_);
5586 }
5587 uword Hash() const { return Hash(value_); }
5588 static uword Hash(double value) {
5589 return Hash64To32(bit_cast<uint64_t>(value));
5590 }
5591
5592 const Double* key_;
5593 const double value_;
5594
5595 private:
5596 DISALLOW_ALLOCATION();
5597};
5598
5599class CanonicalMintKey {
5600 public:
5601 explicit CanonicalMintKey(const Mint& key)
5602 : key_(&key), value_(key.value()) {}
5603 explicit CanonicalMintKey(const int64_t value) : key_(NULL), value_(value) {}
5604 bool Matches(const Mint& obj) const { return obj.value() == value_; }
5605 uword Hash() const { return Hash(value_); }
5606 static uword Hash(int64_t value) {
5607 return Hash64To32(bit_cast<uint64_t>(value));
5608 }
5609
5610 const Mint* key_;
5611 const int64_t value_;
5612
5613 private:
5614 DISALLOW_ALLOCATION();
5615};
5616
5617// Traits for looking up Canonical numbers based on a hash of the value.
5618template <typename ObjectType, typename KeyType>
5619class CanonicalNumberTraits {
5620 public:
5621 static const char* Name() { return "CanonicalNumberTraits"; }
5622 static bool ReportStats() { return false; }
5623
5624 // Called when growing the table.
5625 static bool IsMatch(const Object& a, const Object& b) {
5626 return a.raw() == b.raw();
5627 }
5628 static bool IsMatch(const KeyType& a, const Object& b) {
5629 return a.Matches(ObjectType::Cast(b));
5630 }
5631 static uword Hash(const Object& key) {
5632 return KeyType::Hash(ObjectType::Cast(key).value());
5633 }
5634 static uword Hash(const KeyType& key) { return key.Hash(); }
5635 static ObjectPtr NewKey(const KeyType& obj) {
5636 if (obj.key_ != NULL) {
5637 return obj.key_->raw();
5638 } else {
5639 UNIMPLEMENTED();
5640 return NULL;
5641 }
5642 }
5643};
5644typedef UnorderedHashSet<CanonicalNumberTraits<Double, CanonicalDoubleKey> >
5645 CanonicalDoubleSet;
5646typedef UnorderedHashSet<CanonicalNumberTraits<Mint, CanonicalMintKey> >
5647 CanonicalMintSet;
5648
5649// Returns an instance of Double or Double::null().
5650DoublePtr Class::LookupCanonicalDouble(Zone* zone, double value) const {
5651 ASSERT(this->raw() == Isolate::Current()->object_store()->double_class());
5652 if (this->constants() == Object::empty_array().raw()) return Double::null();
5653
5654 Double& canonical_value = Double::Handle(zone);
5655 CanonicalDoubleSet constants(zone, this->constants());
5656 canonical_value ^= constants.GetOrNull(CanonicalDoubleKey(value));
5657 this->set_constants(constants.Release());
5658 return canonical_value.raw();
5659}
5660
5661// Returns an instance of Mint or Mint::null().
5662MintPtr Class::LookupCanonicalMint(Zone* zone, int64_t value) const {
5663 ASSERT(this->raw() == Isolate::Current()->object_store()->mint_class());
5664 if (this->constants() == Object::empty_array().raw()) return Mint::null();
5665
5666 Mint& canonical_value = Mint::Handle(zone);
5667 CanonicalMintSet constants(zone, this->constants());
5668 canonical_value ^= constants.GetOrNull(CanonicalMintKey(value));
5669 this->set_constants(constants.Release());
5670 return canonical_value.raw();
5671}
5672
5673class CanonicalInstanceKey {
5674 public:
5675 explicit CanonicalInstanceKey(const Instance& key) : key_(key) {
5676 ASSERT(!(key.IsString() || key.IsInteger() || key.IsAbstractType()));
5677 }
5678 bool Matches(const Instance& obj) const {
5679 ASSERT(!(obj.IsString() || obj.IsInteger() || obj.IsAbstractType()));
5680 if (key_.CanonicalizeEquals(obj)) {
5681 ASSERT(obj.IsCanonical());
5682 return true;
5683 }
5684 return false;
5685 }
5686 uword Hash() const { return key_.CanonicalizeHash(); }
5687 const Instance& key_;
5688
5689 private:
5690 DISALLOW_ALLOCATION();
5691};
5692
5693// Traits for looking up Canonical Instances based on a hash of the fields.
5694class CanonicalInstanceTraits {
5695 public:
5696 static const char* Name() { return "CanonicalInstanceTraits"; }
5697 static bool ReportStats() { return false; }
5698
5699 // Called when growing the table.
5700 static bool IsMatch(const Object& a, const Object& b) {
5701 ASSERT(!(a.IsString() || a.IsInteger() || a.IsAbstractType()));
5702 ASSERT(!(b.IsString() || b.IsInteger() || b.IsAbstractType()));
5703 return a.raw() == b.raw();
5704 }
5705 static bool IsMatch(const CanonicalInstanceKey& a, const Object& b) {
5706 return a.Matches(Instance::Cast(b));
5707 }
5708 static uword Hash(const Object& key) {
5709 ASSERT(!(key.IsString() || key.IsNumber() || key.IsAbstractType()));
5710 ASSERT(key.IsInstance());
5711 return Instance::Cast(key).CanonicalizeHash();
5712 }
5713 static uword Hash(const CanonicalInstanceKey& key) { return key.Hash(); }
5714 static ObjectPtr NewKey(const CanonicalInstanceKey& obj) {
5715 return obj.key_.raw();
5716 }
5717};
5718typedef UnorderedHashSet<CanonicalInstanceTraits> CanonicalInstancesSet;
5719
5720InstancePtr Class::LookupCanonicalInstance(Zone* zone,
5721 const Instance& value) const {
5722 ASSERT(this->raw() == value.clazz());
5723 ASSERT(is_finalized() || is_prefinalized());
5724 Instance& canonical_value = Instance::Handle(zone);
5725 if (this->constants() != Object::empty_array().raw()) {
5726 CanonicalInstancesSet constants(zone, this->constants());
5727 canonical_value ^= constants.GetOrNull(CanonicalInstanceKey(value));
5728 this->set_constants(constants.Release());
5729 }
5730 return canonical_value.raw();
5731}
5732
5733InstancePtr Class::InsertCanonicalConstant(Zone* zone,
5734 const Instance& constant) const {
5735 ASSERT(this->raw() == constant.clazz());
5736 Instance& canonical_value = Instance::Handle(zone);
5737 if (this->constants() == Object::empty_array().raw()) {
5738 CanonicalInstancesSet constants(
5739 HashTables::New<CanonicalInstancesSet>(128, Heap::kOld));
5740 canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant));
5741 this->set_constants(constants.Release());
5742 } else {
5743 CanonicalInstancesSet constants(Thread::Current()->zone(),
5744 this->constants());
5745 canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant));
5746 this->set_constants(constants.Release());
5747 }
5748 return canonical_value.raw();
5749}
5750
5751void Class::InsertCanonicalDouble(Zone* zone, const Double& constant) const {
5752 if (this->constants() == Object::empty_array().raw()) {
5753 this->set_constants(Array::Handle(
5754 zone, HashTables::New<CanonicalDoubleSet>(128, Heap::kOld)));
5755 }
5756 CanonicalDoubleSet constants(zone, this->constants());
5757 constants.InsertNewOrGet(CanonicalDoubleKey(constant));
5758 this->set_constants(constants.Release());
5759}
5760
5761void Class::InsertCanonicalMint(Zone* zone, const Mint& constant) const {
5762 if (this->constants() == Object::empty_array().raw()) {
5763 this->set_constants(Array::Handle(
5764 zone, HashTables::New<CanonicalMintSet>(128, Heap::kOld)));
5765 }
5766 CanonicalMintSet constants(zone, this->constants());
5767 constants.InsertNewOrGet(CanonicalMintKey(constant));
5768 this->set_constants(constants.Release());
5769}
5770
5771void Class::RehashConstants(Zone* zone) const {
5772 intptr_t cid = id();
5773 if ((cid == kMintCid) || (cid == kDoubleCid)) {
5774 // Constants stored as a plain list or in a hashset with a stable hashcode,
5775 // which only depends on the actual value of the constant.
5776 return;
5777 }
5778
5779 const Array& old_constants = Array::Handle(zone, constants());
5780 if (old_constants.Length() == 0) return;
5781
5782 set_constants(Object::empty_array());
5783
5784 CanonicalInstancesSet set(zone, old_constants.raw());
5785 Instance& constant = Instance::Handle(zone);
5786 CanonicalInstancesSet::Iterator it(&set);
5787 while (it.MoveNext()) {
5788 constant ^= set.GetKey(it.Current());
5789 ASSERT(!constant.IsNull());
5790 // Shape changes lose the canonical bit because they may result/ in merging
5791 // constants. E.g., [x1, y1], [x1, y2] -> [x1].
5792 DEBUG_ASSERT(constant.IsCanonical() ||
5793 Isolate::Current()->HasAttemptedReload());
5794 InsertCanonicalConstant(zone, constant);
5795 }
5796 set.Release();
5797}
5798
5799bool Class::RequireLegacyErasureOfConstants(Zone* zone) const {
5800 const intptr_t num_type_params = NumTypeParameters();
5801 const intptr_t num_type_args = NumTypeArguments();
5802 const intptr_t from_index = num_type_args - num_type_params;
5803 Instance& constant = Instance::Handle(zone);
5804 TypeArguments& type_arguments = TypeArguments::Handle(zone);
5805 AbstractType& type = AbstractType::Handle(zone);
5806 CanonicalInstancesSet set(zone, constants());
5807 CanonicalInstancesSet::Iterator it(&set);
5808 while (it.MoveNext()) {
5809 constant ^= set.GetKey(it.Current());
5810 ASSERT(!constant.IsNull());
5811 ASSERT(!constant.IsTypeArguments());
5812 ASSERT(!constant.IsType());
5813 type_arguments = constant.GetTypeArguments();
5814 if (type_arguments.IsNull()) {
5815 continue;
5816 }
5817 for (intptr_t i = 0; i < num_type_params; i++) {
5818 type = type_arguments.TypeAt(from_index + i);
5819 if (!type.IsLegacy() && !type.IsVoidType() && !type.IsDynamicType() &&
5820 !type.IsNullType()) {
5821 set.Release();
5822 return true;
5823 }
5824 // It is not possible for a legacy type to have non-legacy type
5825 // arguments or for a legacy function type to have non-legacy parameter
5826 // types, non-legacy type parameters, or required named parameters.
5827 }
5828 }
5829 set.Release();
5830 return false;
5831}
5832
5833intptr_t TypeArguments::ComputeNullability() const {
5834 if (IsNull()) return 0;
5835 const intptr_t num_types = Length();
5836 intptr_t result = 0;
5837 if (num_types <= kNullabilityMaxTypes) {
5838 AbstractType& type = AbstractType::Handle();
5839 for (intptr_t i = 0; i < num_types; i++) {
5840 result <<= kNullabilityBitsPerType;
5841 type = TypeAt(i);
5842 if (!type.IsNull() && !type.IsNullTypeRef()) {
5843 switch (type.nullability()) {
5844 case Nullability::kNullable:
5845 result |= kNullableBits;
5846 break;
5847 case Nullability::kNonNullable:
5848 result |= kNonNullableBits;
5849 break;
5850 case Nullability::kLegacy:
5851 result |= kLegacyBits;
5852 break;
5853 default:
5854 UNREACHABLE();
5855 }
5856 }
5857 }
5858 }
5859 set_nullability(result);
5860 return result;
5861}
5862
5863void TypeArguments::set_nullability(intptr_t value) const {
5864 StoreSmi(&raw_ptr()->nullability_, Smi::New(value));
5865}
5866
5867intptr_t TypeArguments::HashForRange(intptr_t from_index, intptr_t len) const {
5868 if (IsNull()) return kAllDynamicHash;
5869 if (IsRaw(from_index, len)) return kAllDynamicHash;
5870 uint32_t result = 0;
5871 AbstractType& type = AbstractType::Handle();
5872 for (intptr_t i = 0; i < len; i++) {
5873 type = TypeAt(from_index + i);
5874 // The hash may be calculated during type finalization (for debugging
5875 // purposes only) while a type argument is still temporarily null.
5876 if (type.IsNull() || type.IsNullTypeRef()) {
5877 return 0; // Do not cache hash, since it will still change.
5878 }
5879 result = CombineHashes(result, type.Hash());
5880 }
5881 result = FinalizeHash(result, kHashBits);
5882 return result;
5883}
5884
5885intptr_t TypeArguments::ComputeHash() const {
5886 if (IsNull()) return kAllDynamicHash;
5887 const intptr_t num_types = Length();
5888 const uint32_t result = HashForRange(0, num_types);
5889 if (result != 0) {
5890 SetHash(result);
5891 }
5892 return result;
5893}
5894
5895TypeArgumentsPtr TypeArguments::Prepend(Zone* zone,
5896 const TypeArguments& other,
5897 intptr_t other_length,
5898 intptr_t total_length) const {
5899 if (IsNull() && other.IsNull()) {
5900 return TypeArguments::null();
5901 }
5902 const TypeArguments& result =
5903 TypeArguments::Handle(zone, TypeArguments::New(total_length, Heap::kNew));
5904 AbstractType& type = AbstractType::Handle(zone);
5905 for (intptr_t i = 0; i < other_length; i++) {
5906 type = other.IsNull() ? Type::DynamicType() : other.TypeAt(i);
5907 result.SetTypeAt(i, type);
5908 }
5909 for (intptr_t i = other_length; i < total_length; i++) {
5910 type = IsNull() ? Type::DynamicType() : TypeAt(i - other_length);
5911 result.SetTypeAt(i, type);
5912 }
5913 return result.Canonicalize();
5914}
5915
5916TypeArgumentsPtr TypeArguments::ConcatenateTypeParameters(
5917 Zone* zone,
5918 const TypeArguments& other) const {
5919 ASSERT(!IsNull() && !other.IsNull());
5920 const intptr_t this_len = Length();
5921 const intptr_t other_len = other.Length();
5922 const auto& result = TypeArguments::Handle(
5923 zone, TypeArguments::New(this_len + other_len, Heap::kNew));
5924 auto& type = AbstractType::Handle(zone);
5925 for (intptr_t i = 0; i < this_len; ++i) {
5926 type = TypeAt(i);
5927 result.SetTypeAt(i, type);
5928 }
5929 for (intptr_t i = 0; i < other_len; ++i) {
5930 type = other.TypeAt(i);
5931 result.SetTypeAt(this_len + i, type);
5932 }
5933 return result.raw();
5934}
5935
5936StringPtr TypeArguments::Name() const {
5937 Thread* thread = Thread::Current();
5938 ZoneTextBuffer printer(thread->zone());
5939 PrintSubvectorName(0, Length(), kInternalName, &printer);
5940 return Symbols::New(thread, printer.buffer());
5941}
5942
5943StringPtr TypeArguments::UserVisibleName() const {
5944 Thread* thread = Thread::Current();
5945 ZoneTextBuffer printer(thread->zone());
5946 PrintSubvectorName(0, Length(), kUserVisibleName, &printer);
5947 return Symbols::New(thread, printer.buffer());
5948}
5949
5950void TypeArguments::PrintSubvectorName(
5951 intptr_t from_index,
5952 intptr_t len,
5953 NameVisibility name_visibility,
5954 BaseTextBuffer* printer,
5955 NameDisambiguation name_disambiguation /* = NameDisambiguation::kNo */)
5956 const {
5957 printer->AddString("<");
5958 AbstractType& type = AbstractType::Handle();
5959 for (intptr_t i = 0; i < len; i++) {
5960 if (from_index + i < Length()) {
5961 type = TypeAt(from_index + i);
5962 if (type.IsNull()) {
5963 printer->AddString("null"); // Unfinalized vector.
5964 } else {
5965 type.PrintName(name_visibility, printer, name_disambiguation);
5966 }
5967 } else {
5968 printer->AddString("dynamic");
5969 }
5970 if (i < len - 1) {
5971 printer->AddString(", ");
5972 }
5973 }
5974 printer->AddString(">");
5975}
5976
5977bool TypeArguments::IsSubvectorEquivalent(const TypeArguments& other,
5978 intptr_t from_index,
5979 intptr_t len,
5980 TypeEquality kind,
5981 TrailPtr trail) const {
5982 if (this->raw() == other.raw()) {
5983 return true;
5984 }
5985 if (IsNull() || other.IsNull()) {
5986 return false;
5987 }
5988 const intptr_t num_types = Length();
5989 if (num_types != other.Length()) {
5990 return false;
5991 }
5992 AbstractType& type = AbstractType::Handle();
5993 AbstractType& other_type = AbstractType::Handle();
5994 for (intptr_t i = from_index; i < from_index + len; i++) {
5995 type = TypeAt(i);
5996 other_type = other.TypeAt(i);
5997 // Still unfinalized vectors should not be considered equivalent.
5998 if (type.IsNull() || !type.IsEquivalent(other_type, kind, trail)) {
5999 return false;
6000 }
6001 }
6002 return true;
6003}
6004
6005bool TypeArguments::IsRecursive() const {
6006 if (IsNull()) return false;
6007 const intptr_t num_types = Length();
6008 AbstractType& type = AbstractType::Handle();
6009 for (intptr_t i = 0; i < num_types; i++) {
6010 type = TypeAt(i);
6011 // If this type argument is null, the type parameterized with this type
6012 // argument is still being finalized and is definitely recursive. The null
6013 // type argument will be replaced by a non-null type before the type is
6014 // marked as finalized.
6015 if (type.IsNull() || type.IsRecursive()) {
6016 return true;
6017 }
6018 }
6019 return false;
6020}
6021
6022bool TypeArguments::IsDynamicTypes(bool raw_instantiated,
6023 intptr_t from_index,
6024 intptr_t len) const {
6025 ASSERT(Length() >= (from_index + len));
6026 AbstractType& type = AbstractType::Handle();
6027 Class& type_class = Class::Handle();
6028 for (intptr_t i = 0; i < len; i++) {
6029 type = TypeAt(from_index + i);
6030 if (type.IsNull()) {
6031 return false;
6032 }
6033 if (!type.HasTypeClass()) {
6034 if (raw_instantiated && type.IsTypeParameter()) {
6035 // An uninstantiated type parameter is equivalent to dynamic.
6036 continue;
6037 }
6038 return false;
6039 }
6040 type_class = type.type_class();
6041 if (!type_class.IsDynamicClass()) {
6042 return false;
6043 }
6044 }
6045 return true;
6046}
6047
6048bool TypeArguments::HasInstantiations() const {
6049 const Array& prior_instantiations = Array::Handle(instantiations());
6050 ASSERT(prior_instantiations.Length() > 0); // Always at least a sentinel.
6051 return prior_instantiations.Length() > 1;
6052}
6053
6054intptr_t TypeArguments::NumInstantiations() const {
6055 const Array& prior_instantiations = Array::Handle(instantiations());
6056 ASSERT(prior_instantiations.Length() > 0); // Always at least a sentinel.
6057 intptr_t num = 0;
6058 intptr_t i = 0;
6059 while (prior_instantiations.At(i) !=
6060 Smi::New(TypeArguments::kNoInstantiator)) {
6061 i += TypeArguments::Instantiation::kSizeInWords;
6062 num++;
6063 }
6064 return num;
6065}
6066
6067ArrayPtr TypeArguments::instantiations() const {
6068 // We rely on the fact that any loads from the array are dependent loads and
6069 // avoid the load-acquire barrier here.
6070 return raw_ptr()->instantiations_;
6071}
6072
6073void TypeArguments::set_instantiations(const Array& value) const {
6074 // We have to ensure that initializing stores to the array are available
6075 // when releasing the pointer to the array pointer.
6076 // => We have to use store-release here.
6077 ASSERT(!value.IsNull());
6078 StorePointer<ArrayPtr, std::memory_order_release>(&raw_ptr()->instantiations_,
6079 value.raw());
6080}
6081
6082intptr_t TypeArguments::Length() const {
6083 if (IsNull()) {
6084 return 0;
6085 }
6086 return Smi::Value(raw_ptr()->length_);
6087}
6088
6089intptr_t TypeArguments::nullability() const {
6090 if (IsNull()) {
6091 return 0;
6092 }
6093 return Smi::Value(raw_ptr()->nullability_);
6094}
6095
6096AbstractTypePtr TypeArguments::TypeAt(intptr_t index) const {
6097 ASSERT(!IsNull());
6098 return *TypeAddr(index);
6099}
6100
6101AbstractTypePtr TypeArguments::TypeAtNullSafe(intptr_t index) const {
6102 if (IsNull()) {
6103 // null vector represents infinite list of dynamics
6104 return Type::dynamic_type().raw();
6105 }
6106 ASSERT((index >= 0) && (index < Length()));
6107 return TypeAt(index);
6108}
6109
6110void TypeArguments::SetTypeAt(intptr_t index, const AbstractType& value) const {
6111 ASSERT(!IsCanonical());
6112 StorePointer(TypeAddr(index), value.raw());
6113}
6114
6115bool TypeArguments::IsSubvectorInstantiated(intptr_t from_index,
6116 intptr_t len,
6117 Genericity genericity,
6118 intptr_t num_free_fun_type_params,
6119 TrailPtr trail) const {
6120 ASSERT(!IsNull());
6121 AbstractType& type = AbstractType::Handle();
6122 for (intptr_t i = 0; i < len; i++) {
6123 type = TypeAt(from_index + i);
6124 // If this type argument T is null, the type A containing T in its flattened
6125 // type argument vector V is recursive and is still being finalized.
6126 // T is the type argument of a super type of A. T is being instantiated
6127 // during finalization of V, which is also the instantiator. T depends
6128 // solely on the type parameters of A and will be replaced by a non-null
6129 // type before A is marked as finalized.
6130 if (!type.IsNull() &&
6131 !type.IsInstantiated(genericity, num_free_fun_type_params, trail)) {
6132 return false;
6133 }
6134 }
6135 return true;
6136}
6137
6138bool TypeArguments::IsUninstantiatedIdentity() const {
6139 AbstractType& type = AbstractType::Handle();
6140 const intptr_t num_types = Length();
6141 for (intptr_t i = 0; i < num_types; i++) {
6142 type = TypeAt(i);
6143 if (type.IsNull()) {
6144 return false; // Still unfinalized, too early to tell.
6145 }
6146 if (!type.IsTypeParameter()) {
6147 return false;
6148 }
6149 const TypeParameter& type_param = TypeParameter::Cast(type);
6150 ASSERT(type_param.IsFinalized());
6151 if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) {
6152 return false;
6153 }
6154 // Instantiating nullable and legacy type parameters may change
6155 // nullability of a type, so type arguments vector containing such type
6156 // parameters cannot be substituted with instantiator type arguments.
6157 if (type_param.IsNullable() || type_param.IsLegacy()) {
6158 return false;
6159 }
6160 }
6161 return true;
6162 // Note that it is not necessary to verify at runtime that the instantiator
6163 // type vector is long enough, since this uninstantiated vector contains as
6164 // many different type parameters as it is long.
6165}
6166
6167// Return true if this uninstantiated type argument vector, once instantiated
6168// at runtime, is a prefix of the type argument vector of its instantiator.
6169// A runtime check may be required, as indicated by with_runtime_check.
6170bool TypeArguments::CanShareInstantiatorTypeArguments(
6171 const Class& instantiator_class,
6172 bool* with_runtime_check) const {
6173 ASSERT(!IsInstantiated());
6174 if (with_runtime_check != nullptr) {
6175 *with_runtime_check = false;
6176 }
6177 const intptr_t num_type_args = Length();
6178 const intptr_t num_instantiator_type_args =
6179 instantiator_class.NumTypeArguments();
6180 if (num_type_args > num_instantiator_type_args) {
6181 // This vector cannot be a prefix of a shorter vector.
6182 return false;
6183 }
6184 const intptr_t num_instantiator_type_params =
6185 instantiator_class.NumTypeParameters();
6186 const intptr_t first_type_param_offset =
6187 num_instantiator_type_args - num_instantiator_type_params;
6188 // At compile time, the type argument vector of the instantiator consists of
6189 // the type argument vector of its super type, which may refer to the type
6190 // parameters of the instantiator class, followed by (or overlapping partially
6191 // or fully with) the type parameters of the instantiator class in declaration
6192 // order.
6193 // In other words, the only variables are the type parameters of the
6194 // instantiator class.
6195 // This uninstantiated type argument vector is also expressed in terms of the
6196 // type parameters of the instantiator class. Therefore, in order to be a
6197 // prefix once instantiated at runtime, every one of its type argument must be
6198 // equal to the type argument of the instantiator vector at the same index.
6199
6200 // As a first requirement, the last num_instantiator_type_params type
6201 // arguments of this type argument vector must refer to the corresponding type
6202 // parameters of the instantiator class.
6203 AbstractType& type_arg = AbstractType::Handle();
6204 for (intptr_t i = first_type_param_offset; i < num_type_args; i++) {
6205 type_arg = TypeAt(i);
6206 if (!type_arg.IsTypeParameter()) {
6207 return false;
6208 }
6209 const TypeParameter& type_param = TypeParameter::Cast(type_arg);
6210 ASSERT(type_param.IsFinalized());
6211 if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) {
6212 return false;
6213 }
6214 // Instantiating nullable and legacy type parameters may change nullability
6215 // of a type, so type arguments vector containing such type parameters
6216 // cannot be substituted with instantiator type arguments, unless we check
6217 // at runtime the nullability of the first 1 or 2 type arguments of the
6218 // instantiator.
6219 // Note that the presence of non-overlapping super type arguments (i.e.
6220 // first_type_param_offset > 0) will prevent this optimization.
6221 if (type_param.IsNullable() || type_param.IsLegacy()) {
6222 if (with_runtime_check == nullptr || i >= kNullabilityMaxTypes) {
6223 return false;
6224 }
6225 *with_runtime_check = true;
6226 }
6227 }
6228 // As a second requirement, the type arguments corresponding to the super type
6229 // must be identical. Overlapping ones have already been checked starting at
6230 // first_type_param_offset.
6231 if (first_type_param_offset == 0) {
6232 return true;
6233 }
6234 AbstractType& super_type =
6235 AbstractType::Handle(instantiator_class.super_type());
6236 const TypeArguments& super_type_args =
6237 TypeArguments::Handle(super_type.arguments());
6238 if (super_type_args.IsNull()) {
6239 ASSERT(!IsUninstantiatedIdentity());
6240 return false;
6241 }
6242 AbstractType& super_type_arg = AbstractType::Handle();
6243 for (intptr_t i = 0; (i < first_type_param_offset) && (i < num_type_args);
6244 i++) {
6245 type_arg = TypeAt(i);
6246 super_type_arg = super_type_args.TypeAt(i);
6247 if (!type_arg.Equals(super_type_arg)) {
6248 ASSERT(!IsUninstantiatedIdentity());
6249 return false;
6250 }
6251 }
6252 return true;
6253}
6254
6255// Return true if this uninstantiated type argument vector, once instantiated
6256// at runtime, is a prefix of the enclosing function type arguments.
6257// A runtime check may be required, as indicated by with_runtime_check.
6258bool TypeArguments::CanShareFunctionTypeArguments(
6259 const Function& function,
6260 bool* with_runtime_check) const {
6261 ASSERT(!IsInstantiated());
6262 if (with_runtime_check != nullptr) {
6263 *with_runtime_check = false;
6264 }
6265 const intptr_t num_type_args = Length();
6266 const intptr_t num_parent_type_params = function.NumParentTypeParameters();
6267 const intptr_t num_function_type_params = function.NumTypeParameters();
6268 const intptr_t num_function_type_args =
6269 num_parent_type_params + num_function_type_params;
6270 if (num_type_args > num_function_type_args) {
6271 // This vector cannot be a prefix of a shorter vector.
6272 return false;
6273 }
6274 AbstractType& type_arg = AbstractType::Handle();
6275 for (intptr_t i = 0; i < num_type_args; i++) {
6276 type_arg = TypeAt(i);
6277 if (!type_arg.IsTypeParameter()) {
6278 return false;
6279 }
6280 const TypeParameter& type_param = TypeParameter::Cast(type_arg);
6281 ASSERT(type_param.IsFinalized());
6282 if ((type_param.index() != i) || !type_param.IsFunctionTypeParameter()) {
6283 return false;
6284 }
6285 // Instantiating nullable and legacy type parameters may change nullability
6286 // of a type, so type arguments vector containing such type parameters
6287 // cannot be substituted with the enclosing function type arguments, unless
6288 // we check at runtime the nullability of the first 1 or 2 type arguments of
6289 // the enclosing function type arguments.
6290 if (type_param.IsNullable() || type_param.IsLegacy()) {
6291 if (with_runtime_check == nullptr || i >= kNullabilityMaxTypes) {
6292 return false;
6293 }
6294 *with_runtime_check = true;
6295 }
6296 }
6297 return true;
6298}
6299
6300bool TypeArguments::IsFinalized() const {
6301 ASSERT(!IsNull());
6302 AbstractType& type = AbstractType::Handle();
6303 const intptr_t num_types = Length();
6304 for (intptr_t i = 0; i < num_types; i++) {
6305 type = TypeAt(i);
6306 if (!type.IsFinalized()) {
6307 return false;
6308 }
6309 }
6310 return true;
6311}
6312
6313TypeArgumentsPtr TypeArguments::InstantiateFrom(
6314 const TypeArguments& instantiator_type_arguments,
6315 const TypeArguments& function_type_arguments,
6316 intptr_t num_free_fun_type_params,
6317 Heap::Space space,
6318 TrailPtr trail) const {
6319 ASSERT(!IsInstantiated(kAny, num_free_fun_type_params));
6320 if ((instantiator_type_arguments.IsNull() ||
6321 instantiator_type_arguments.Length() == Length()) &&
6322 IsUninstantiatedIdentity()) {
6323 return instantiator_type_arguments.raw();
6324 }
6325 const intptr_t num_types = Length();
6326 TypeArguments& instantiated_array =
6327 TypeArguments::Handle(TypeArguments::New(num_types, space));
6328 AbstractType& type = AbstractType::Handle();
6329 for (intptr_t i = 0; i < num_types; i++) {
6330 type = TypeAt(i);
6331 // If this type argument T is null, the type A containing T in its flattened
6332 // type argument vector V is recursive and is still being finalized.
6333 // T is the type argument of a super type of A. T is being instantiated
6334 // during finalization of V, which is also the instantiator. T depends
6335 // solely on the type parameters of A and will be replaced by a non-null
6336 // type before A is marked as finalized.
6337 if (!type.IsNull() &&
6338 !type.IsInstantiated(kAny, num_free_fun_type_params)) {
6339 type = type.InstantiateFrom(instantiator_type_arguments,
6340 function_type_arguments,
6341 num_free_fun_type_params, space, trail);
6342 // A returned null type indicates a failed instantiation in dead code that
6343 // must be propagated up to the caller, the optimizing compiler.
6344 if (type.IsNull()) {
6345 return Object::empty_type_arguments().raw();
6346 }
6347 }
6348 instantiated_array.SetTypeAt(i, type);
6349 }
6350 return instantiated_array.raw();
6351}
6352
6353TypeArgumentsPtr TypeArguments::InstantiateAndCanonicalizeFrom(
6354 const TypeArguments& instantiator_type_arguments,
6355 const TypeArguments& function_type_arguments) const {
6356 auto thread = Thread::Current();
6357 auto zone = thread->zone();
6358 SafepointMutexLocker ml(
6359 thread->isolate_group()->type_arguments_canonicalization_mutex());
6360
6361 ASSERT(!IsInstantiated());
6362 ASSERT(instantiator_type_arguments.IsNull() ||
6363 instantiator_type_arguments.IsCanonical());
6364 ASSERT(function_type_arguments.IsNull() ||
6365 function_type_arguments.IsCanonical());
6366 // Lookup instantiators and if found, return instantiated result.
6367 Array& prior_instantiations = Array::Handle(zone, instantiations());
6368 ASSERT(!prior_instantiations.IsNull() && prior_instantiations.IsArray());
6369 // The instantiations cache is initialized with Object::zero_array() and is
6370 // therefore guaranteed to contain kNoInstantiator. No length check needed.
6371 ASSERT(prior_instantiations.Length() > 0); // Always at least a sentinel.
6372 intptr_t index = 0;
6373 while (true) {
6374 if ((prior_instantiations.At(
6375 index +
6376 TypeArguments::Instantiation::kInstantiatorTypeArgsIndex) ==
6377 instantiator_type_arguments.raw()) &&
6378 (prior_instantiations.At(
6379 index + TypeArguments::Instantiation::kFunctionTypeArgsIndex) ==
6380 function_type_arguments.raw())) {
6381 return TypeArguments::RawCast(prior_instantiations.At(
6382 index + TypeArguments::Instantiation::kInstantiatedTypeArgsIndex));
6383 }
6384 if (prior_instantiations.At(index) ==
6385 Smi::New(TypeArguments::kNoInstantiator)) {
6386 break;
6387 }
6388 index += TypeArguments::Instantiation::kSizeInWords;
6389 }
6390 // Cache lookup failed. Instantiate the type arguments.
6391 TypeArguments& result = TypeArguments::Handle(zone);
6392 result = InstantiateFrom(instantiator_type_arguments, function_type_arguments,
6393 kAllFree, Heap::kOld);
6394 // Canonicalize type arguments.
6395 result = result.Canonicalize();
6396 // InstantiateAndCanonicalizeFrom is not reentrant. It cannot have been called
6397 // indirectly, so the prior_instantiations array cannot have grown.
6398 ASSERT(prior_instantiations.raw() == instantiations());
6399 // Add instantiator and function type args and result to instantiations array.
6400 intptr_t length = prior_instantiations.Length();
6401 if ((index + TypeArguments::Instantiation::kSizeInWords) >= length) {
6402 // TODO(regis): Should we limit the number of cached instantiations?
6403 // Grow the instantiations array by about 50%, but at least by 1.
6404 // The initial array is Object::zero_array() of length 1.
6405 intptr_t entries =
6406 (length - 1) / TypeArguments::Instantiation::kSizeInWords;
6407 intptr_t new_entries = entries + (entries >> 1) + 1;
6408 length = new_entries * TypeArguments::Instantiation::kSizeInWords + 1;
6409 prior_instantiations =
6410 Array::Grow(prior_instantiations, length, Heap::kOld);
6411 set_instantiations(prior_instantiations);
6412 ASSERT((index + TypeArguments::Instantiation::kSizeInWords) < length);
6413 }
6414
6415 // Set sentinel marker at next position.
6416 prior_instantiations.SetAt(
6417 index + TypeArguments::Instantiation::kSizeInWords +
6418 TypeArguments::Instantiation::kInstantiatorTypeArgsIndex,
6419 Smi::Handle(zone, Smi::New(TypeArguments::kNoInstantiator)));
6420
6421 prior_instantiations.SetAt(
6422 index + TypeArguments::Instantiation::kFunctionTypeArgsIndex,
6423 function_type_arguments);
6424 prior_instantiations.SetAt(
6425 index + TypeArguments::Instantiation::kInstantiatedTypeArgsIndex, result);
6426
6427 // We let any concurrently running mutator thread now see the new entry by
6428 // using a store-release barrier.
6429 ASSERT(
6430 prior_instantiations.At(
6431 index + TypeArguments::Instantiation::kInstantiatorTypeArgsIndex) ==
6432 Smi::New(TypeArguments::kNoInstantiator));
6433 prior_instantiations.SetAtRelease(
6434 index + TypeArguments::Instantiation::kInstantiatorTypeArgsIndex,
6435 instantiator_type_arguments);
6436 return result.raw();
6437}
6438
6439TypeArgumentsPtr TypeArguments::New(intptr_t len, Heap::Space space) {
6440 if (len < 0 || len > kMaxElements) {
6441 // This should be caught before we reach here.
6442 FATAL1("Fatal error in TypeArguments::New: invalid len %" Pd "\n", len);
6443 }
6444 TypeArguments& result = TypeArguments::Handle();
6445 {
6446 ObjectPtr raw = Object::Allocate(TypeArguments::kClassId,
6447 TypeArguments::InstanceSize(len), space);
6448 NoSafepointScope no_safepoint;
6449 result ^= raw;
6450 // Length must be set before we start storing into the array.
6451 result.SetLength(len);
6452 result.SetHash(0);
6453 result.set_nullability(0);
6454 }
6455 // The zero array should have been initialized.
6456 ASSERT(Object::zero_array().raw() != Array::null());
6457 COMPILE_ASSERT(TypeArguments::kNoInstantiator == 0);
6458 result.set_instantiations(Object::zero_array());
6459 return result.raw();
6460}
6461
6462AbstractTypePtr const* TypeArguments::TypeAddr(intptr_t index) const {
6463 ASSERT((index >= 0) && (index < Length()));
6464 return &raw_ptr()->types()[index];
6465}
6466
6467void TypeArguments::SetLength(intptr_t value) const {
6468 ASSERT(!IsCanonical());
6469 // This is only safe because we create a new Smi, which does not cause
6470 // heap allocation.
6471 StoreSmi(&raw_ptr()->length_, Smi::New(value));
6472}
6473
6474TypeArgumentsPtr TypeArguments::Canonicalize(TrailPtr trail) const {
6475 if (IsNull() || IsCanonical()) {
6476 ASSERT(IsOld());
6477 return this->raw();
6478 }
6479 const intptr_t num_types = Length();
6480 if (IsRaw(0, num_types)) {
6481 return TypeArguments::null();
6482 }
6483 Thread* thread = Thread::Current();
6484 Zone* zone = thread->zone();
6485 Isolate* isolate = thread->isolate();
6486 ObjectStore* object_store = isolate->object_store();
6487 TypeArguments& result = TypeArguments::Handle(zone);
6488 {
6489 SafepointMutexLocker ml(isolate->group()->type_canonicalization_mutex());
6490 CanonicalTypeArgumentsSet table(zone,
6491 object_store->canonical_type_arguments());
6492 result ^= table.GetOrNull(CanonicalTypeArgumentsKey(*this));
6493 object_store->set_canonical_type_arguments(table.Release());
6494 }
6495 if (result.IsNull()) {
6496 // Canonicalize each type argument.
6497 AbstractType& type_arg = AbstractType::Handle(zone);
6498 for (intptr_t i = 0; i < num_types; i++) {
6499 type_arg = TypeAt(i);
6500 type_arg = type_arg.Canonicalize(trail);
6501 if (IsCanonical()) {
6502 // Canonicalizing this type_arg canonicalized this type.
6503 ASSERT(IsRecursive());
6504 return this->raw();
6505 }
6506 SetTypeAt(i, type_arg);
6507 }
6508 // Canonicalization of a type argument of a recursive type argument vector
6509 // may change the hash of the vector, so invalidate.
6510 if (IsRecursive()) {
6511 SetHash(0);
6512 }
6513 SafepointMutexLocker ml(isolate->group()->type_canonicalization_mutex());
6514 CanonicalTypeArgumentsSet table(zone,
6515 object_store->canonical_type_arguments());
6516 // Since we canonicalized some type arguments above we need to lookup
6517 // in the table again to make sure we don't already have an equivalent
6518 // canonical entry.
6519 result ^= table.GetOrNull(CanonicalTypeArgumentsKey(*this));
6520 if (result.IsNull()) {
6521 // Make sure we have an old space object and add it to the table.
6522 if (this->IsNew()) {
6523 result ^= Object::Clone(*this, Heap::kOld);
6524 } else {
6525 result = this->raw();
6526 }
6527 ASSERT(result.IsOld());
6528 result.ComputeNullability();
6529 result.SetCanonical(); // Mark object as being canonical.
6530 // Now add this TypeArgument into the canonical list of type arguments.
6531 bool present = table.Insert(result);
6532 ASSERT(!present);
6533 }
6534 object_store->set_canonical_type_arguments(table.Release());
6535 }
6536 ASSERT(result.Equals(*this));
6537 ASSERT(!result.IsNull());
6538 ASSERT(result.IsTypeArguments());
6539 ASSERT(result.IsCanonical());
6540 return result.raw();
6541}
6542
6543void TypeArguments::EnumerateURIs(URIs* uris) const {
6544 if (IsNull()) {
6545 return;
6546 }
6547 Thread* thread = Thread::Current();
6548 Zone* zone = thread->zone();
6549 AbstractType& type = AbstractType::Handle(zone);
6550 const intptr_t num_types = Length();
6551 for (intptr_t i = 0; i < num_types; i++) {
6552 type = TypeAt(i);
6553 type.EnumerateURIs(uris);
6554 }
6555}
6556
6557const char* TypeArguments::ToCString() const {
6558 if (IsNull()) {
6559 return "TypeArguments: null";
6560 }
6561 Zone* zone = Thread::Current()->zone();
6562 const char* prev_cstr = OS::SCreate(zone, "TypeArguments: (H%" Px ")",
6563 Smi::Value(raw_ptr()->hash_));
6564 for (int i = 0; i < Length(); i++) {
6565 const AbstractType& type_at = AbstractType::Handle(zone, TypeAt(i));
6566 const char* type_cstr = type_at.IsNull() ? "null" : type_at.ToCString();
6567 char* chars = OS::SCreate(zone, "%s [%s]", prev_cstr, type_cstr);
6568 prev_cstr = chars;
6569 }
6570 return prev_cstr;
6571}
6572
6573const char* PatchClass::ToCString() const {
6574 const Class& cls = Class::Handle(patched_class());
6575 const char* cls_name = cls.ToCString();
6576 return OS::SCreate(Thread::Current()->zone(), "PatchClass for %s", cls_name);
6577}
6578
6579PatchClassPtr PatchClass::New(const Class& patched_class,
6580 const Class& origin_class) {
6581 const PatchClass& result = PatchClass::Handle(PatchClass::New());
6582 result.set_patched_class(patched_class);
6583 result.set_origin_class(origin_class);
6584 result.set_script(Script::Handle(origin_class.script()));
6585 result.set_library_kernel_offset(-1);
6586 return result.raw();
6587}
6588
6589PatchClassPtr PatchClass::New(const Class& patched_class,
6590 const Script& script) {
6591 const PatchClass& result = PatchClass::Handle(PatchClass::New());
6592 result.set_patched_class(patched_class);
6593 result.set_origin_class(patched_class);
6594 result.set_script(script);
6595 result.set_library_kernel_offset(-1);
6596 return result.raw();
6597}
6598
6599PatchClassPtr PatchClass::New() {
6600 ASSERT(Object::patch_class_class() != Class::null());
6601 ObjectPtr raw = Object::Allocate(PatchClass::kClassId,
6602 PatchClass::InstanceSize(), Heap::kOld);
6603 return static_cast<PatchClassPtr>(raw);
6604}
6605
6606void PatchClass::set_patched_class(const Class& value) const {
6607 StorePointer(&raw_ptr()->patched_class_, value.raw());
6608}
6609
6610void PatchClass::set_origin_class(const Class& value) const {
6611 StorePointer(&raw_ptr()->origin_class_, value.raw());
6612}
6613
6614void PatchClass::set_script(const Script& value) const {
6615 StorePointer(&raw_ptr()->script_, value.raw());
6616}
6617
6618void PatchClass::set_library_kernel_data(const ExternalTypedData& data) const {
6619 StorePointer(&raw_ptr()->library_kernel_data_, data.raw());
6620}
6621
6622intptr_t Function::Hash() const {
6623 return String::HashRawSymbol(name());
6624}
6625
6626bool Function::HasBreakpoint() const {
6627#if defined(PRODUCT)
6628 return false;
6629#else
6630 Thread* thread = Thread::Current();
6631 return thread->isolate()->debugger()->HasBreakpoint(*this, thread->zone());
6632#endif
6633}
6634
6635void Function::InstallOptimizedCode(const Code& code) const {
6636 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
6637 // We may not have previous code if FLAG_precompile is set.
6638 // Hot-reload may have already disabled the current code.
6639 if (HasCode() && !Code::Handle(CurrentCode()).IsDisabled()) {
6640 Code::Handle(CurrentCode()).DisableDartCode();
6641 }
6642 AttachCode(code);
6643}
6644
6645void Function::SetInstructions(const Code& value) const {
6646 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
6647 SetInstructionsSafe(value);
6648}
6649
6650void Function::SetInstructionsSafe(const Code& value) const {
6651 StorePointer(&raw_ptr()->code_, value.raw());
6652 StoreNonPointer(&raw_ptr()->entry_point_, value.EntryPoint());
6653 StoreNonPointer(&raw_ptr()->unchecked_entry_point_,
6654 value.UncheckedEntryPoint());
6655}
6656
6657void Function::AttachCode(const Code& value) const {
6658 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
6659 // Finish setting up code before activating it.
6660 value.set_owner(*this);
6661 SetInstructions(value);
6662 ASSERT(Function::Handle(value.function()).IsNull() ||
6663 (value.function() == this->raw()));
6664}
6665
6666bool Function::HasCode() const {
6667 NoSafepointScope no_safepoint;
6668 ASSERT(raw_ptr()->code_ != Code::null());
6669#if defined(DART_PRECOMPILED_RUNTIME)
6670 return raw_ptr()->code_ != StubCode::LazyCompile().raw();
6671#else
6672 return raw_ptr()->code_ != StubCode::LazyCompile().raw() &&
6673 raw_ptr()->code_ != StubCode::InterpretCall().raw();
6674#endif // defined(DART_PRECOMPILED_RUNTIME)
6675}
6676
6677#if !defined(DART_PRECOMPILED_RUNTIME)
6678bool Function::IsBytecodeAllowed(Zone* zone) const {
6679 if (FLAG_intrinsify) {
6680 // Bigint intrinsics should not be interpreted, because their Dart version
6681 // is only to be used when intrinsics are disabled. Mixing an interpreted
6682 // Dart version with a compiled intrinsified version results in a mismatch
6683 // in the number of digits processed by each call.
6684 switch (recognized_kind()) {
6685 case MethodRecognizer::kBigint_lsh:
6686 case MethodRecognizer::kBigint_rsh:
6687 case MethodRecognizer::kBigint_absAdd:
6688 case MethodRecognizer::kBigint_absSub:
6689 case MethodRecognizer::kBigint_mulAdd:
6690 case MethodRecognizer::kBigint_sqrAdd:
6691 case MethodRecognizer::kBigint_estimateQuotientDigit:
6692 case MethodRecognizer::kMontgomery_mulMod:
6693 return false;
6694 default:
6695 break;
6696 }
6697 }
6698 switch (kind()) {
6699 case FunctionLayout::kDynamicInvocationForwarder:
6700 return is_declared_in_bytecode();
6701 case FunctionLayout::kImplicitClosureFunction:
6702 case FunctionLayout::kIrregexpFunction:
6703 case FunctionLayout::kFfiTrampoline:
6704 return false;
6705 default:
6706 return true;
6707 }
6708}
6709
6710void Function::AttachBytecode(const Bytecode& value) const {
6711 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
6712 ASSERT(!value.IsNull());
6713 // Finish setting up code before activating it.
6714 if (!value.InVMIsolateHeap()) {
6715 value.set_function(*this);
6716 }
6717 StorePointer(&raw_ptr()->bytecode_, value.raw());
6718
6719 // We should not have loaded the bytecode if the function had code.
6720 // However, we may load the bytecode to access source positions (see
6721 // ProcessBytecodeTokenPositionsEntry in kernel.cc).
6722 // In that case, do not install InterpretCall stub below.
6723 if (FLAG_enable_interpreter && !HasCode()) {
6724 // Set the code entry_point to InterpretCall stub.
6725 SetInstructions(StubCode::InterpretCall());
6726 }
6727}
6728#endif // !defined(DART_PRECOMPILED_RUNTIME)
6729
6730bool Function::HasCode(FunctionPtr function) {
6731 NoSafepointScope no_safepoint;
6732 ASSERT(function->ptr()->code_ != Code::null());
6733#if defined(DART_PRECOMPILED_RUNTIME)
6734 return function->ptr()->code_ != StubCode::LazyCompile().raw();
6735#else
6736 return function->ptr()->code_ != StubCode::LazyCompile().raw() &&
6737 function->ptr()->code_ != StubCode::InterpretCall().raw();
6738#endif // !defined(DART_PRECOMPILED_RUNTIME)
6739}
6740
6741void Function::ClearCode() const {
6742#if defined(DART_PRECOMPILED_RUNTIME)
6743 UNREACHABLE();
6744#else
6745 ASSERT(Thread::Current()->IsMutatorThread());
6746
6747 StorePointer(&raw_ptr()->unoptimized_code_, Code::null());
6748
6749 if (FLAG_enable_interpreter && HasBytecode()) {
6750 SetInstructions(StubCode::InterpretCall());
6751 } else {
6752 SetInstructions(StubCode::LazyCompile());
6753 }
6754#endif // defined(DART_PRECOMPILED_RUNTIME)
6755}
6756
6757void Function::ClearBytecode() const {
6758#if defined(DART_PRECOMPILED_RUNTIME)
6759 UNREACHABLE();
6760#else
6761 StorePointer(&raw_ptr()->bytecode_, Bytecode::null());
6762#endif // defined(DART_PRECOMPILED_RUNTIME)
6763}
6764
6765void Function::EnsureHasCompiledUnoptimizedCode() const {
6766 ASSERT(!ForceOptimize());
6767 Thread* thread = Thread::Current();
6768 ASSERT(thread->IsMutatorThread());
6769 DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
6770 Zone* zone = thread->zone();
6771
6772 const Error& error =
6773 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, *this));
6774 if (!error.IsNull()) {
6775 Exceptions::PropagateError(error);
6776 }
6777}
6778
6779void Function::SwitchToUnoptimizedCode() const {
6780 ASSERT(HasOptimizedCode());
6781 Thread* thread = Thread::Current();
6782 Isolate* isolate = thread->isolate();
6783 Zone* zone = thread->zone();
6784 ASSERT(thread->IsMutatorThread());
6785 // TODO(35224): DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
6786 const Code& current_code = Code::Handle(zone, CurrentCode());
6787
6788 if (FLAG_trace_deoptimization_verbose) {
6789 THR_Print("Disabling optimized code: '%s' entry: %#" Px "\n",
6790 ToFullyQualifiedCString(), current_code.EntryPoint());
6791 }
6792 current_code.DisableDartCode();
6793 const Error& error =
6794 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, *this));
6795 if (!error.IsNull()) {
6796 Exceptions::PropagateError(error);
6797 }
6798 const Code& unopt_code = Code::Handle(zone, unoptimized_code());
6799 unopt_code.Enable();
6800 AttachCode(unopt_code);
6801 isolate->TrackDeoptimizedCode(current_code);
6802}
6803
6804void Function::SwitchToLazyCompiledUnoptimizedCode() const {
6805#if defined(DART_PRECOMPILED_RUNTIME)
6806 UNREACHABLE();
6807#else
6808 if (!HasOptimizedCode()) {
6809 return;
6810 }
6811
6812 Thread* thread = Thread::Current();
6813 Zone* zone = thread->zone();
6814 ASSERT(thread->IsMutatorThread());
6815
6816 const Code& current_code = Code::Handle(zone, CurrentCode());
6817 TIR_Print("Disabling optimized code for %s\n", ToCString());
6818 current_code.DisableDartCode();
6819
6820 const Code& unopt_code = Code::Handle(zone, unoptimized_code());
6821 if (unopt_code.IsNull()) {
6822 // Set the lazy compile or interpreter call stub code.
6823 if (FLAG_enable_interpreter && HasBytecode()) {
6824 TIR_Print("Switched to interpreter call stub for %s\n", ToCString());
6825 SetInstructions(StubCode::InterpretCall());
6826 } else {
6827 TIR_Print("Switched to lazy compile stub for %s\n", ToCString());
6828 SetInstructions(StubCode::LazyCompile());
6829 }
6830 return;
6831 }
6832
6833 TIR_Print("Switched to unoptimized code for %s\n", ToCString());
6834
6835 AttachCode(unopt_code);
6836 unopt_code.Enable();
6837#endif
6838}
6839
6840void Function::set_unoptimized_code(const Code& value) const {
6841#if defined(DART_PRECOMPILED_RUNTIME)
6842 UNREACHABLE();
6843#else
6844 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
6845 ASSERT(value.IsNull() || !value.is_optimized());
6846 StorePointer(&raw_ptr()->unoptimized_code_, value.raw());
6847#endif
6848}
6849
6850ContextScopePtr Function::context_scope() const {
6851 if (IsClosureFunction()) {
6852 const Object& obj = Object::Handle(raw_ptr()->data_);
6853 ASSERT(!obj.IsNull());
6854 return ClosureData::Cast(obj).context_scope();
6855 }
6856 return ContextScope::null();
6857}
6858
6859void Function::set_context_scope(const ContextScope& value) const {
6860 if (IsClosureFunction()) {
6861 const Object& obj = Object::Handle(raw_ptr()->data_);
6862 ASSERT(!obj.IsNull());
6863 ClosureData::Cast(obj).set_context_scope(value);
6864 return;
6865 }
6866 UNREACHABLE();
6867}
6868
6869InstancePtr Function::implicit_static_closure() const {
6870 if (IsImplicitStaticClosureFunction()) {
6871 const Object& obj = Object::Handle(raw_ptr()->data_);
6872 ASSERT(!obj.IsNull());
6873 return ClosureData::Cast(obj).implicit_static_closure();
6874 }
6875 return Instance::null();
6876}
6877
6878void Function::set_implicit_static_closure(const Instance& closure) const {
6879 if (IsImplicitStaticClosureFunction()) {
6880 const Object& obj = Object::Handle(raw_ptr()->data_);
6881 ASSERT(!obj.IsNull());
6882 ClosureData::Cast(obj).set_implicit_static_closure(closure);
6883 return;
6884 }
6885 UNREACHABLE();
6886}
6887
6888ScriptPtr Function::eval_script() const {
6889 const Object& obj = Object::Handle(raw_ptr()->data_);
6890 if (obj.IsScript()) {
6891 return Script::Cast(obj).raw();
6892 }
6893 return Script::null();
6894}
6895
6896void Function::set_eval_script(const Script& script) const {
6897 ASSERT(token_pos() == TokenPosition::kMinSource);
6898 ASSERT(raw_ptr()->data_ == Object::null());
6899 set_data(script);
6900}
6901
6902FunctionPtr Function::extracted_method_closure() const {
6903 ASSERT(kind() == FunctionLayout::kMethodExtractor);
6904 const Object& obj = Object::Handle(raw_ptr()->data_);
6905 ASSERT(obj.IsFunction());
6906 return Function::Cast(obj).raw();
6907}
6908
6909void Function::set_extracted_method_closure(const Function& value) const {
6910 ASSERT(kind() == FunctionLayout::kMethodExtractor);
6911 ASSERT(raw_ptr()->data_ == Object::null());
6912 set_data(value);
6913}
6914
6915ArrayPtr Function::saved_args_desc() const {
6916 ASSERT(kind() == FunctionLayout::kNoSuchMethodDispatcher ||
6917 kind() == FunctionLayout::kInvokeFieldDispatcher);
6918 const Object& obj = Object::Handle(raw_ptr()->data_);
6919 ASSERT(obj.IsArray());
6920 return Array::Cast(obj).raw();
6921}
6922
6923void Function::set_saved_args_desc(const Array& value) const {
6924 ASSERT(kind() == FunctionLayout::kNoSuchMethodDispatcher ||
6925 kind() == FunctionLayout::kInvokeFieldDispatcher);
6926 ASSERT(raw_ptr()->data_ == Object::null());
6927 set_data(value);
6928}
6929
6930FieldPtr Function::accessor_field() const {
6931 ASSERT(kind() == FunctionLayout::kImplicitGetter ||
6932 kind() == FunctionLayout::kImplicitSetter ||
6933 kind() == FunctionLayout::kImplicitStaticGetter ||
6934 kind() == FunctionLayout::kFieldInitializer);
6935 return Field::RawCast(raw_ptr()->data_);
6936}
6937
6938void Function::set_accessor_field(const Field& value) const {
6939 ASSERT(kind() == FunctionLayout::kImplicitGetter ||
6940 kind() == FunctionLayout::kImplicitSetter ||
6941 kind() == FunctionLayout::kImplicitStaticGetter ||
6942 kind() == FunctionLayout::kFieldInitializer);
6943 // Top level classes may be finalized multiple times.
6944 ASSERT(raw_ptr()->data_ == Object::null() || raw_ptr()->data_ == value.raw());
6945 set_data(value);
6946}
6947
6948FunctionPtr Function::parent_function() const {
6949 if (IsClosureFunction() || IsSignatureFunction()) {
6950 const Object& obj = Object::Handle(raw_ptr()->data_);
6951 ASSERT(!obj.IsNull());
6952 if (IsClosureFunction()) {
6953 return ClosureData::Cast(obj).parent_function();
6954 } else {
6955 return SignatureData::Cast(obj).parent_function();
6956 }
6957 }
6958 return Function::null();
6959}
6960
6961void Function::set_parent_function(const Function& value) const {
6962 const Object& obj = Object::Handle(raw_ptr()->data_);
6963 ASSERT(!obj.IsNull());
6964 if (IsClosureFunction()) {
6965 ClosureData::Cast(obj).set_parent_function(value);
6966 } else {
6967 ASSERT(IsSignatureFunction());
6968 SignatureData::Cast(obj).set_parent_function(value);
6969 }
6970}
6971
6972// Enclosing outermost function of this local function.
6973FunctionPtr Function::GetOutermostFunction() const {
6974 FunctionPtr parent = parent_function();
6975 if (parent == Object::null()) {
6976 return raw();
6977 }
6978 Function& function = Function::Handle();
6979 do {
6980 function = parent;
6981 parent = function.parent_function();
6982 } while (parent != Object::null());
6983 return function.raw();
6984}
6985
6986bool Function::HasGenericParent() const {
6987 if (IsImplicitClosureFunction()) {
6988 // The parent function of an implicit closure function is not the enclosing
6989 // function we are asking about here.
6990 return false;
6991 }
6992 Function& parent = Function::Handle(parent_function());
6993 while (!parent.IsNull()) {
6994 if (parent.IsGeneric()) {
6995 return true;
6996 }
6997 parent = parent.parent_function();
6998 }
6999 return false;
7000}
7001
7002FunctionPtr Function::implicit_closure_function() const {
7003 if (IsClosureFunction() || IsSignatureFunction() || IsFactory() ||
7004 IsDispatcherOrImplicitAccessor() || IsFieldInitializer()) {
7005 return Function::null();
7006 }
7007 const Object& obj = Object::Handle(raw_ptr()->data_);
7008 ASSERT(obj.IsNull() || obj.IsScript() || obj.IsFunction() || obj.IsArray());
7009 if (obj.IsNull() || obj.IsScript()) {
7010 return Function::null();
7011 }
7012 if (obj.IsFunction()) {
7013 return Function::Cast(obj).raw();
7014 }
7015 ASSERT(is_native());
7016 ASSERT(obj.IsArray());
7017 const Object& res = Object::Handle(Array::Cast(obj).At(1));
7018 return res.IsNull() ? Function::null() : Function::Cast(res).raw();
7019}
7020
7021void Function::set_implicit_closure_function(const Function& value) const {
7022 ASSERT(!IsClosureFunction() && !IsSignatureFunction());
7023 const Object& old_data = Object::Handle(raw_ptr()->data_);
7024 if (is_native()) {
7025 ASSERT(old_data.IsArray());
7026 ASSERT((Array::Cast(old_data).At(1) == Object::null()) || value.IsNull());
7027 Array::Cast(old_data).SetAt(1, value);
7028 } else {
7029 // Maybe this function will turn into a native later on :-/
7030 if (old_data.IsArray()) {
7031 ASSERT((Array::Cast(old_data).At(1) == Object::null()) || value.IsNull());
7032 Array::Cast(old_data).SetAt(1, value);
7033 } else {
7034 ASSERT(old_data.IsNull() || value.IsNull());
7035 set_data(value);
7036 }
7037 }
7038}
7039
7040TypePtr Function::ExistingSignatureType() const {
7041 const Object& obj = Object::Handle(raw_ptr()->data_);
7042 ASSERT(!obj.IsNull());
7043 if (IsSignatureFunction()) {
7044 return SignatureData::Cast(obj).signature_type();
7045 } else if (IsClosureFunction()) {
7046 return ClosureData::Cast(obj).signature_type();
7047 } else {
7048 ASSERT(IsFfiTrampoline());
7049 return FfiTrampolineData::Cast(obj).signature_type();
7050 }
7051}
7052
7053void Function::SetFfiCSignature(const Function& sig) const {
7054 ASSERT(IsFfiTrampoline());
7055 const Object& obj = Object::Handle(raw_ptr()->data_);
7056 ASSERT(!obj.IsNull());
7057 FfiTrampolineData::Cast(obj).set_c_signature(sig);
7058}
7059
7060FunctionPtr Function::FfiCSignature() const {
7061 ASSERT(IsFfiTrampoline());
7062 const Object& obj = Object::Handle(raw_ptr()->data_);
7063 ASSERT(!obj.IsNull());
7064 return FfiTrampolineData::Cast(obj).c_signature();
7065}
7066
7067bool Function::FfiCSignatureContainsHandles() const {
7068 ASSERT(IsFfiTrampoline());
7069 const Function& c_signature = Function::Handle(FfiCSignature());
7070 const intptr_t num_params = c_signature.num_fixed_parameters();
7071 for (intptr_t i = 0; i < num_params; i++) {
7072 const bool is_handle =
7073 AbstractType::Handle(c_signature.ParameterTypeAt(i)).type_class_id() ==
7074 kFfiHandleCid;
7075 if (is_handle) {
7076 return true;
7077 }
7078 }
7079 return AbstractType::Handle(c_signature.result_type()).type_class_id() ==
7080 kFfiHandleCid;
7081}
7082
7083int32_t Function::FfiCallbackId() const {
7084 ASSERT(IsFfiTrampoline());
7085 const Object& obj = Object::Handle(raw_ptr()->data_);
7086 ASSERT(!obj.IsNull());
7087 return FfiTrampolineData::Cast(obj).callback_id();
7088}
7089
7090void Function::SetFfiCallbackId(int32_t value) const {
7091 ASSERT(IsFfiTrampoline());
7092 const Object& obj = Object::Handle(raw_ptr()->data_);
7093 ASSERT(!obj.IsNull());
7094 FfiTrampolineData::Cast(obj).set_callback_id(value);
7095}
7096
7097FunctionPtr Function::FfiCallbackTarget() const {
7098 ASSERT(IsFfiTrampoline());
7099 const Object& obj = Object::Handle(raw_ptr()->data_);
7100 ASSERT(!obj.IsNull());
7101 return FfiTrampolineData::Cast(obj).callback_target();
7102}
7103
7104void Function::SetFfiCallbackTarget(const Function& target) const {
7105 ASSERT(IsFfiTrampoline());
7106 const Object& obj = Object::Handle(raw_ptr()->data_);
7107 ASSERT(!obj.IsNull());
7108 FfiTrampolineData::Cast(obj).set_callback_target(target);
7109}
7110
7111InstancePtr Function::FfiCallbackExceptionalReturn() const {
7112 ASSERT(IsFfiTrampoline());
7113 const Object& obj = Object::Handle(raw_ptr()->data_);
7114 ASSERT(!obj.IsNull());
7115 return FfiTrampolineData::Cast(obj).callback_exceptional_return();
7116}
7117
7118void Function::SetFfiCallbackExceptionalReturn(const Instance& value) const {
7119 ASSERT(IsFfiTrampoline());
7120 const Object& obj = Object::Handle(raw_ptr()->data_);
7121 ASSERT(!obj.IsNull());
7122 FfiTrampolineData::Cast(obj).set_callback_exceptional_return(value);
7123}
7124
7125TypePtr Function::SignatureType(Nullability nullability) const {
7126 Type& type = Type::Handle(ExistingSignatureType());
7127 if (type.IsNull()) {
7128 // The function type of this function is not yet cached and needs to be
7129 // constructed and cached here.
7130 // A function type is type parameterized in the same way as the owner class
7131 // of its non-static signature function.
7132 // It is not type parameterized if its signature function is static, or if
7133 // none of its result type or formal parameter types are type parameterized.
7134 // Unless the function type is a generic typedef, the type arguments of the
7135 // function type are not explicitly stored in the function type as a vector
7136 // of type arguments.
7137 // The type class of a non-typedef function type is always the non-generic
7138 // _Closure class, whether the type is generic or not.
7139 // The type class of a typedef function type is always the typedef class,
7140 // which may be generic, in which case the type stores type arguments.
7141 // With the introduction of generic functions, we may reach here before the
7142 // function type parameters have been resolved. Therefore, we cannot yet
7143 // check whether the function type has an instantiated signature.
7144 // We can do it only when the signature has been resolved.
7145 // We only set the type class of the function type to the typedef class
7146 // if the signature of the function type is the signature of the typedef.
7147 // Note that a function type can have a typedef class as owner without
7148 // representing the typedef, as in the following example:
7149 // typedef F(f(int x)); where the type of f is a function type with F as
7150 // owner, without representing the function type of F.
7151 Class& scope_class = Class::Handle(Owner());
7152 if (!scope_class.IsTypedefClass() ||
7153 (scope_class.signature_function() != raw())) {
7154 scope_class = Isolate::Current()->object_store()->closure_class();
7155 }
7156 const TypeArguments& signature_type_arguments =
7157 TypeArguments::Handle(scope_class.type_parameters());
7158 // Return the still unfinalized signature type.
7159 type = Type::New(scope_class, signature_type_arguments, token_pos(),
7160 nullability);
7161 type.set_signature(*this);
7162 SetSignatureType(type);
7163 }
7164 return type.ToNullability(nullability, Heap::kOld);
7165}
7166
7167void Function::SetSignatureType(const Type& value) const {
7168 const Object& obj = Object::Handle(raw_ptr()->data_);
7169 ASSERT(!obj.IsNull());
7170 if (IsSignatureFunction()) {
7171 SignatureData::Cast(obj).set_signature_type(value);
7172 ASSERT(!value.IsCanonical() || (value.signature() == this->raw()));
7173 } else if (IsClosureFunction()) {
7174 ClosureData::Cast(obj).set_signature_type(value);
7175 } else {
7176 ASSERT(IsFfiTrampoline());
7177 FfiTrampolineData::Cast(obj).set_signature_type(value);
7178 }
7179}
7180
7181bool Function::IsRedirectingFactory() const {
7182 if (!IsFactory() || !is_redirecting()) {
7183 return false;
7184 }
7185 ASSERT(!IsClosureFunction()); // A factory cannot also be a closure.
7186 return true;
7187}
7188
7189TypePtr Function::RedirectionType() const {
7190 ASSERT(IsRedirectingFactory());
7191 ASSERT(!is_native());
7192 const Object& obj = Object::Handle(raw_ptr()->data_);
7193 ASSERT(!obj.IsNull());
7194 return RedirectionData::Cast(obj).type();
7195}
7196
7197const char* Function::KindToCString(FunctionLayout::Kind kind) {
7198 return FunctionLayout::KindToCString(kind);
7199}
7200
7201void Function::SetRedirectionType(const Type& type) const {
7202 ASSERT(IsFactory());
7203 Object& obj = Object::Handle(raw_ptr()->data_);
7204 if (obj.IsNull()) {
7205 obj = RedirectionData::New();
7206 set_data(obj);
7207 }
7208 RedirectionData::Cast(obj).set_type(type);
7209}
7210
7211StringPtr Function::RedirectionIdentifier() const {
7212 ASSERT(IsRedirectingFactory());
7213 const Object& obj = Object::Handle(raw_ptr()->data_);
7214 ASSERT(!obj.IsNull());
7215 return RedirectionData::Cast(obj).identifier();
7216}
7217
7218void Function::SetRedirectionIdentifier(const String& identifier) const {
7219 ASSERT(IsFactory());
7220 Object& obj = Object::Handle(raw_ptr()->data_);
7221 if (obj.IsNull()) {
7222 obj = RedirectionData::New();
7223 set_data(obj);
7224 }
7225 RedirectionData::Cast(obj).set_identifier(identifier);
7226}
7227
7228FunctionPtr Function::RedirectionTarget() const {
7229 ASSERT(IsRedirectingFactory());
7230 const Object& obj = Object::Handle(raw_ptr()->data_);
7231 ASSERT(!obj.IsNull());
7232 return RedirectionData::Cast(obj).target();
7233}
7234
7235void Function::SetRedirectionTarget(const Function& target) const {
7236 ASSERT(IsFactory());
7237 Object& obj = Object::Handle(raw_ptr()->data_);
7238 if (obj.IsNull()) {
7239 obj = RedirectionData::New();
7240 set_data(obj);
7241 }
7242 RedirectionData::Cast(obj).set_target(target);
7243}
7244
7245FunctionPtr Function::ForwardingTarget() const {
7246 ASSERT(kind() == FunctionLayout::kDynamicInvocationForwarder);
7247 Array& checks = Array::Handle();
7248 checks ^= raw_ptr()->data_;
7249 return Function::RawCast(checks.At(0));
7250}
7251
7252void Function::SetForwardingChecks(const Array& checks) const {
7253 ASSERT(kind() == FunctionLayout::kDynamicInvocationForwarder);
7254 ASSERT(checks.Length() >= 1);
7255 ASSERT(Object::Handle(checks.At(0)).IsFunction());
7256 set_data(checks);
7257}
7258
7259// This field is heavily overloaded:
7260// eval function: Script expression source
7261// kernel eval function: Array[0] = Script
7262// Array[1] = Kernel data
7263// Array[2] = Kernel offset of enclosing library
7264// signature function: SignatureData
7265// method extractor: Function extracted closure function
7266// implicit getter: Field
7267// implicit setter: Field
7268// impl. static final gttr: Field
7269// field initializer: Field
7270// noSuchMethod dispatcher: Array arguments descriptor
7271// invoke-field dispatcher: Array arguments descriptor
7272// redirecting constructor: RedirectionData
7273// closure function: ClosureData
7274// irregexp function: Array[0] = RegExp
7275// Array[1] = Smi string specialization cid
7276// native function: Array[0] = String native name
7277// Array[1] = Function implicit closure function
7278// regular function: Function for implicit closure function
7279// ffi trampoline function: FfiTrampolineData (Dart->C)
7280// dyn inv forwarder: Array[0] = Function target
7281// Array[1] = TypeArguments default type args
7282// Array[i] = ParameterTypeCheck
7283void Function::set_data(const Object& value) const {
7284 StorePointer(&raw_ptr()->data_, value.raw());
7285}
7286
7287bool Function::IsInFactoryScope() const {
7288 if (!IsLocalFunction()) {
7289 return IsFactory();
7290 }
7291 Function& outer_function = Function::Handle(parent_function());
7292 while (outer_function.IsLocalFunction()) {
7293 outer_function = outer_function.parent_function();
7294 }
7295 return outer_function.IsFactory();
7296}
7297
7298void Function::set_name(const String& value) const {
7299 ASSERT(value.IsSymbol());
7300 StorePointer(&raw_ptr()->name_, value.raw());
7301}
7302
7303void Function::set_owner(const Object& value) const {
7304 ASSERT(!value.IsNull() || IsSignatureFunction());
7305 StorePointer(&raw_ptr()->owner_, value.raw());
7306}
7307
7308RegExpPtr Function::regexp() const {
7309 ASSERT(kind() == FunctionLayout::kIrregexpFunction);
7310 const Array& pair = Array::Cast(Object::Handle(raw_ptr()->data_));
7311 return RegExp::RawCast(pair.At(0));
7312}
7313
7314class StickySpecialization : public BitField<intptr_t, bool, 0, 1> {};
7315class StringSpecializationCid
7316 : public BitField<intptr_t, intptr_t, 1, ObjectLayout::kClassIdTagSize> {};
7317
7318intptr_t Function::string_specialization_cid() const {
7319 ASSERT(kind() == FunctionLayout::kIrregexpFunction);
7320 const Array& pair = Array::Cast(Object::Handle(raw_ptr()->data_));
7321 return StringSpecializationCid::decode(Smi::Value(Smi::RawCast(pair.At(1))));
7322}
7323
7324bool Function::is_sticky_specialization() const {
7325 ASSERT(kind() == FunctionLayout::kIrregexpFunction);
7326 const Array& pair = Array::Cast(Object::Handle(raw_ptr()->data_));
7327 return StickySpecialization::decode(Smi::Value(Smi::RawCast(pair.At(1))));
7328}
7329
7330void Function::SetRegExpData(const RegExp& regexp,
7331 intptr_t string_specialization_cid,
7332 bool sticky) const {
7333 ASSERT(kind() == FunctionLayout::kIrregexpFunction);
7334 ASSERT(IsStringClassId(string_specialization_cid));
7335 ASSERT(raw_ptr()->data_ == Object::null());
7336 const Array& pair = Array::Handle(Array::New(2, Heap::kOld));
7337 pair.SetAt(0, regexp);
7338 pair.SetAt(1, Smi::Handle(Smi::New(StickySpecialization::encode(sticky) |
7339 StringSpecializationCid::encode(
7340 string_specialization_cid))));
7341 set_data(pair);
7342}
7343
7344StringPtr Function::native_name() const {
7345 ASSERT(is_native());
7346 const Object& obj = Object::Handle(raw_ptr()->data_);
7347 ASSERT(obj.IsArray());
7348 return String::RawCast(Array::Cast(obj).At(0));
7349}
7350
7351void Function::set_native_name(const String& value) const {
7352 Zone* zone = Thread::Current()->zone();
7353 ASSERT(is_native());
7354
7355 // Due to the fact that kernel needs to read in the constant table before the
7356 // annotation data is available, we don't know at function creation time
7357 // whether the function is a native or not.
7358 //
7359 // Reading the constant table can cause a static function to get an implicit
7360 // closure function.
7361 //
7362 // We therefore handle both cases.
7363 const Object& old_data = Object::Handle(zone, raw_ptr()->data_);
7364 ASSERT(old_data.IsNull() ||
7365 (old_data.IsFunction() &&
7366 Function::Handle(zone, Function::RawCast(old_data.raw()))
7367 .IsImplicitClosureFunction()));
7368
7369 const Array& pair = Array::Handle(zone, Array::New(2, Heap::kOld));
7370 pair.SetAt(0, value);
7371 pair.SetAt(1, old_data); // will be the implicit closure function if needed.
7372 set_data(pair);
7373}
7374
7375void Function::set_result_type(const AbstractType& value) const {
7376 ASSERT(!value.IsNull());
7377 StorePointer(&raw_ptr()->result_type_, value.raw());
7378}
7379
7380AbstractTypePtr Function::ParameterTypeAt(intptr_t index) const {
7381 const Array& parameter_types = Array::Handle(raw_ptr()->parameter_types_);
7382 return AbstractType::RawCast(parameter_types.At(index));
7383}
7384
7385void Function::SetParameterTypeAt(intptr_t index,
7386 const AbstractType& value) const {
7387 ASSERT(!value.IsNull());
7388 // Method extractor parameters are shared and are in the VM heap.
7389 ASSERT(kind() != FunctionLayout::kMethodExtractor);
7390 const Array& parameter_types = Array::Handle(raw_ptr()->parameter_types_);
7391 parameter_types.SetAt(index, value);
7392}
7393
7394void Function::set_parameter_types(const Array& value) const {
7395 StorePointer(&raw_ptr()->parameter_types_, value.raw());
7396}
7397
7398StringPtr Function::ParameterNameAt(intptr_t index) const {
7399 const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names_);
7400 return String::RawCast(parameter_names.At(index));
7401}
7402
7403void Function::SetParameterNameAt(intptr_t index, const String& value) const {
7404 ASSERT(!value.IsNull() && value.IsSymbol());
7405 const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names_);
7406 parameter_names.SetAt(index, value);
7407}
7408
7409void Function::set_parameter_names(const Array& value) const {
7410 StorePointer(&raw_ptr()->parameter_names_, value.raw());
7411}
7412
7413intptr_t Function::NameArrayLengthIncludingFlags(intptr_t num_parameters) {
7414 return num_parameters +
7415 (num_parameters + compiler::target::kNumParameterFlagsPerElement - 1) /
7416 compiler::target::kNumParameterFlagsPerElement;
7417}
7418
7419intptr_t Function::GetRequiredFlagIndex(intptr_t index,
7420 intptr_t* flag_mask) const {
7421 ASSERT(index >= num_fixed_parameters());
7422 index -= num_fixed_parameters();
7423 *flag_mask = 1 << (static_cast<uintptr_t>(index) %
7424 compiler::target::kNumParameterFlagsPerElement);
7425 return NumParameters() +
7426 index / compiler::target::kNumParameterFlagsPerElement;
7427}
7428
7429bool Function::IsRequiredAt(intptr_t index) const {
7430 if (index < num_fixed_parameters() + NumOptionalPositionalParameters()) {
7431 return false;
7432 }
7433 intptr_t flag_mask;
7434 const intptr_t flag_index = GetRequiredFlagIndex(index, &flag_mask);
7435 const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names_);
7436 if (flag_index >= parameter_names.Length()) {
7437 return false;
7438 }
7439 ObjectPtr element = parameter_names.At(flag_index);
7440 if (element == Object::null()) {
7441 return false;
7442 }
7443 const intptr_t flag = Smi::Value(Smi::RawCast(element));
7444 return (flag & flag_mask) != 0;
7445}
7446
7447void Function::SetIsRequiredAt(intptr_t index) const {
7448 intptr_t flag_mask;
7449 const intptr_t flag_index = GetRequiredFlagIndex(index, &flag_mask);
7450 const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names_);
7451 ASSERT(flag_index < parameter_names.Length());
7452 intptr_t flag;
7453 ObjectPtr element = parameter_names.At(flag_index);
7454 if (element == Object::null()) {
7455 flag = 0;
7456 } else {
7457 flag = Smi::Value(Smi::RawCast(element));
7458 }
7459 parameter_names.SetAt(flag_index, Object::Handle(Smi::New(flag | flag_mask)));
7460}
7461
7462void Function::TruncateUnusedParameterFlags() const {
7463 // Truncate the parameter names array to remove unused flags from the end.
7464 const Array& parameter_names = Array::Handle(raw_ptr()->parameter_names_);
7465 const intptr_t num_params = NumParameters();
7466 intptr_t last_required_flag = parameter_names.Length() - 1;
7467 for (; last_required_flag >= num_params; --last_required_flag) {
7468 if (parameter_names.At(last_required_flag) != Object::null()) {
7469 break;
7470 }
7471 }
7472 parameter_names.Truncate(last_required_flag + 1);
7473}
7474
7475void Function::set_type_parameters(const TypeArguments& value) const {
7476 StorePointer(&raw_ptr()->type_parameters_, value.raw());
7477}
7478
7479intptr_t Function::NumTypeParameters(Thread* thread) const {
7480 if (type_parameters() == TypeArguments::null()) {
7481 return 0;
7482 }
7483 REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
7484 TypeArguments& type_params = thread->TypeArgumentsHandle();
7485 type_params = type_parameters();
7486 // We require null to represent a non-generic function.
7487 ASSERT(type_params.Length() != 0);
7488 return type_params.Length();
7489}
7490
7491intptr_t Function::NumParentTypeParameters() const {
7492 if (IsImplicitClosureFunction()) {
7493 return 0;
7494 }
7495 Thread* thread = Thread::Current();
7496 Function& parent = Function::Handle(parent_function());
7497 intptr_t num_parent_type_params = 0;
7498 while (!parent.IsNull()) {
7499 num_parent_type_params += parent.NumTypeParameters(thread);
7500 if (parent.IsImplicitClosureFunction()) break;
7501 parent = parent.parent_function();
7502 }
7503 return num_parent_type_params;
7504}
7505
7506void Function::PrintSignatureTypes() const {
7507 Function& sig_fun = Function::Handle(raw());
7508 Type& sig_type = Type::Handle();
7509 while (!sig_fun.IsNull()) {
7510 sig_type = sig_fun.SignatureType();
7511 THR_Print("%s%s\n",
7512 sig_fun.IsImplicitClosureFunction() ? "implicit closure: " : "",
7513 sig_type.ToCString());
7514 sig_fun = sig_fun.parent_function();
7515 }
7516}
7517
7518TypeParameterPtr Function::LookupTypeParameter(const String& type_name,
7519 intptr_t* function_level) const {
7520 ASSERT(!type_name.IsNull());
7521 Thread* thread = Thread::Current();
7522 REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
7523 REUSABLE_TYPE_PARAMETER_HANDLESCOPE(thread);
7524 REUSABLE_STRING_HANDLESCOPE(thread);
7525 REUSABLE_FUNCTION_HANDLESCOPE(thread);
7526 TypeArguments& type_params = thread->TypeArgumentsHandle();
7527 TypeParameter& type_param = thread->TypeParameterHandle();
7528 String& type_param_name = thread->StringHandle();
7529 Function& function = thread->FunctionHandle();
7530
7531 function = this->raw();
7532 while (!function.IsNull()) {
7533 type_params = function.type_parameters();
7534 if (!type_params.IsNull()) {
7535 const intptr_t num_type_params = type_params.Length();
7536 for (intptr_t i = 0; i < num_type_params; i++) {
7537 type_param ^= type_params.TypeAt(i);
7538 type_param_name = type_param.name();
7539 if (type_param_name.Equals(type_name)) {
7540 return type_param.raw();
7541 }
7542 }
7543 }
7544 if (function.IsImplicitClosureFunction()) {
7545 // The parent function is not the enclosing function, but the closurized
7546 // function with identical type parameters.
7547 break;
7548 }
7549 function = function.parent_function();
7550 if (function_level != NULL) {
7551 (*function_level)--;
7552 }
7553 }
7554 return TypeParameter::null();
7555}
7556
7557void Function::set_kind(FunctionLayout::Kind value) const {
7558 set_kind_tag(KindBits::update(value, raw_ptr()->kind_tag_));
7559}
7560
7561void Function::set_modifier(FunctionLayout::AsyncModifier value) const {
7562 set_kind_tag(ModifierBits::update(value, raw_ptr()->kind_tag_));
7563}
7564
7565void Function::set_recognized_kind(MethodRecognizer::Kind value) const {
7566 // Prevent multiple settings of kind.
7567 ASSERT((value == MethodRecognizer::kUnknown) || !IsRecognized());
7568 set_kind_tag(RecognizedBits::update(value, raw_ptr()->kind_tag_));
7569}
7570
7571void Function::set_token_pos(TokenPosition token_pos) const {
7572#if defined(DART_PRECOMPILED_RUNTIME)
7573 UNREACHABLE();
7574#else
7575 ASSERT(!token_pos.IsClassifying() || IsMethodExtractor());
7576 StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
7577#endif
7578}
7579
7580void Function::set_kind_tag(uint32_t value) const {
7581 StoreNonPointer(&raw_ptr()->kind_tag_, static_cast<uint32_t>(value));
7582}
7583
7584void Function::set_packed_fields(uint32_t packed_fields) const {
7585 StoreNonPointer(&raw_ptr()->packed_fields_, packed_fields);
7586}
7587
7588void Function::set_num_fixed_parameters(intptr_t value) const {
7589 ASSERT(value >= 0);
7590 ASSERT(Utils::IsUint(FunctionLayout::kMaxFixedParametersBits, value));
7591 const uint32_t* original = &raw_ptr()->packed_fields_;
7592 StoreNonPointer(original, FunctionLayout::PackedNumFixedParameters::update(
7593 value, *original));
7594}
7595
7596void Function::SetNumOptionalParameters(intptr_t value,
7597 bool are_optional_positional) const {
7598 ASSERT(Utils::IsUint(FunctionLayout::kMaxOptionalParametersBits, value));
7599 uint32_t packed_fields = raw_ptr()->packed_fields_;
7600 packed_fields = FunctionLayout::PackedHasNamedOptionalParameters::update(
7601 !are_optional_positional, packed_fields);
7602 packed_fields =
7603 FunctionLayout::PackedNumOptionalParameters::update(value, packed_fields);
7604 set_packed_fields(packed_fields);
7605}
7606
7607bool Function::IsOptimizable() const {
7608 if (FLAG_precompiled_mode) {
7609 return true;
7610 }
7611 if (ForceOptimize()) return true;
7612 if (is_native()) {
7613 // Native methods don't need to be optimized.
7614 return false;
7615 }
7616 const intptr_t function_length = end_token_pos().Pos() - token_pos().Pos();
7617 if (is_optimizable() && (script() != Script::null()) &&
7618 (function_length < FLAG_huge_method_cutoff_in_tokens)) {
7619 // Additional check needed for implicit getters.
7620 return (unoptimized_code() == Object::null()) ||
7621 (Code::Handle(unoptimized_code()).Size() <
7622 FLAG_huge_method_cutoff_in_code_size);
7623 }
7624 return false;
7625}
7626
7627void Function::SetIsOptimizable(bool value) const {
7628 ASSERT(!is_native());
7629 set_is_optimizable(value);
7630 if (!value) {
7631 set_is_inlinable(false);
7632 set_usage_counter(INT32_MIN);
7633 }
7634}
7635
7636#if !defined(DART_PRECOMPILED_RUNTIME)
7637bool Function::CanBeInlined() const {
7638 // Our force-optimized functions cannot deoptimize to an unoptimized frame.
7639 // If the instructions of the force-optimized function body get moved via
7640 // code motion, we might attempt do deoptimize a frame where the force-
7641 // optimized function has only partially finished. Since force-optimized
7642 // functions cannot deoptimize to unoptimized frames we prevent them from
7643 // being inlined (for now).
7644 if (ForceOptimize()) {
7645 if (IsFfiTrampoline()) {
7646 // The CallSiteInliner::InlineCall asserts in PrepareGraphs that
7647 // GraphEntryInstr::SuccessorCount() == 1, but FFI trampoline has two
7648 // entries (a normal and a catch entry).
7649 return false;
7650 }
7651 return CompilerState::Current().is_aot();
7652 }
7653
7654#if !defined(PRODUCT)
7655 Thread* thread = Thread::Current();
7656 if (thread->isolate()->debugger()->HasBreakpoint(*this, thread->zone())) {
7657 return false;
7658 }
7659#endif // !defined(PRODUCT)
7660
7661 return is_inlinable() && !is_external() && !is_generated_body();
7662}
7663#endif // !defined(DART_PRECOMPILED_RUNTIME)
7664
7665intptr_t Function::NumParameters() const {
7666 return num_fixed_parameters() + NumOptionalParameters();
7667}
7668
7669intptr_t Function::NumImplicitParameters() const {
7670 const FunctionLayout::Kind k = kind();
7671 if (k == FunctionLayout::kConstructor) {
7672 // Type arguments for factory; instance for generative constructor.
7673 return 1;
7674 }
7675 if ((k == FunctionLayout::kClosureFunction) ||
7676 (k == FunctionLayout::kImplicitClosureFunction) ||
7677 (k == FunctionLayout::kSignatureFunction) ||
7678 (k == FunctionLayout::kFfiTrampoline)) {
7679 return 1; // Closure object.
7680 }
7681 if (!is_static()) {
7682 // Closure functions defined inside instance (i.e. non-static) functions are
7683 // marked as non-static, but they do not have a receiver.
7684 // Closures are handled above.
7685 ASSERT((k != FunctionLayout::kClosureFunction) &&
7686 (k != FunctionLayout::kImplicitClosureFunction) &&
7687 (k != FunctionLayout::kSignatureFunction));
7688 return 1; // Receiver.
7689 }
7690 return 0; // No implicit parameters.
7691}
7692
7693bool Function::AreValidArgumentCounts(intptr_t num_type_arguments,
7694 intptr_t num_arguments,
7695 intptr_t num_named_arguments,
7696 String* error_message) const {
7697 if ((num_type_arguments != 0) &&
7698 (num_type_arguments != NumTypeParameters())) {
7699 if (error_message != NULL) {
7700 const intptr_t kMessageBufferSize = 64;
7701 char message_buffer[kMessageBufferSize];
7702 Utils::SNPrint(message_buffer, kMessageBufferSize,
7703 "%" Pd " type arguments passed, but %" Pd " expected",
7704 num_type_arguments, NumTypeParameters());
7705 // Allocate in old space because it can be invoked in background
7706 // optimizing compilation.
7707 *error_message = String::New(message_buffer, Heap::kOld);
7708 }
7709 return false; // Too many type arguments.
7710 }
7711 if (num_named_arguments > NumOptionalNamedParameters()) {
7712 if (error_message != NULL) {
7713 const intptr_t kMessageBufferSize = 64;
7714 char message_buffer[kMessageBufferSize];
7715 Utils::SNPrint(message_buffer, kMessageBufferSize,
7716 "%" Pd " named passed, at most %" Pd " expected",
7717 num_named_arguments, NumOptionalNamedParameters());
7718 // Allocate in old space because it can be invoked in background
7719 // optimizing compilation.
7720 *error_message = String::New(message_buffer, Heap::kOld);
7721 }
7722 return false; // Too many named arguments.
7723 }
7724 const intptr_t num_pos_args = num_arguments - num_named_arguments;
7725 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
7726 const intptr_t num_pos_params = num_fixed_parameters() + num_opt_pos_params;
7727 if (num_pos_args > num_pos_params) {
7728 if (error_message != NULL) {
7729 const intptr_t kMessageBufferSize = 64;
7730 char message_buffer[kMessageBufferSize];
7731 // Hide implicit parameters to the user.
7732 const intptr_t num_hidden_params = NumImplicitParameters();
7733 Utils::SNPrint(message_buffer, kMessageBufferSize,
7734 "%" Pd "%s passed, %s%" Pd " expected",
7735 num_pos_args - num_hidden_params,
7736 num_opt_pos_params > 0 ? " positional" : "",
7737 num_opt_pos_params > 0 ? "at most " : "",
7738 num_pos_params - num_hidden_params);
7739 // Allocate in old space because it can be invoked in background
7740 // optimizing compilation.
7741 *error_message = String::New(message_buffer, Heap::kOld);
7742 }
7743 return false; // Too many fixed and/or positional arguments.
7744 }
7745 if (num_pos_args < num_fixed_parameters()) {
7746 if (error_message != NULL) {
7747 const intptr_t kMessageBufferSize = 64;
7748 char message_buffer[kMessageBufferSize];
7749 // Hide implicit parameters to the user.
7750 const intptr_t num_hidden_params = NumImplicitParameters();
7751 Utils::SNPrint(message_buffer, kMessageBufferSize,
7752 "%" Pd "%s passed, %s%" Pd " expected",
7753 num_pos_args - num_hidden_params,
7754 num_opt_pos_params > 0 ? " positional" : "",
7755 num_opt_pos_params > 0 ? "at least " : "",
7756 num_fixed_parameters() - num_hidden_params);
7757 // Allocate in old space because it can be invoked in background
7758 // optimizing compilation.
7759 *error_message = String::New(message_buffer, Heap::kOld);
7760 }
7761 return false; // Too few fixed and/or positional arguments.
7762 }
7763 return true;
7764}
7765
7766bool Function::AreValidArguments(intptr_t num_type_arguments,
7767 intptr_t num_arguments,
7768 const Array& argument_names,
7769 String* error_message) const {
7770 const Array& args_desc_array = Array::Handle(ArgumentsDescriptor::NewBoxed(
7771 num_type_arguments, num_arguments, argument_names, Heap::kNew));
7772 ArgumentsDescriptor args_desc(args_desc_array);
7773 return AreValidArguments(args_desc, error_message);
7774}
7775
7776bool Function::AreValidArguments(const ArgumentsDescriptor& args_desc,
7777 String* error_message) const {
7778 const intptr_t num_type_arguments = args_desc.TypeArgsLen();
7779 const intptr_t num_arguments = args_desc.Count();
7780 const intptr_t num_named_arguments = args_desc.NamedCount();
7781
7782 if (!AreValidArgumentCounts(num_type_arguments, num_arguments,
7783 num_named_arguments, error_message)) {
7784 return false;
7785 }
7786 // Verify that all argument names are valid parameter names.
7787 Thread* thread = Thread::Current();
7788 Isolate* isolate = thread->isolate();
7789 Zone* zone = thread->zone();
7790 String& argument_name = String::Handle(zone);
7791 String& parameter_name = String::Handle(zone);
7792 const intptr_t num_positional_args = num_arguments - num_named_arguments;
7793 const intptr_t num_parameters = NumParameters();
7794 for (intptr_t i = 0; i < num_named_arguments; i++) {
7795 argument_name = args_desc.NameAt(i);
7796 ASSERT(argument_name.IsSymbol());
7797 bool found = false;
7798 for (intptr_t j = num_positional_args; j < num_parameters; j++) {
7799 parameter_name = ParameterNameAt(j);
7800 ASSERT(parameter_name.IsSymbol());
7801 if (argument_name.Equals(parameter_name)) {
7802 found = true;
7803 break;
7804 }
7805 }
7806 if (!found) {
7807 if (error_message != nullptr) {
7808 const intptr_t kMessageBufferSize = 64;
7809 char message_buffer[kMessageBufferSize];
7810 Utils::SNPrint(message_buffer, kMessageBufferSize,
7811 "no optional formal parameter named '%s'",
7812 argument_name.ToCString());
7813 *error_message = String::New(message_buffer);
7814 }
7815 return false;
7816 }
7817 }
7818 if (isolate->null_safety()) {
7819 // Verify that all required named parameters are filled.
7820 for (intptr_t j = num_parameters - NumOptionalNamedParameters();
7821 j < num_parameters; j++) {
7822 if (IsRequiredAt(j)) {
7823 parameter_name = ParameterNameAt(j);
7824 ASSERT(parameter_name.IsSymbol());
7825 bool found = false;
7826 for (intptr_t i = 0; i < num_named_arguments; i++) {
7827 argument_name = args_desc.NameAt(i);
7828 ASSERT(argument_name.IsSymbol());
7829 if (argument_name.Equals(parameter_name)) {
7830 found = true;
7831 break;
7832 }
7833 }
7834 if (!found) {
7835 if (error_message != nullptr) {
7836 const intptr_t kMessageBufferSize = 64;
7837 char message_buffer[kMessageBufferSize];
7838 Utils::SNPrint(message_buffer, kMessageBufferSize,
7839 "missing required named parameter '%s'",
7840 parameter_name.ToCString());
7841 *error_message = String::New(message_buffer);
7842 }
7843 return false;
7844 }
7845 }
7846 }
7847 }
7848 return true;
7849}
7850
7851// Checks each supplied function type argument is a subtype of the corresponding
7852// bound. Also takes the number of type arguments to skip over because they
7853// belong to parent functions and are not included in the type parameters.
7854// Returns null if all checks succeed, otherwise returns a non-null Error for
7855// one of the failures.
7856static ObjectPtr TypeArgumentsAreBoundSubtypes(
7857 Zone* zone,
7858 const TokenPosition& token_pos,
7859 const TypeArguments& type_parameters,
7860 intptr_t num_parent_type_args,
7861 const TypeArguments& instantiator_type_arguments,
7862 const TypeArguments& function_type_arguments) {
7863 ASSERT(!type_parameters.IsNull());
7864 ASSERT(!function_type_arguments.IsNull());
7865 const intptr_t kNumTypeArgs = function_type_arguments.Length();
7866 ASSERT_EQUAL(num_parent_type_args + type_parameters.Length(), kNumTypeArgs);
7867
7868 // Don't bother allocating handles, there's nothing to check.
7869 if (kNumTypeArgs - num_parent_type_args == 0) return Error::null();
7870
7871 auto& type = AbstractType::Handle(zone);
7872 auto& bound = AbstractType::Handle(zone);
7873 auto& name = String::Handle(zone);
7874 for (intptr_t i = num_parent_type_args; i < kNumTypeArgs; i++) {
7875 type = type_parameters.TypeAt(i - num_parent_type_args);
7876 ASSERT(type.IsTypeParameter());
7877 const auto& parameter = TypeParameter::Cast(type);
7878 bound = parameter.bound();
7879 name = parameter.name();
7880 // Only perform non-covariant checks where the bound is not the top type.
7881 if (parameter.IsGenericCovariantImpl() || bound.IsTopTypeForSubtyping()) {
7882 continue;
7883 }
7884 if (!AbstractType::InstantiateAndTestSubtype(&type, &bound,
7885 instantiator_type_arguments,
7886 function_type_arguments)) {
7887 return Error::RawCast(ThrowTypeError(token_pos, type, bound, name));
7888 }
7889 }
7890
7891 return Error::null();
7892}
7893
7894// Returns a TypeArguments object where, for each type parameter local to this
7895// function, the entry in the TypeArguments is an instantiated version of its
7896// bound. In the instantiated bound, any local function type parameter
7897// references are replaced with the corresponding bound if that bound can be
7898// fully instantiated without local function type parameters, otherwise dynamic.
7899static TypeArgumentsPtr InstantiateTypeParametersToBounds(
7900 Zone* zone,
7901 const TokenPosition& token_pos,
7902 const TypeArguments& type_parameters,
7903 const TypeArguments& instantiator_type_args,
7904 intptr_t num_parent_type_args,
7905 const TypeArguments& parent_type_args) {
7906 ASSERT(!type_parameters.IsNull());
7907 const intptr_t kNumCurrentTypeArgs = type_parameters.Length();
7908 const intptr_t kNumTypeArgs = kNumCurrentTypeArgs + num_parent_type_args;
7909 auto& function_type_args = TypeArguments::Handle(zone);
7910
7911 bool all_bounds_instantiated = true;
7912
7913 // Create a type argument vector large enough for the parents' and current
7914 // type arguments.
7915 function_type_args = TypeArguments::New(kNumTypeArgs);
7916 auto& type = AbstractType::Handle(zone);
7917 auto& bound = AbstractType::Handle(zone);
7918 // First copy over the parent type args (or the dynamic type if null).
7919 for (intptr_t i = 0; i < num_parent_type_args; i++) {
7920 type = parent_type_args.IsNull() ? Type::DynamicType()
7921 : parent_type_args.TypeAt(i);
7922 function_type_args.SetTypeAt(i, type);
7923 }
7924 // Now try fully instantiating the bounds of each parameter using only
7925 // the instantiator and parent function type arguments. If possible, keep the
7926 // instantiated bound as the entry. Otherwise, just set that entry to dynamic.
7927 for (intptr_t i = num_parent_type_args; i < kNumTypeArgs; i++) {
7928 type = type_parameters.TypeAt(i - num_parent_type_args);
7929 const auto& param = TypeParameter::Cast(type);
7930 bound = param.bound();
7931 // Only instantiate up to the parent type parameters.
7932 if (!bound.IsInstantiated(kAny, num_parent_type_args)) {
7933 bound = bound.InstantiateFrom(instantiator_type_args, function_type_args,
7934 num_parent_type_args, Heap::kNew);
7935 }
7936 if (!bound.IsInstantiated()) {
7937 // There are local type variables used in this bound.
7938 bound = Type::DynamicType();
7939 all_bounds_instantiated = false;
7940 }
7941 function_type_args.SetTypeAt(i, bound);
7942 }
7943
7944 // If all the bounds were instantiated in the first pass, then there can't
7945 // be any self or mutual recursion, so skip the bounds subtype check.
7946 if (all_bounds_instantiated) return function_type_args.raw();
7947
7948 // Do another pass, using the set of TypeArguments we just created. If a given
7949 // bound was instantiated in the last pass, just copy it over. (We don't need
7950 // to iterate to a fixed point, since there should be no self or mutual
7951 // recursion in the bounds.)
7952 const auto& first_round =
7953 TypeArguments::Handle(zone, function_type_args.raw());
7954 function_type_args = TypeArguments::New(kNumTypeArgs);
7955 // Again, copy over the parent type arguments first.
7956 for (intptr_t i = 0; i < num_parent_type_args; i++) {
7957 type = first_round.TypeAt(i);
7958 function_type_args.SetTypeAt(i, type);
7959 }
7960 for (intptr_t i = num_parent_type_args; i < kNumTypeArgs; i++) {
7961 type = type_parameters.TypeAt(i - num_parent_type_args);
7962 const auto& param = TypeParameter::Cast(type);
7963 bound = first_round.TypeAt(i);
7964 // The dynamic type is never a bound, even when implicit, so it also marks
7965 // bounds that were not already fully instantiated.
7966 if (bound.raw() == Type::DynamicType()) {
7967 bound = param.bound();
7968 bound = bound.InstantiateFrom(instantiator_type_args, first_round,
7969 kAllFree, Heap::kNew);
7970 }
7971 function_type_args.SetTypeAt(i, bound);
7972 }
7973
7974 return function_type_args.raw();
7975}
7976
7977// Retrieves the function type arguments, if any. This could be explicitly
7978// passed type from the arguments array, delayed type arguments in closures,
7979// or instantiated bounds for the type parameters if no other source for
7980// function type arguments are found.
7981static TypeArgumentsPtr RetrieveFunctionTypeArguments(
7982 Thread* thread,
7983 Zone* zone,
7984 const Function& function,
7985 const Instance& receiver,
7986 const TypeArguments& instantiator_type_args,
7987 const TypeArguments& type_params,
7988 const Array& args,
7989 const ArgumentsDescriptor& args_desc) {
7990 ASSERT(!function.IsNull());
7991
7992 const intptr_t kNumCurrentTypeArgs = function.NumTypeParameters(thread);
7993 const intptr_t kNumParentTypeArgs = function.NumParentTypeParameters();
7994 const intptr_t kNumTypeArgs = kNumCurrentTypeArgs + kNumParentTypeArgs;
7995 // Non-generic functions don't receive type arguments.
7996 if (kNumTypeArgs == 0) return Object::empty_type_arguments().raw();
7997 // Closure functions require that the receiver be provided (and is a closure).
7998 ASSERT(!function.IsClosureFunction() || receiver.IsClosure());
7999
8000 // Only closure functions should have possibly generic parents.
8001 ASSERT(function.IsClosureFunction() || kNumParentTypeArgs == 0);
8002 const auto& parent_type_args =
8003 function.IsClosureFunction()
8004 ? TypeArguments::Handle(
8005 zone, Closure::Cast(receiver).function_type_arguments())
8006 : Object::null_type_arguments();
8007 // We don't try to instantiate the parent type parameters to their bounds
8008 // if not provided or check any closed-over type arguments against the parent
8009 // type parameter bounds (since they have been type checked already).
8010 if (kNumCurrentTypeArgs == 0) return parent_type_args.raw();
8011
8012 auto& function_type_args = TypeArguments::Handle(zone);
8013 if (function.IsClosureFunction()) {
8014 const auto& closure = Closure::Cast(receiver);
8015 function_type_args = closure.delayed_type_arguments();
8016 if (function_type_args.raw() == Object::empty_type_arguments().raw()) {
8017 // There are no delayed type arguments, so set back to null.
8018 function_type_args = TypeArguments::null();
8019 }
8020 }
8021
8022 if (function_type_args.IsNull() && args_desc.TypeArgsLen() > 0) {
8023 function_type_args ^= args.At(0);
8024 }
8025
8026 if (function_type_args.IsNull()) {
8027 // We have no explicitly provided function type arguments, so generate
8028 // some by instantiating the parameters to bounds.
8029 return InstantiateTypeParametersToBounds(
8030 zone, function.token_pos(), type_params, instantiator_type_args,
8031 kNumParentTypeArgs, parent_type_args);
8032 }
8033
8034 if (kNumParentTypeArgs > 0) {
8035 function_type_args = function_type_args.Prepend(
8036 zone, parent_type_args, kNumParentTypeArgs, kNumTypeArgs);
8037 }
8038
8039 return function_type_args.raw();
8040}
8041
8042// Retrieves the instantiator type arguments, if any, from the receiver.
8043static TypeArgumentsPtr RetrieveInstantiatorTypeArguments(
8044 Zone* zone,
8045 const Function& function,
8046 const Instance& receiver) {
8047 if (function.IsClosureFunction()) {
8048 ASSERT(receiver.IsClosure());
8049 const auto& closure = Closure::Cast(receiver);
8050 return closure.instantiator_type_arguments();
8051 }
8052 if (!receiver.IsNull()) {
8053 const auto& cls = Class::Handle(zone, receiver.clazz());
8054 if (cls.NumTypeArguments() > 0) {
8055 return receiver.GetTypeArguments();
8056 }
8057 }
8058 return Object::empty_type_arguments().raw();
8059}
8060
8061ObjectPtr Function::DoArgumentTypesMatch(
8062 const Array& args,
8063 const ArgumentsDescriptor& args_desc) const {
8064 Thread* thread = Thread::Current();
8065 Zone* zone = thread->zone();
8066
8067 auto& receiver = Instance::Handle(zone);
8068 if (IsClosureFunction() || HasThisParameter()) {
8069 receiver ^= args.At(args_desc.FirstArgIndex());
8070 }
8071 const auto& instantiator_type_arguments = TypeArguments::Handle(
8072 zone, RetrieveInstantiatorTypeArguments(zone, *this, receiver));
8073 return Function::DoArgumentTypesMatch(args, args_desc,
8074 instantiator_type_arguments);
8075}
8076
8077ObjectPtr Function::DoArgumentTypesMatch(
8078 const Array& args,
8079 const ArgumentsDescriptor& args_desc,
8080 const TypeArguments& instantiator_type_arguments) const {
8081 Thread* thread = Thread::Current();
8082 Zone* zone = thread->zone();
8083
8084 auto& receiver = Instance::Handle(zone);
8085 if (IsClosureFunction() || HasThisParameter()) {
8086 receiver ^= args.At(args_desc.FirstArgIndex());
8087 }
8088
8089 const auto& params = TypeArguments::Handle(zone, type_parameters());
8090 const auto& function_type_arguments = TypeArguments::Handle(
8091 zone, RetrieveFunctionTypeArguments(thread, zone, *this, receiver,
8092 instantiator_type_arguments, params,
8093 args, args_desc));
8094 return Function::DoArgumentTypesMatch(
8095 args, args_desc, instantiator_type_arguments, function_type_arguments);
8096}
8097
8098ObjectPtr Function::DoArgumentTypesMatch(
8099 const Array& args,
8100 const ArgumentsDescriptor& args_desc,
8101 const TypeArguments& instantiator_type_arguments,
8102 const TypeArguments& function_type_arguments) const {
8103 // We need a concrete (possibly empty) type arguments vector, not the
8104 // implicitly filled with dynamic one.
8105 ASSERT(!function_type_arguments.IsNull());
8106
8107 Thread* thread = Thread::Current();
8108 Zone* zone = thread->zone();
8109
8110 // Perform any non-covariant bounds checks on the provided function type
8111 // arguments to make sure they are appropriate subtypes of the bounds.
8112 const intptr_t kNumLocalTypeArgs = NumTypeParameters(thread);
8113 if (kNumLocalTypeArgs > 0) {
8114 const intptr_t kNumParentTypeArgs = NumParentTypeParameters();
8115 ASSERT_EQUAL(kNumLocalTypeArgs + kNumParentTypeArgs,
8116 function_type_arguments.Length());
8117 const auto& params = TypeArguments::Handle(zone, type_parameters());
8118 const auto& result = Object::Handle(
8119 zone, TypeArgumentsAreBoundSubtypes(
8120 zone, token_pos(), params, kNumParentTypeArgs,
8121 instantiator_type_arguments, function_type_arguments));
8122 if (result.IsError()) {
8123 return result.raw();
8124 }
8125 } else {
8126 ASSERT_EQUAL(NumParentTypeParameters(), function_type_arguments.Length());
8127 }
8128
8129 AbstractType& type = AbstractType::Handle(zone);
8130 Instance& argument = Instance::Handle(zone);
8131
8132 auto check_argument = [](const Instance& argument, const AbstractType& type,
8133 const TypeArguments& instantiator_type_args,
8134 const TypeArguments& function_type_args) -> bool {
8135 // If the argument type is the top type, no need to check.
8136 if (type.IsTopTypeForSubtyping()) return true;
8137 if (argument.IsNull()) {
8138 return Instance::NullIsAssignableTo(type);
8139 }
8140 return argument.IsAssignableTo(type, instantiator_type_args,
8141 function_type_args);
8142 };
8143
8144 // Check types of the provided arguments against the expected parameter types.
8145 const intptr_t arg_offset = args_desc.FirstArgIndex();
8146 // Only check explicit arguments.
8147 const intptr_t arg_start = arg_offset + NumImplicitParameters();
8148 const intptr_t num_positional_args = args_desc.PositionalCount();
8149 for (intptr_t arg_index = arg_start; arg_index < num_positional_args;
8150 ++arg_index) {
8151 argument ^= args.At(arg_index);
8152 // Adjust for type arguments when they're present.
8153 const intptr_t param_index = arg_index - arg_offset;
8154 type = ParameterTypeAt(param_index);
8155
8156 if (!check_argument(argument, type, instantiator_type_arguments,
8157 function_type_arguments)) {
8158 auto& name = String::Handle(zone, ParameterNameAt(param_index));
8159 return ThrowTypeError(token_pos(), argument, type, name);
8160 }
8161 }
8162
8163 const intptr_t num_named_arguments = args_desc.NamedCount();
8164 if (num_named_arguments == 0) {
8165 return Error::null();
8166 }
8167
8168 const int num_parameters = NumParameters();
8169 const int num_fixed_params = num_fixed_parameters();
8170
8171 String& argument_name = String::Handle(zone);
8172 String& parameter_name = String::Handle(zone);
8173
8174 // Check types of named arguments against expected parameter type.
8175 for (intptr_t named_index = 0; named_index < num_named_arguments;
8176 named_index++) {
8177 argument_name = args_desc.NameAt(named_index);
8178 ASSERT(argument_name.IsSymbol());
8179 argument ^= args.At(args_desc.PositionAt(named_index));
8180
8181 // Try to find the named parameter that matches the provided argument.
8182 // Even when annotated with @required, named parameters are still stored
8183 // as if they were optional and so come after the fixed parameters.
8184 // Currently O(n^2) as there's no guarantee from either the CFE or the
8185 // VM that named parameters and named arguments are sorted in the same way.
8186 intptr_t param_index = num_fixed_params;
8187 for (; param_index < num_parameters; param_index++) {
8188 parameter_name = ParameterNameAt(param_index);
8189 ASSERT(parameter_name.IsSymbol());
8190
8191 if (!parameter_name.Equals(argument_name)) continue;
8192
8193 type = ParameterTypeAt(param_index);
8194 if (!check_argument(argument, type, instantiator_type_arguments,
8195 function_type_arguments)) {
8196 auto& name = String::Handle(zone, ParameterNameAt(param_index));
8197 return ThrowTypeError(token_pos(), argument, type, name);
8198 }
8199 break;
8200 }
8201 // Only should fail if AreValidArguments returns a false positive.
8202 ASSERT(param_index < num_parameters);
8203 }
8204 return Error::null();
8205}
8206
8207// Helper allocating a C string buffer in the zone, printing the fully qualified
8208// name of a function in it, and replacing ':' by '_' to make sure the
8209// constructed name is a valid C++ identifier for debugging purpose.
8210// Set 'chars' to allocated buffer and return number of written characters.
8211
8212enum QualifiedFunctionLibKind {
8213 kQualifiedFunctionLibKindLibUrl,
8214 kQualifiedFunctionLibKindLibName
8215};
8216
8217static intptr_t ConstructFunctionFullyQualifiedCString(
8218 const Function& function,
8219 char** chars,
8220 intptr_t reserve_len,
8221 bool with_lib,
8222 QualifiedFunctionLibKind lib_kind) {
8223 Zone* zone = Thread::Current()->zone();
8224 const char* name = String::Handle(zone, function.name()).ToCString();
8225 const char* function_format = (reserve_len == 0) ? "%s" : "%s_";
8226 reserve_len += Utils::SNPrint(NULL, 0, function_format, name);
8227 const Function& parent = Function::Handle(zone, function.parent_function());
8228 intptr_t written = 0;
8229 if (parent.IsNull()) {
8230 const Class& function_class = Class::Handle(zone, function.Owner());
8231 ASSERT(!function_class.IsNull());
8232 const char* class_name =
8233 String::Handle(zone, function_class.Name()).ToCString();
8234 ASSERT(class_name != NULL);
8235 const char* library_name = NULL;
8236 const char* lib_class_format = NULL;
8237 if (with_lib) {
8238 const Library& library = Library::Handle(zone, function_class.library());
8239 ASSERT(!library.IsNull());
8240 switch (lib_kind) {
8241 case kQualifiedFunctionLibKindLibUrl:
8242 library_name = String::Handle(zone, library.url()).ToCString();
8243 break;
8244 case kQualifiedFunctionLibKindLibName:
8245 library_name = String::Handle(zone, library.name()).ToCString();
8246 break;
8247 default:
8248 UNREACHABLE();
8249 }
8250 ASSERT(library_name != NULL);
8251 lib_class_format = (library_name[0] == '\0') ? "%s%s_" : "%s_%s_";
8252 } else {
8253 library_name = "";
8254 lib_class_format = "%s%s.";
8255 }
8256 reserve_len +=
8257 Utils::SNPrint(NULL, 0, lib_class_format, library_name, class_name);
8258 ASSERT(chars != NULL);
8259 *chars = zone->Alloc<char>(reserve_len + 1);
8260 written = Utils::SNPrint(*chars, reserve_len + 1, lib_class_format,
8261 library_name, class_name);
8262 } else {
8263 written = ConstructFunctionFullyQualifiedCString(parent, chars, reserve_len,
8264 with_lib, lib_kind);
8265 }
8266 ASSERT(*chars != NULL);
8267 char* next = *chars + written;
8268 written += Utils::SNPrint(next, reserve_len + 1, function_format, name);
8269 // Replace ":" with "_".
8270 while (true) {
8271 next = strchr(next, ':');
8272 if (next == NULL) break;
8273 *next = '_';
8274 }
8275 return written;
8276}
8277
8278const char* Function::ToFullyQualifiedCString() const {
8279 char* chars = NULL;
8280 ConstructFunctionFullyQualifiedCString(*this, &chars, 0, true,
8281 kQualifiedFunctionLibKindLibUrl);
8282 return chars;
8283}
8284
8285const char* Function::ToLibNamePrefixedQualifiedCString() const {
8286 char* chars = NULL;
8287 ConstructFunctionFullyQualifiedCString(*this, &chars, 0, true,
8288 kQualifiedFunctionLibKindLibName);
8289 return chars;
8290}
8291
8292const char* Function::ToQualifiedCString() const {
8293 char* chars = NULL;
8294 ConstructFunctionFullyQualifiedCString(*this, &chars, 0, false,
8295 kQualifiedFunctionLibKindLibUrl);
8296 return chars;
8297}
8298
8299FunctionPtr Function::InstantiateSignatureFrom(
8300 const TypeArguments& instantiator_type_arguments,
8301 const TypeArguments& function_type_arguments,
8302 intptr_t num_free_fun_type_params,
8303 Heap::Space space) const {
8304 Zone* zone = Thread::Current()->zone();
8305 const Object& owner = Object::Handle(zone, RawOwner());
8306 // Note that parent pointers in newly instantiated signatures still points to
8307 // the original uninstantiated parent signatures. That is not a problem.
8308 const Function& parent = Function::Handle(zone, parent_function());
8309
8310 // See the comment on kCurrentAndEnclosingFree to understand why we don't
8311 // adjust 'num_free_fun_type_params' downward in this case.
8312 bool delete_type_parameters = false;
8313 if (num_free_fun_type_params == kCurrentAndEnclosingFree) {
8314 num_free_fun_type_params = kAllFree;
8315 delete_type_parameters = true;
8316 } else {
8317 ASSERT(!HasInstantiatedSignature(kAny, num_free_fun_type_params));
8318
8319 // A generic typedef may declare a non-generic function type and get
8320 // instantiated with unrelated function type parameters. In that case, its
8321 // signature is still uninstantiated, because these type parameters are
8322 // free (they are not declared by the typedef).
8323 // For that reason, we only adjust num_free_fun_type_params if this
8324 // signature is generic or has a generic parent.
8325 if (IsGeneric() || HasGenericParent()) {
8326 // We only consider the function type parameters declared by the parents
8327 // of this signature function as free.
8328 const int num_parent_type_params = NumParentTypeParameters();
8329 if (num_parent_type_params < num_free_fun_type_params) {
8330 num_free_fun_type_params = num_parent_type_params;
8331 }
8332 }
8333 }
8334
8335 Function& sig = Function::Handle(Function::NewSignatureFunction(
8336 owner, parent, TokenPosition::kNoSource, space));
8337 AbstractType& type = AbstractType::Handle(zone);
8338
8339 // Copy the type parameters and instantiate their bounds (if necessary).
8340 if (!delete_type_parameters) {
8341 const TypeArguments& type_params =
8342 TypeArguments::Handle(zone, type_parameters());
8343 if (!type_params.IsNull()) {
8344 TypeArguments& instantiated_type_params = TypeArguments::Handle(zone);
8345 TypeParameter& type_param = TypeParameter::Handle(zone);
8346 Class& cls = Class::Handle(zone);
8347 String& param_name = String::Handle(zone);
8348 for (intptr_t i = 0; i < type_params.Length(); ++i) {
8349 type_param ^= type_params.TypeAt(i);
8350 type = type_param.bound();
8351 if (!type.IsInstantiated(kAny, num_free_fun_type_params)) {
8352 type = type.InstantiateFrom(instantiator_type_arguments,
8353 function_type_arguments,
8354 num_free_fun_type_params, space);
8355 // A returned null type indicates a failed instantiation in dead code
8356 // that must be propagated up to the caller, the optimizing compiler.
8357 if (type.IsNull()) {
8358 return Function::null();
8359 }
8360 cls = type_param.parameterized_class();
8361 param_name = type_param.name();
8362 ASSERT(type_param.IsFinalized());
8363 ASSERT(type_param.IsCanonical());
8364 type_param = TypeParameter::New(
8365 cls, sig, type_param.index(), param_name, type,
8366 type_param.IsGenericCovariantImpl(), type_param.nullability(),
8367 type_param.token_pos());
8368 type_param.SetIsFinalized();
8369 type_param.SetCanonical();
8370 type_param.SetDeclaration(true);
8371 if (instantiated_type_params.IsNull()) {
8372 instantiated_type_params = TypeArguments::New(type_params.Length());
8373 for (intptr_t j = 0; j < i; ++j) {
8374 type = type_params.TypeAt(j);
8375 instantiated_type_params.SetTypeAt(j, type);
8376 }
8377 }
8378 instantiated_type_params.SetTypeAt(i, type_param);
8379 } else if (!instantiated_type_params.IsNull()) {
8380 instantiated_type_params.SetTypeAt(i, type_param);
8381 }
8382 }
8383 sig.set_type_parameters(instantiated_type_params.IsNull()
8384 ? type_params
8385 : instantiated_type_params);
8386 }
8387 }
8388
8389 type = result_type();
8390 if (!type.IsInstantiated(kAny, num_free_fun_type_params)) {
8391 type = type.InstantiateFrom(instantiator_type_arguments,
8392 function_type_arguments,
8393 num_free_fun_type_params, space);
8394 // A returned null type indicates a failed instantiation in dead code that
8395 // must be propagated up to the caller, the optimizing compiler.
8396 if (type.IsNull()) {
8397 return Function::null();
8398 }
8399 }
8400 sig.set_result_type(type);
8401 const intptr_t num_params = NumParameters();
8402 sig.set_num_fixed_parameters(num_fixed_parameters());
8403 sig.SetNumOptionalParameters(NumOptionalParameters(),
8404 HasOptionalPositionalParameters());
8405 sig.set_parameter_types(Array::Handle(Array::New(num_params, space)));
8406 for (intptr_t i = 0; i < num_params; i++) {
8407 type = ParameterTypeAt(i);
8408 if (!type.IsInstantiated(kAny, num_free_fun_type_params)) {
8409 type = type.InstantiateFrom(instantiator_type_arguments,
8410 function_type_arguments,
8411 num_free_fun_type_params, space);
8412 // A returned null type indicates a failed instantiation in dead code that
8413 // must be propagated up to the caller, the optimizing compiler.
8414 if (type.IsNull()) {
8415 return Function::null();
8416 }
8417 }
8418 sig.SetParameterTypeAt(i, type);
8419 }
8420 sig.set_parameter_names(Array::Handle(zone, parameter_names()));
8421
8422 if (delete_type_parameters) {
8423 ASSERT(sig.HasInstantiatedSignature(kFunctions));
8424 }
8425 return sig.raw();
8426}
8427
8428// Checks if the type of the specified parameter of this function is a supertype
8429// of the type of the specified parameter of the other function (i.e. check
8430// parameter contravariance).
8431// Note that types marked as covariant are already dealt with in the front-end.
8432bool Function::IsContravariantParameter(intptr_t parameter_position,
8433 const Function& other,
8434 intptr_t other_parameter_position,
8435 Heap::Space space) const {
8436 const AbstractType& param_type =
8437 AbstractType::Handle(ParameterTypeAt(parameter_position));
8438 if (param_type.IsTopTypeForSubtyping()) {
8439 return true;
8440 }
8441 const AbstractType& other_param_type =
8442 AbstractType::Handle(other.ParameterTypeAt(other_parameter_position));
8443 return other_param_type.IsSubtypeOf(param_type, space);
8444}
8445
8446bool Function::HasSameTypeParametersAndBounds(const Function& other,
8447 TypeEquality kind) const {
8448 Thread* thread = Thread::Current();
8449 Zone* zone = thread->zone();
8450
8451 const intptr_t num_type_params = NumTypeParameters(thread);
8452 if (num_type_params != other.NumTypeParameters(thread)) {
8453 return false;
8454 }
8455 if (num_type_params > 0) {
8456 const TypeArguments& type_params =
8457 TypeArguments::Handle(zone, type_parameters());
8458 ASSERT(!type_params.IsNull());
8459 const TypeArguments& other_type_params =
8460 TypeArguments::Handle(zone, other.type_parameters());
8461 ASSERT(!other_type_params.IsNull());
8462 TypeParameter& type_param = TypeParameter::Handle(zone);
8463 TypeParameter& other_type_param = TypeParameter::Handle(zone);
8464 AbstractType& bound = AbstractType::Handle(zone);
8465 AbstractType& other_bound = AbstractType::Handle(zone);
8466 for (intptr_t i = 0; i < num_type_params; i++) {
8467 type_param ^= type_params.TypeAt(i);
8468 other_type_param ^= other_type_params.TypeAt(i);
8469 bound = type_param.bound();
8470 ASSERT(bound.IsFinalized());
8471 other_bound = other_type_param.bound();
8472 ASSERT(other_bound.IsFinalized());
8473 if (kind == TypeEquality::kInSubtypeTest) {
8474 // Bounds that are mutual subtypes are considered equal.
8475 if (!bound.IsSubtypeOf(other_bound, Heap::kOld) ||
8476 !other_bound.IsSubtypeOf(bound, Heap::kOld)) {
8477 return false;
8478 }
8479 } else {
8480 if (!bound.IsEquivalent(other_bound, kind)) {
8481 return false;
8482 }
8483 }
8484 }
8485 }
8486 return true;
8487}
8488
8489bool Function::IsSubtypeOf(const Function& other, Heap::Space space) const {
8490 const intptr_t num_fixed_params = num_fixed_parameters();
8491 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
8492 const intptr_t num_opt_named_params = NumOptionalNamedParameters();
8493 const intptr_t other_num_fixed_params = other.num_fixed_parameters();
8494 const intptr_t other_num_opt_pos_params =
8495 other.NumOptionalPositionalParameters();
8496 const intptr_t other_num_opt_named_params =
8497 other.NumOptionalNamedParameters();
8498 // This function requires the same arguments or less and accepts the same
8499 // arguments or more. We can ignore implicit parameters.
8500 const intptr_t num_ignored_params = NumImplicitParameters();
8501 const intptr_t other_num_ignored_params = other.NumImplicitParameters();
8502 if (((num_fixed_params - num_ignored_params) >
8503 (other_num_fixed_params - other_num_ignored_params)) ||
8504 ((num_fixed_params - num_ignored_params + num_opt_pos_params) <
8505 (other_num_fixed_params - other_num_ignored_params +
8506 other_num_opt_pos_params)) ||
8507 (num_opt_named_params < other_num_opt_named_params)) {
8508 return false;
8509 }
8510 // Check the type parameters and bounds of generic functions.
8511 if (!HasSameTypeParametersAndBounds(other, TypeEquality::kInSubtypeTest)) {
8512 return false;
8513 }
8514 Thread* thread = Thread::Current();
8515 Zone* zone = thread->zone();
8516 Isolate* isolate = thread->isolate();
8517 // Check the result type.
8518 const AbstractType& other_res_type =
8519 AbstractType::Handle(zone, other.result_type());
8520 // 'void Function()' is a subtype of 'Object Function()'.
8521 if (!other_res_type.IsTopTypeForSubtyping()) {
8522 const AbstractType& res_type = AbstractType::Handle(zone, result_type());
8523 if (!res_type.IsSubtypeOf(other_res_type, space)) {
8524 return false;
8525 }
8526 }
8527 // Check the types of fixed and optional positional parameters.
8528 for (intptr_t i = 0; i < (other_num_fixed_params - other_num_ignored_params +
8529 other_num_opt_pos_params);
8530 i++) {
8531 if (!IsContravariantParameter(i + num_ignored_params, other,
8532 i + other_num_ignored_params, space)) {
8533 return false;
8534 }
8535 }
8536 // Check that for each optional named parameter of type T of the other
8537 // function type, there exists an optional named parameter of this function
8538 // type with an identical name and with a type S that is a supertype of T.
8539 // Note that SetParameterNameAt() guarantees that names are symbols, so we
8540 // can compare their raw pointers.
8541 const int num_params = num_fixed_params + num_opt_named_params;
8542 const int other_num_params =
8543 other_num_fixed_params + other_num_opt_named_params;
8544 bool found_param_name;
8545 String& other_param_name = String::Handle(zone);
8546 for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) {
8547 other_param_name = other.ParameterNameAt(i);
8548 ASSERT(other_param_name.IsSymbol());
8549 found_param_name = false;
8550 for (intptr_t j = num_fixed_params; j < num_params; j++) {
8551 ASSERT(String::Handle(zone, ParameterNameAt(j)).IsSymbol());
8552 if (ParameterNameAt(j) == other_param_name.raw()) {
8553 found_param_name = true;
8554 if (!IsContravariantParameter(j, other, i, space)) {
8555 return false;
8556 }
8557 break;
8558 }
8559 }
8560 if (!found_param_name) {
8561 return false;
8562 }
8563 }
8564 if (isolate->null_safety()) {
8565 // Check that for each required named parameter in this function, there's a
8566 // corresponding required named parameter in the other function.
8567 String& param_name = other_param_name;
8568 for (intptr_t j = num_params - num_opt_named_params; j < num_params; j++) {
8569 if (IsRequiredAt(j)) {
8570 param_name = ParameterNameAt(j);
8571 ASSERT(param_name.IsSymbol());
8572 bool found = false;
8573 for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) {
8574 ASSERT(String::Handle(zone, other.ParameterNameAt(i)).IsSymbol());
8575 if (other.ParameterNameAt(i) == param_name.raw()) {
8576 found = true;
8577 if (!other.IsRequiredAt(i)) {
8578 return false;
8579 }
8580 }
8581 }
8582 if (!found) {
8583 return false;
8584 }
8585 }
8586 }
8587 }
8588 return true;
8589}
8590
8591// The compiler generates an implicit constructor if a class definition
8592// does not contain an explicit constructor or factory. The implicit
8593// constructor has the same token position as the owner class.
8594bool Function::IsImplicitConstructor() const {
8595 return IsGenerativeConstructor() && (token_pos() == end_token_pos());
8596}
8597
8598bool Function::IsImplicitStaticClosureFunction(FunctionPtr func) {
8599 NoSafepointScope no_safepoint;
8600 uint32_t kind_tag = func->ptr()->kind_tag_;
8601 return (KindBits::decode(kind_tag) ==
8602 FunctionLayout::kImplicitClosureFunction) &&
8603 StaticBit::decode(kind_tag);
8604}
8605
8606FunctionPtr Function::New(Heap::Space space) {
8607 ASSERT(Object::function_class() != Class::null());
8608 ObjectPtr raw =
8609 Object::Allocate(Function::kClassId, Function::InstanceSize(), space);
8610 return static_cast<FunctionPtr>(raw);
8611}
8612
8613FunctionPtr Function::New(const String& name,
8614 FunctionLayout::Kind kind,
8615 bool is_static,
8616 bool is_const,
8617 bool is_abstract,
8618 bool is_external,
8619 bool is_native,
8620 const Object& owner,
8621 TokenPosition token_pos,
8622 Heap::Space space) {
8623 ASSERT(!owner.IsNull() || (kind == FunctionLayout::kSignatureFunction));
8624 const Function& result = Function::Handle(Function::New(space));
8625 result.set_kind_tag(0);
8626 result.set_parameter_types(Object::empty_array());
8627 result.set_parameter_names(Object::empty_array());
8628 result.set_name(name);
8629 result.set_kind_tag(0); // Ensure determinism of uninitialized bits.
8630 result.set_kind(kind);
8631 result.set_recognized_kind(MethodRecognizer::kUnknown);
8632 result.set_modifier(FunctionLayout::kNoModifier);
8633 result.set_is_static(is_static);
8634 result.set_is_const(is_const);
8635 result.set_is_abstract(is_abstract);
8636 result.set_is_external(is_external);
8637 result.set_is_native(is_native);
8638 result.set_is_reflectable(true); // Will be computed later.
8639 result.set_is_visible(true); // Will be computed later.
8640 result.set_is_debuggable(true); // Will be computed later.
8641 result.set_is_intrinsic(false);
8642 result.set_is_redirecting(false);
8643 result.set_is_generated_body(false);
8644 result.set_has_pragma(false);
8645 result.set_is_polymorphic_target(false);
8646 result.set_is_synthetic(false);
8647 NOT_IN_PRECOMPILED(result.set_state_bits(0));
8648 result.set_owner(owner);
8649 NOT_IN_PRECOMPILED(result.set_token_pos(token_pos));
8650 NOT_IN_PRECOMPILED(result.set_end_token_pos(token_pos));
8651 result.set_num_fixed_parameters(0);
8652 result.SetNumOptionalParameters(0, false);
8653 NOT_IN_PRECOMPILED(result.set_usage_counter(0));
8654 NOT_IN_PRECOMPILED(result.set_deoptimization_counter(0));
8655 NOT_IN_PRECOMPILED(result.set_optimized_instruction_count(0));
8656 NOT_IN_PRECOMPILED(result.set_optimized_call_site_count(0));
8657 NOT_IN_PRECOMPILED(result.set_inlining_depth(0));
8658 NOT_IN_PRECOMPILED(result.set_is_declared_in_bytecode(false));
8659 NOT_IN_PRECOMPILED(result.set_binary_declaration_offset(0));
8660 result.set_is_optimizable(is_native ? false : true);
8661 result.set_is_background_optimizable(is_native ? false : true);
8662 result.set_is_inlinable(true);
8663 result.reset_unboxed_parameters_and_return();
8664 result.SetInstructionsSafe(StubCode::LazyCompile());
8665 if (kind == FunctionLayout::kClosureFunction ||
8666 kind == FunctionLayout::kImplicitClosureFunction) {
8667 ASSERT(space == Heap::kOld);
8668 const ClosureData& data = ClosureData::Handle(ClosureData::New());
8669 result.set_data(data);
8670 } else if (kind == FunctionLayout::kSignatureFunction) {
8671 const SignatureData& data =
8672 SignatureData::Handle(SignatureData::New(space));
8673 result.set_data(data);
8674 } else if (kind == FunctionLayout::kFfiTrampoline) {
8675 const FfiTrampolineData& data =
8676 FfiTrampolineData::Handle(FfiTrampolineData::New());
8677 result.set_data(data);
8678 } else {
8679 // Functions other than signature functions have no reason to be allocated
8680 // in new space.
8681 ASSERT(space == Heap::kOld);
8682 }
8683
8684 // Force-optimized functions are not debuggable because they cannot
8685 // deoptimize.
8686 if (result.ForceOptimize()) {
8687 result.set_is_debuggable(false);
8688 }
8689
8690 return result.raw();
8691}
8692
8693FunctionPtr Function::NewClosureFunctionWithKind(FunctionLayout::Kind kind,
8694 const String& name,
8695 const Function& parent,
8696 TokenPosition token_pos,
8697 const Object& owner) {
8698 ASSERT((kind == FunctionLayout::kClosureFunction) ||
8699 (kind == FunctionLayout::kImplicitClosureFunction));
8700 ASSERT(!parent.IsNull());
8701 ASSERT(!owner.IsNull());
8702 const Function& result = Function::Handle(
8703 Function::New(name, kind,
8704 /* is_static = */ parent.is_static(),
8705 /* is_const = */ false,
8706 /* is_abstract = */ false,
8707 /* is_external = */ false,
8708 /* is_native = */ false, owner, token_pos));
8709 result.set_parent_function(parent);
8710 return result.raw();
8711}
8712
8713FunctionPtr Function::NewClosureFunction(const String& name,
8714 const Function& parent,
8715 TokenPosition token_pos) {
8716 // Use the owner defining the parent function and not the class containing it.
8717 const Object& parent_owner = Object::Handle(parent.RawOwner());
8718 return NewClosureFunctionWithKind(FunctionLayout::kClosureFunction, name,
8719 parent, token_pos, parent_owner);
8720}
8721
8722FunctionPtr Function::NewImplicitClosureFunction(const String& name,
8723 const Function& parent,
8724 TokenPosition token_pos) {
8725 // Use the owner defining the parent function and not the class containing it.
8726 const Object& parent_owner = Object::Handle(parent.RawOwner());
8727 return NewClosureFunctionWithKind(FunctionLayout::kImplicitClosureFunction,
8728 name, parent, token_pos, parent_owner);
8729}
8730
8731FunctionPtr Function::NewSignatureFunction(const Object& owner,
8732 const Function& parent,
8733 TokenPosition token_pos,
8734 Heap::Space space) {
8735 const Function& result = Function::Handle(Function::New(
8736 Symbols::AnonymousSignature(), FunctionLayout::kSignatureFunction,
8737 /* is_static = */ false,
8738 /* is_const = */ false,
8739 /* is_abstract = */ false,
8740 /* is_external = */ false,
8741 /* is_native = */ false,
8742 owner, // Same as function type scope class.
8743 token_pos, space));
8744 result.set_parent_function(parent);
8745 result.set_is_reflectable(false);
8746 result.set_is_visible(false);
8747 result.set_is_debuggable(false);
8748 return result.raw();
8749}
8750
8751FunctionPtr Function::NewEvalFunction(const Class& owner,
8752 const Script& script,
8753 bool is_static) {
8754 Thread* thread = Thread::Current();
8755 Zone* zone = thread->zone();
8756 const Function& result = Function::Handle(
8757 zone,
8758 Function::New(String::Handle(Symbols::New(thread, ":Eval")),
8759 FunctionLayout::kRegularFunction, is_static,
8760 /* is_const = */ false,
8761 /* is_abstract = */ false,
8762 /* is_external = */ false,
8763 /* is_native = */ false, owner, TokenPosition::kMinSource));
8764 ASSERT(!script.IsNull());
8765 result.set_is_debuggable(false);
8766 result.set_is_visible(true);
8767 result.set_eval_script(script);
8768 return result.raw();
8769}
8770
8771bool Function::SafeToClosurize() const {
8772#if defined(DART_PRECOMPILED_RUNTIME)
8773 return HasImplicitClosureFunction();
8774#else
8775 return true;
8776#endif
8777}
8778
8779FunctionPtr Function::ImplicitClosureFunction() const {
8780 // Return the existing implicit closure function if any.
8781 if (implicit_closure_function() != Function::null()) {
8782 return implicit_closure_function();
8783 }
8784#if defined(DART_PRECOMPILED_RUNTIME)
8785 // In AOT mode all implicit closures are pre-created.
8786 FATAL("Cannot create implicit closure in AOT!");
8787 return Function::null();
8788#else
8789 ASSERT(!IsSignatureFunction() && !IsClosureFunction());
8790 Thread* thread = Thread::Current();
8791 Zone* zone = thread->zone();
8792 // Create closure function.
8793 const String& closure_name = String::Handle(zone, name());
8794 const Function& closure_function = Function::Handle(
8795 zone, NewImplicitClosureFunction(closure_name, *this, token_pos()));
8796
8797 // Set closure function's context scope.
8798 if (is_static()) {
8799 closure_function.set_context_scope(Object::empty_context_scope());
8800 } else {
8801 const ContextScope& context_scope = ContextScope::Handle(
8802 zone, LocalScope::CreateImplicitClosureScope(*this));
8803 closure_function.set_context_scope(context_scope);
8804 }
8805
8806 // Set closure function's type parameters.
8807 closure_function.set_type_parameters(
8808 TypeArguments::Handle(zone, type_parameters()));
8809
8810 // Set closure function's result type to this result type.
8811 closure_function.set_result_type(AbstractType::Handle(zone, result_type()));
8812
8813 // Set closure function's end token to this end token.
8814 closure_function.set_end_token_pos(end_token_pos());
8815
8816 // The closurized method stub just calls into the original method and should
8817 // therefore be skipped by the debugger and in stack traces.
8818 closure_function.set_is_debuggable(false);
8819 closure_function.set_is_visible(false);
8820
8821 // Set closure function's formal parameters to this formal parameters,
8822 // removing the receiver if this is an instance method and adding the closure
8823 // object as first parameter.
8824 const int kClosure = 1;
8825 const int has_receiver = is_static() ? 0 : 1;
8826 const int num_fixed_params = kClosure - has_receiver + num_fixed_parameters();
8827 const int num_opt_params = NumOptionalParameters();
8828 const bool has_opt_pos_params = HasOptionalPositionalParameters();
8829 const int num_params = num_fixed_params + num_opt_params;
8830 const int num_required_flags =
8831 Array::Handle(zone, parameter_names()).Length() - NumParameters();
8832 closure_function.set_num_fixed_parameters(num_fixed_params);
8833 closure_function.SetNumOptionalParameters(num_opt_params, has_opt_pos_params);
8834 closure_function.set_parameter_types(
8835 Array::Handle(zone, Array::New(num_params, Heap::kOld)));
8836 closure_function.set_parameter_names(Array::Handle(
8837 zone, Array::New(num_params + num_required_flags, Heap::kOld)));
8838 AbstractType& param_type = AbstractType::Handle(zone);
8839 String& param_name = String::Handle(zone);
8840 // Add implicit closure object parameter.
8841 param_type = Type::DynamicType();
8842 closure_function.SetParameterTypeAt(0, param_type);
8843 closure_function.SetParameterNameAt(0, Symbols::ClosureParameter());
8844 for (int i = kClosure; i < num_params; i++) {
8845 param_type = ParameterTypeAt(has_receiver - kClosure + i);
8846 closure_function.SetParameterTypeAt(i, param_type);
8847 param_name = ParameterNameAt(has_receiver - kClosure + i);
8848 closure_function.SetParameterNameAt(i, param_name);
8849 if (IsRequiredAt(has_receiver - kClosure + i)) {
8850 closure_function.SetIsRequiredAt(i);
8851 }
8852 }
8853 closure_function.InheritBinaryDeclarationFrom(*this);
8854
8855 // Change covariant parameter types to either Object? for an opted-in implicit
8856 // closure or to Object* for a legacy implicit closure.
8857 if (!is_static()) {
8858 BitVector is_covariant(zone, NumParameters());
8859 BitVector is_generic_covariant_impl(zone, NumParameters());
8860 kernel::ReadParameterCovariance(*this, &is_covariant,
8861 &is_generic_covariant_impl);
8862
8863 Type& object_type = Type::Handle(zone, Type::ObjectType());
8864 ObjectStore* object_store = Isolate::Current()->object_store();
8865 object_type = nnbd_mode() == NNBDMode::kOptedInLib
8866 ? object_store->nullable_object_type()
8867 : object_store->legacy_object_type();
8868 for (intptr_t i = kClosure; i < num_params; ++i) {
8869 const intptr_t original_param_index = has_receiver - kClosure + i;
8870 if (is_covariant.Contains(original_param_index) ||
8871 is_generic_covariant_impl.Contains(original_param_index)) {
8872 closure_function.SetParameterTypeAt(i, object_type);
8873 }
8874 }
8875 }
8876 const Type& signature_type =
8877 Type::Handle(zone, closure_function.SignatureType());
8878 if (!signature_type.IsFinalized()) {
8879 ClassFinalizer::FinalizeType(Class::Handle(zone, Owner()), signature_type);
8880 }
8881 set_implicit_closure_function(closure_function);
8882 ASSERT(closure_function.IsImplicitClosureFunction());
8883 return closure_function.raw();
8884#endif // defined(DART_PRECOMPILED_RUNTIME)
8885}
8886
8887void Function::DropUncompiledImplicitClosureFunction() const {
8888 if (implicit_closure_function() != Function::null()) {
8889 const Function& func = Function::Handle(implicit_closure_function());
8890 if (!func.HasCode()) {
8891 set_implicit_closure_function(Function::Handle());
8892 }
8893 }
8894}
8895
8896StringPtr Function::Signature() const {
8897 Thread* thread = Thread::Current();
8898 ZoneTextBuffer printer(thread->zone());
8899 PrintSignature(kInternalName, &printer);
8900 return Symbols::New(thread, printer.buffer());
8901}
8902
8903StringPtr Function::UserVisibleSignature() const {
8904 Thread* thread = Thread::Current();
8905 ZoneTextBuffer printer(thread->zone());
8906 PrintSignature(kUserVisibleName, &printer);
8907 return Symbols::New(thread, printer.buffer());
8908}
8909
8910void Function::PrintSignatureParameters(Thread* thread,
8911 Zone* zone,
8912 NameVisibility name_visibility,
8913 BaseTextBuffer* printer) const {
8914 AbstractType& param_type = AbstractType::Handle(zone);
8915 const intptr_t num_params = NumParameters();
8916 const intptr_t num_fixed_params = num_fixed_parameters();
8917 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
8918 const intptr_t num_opt_named_params = NumOptionalNamedParameters();
8919 const intptr_t num_opt_params = num_opt_pos_params + num_opt_named_params;
8920 ASSERT((num_fixed_params + num_opt_params) == num_params);
8921 intptr_t i = 0;
8922 if (name_visibility == kUserVisibleName) {
8923 // Hide implicit parameters.
8924 i = NumImplicitParameters();
8925 }
8926 String& name = String::Handle(zone);
8927 while (i < num_fixed_params) {
8928 param_type = ParameterTypeAt(i);
8929 ASSERT(!param_type.IsNull());
8930 param_type.PrintName(name_visibility, printer);
8931 if (i != (num_params - 1)) {
8932 printer->AddString(", ");
8933 }
8934 i++;
8935 }
8936 if (num_opt_params > 0) {
8937 if (num_opt_pos_params > 0) {
8938 printer->AddString("[");
8939 } else {
8940 printer->AddString("{");
8941 }
8942 for (intptr_t i = num_fixed_params; i < num_params; i++) {
8943 if (num_opt_named_params > 0 && IsRequiredAt(i)) {
8944 printer->AddString("required ");
8945 }
8946 param_type = ParameterTypeAt(i);
8947 ASSERT(!param_type.IsNull());
8948 param_type.PrintName(name_visibility, printer);
8949 // The parameter name of an optional positional parameter does not need
8950 // to be part of the signature, since it is not used.
8951 if (num_opt_named_params > 0) {
8952 name = ParameterNameAt(i);
8953 printer->AddString(" ");
8954 printer->AddString(name.ToCString());
8955 }
8956 if (i != (num_params - 1)) {
8957 printer->AddString(", ");
8958 }
8959 }
8960 if (num_opt_pos_params > 0) {
8961 printer->AddString("]");
8962 } else {
8963 printer->AddString("}");
8964 }
8965 }
8966}
8967
8968InstancePtr Function::ImplicitStaticClosure() const {
8969 ASSERT(IsImplicitStaticClosureFunction());
8970 if (implicit_static_closure() == Instance::null()) {
8971 Zone* zone = Thread::Current()->zone();
8972 const Context& context = Context::Handle(zone);
8973 Instance& closure =
8974 Instance::Handle(zone, Closure::New(Object::null_type_arguments(),
8975 Object::null_type_arguments(),
8976 *this, context, Heap::kOld));
8977 set_implicit_static_closure(closure);
8978 }
8979 return implicit_static_closure();
8980}
8981
8982InstancePtr Function::ImplicitInstanceClosure(const Instance& receiver) const {
8983 ASSERT(IsImplicitClosureFunction());
8984 Zone* zone = Thread::Current()->zone();
8985 const Context& context = Context::Handle(zone, Context::New(1));
8986 context.SetAt(0, receiver);
8987 TypeArguments& instantiator_type_arguments = TypeArguments::Handle(zone);
8988 if (!HasInstantiatedSignature(kCurrentClass)) {
8989 instantiator_type_arguments = receiver.GetTypeArguments();
8990 }
8991 ASSERT(HasInstantiatedSignature(kFunctions)); // No generic parent function.
8992 return Closure::New(instantiator_type_arguments,
8993 Object::null_type_arguments(), *this, context);
8994}
8995
8996intptr_t Function::ComputeClosureHash() const {
8997 ASSERT(IsClosureFunction());
8998 const Class& cls = Class::Handle(Owner());
8999 uintptr_t result = String::Handle(name()).Hash();
9000 result += String::Handle(Signature()).Hash();
9001 result += String::Handle(cls.Name()).Hash();
9002 return result;
9003}
9004
9005void Function::PrintSignature(NameVisibility name_visibility,
9006 BaseTextBuffer* printer) const {
9007 Thread* thread = Thread::Current();
9008 Zone* zone = thread->zone();
9009 Isolate* isolate = thread->isolate();
9010 String& name = String::Handle(zone);
9011 const TypeArguments& type_params =
9012 TypeArguments::Handle(zone, type_parameters());
9013 if (!type_params.IsNull()) {
9014 const intptr_t num_type_params = type_params.Length();
9015 ASSERT(num_type_params > 0);
9016 TypeParameter& type_param = TypeParameter::Handle(zone);
9017 AbstractType& bound = AbstractType::Handle(zone);
9018 printer->AddString("<");
9019 for (intptr_t i = 0; i < num_type_params; i++) {
9020 type_param ^= type_params.TypeAt(i);
9021 name = type_param.name();
9022 printer->AddString(name.ToCString());
9023 bound = type_param.bound();
9024 // Do not print default bound or non-nullable Object bound in weak mode.
9025 if (!bound.IsNull() &&
9026 (!bound.IsObjectType() ||
9027 (isolate->null_safety() && bound.IsNonNullable()))) {
9028 printer->AddString(" extends ");
9029 bound.PrintName(name_visibility, printer);
9030 }
9031 if (i < num_type_params - 1) {
9032 printer->AddString(", ");
9033 }
9034 }
9035 printer->AddString(">");
9036 }
9037 printer->AddString("(");
9038 PrintSignatureParameters(thread, zone, name_visibility, printer);
9039 printer->AddString(") => ");
9040 const AbstractType& res_type = AbstractType::Handle(zone, result_type());
9041 res_type.PrintName(name_visibility, printer);
9042}
9043
9044bool Function::HasInstantiatedSignature(Genericity genericity,
9045 intptr_t num_free_fun_type_params,
9046 TrailPtr trail) const {
9047 if (num_free_fun_type_params == kCurrentAndEnclosingFree) {
9048 num_free_fun_type_params = kAllFree;
9049 } else if (genericity != kCurrentClass) {
9050 // A generic typedef may declare a non-generic function type and get
9051 // instantiated with unrelated function type parameters. In that case, its
9052 // signature is still uninstantiated, because these type parameters are
9053 // free (they are not declared by the typedef).
9054 // For that reason, we only adjust num_free_fun_type_params if this
9055 // signature is generic or has a generic parent.
9056 if (IsGeneric() || HasGenericParent()) {
9057 // We only consider the function type parameters declared by the parents
9058 // of this signature function as free.
9059 const int num_parent_type_params = NumParentTypeParameters();
9060 if (num_parent_type_params < num_free_fun_type_params) {
9061 num_free_fun_type_params = num_parent_type_params;
9062 }
9063 }
9064 }
9065 AbstractType& type = AbstractType::Handle(result_type());
9066 if (!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) {
9067 return false;
9068 }
9069 const intptr_t num_parameters = NumParameters();
9070 for (intptr_t i = 0; i < num_parameters; i++) {
9071 type = ParameterTypeAt(i);
9072 if (!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) {
9073 return false;
9074 }
9075 }
9076 TypeArguments& type_params = TypeArguments::Handle(type_parameters());
9077 TypeParameter& type_param = TypeParameter::Handle();
9078 for (intptr_t i = 0; i < type_params.Length(); ++i) {
9079 type_param ^= type_params.TypeAt(i);
9080 type = type_param.bound();
9081 if (!type.IsInstantiated(genericity, num_free_fun_type_params, trail)) {
9082 return false;
9083 }
9084 }
9085 return true;
9086}
9087
9088ClassPtr Function::Owner() const {
9089 if (raw_ptr()->owner_ == Object::null()) {
9090 ASSERT(IsSignatureFunction());
9091 return Class::null();
9092 }
9093 if (raw_ptr()->owner_->IsClass()) {
9094 return Class::RawCast(raw_ptr()->owner_);
9095 }
9096 const Object& obj = Object::Handle(raw_ptr()->owner_);
9097 ASSERT(obj.IsPatchClass());
9098 return PatchClass::Cast(obj).patched_class();
9099}
9100
9101ClassPtr Function::origin() const {
9102 if (raw_ptr()->owner_ == Object::null()) {
9103 ASSERT(IsSignatureFunction());
9104 return Class::null();
9105 }
9106 if (raw_ptr()->owner_->IsClass()) {
9107 return Class::RawCast(raw_ptr()->owner_);
9108 }
9109 const Object& obj = Object::Handle(raw_ptr()->owner_);
9110 ASSERT(obj.IsPatchClass());
9111 return PatchClass::Cast(obj).origin_class();
9112}
9113
9114void Function::InheritBinaryDeclarationFrom(const Function& src) const {
9115#if defined(DART_PRECOMPILED_RUNTIME)
9116 UNREACHABLE();
9117#else
9118 StoreNonPointer(&raw_ptr()->binary_declaration_,
9119 src.raw_ptr()->binary_declaration_);
9120#endif
9121}
9122
9123void Function::InheritBinaryDeclarationFrom(const Field& src) const {
9124#if defined(DART_PRECOMPILED_RUNTIME)
9125 UNREACHABLE();
9126#else
9127 if (src.is_declared_in_bytecode()) {
9128 set_is_declared_in_bytecode(true);
9129 set_bytecode_offset(src.bytecode_offset());
9130 } else {
9131 set_kernel_offset(src.kernel_offset());
9132 }
9133#endif
9134}
9135
9136void Function::SetKernelDataAndScript(const Script& script,
9137 const ExternalTypedData& data,
9138 intptr_t offset) const {
9139 Array& data_field = Array::Handle(Array::New(3));
9140 data_field.SetAt(0, script);
9141 data_field.SetAt(1, data);
9142 data_field.SetAt(2, Smi::Handle(Smi::New(offset)));
9143 set_data(data_field);
9144}
9145
9146ScriptPtr Function::script() const {
9147 // NOTE(turnidge): If you update this function, you probably want to
9148 // update Class::PatchFieldsAndFunctions() at the same time.
9149 const Object& data = Object::Handle(raw_ptr()->data_);
9150 if (IsDynamicInvocationForwarder()) {
9151 const auto& forwarding_target = Function::Handle(ForwardingTarget());
9152 return forwarding_target.script();
9153 }
9154 if (IsImplicitGetterOrSetter()) {
9155 const auto& field = Field::Handle(accessor_field());
9156 return field.Script();
9157 }
9158 if (data.IsArray()) {
9159 Object& script = Object::Handle(Array::Cast(data).At(0));
9160 if (script.IsScript()) {
9161 return Script::Cast(script).raw();
9162 }
9163 }
9164 if (token_pos() == TokenPosition::kMinSource) {
9165 // Testing for position 0 is an optimization that relies on temporary
9166 // eval functions having token position 0.
9167 const Script& script = Script::Handle(eval_script());
9168 if (!script.IsNull()) {
9169 return script.raw();
9170 }
9171 }
9172 const Object& obj = Object::Handle(raw_ptr()->owner_);
9173 if (obj.IsPatchClass()) {
9174 return PatchClass::Cast(obj).script();
9175 }
9176 if (IsClosureFunction()) {
9177 return Function::Handle(parent_function()).script();
9178 }
9179 if (obj.IsNull()) {
9180 ASSERT(IsSignatureFunction());
9181 return Script::null();
9182 }
9183 ASSERT(obj.IsClass());
9184 return Class::Cast(obj).script();
9185}
9186
9187ExternalTypedDataPtr Function::KernelData() const {
9188 Object& data = Object::Handle(raw_ptr()->data_);
9189 if (data.IsArray()) {
9190 Object& script = Object::Handle(Array::Cast(data).At(0));
9191 if (script.IsScript()) {
9192 return ExternalTypedData::RawCast(Array::Cast(data).At(1));
9193 }
9194 }
9195 if (IsClosureFunction()) {
9196 Function& parent = Function::Handle(parent_function());
9197 ASSERT(!parent.IsNull());
9198 return parent.KernelData();
9199 }
9200
9201 const Object& obj = Object::Handle(raw_ptr()->owner_);
9202 if (obj.IsClass()) {
9203 Library& lib = Library::Handle(Class::Cast(obj).library());
9204 return lib.kernel_data();
9205 }
9206 ASSERT(obj.IsPatchClass());
9207 return PatchClass::Cast(obj).library_kernel_data();
9208}
9209
9210intptr_t Function::KernelDataProgramOffset() const {
9211 ASSERT(!is_declared_in_bytecode());
9212 if (IsNoSuchMethodDispatcher() || IsInvokeFieldDispatcher() ||
9213 IsFfiTrampoline()) {
9214 return 0;
9215 }
9216 Object& data = Object::Handle(raw_ptr()->data_);
9217 if (data.IsArray()) {
9218 Object& script = Object::Handle(Array::Cast(data).At(0));
9219 if (script.IsScript()) {
9220 return Smi::Value(Smi::RawCast(Array::Cast(data).At(2)));
9221 }
9222 }
9223 if (IsClosureFunction()) {
9224 Function& parent = Function::Handle(parent_function());
9225 ASSERT(!parent.IsNull());
9226 return parent.KernelDataProgramOffset();
9227 }
9228
9229 const Object& obj = Object::Handle(raw_ptr()->owner_);
9230 if (obj.IsClass()) {
9231 Library& lib = Library::Handle(Class::Cast(obj).library());
9232 ASSERT(!lib.is_declared_in_bytecode());
9233 return lib.kernel_offset();
9234 }
9235 ASSERT(obj.IsPatchClass());
9236 return PatchClass::Cast(obj).library_kernel_offset();
9237}
9238
9239bool Function::HasOptimizedCode() const {
9240 return HasCode() && Code::Handle(CurrentCode()).is_optimized();
9241}
9242
9243bool Function::ShouldCompilerOptimize() const {
9244 return !FLAG_enable_interpreter ||
9245 ((unoptimized_code() != Object::null()) && WasCompiled()) ||
9246 ForceOptimize();
9247}
9248
9249const char* Function::NameCString(NameVisibility name_visibility) const {
9250 switch (name_visibility) {
9251 case kInternalName:
9252 return String::Handle(name()).ToCString();
9253 case kScrubbedName:
9254 case kUserVisibleName:
9255 return UserVisibleNameCString();
9256 }
9257 UNREACHABLE();
9258 return nullptr;
9259}
9260
9261const char* Function::UserVisibleNameCString() const {
9262 if (FLAG_show_internal_names) {
9263 return String::Handle(name()).ToCString();
9264 }
9265 return String::ScrubName(String::Handle(name()), is_extension_member());
9266}
9267
9268StringPtr Function::UserVisibleName() const {
9269 if (FLAG_show_internal_names) {
9270 return name();
9271 }
9272 return Symbols::New(
9273 Thread::Current(),
9274 String::ScrubName(String::Handle(name()), is_extension_member()));
9275}
9276
9277StringPtr Function::QualifiedScrubbedName() const {
9278 Thread* thread = Thread::Current();
9279 ZoneTextBuffer printer(thread->zone());
9280 PrintName(NameFormattingParams(kScrubbedName), &printer);
9281 return Symbols::New(thread, printer.buffer());
9282}
9283
9284StringPtr Function::QualifiedUserVisibleName() const {
9285 Thread* thread = Thread::Current();
9286 ZoneTextBuffer printer(thread->zone());
9287 PrintName(NameFormattingParams(kUserVisibleName), &printer);
9288 return Symbols::New(thread, printer.buffer());
9289}
9290
9291void Function::PrintName(const NameFormattingParams& params,
9292 BaseTextBuffer* printer) const {
9293 // If |this| is the generated asynchronous body closure, use the
9294 // name of the parent function.
9295 Function& fun = Function::Handle(raw());
9296
9297 if (params.disambiguate_names) {
9298 if (fun.IsInvokeFieldDispatcher()) {
9299 printer->AddString("[invoke-field] ");
9300 }
9301 if (fun.IsImplicitClosureFunction()) {
9302 printer->AddString("[tear-off] ");
9303 }
9304 if (fun.IsMethodExtractor()) {
9305 printer->AddString("[tear-off-extractor] ");
9306 }
9307 }
9308
9309 if (fun.IsNonImplicitClosureFunction()) {
9310 // Sniff the parent function.
9311 fun = fun.parent_function();
9312 ASSERT(!fun.IsNull());
9313 if (!fun.IsAsyncGenerator() && !fun.IsAsyncFunction() &&
9314 !fun.IsSyncGenerator()) {
9315 // Parent function is not the generator of an asynchronous body closure,
9316 // start at |this|.
9317 fun = raw();
9318 }
9319 }
9320 if (IsClosureFunction()) {
9321 if (fun.IsLocalFunction() && !fun.IsImplicitClosureFunction()) {
9322 Function& parent = Function::Handle(fun.parent_function());
9323 if (parent.IsAsyncClosure() || parent.IsSyncGenClosure() ||
9324 parent.IsAsyncGenClosure()) {
9325 // Skip the closure and use the real function name found in
9326 // the parent.
9327 parent = parent.parent_function();
9328 }
9329 if (params.include_parent_name) {
9330 parent.PrintName(params, printer);
9331 // A function's scrubbed name and its user visible name are identical.
9332 printer->AddString(".");
9333 }
9334 if (params.disambiguate_names &&
9335 fun.name() == Symbols::AnonymousClosure().raw()) {
9336 printer->Printf("<anonymous closure @%" Pd ">", fun.token_pos().Pos());
9337 } else {
9338 printer->AddString(fun.NameCString(params.name_visibility));
9339 }
9340 // If we skipped rewritten async/async*/sync* body then append a suffix
9341 // to the end of the name.
9342 if (fun.raw() != raw() && params.disambiguate_names) {
9343 printer->AddString("{body}");
9344 }
9345 return;
9346 }
9347 }
9348
9349 if (fun.kind() == FunctionLayout::kConstructor) {
9350 printer->AddString("new ");
9351 } else if (params.include_class_name) {
9352 const Class& cls = Class::Handle(Owner());
9353 if (!cls.IsTopLevel()) {
9354 const Class& mixin = Class::Handle(cls.Mixin());
9355 printer->AddString(params.name_visibility == kUserVisibleName
9356 ? mixin.UserVisibleNameCString()
9357 : cls.NameCString(params.name_visibility));
9358 printer->AddString(".");
9359 }
9360 }
9361
9362 printer->AddString(fun.NameCString(params.name_visibility));
9363
9364 // If we skipped rewritten async/async*/sync* body then append a suffix
9365 // to the end of the name.
9366 if (fun.raw() != raw() && params.disambiguate_names) {
9367 printer->AddString("{body}");
9368 }
9369
9370 // Field dispatchers are specialized for an argument descriptor so there
9371 // might be multiples of them with the same name but different argument
9372 // descriptors. Add a suffix to disambiguate.
9373 if (params.disambiguate_names && fun.IsInvokeFieldDispatcher()) {
9374 printer->AddString(" ");
9375 if (NumTypeParameters() != 0) {
9376 printer->Printf("<%" Pd ">", fun.NumTypeParameters());
9377 }
9378 printer->AddString("(");
9379 printer->Printf("%" Pd "", fun.num_fixed_parameters());
9380 if (fun.NumOptionalPositionalParameters() != 0) {
9381 printer->Printf(" [%" Pd "]", fun.NumOptionalPositionalParameters());
9382 }
9383 if (fun.NumOptionalNamedParameters() != 0) {
9384 printer->AddString(" {");
9385 String& name = String::Handle();
9386 for (intptr_t i = 0; i < fun.NumOptionalNamedParameters(); i++) {
9387 name = fun.ParameterNameAt(fun.num_fixed_parameters() + i);
9388 printer->Printf("%s%s", i > 0 ? ", " : "", name.ToCString());
9389 }
9390 printer->AddString("}");
9391 }
9392 printer->AddString(")");
9393 }
9394}
9395
9396StringPtr Function::GetSource() const {
9397 if (IsImplicitConstructor() || IsSignatureFunction() || is_synthetic()) {
9398 // We may need to handle more cases when the restrictions on mixins are
9399 // relaxed. In particular we might start associating some source with the
9400 // forwarding constructors when it becomes possible to specify a particular
9401 // constructor from the mixin to use.
9402 return String::null();
9403 }
9404 Zone* zone = Thread::Current()->zone();
9405 const Script& func_script = Script::Handle(zone, script());
9406
9407 intptr_t from_line;
9408 intptr_t from_col;
9409 intptr_t to_line;
9410 intptr_t to_col;
9411 intptr_t to_length;
9412 func_script.GetTokenLocation(token_pos(), &from_line, &from_col);
9413 func_script.GetTokenLocation(end_token_pos(), &to_line, &to_col, &to_length);
9414
9415 if (to_length == 1) {
9416 // Handle special cases for end tokens of closures (where we exclude the
9417 // last token):
9418 // (1) "foo(() => null, bar);": End token is `,', but we don't print it.
9419 // (2) "foo(() => null);": End token is ')`, but we don't print it.
9420 // (3) "var foo = () => null;": End token is `;', but in this case the
9421 // token semicolon belongs to the assignment so we skip it.
9422 const String& src = String::Handle(func_script.Source());
9423 if (src.IsNull() || src.Length() == 0) {
9424 return Symbols::OptimizedOut().raw();
9425 }
9426 uint16_t end_char = src.CharAt(end_token_pos().value());
9427 if ((end_char == ',') || // Case 1.
9428 (end_char == ')') || // Case 2.
9429 (end_char == ';' && String::Handle(zone, name())
9430 .Equals("<anonymous closure>"))) { // Case 3.
9431 to_length = 0;
9432 }
9433 }
9434
9435 return func_script.GetSnippet(from_line, from_col, to_line,
9436 to_col + to_length);
9437}
9438
9439// Construct fingerprint from token stream. The token stream contains also
9440// arguments.
9441int32_t Function::SourceFingerprint() const {
9442#if !defined(DART_PRECOMPILED_RUNTIME)
9443 if (is_declared_in_bytecode()) {
9444 return kernel::BytecodeFingerprintHelper::CalculateFunctionFingerprint(
9445 *this);
9446 }
9447 return kernel::KernelSourceFingerprintHelper::CalculateFunctionFingerprint(
9448 *this);
9449#else
9450 return 0;
9451#endif // !defined(DART_PRECOMPILED_RUNTIME)
9452}
9453
9454void Function::SaveICDataMap(
9455 const ZoneGrowableArray<const ICData*>& deopt_id_to_ic_data,
9456 const Array& edge_counters_array) const {
9457#if !defined(DART_PRECOMPILED_RUNTIME)
9458 // Compute number of ICData objects to save.
9459 // Store edge counter array in the first slot.
9460 intptr_t count = 1;
9461 for (intptr_t i = 0; i < deopt_id_to_ic_data.length(); i++) {
9462 if (deopt_id_to_ic_data[i] != NULL) {
9463 count++;
9464 }
9465 }
9466 const Array& array = Array::Handle(Array::New(count, Heap::kOld));
9467 count = 1;
9468 for (intptr_t i = 0; i < deopt_id_to_ic_data.length(); i++) {
9469 if (deopt_id_to_ic_data[i] != NULL) {
9470 ASSERT(i == deopt_id_to_ic_data[i]->deopt_id());
9471 array.SetAt(count++, *deopt_id_to_ic_data[i]);
9472 }
9473 }
9474 array.SetAt(0, edge_counters_array);
9475 set_ic_data_array(array);
9476#else // DART_PRECOMPILED_RUNTIME
9477 UNREACHABLE();
9478#endif // DART_PRECOMPILED_RUNTIME
9479}
9480
9481void Function::RestoreICDataMap(
9482 ZoneGrowableArray<const ICData*>* deopt_id_to_ic_data,
9483 bool clone_ic_data) const {
9484#if !defined(DART_PRECOMPILED_RUNTIME)
9485 if (FLAG_force_clone_compiler_objects) {
9486 clone_ic_data = true;
9487 }
9488 ASSERT(deopt_id_to_ic_data->is_empty());
9489 Zone* zone = Thread::Current()->zone();
9490 const Array& saved_ic_data = Array::Handle(zone, ic_data_array());
9491 if (saved_ic_data.IsNull()) {
9492 // Could happen with deferred loading.
9493 return;
9494 }
9495 const intptr_t saved_length = saved_ic_data.Length();
9496 ASSERT(saved_length > 0);
9497 if (saved_length > 1) {
9498 const intptr_t restored_length =
9499 ICData::Cast(Object::Handle(zone, saved_ic_data.At(saved_length - 1)))
9500 .deopt_id() +
9501 1;
9502 deopt_id_to_ic_data->SetLength(restored_length);
9503 for (intptr_t i = 0; i < restored_length; i++) {
9504 (*deopt_id_to_ic_data)[i] = NULL;
9505 }
9506 for (intptr_t i = 1; i < saved_length; i++) {
9507 ICData& ic_data = ICData::ZoneHandle(zone);
9508 ic_data ^= saved_ic_data.At(i);
9509 if (clone_ic_data) {
9510 const ICData& original_ic_data = ICData::Handle(zone, ic_data.raw());
9511 ic_data = ICData::Clone(ic_data);
9512 ic_data.SetOriginal(original_ic_data);
9513 }
9514 ASSERT(deopt_id_to_ic_data->At(ic_data.deopt_id()) == nullptr);
9515 (*deopt_id_to_ic_data)[ic_data.deopt_id()] = &ic_data;
9516 }
9517 }
9518#else // DART_PRECOMPILED_RUNTIME
9519 UNREACHABLE();
9520#endif // DART_PRECOMPILED_RUNTIME
9521}
9522
9523void Function::set_ic_data_array(const Array& value) const {
9524 StorePointer<ArrayPtr, std::memory_order_release>(&raw_ptr()->ic_data_array_,
9525 value.raw());
9526}
9527
9528ArrayPtr Function::ic_data_array() const {
9529 return LoadPointer<ArrayPtr, std::memory_order_acquire>(
9530 &raw_ptr()->ic_data_array_);
9531}
9532
9533void Function::ClearICDataArray() const {
9534 set_ic_data_array(Array::null_array());
9535}
9536
9537ICDataPtr Function::FindICData(intptr_t deopt_id) const {
9538 const Array& array = Array::Handle(ic_data_array());
9539 ICData& ic_data = ICData::Handle();
9540 for (intptr_t i = 1; i < array.Length(); i++) {
9541 ic_data ^= array.At(i);
9542 if (ic_data.deopt_id() == deopt_id) {
9543 return ic_data.raw();
9544 }
9545 }
9546 return ICData::null();
9547}
9548
9549void Function::SetDeoptReasonForAll(intptr_t deopt_id,
9550 ICData::DeoptReasonId reason) {
9551 const Array& array = Array::Handle(ic_data_array());
9552 ICData& ic_data = ICData::Handle();
9553 for (intptr_t i = 1; i < array.Length(); i++) {
9554 ic_data ^= array.At(i);
9555 if (ic_data.deopt_id() == deopt_id) {
9556 ic_data.AddDeoptReason(reason);
9557 }
9558 }
9559}
9560
9561bool Function::CheckSourceFingerprint(int32_t fp) const {
9562#if !defined(DEBUG)
9563 return true; // Only check on debug.
9564#endif
9565
9566 if (Isolate::Current()->obfuscate() || FLAG_precompiled_mode ||
9567 (Dart::vm_snapshot_kind() != Snapshot::kNone)) {
9568 return true; // The kernel structure has been altered, skip checking.
9569 }
9570
9571 if (is_declared_in_bytecode()) {
9572 // AST and bytecode compute different fingerprints, and we only track one
9573 // fingerprint set.
9574 return true;
9575 }
9576
9577 if (SourceFingerprint() != fp) {
9578 // This output can be copied into a file, then used with sed
9579 // to replace the old values.
9580 // sed -i.bak -f /tmp/newkeys \
9581 // runtime/vm/compiler/recognized_methods_list.h
9582 THR_Print("s/0x%08x/0x%08x/\n", fp, SourceFingerprint());
9583 return false;
9584 }
9585 return true;
9586}
9587
9588CodePtr Function::EnsureHasCode() const {
9589 if (HasCode()) return CurrentCode();
9590 Thread* thread = Thread::Current();
9591 ASSERT(thread->IsMutatorThread());
9592 DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
9593 Zone* zone = thread->zone();
9594 const Object& result =
9595 Object::Handle(zone, Compiler::CompileFunction(thread, *this));
9596 if (result.IsError()) {
9597 if (result.IsLanguageError()) {
9598 Exceptions::ThrowCompileTimeError(LanguageError::Cast(result));
9599 UNREACHABLE();
9600 }
9601 Exceptions::PropagateError(Error::Cast(result));
9602 UNREACHABLE();
9603 }
9604 // Compiling in unoptimized mode should never fail if there are no errors.
9605 ASSERT(HasCode());
9606 ASSERT(ForceOptimize() || unoptimized_code() == result.raw());
9607 return CurrentCode();
9608}
9609
9610bool Function::NeedsMonomorphicCheckedEntry(Zone* zone) const {
9611#if !defined(DART_PRECOMPILED_RUNTIME)
9612 if (!IsDynamicFunction()) {
9613 return false;
9614 }
9615
9616 // For functions which need an args descriptor the switchable call sites will
9617 // transition directly to calling via a stub (and therefore never call the
9618 // monomorphic entry).
9619 //
9620 // See runtime_entry.cc:DEFINE_RUNTIME_ENTRY(UnlinkedCall)
9621 if (PrologueNeedsArgumentsDescriptor()) {
9622 return false;
9623 }
9624
9625 // All dyn:* forwarders are called via SwitchableCalls and all except the ones
9626 // with `PrologueNeedsArgumentsDescriptor()` transition into monomorphic
9627 // state.
9628 if (Function::IsDynamicInvocationForwarderName(name())) {
9629 return true;
9630 }
9631
9632 // If table dispatch is disabled, all instance calls use switchable calls.
9633 if (!(FLAG_precompiled_mode && FLAG_use_bare_instructions &&
9634 FLAG_use_table_dispatch)) {
9635 return true;
9636 }
9637
9638 // Only if there are dynamic callers and if we didn't create a dyn:* forwarder
9639 // for it do we need the monomorphic checked entry.
9640 return HasDynamicCallers(zone) &&
9641 !kernel::NeedsDynamicInvocationForwarder(*this);
9642#else
9643 UNREACHABLE();
9644 return true;
9645#endif
9646}
9647
9648bool Function::HasDynamicCallers(Zone* zone) const {
9649#if !defined(DART_PRECOMPILED_RUNTIME)
9650 // Issue(dartbug.com/42719):
9651 // Right now the metadata of _Closure.call says there are no dynamic callers -
9652 // even though there can be. To be conservative we return true.
9653 if ((name() == Symbols::GetCall().raw() || name() == Symbols::Call().raw()) &&
9654 Class::IsClosureClass(Owner())) {
9655 return true;
9656 }
9657
9658 // Use the results of TFA to determine whether this function is ever
9659 // called dynamically, i.e. using switchable calls.
9660 kernel::ProcedureAttributesMetadata metadata;
9661 metadata = kernel::ProcedureAttributesOf(*this, zone);
9662 if (IsGetterFunction() || IsImplicitGetterFunction() || IsMethodExtractor()) {
9663 return metadata.getter_called_dynamically;
9664 } else {
9665 return metadata.method_or_setter_called_dynamically;
9666 }
9667#else
9668 UNREACHABLE();
9669 return true;
9670#endif
9671}
9672
9673bool Function::PrologueNeedsArgumentsDescriptor() const {
9674 // The prologue of those functions need to examine the arg descriptor for
9675 // various purposes.
9676 return IsGeneric() || HasOptionalParameters();
9677}
9678
9679bool Function::MayHaveUncheckedEntryPoint() const {
9680 return FLAG_enable_multiple_entrypoints &&
9681 (NeedsArgumentTypeChecks() || IsImplicitClosureFunction());
9682}
9683
9684const char* Function::ToCString() const {
9685 if (IsNull()) {
9686 return "Function: null";
9687 }
9688 Zone* zone = Thread::Current()->zone();
9689 ZoneTextBuffer buffer(zone);
9690 buffer.Printf("Function '%s':", String::Handle(zone, name()).ToCString());
9691 if (is_static()) {
9692 buffer.AddString(" static");
9693 }
9694 if (is_abstract()) {
9695 buffer.AddString(" abstract");
9696 }
9697 switch (kind()) {
9698 case FunctionLayout::kRegularFunction:
9699 case FunctionLayout::kClosureFunction:
9700 case FunctionLayout::kImplicitClosureFunction:
9701 case FunctionLayout::kGetterFunction:
9702 case FunctionLayout::kSetterFunction:
9703 break;
9704 case FunctionLayout::kSignatureFunction:
9705 buffer.AddString(" signature");
9706 break;
9707 case FunctionLayout::kConstructor:
9708 buffer.AddString(is_static() ? " factory" : " constructor");
9709 break;
9710 case FunctionLayout::kImplicitGetter:
9711 buffer.AddString(" getter");
9712 break;
9713 case FunctionLayout::kImplicitSetter:
9714 buffer.AddString(" setter");
9715 break;
9716 case FunctionLayout::kImplicitStaticGetter:
9717 buffer.AddString(" static-getter");
9718 break;
9719 case FunctionLayout::kFieldInitializer:
9720 buffer.AddString(" field-initializer");
9721 break;
9722 case FunctionLayout::kMethodExtractor:
9723 buffer.AddString(" method-extractor");
9724 break;
9725 case FunctionLayout::kNoSuchMethodDispatcher:
9726 buffer.AddString(" no-such-method-dispatcher");
9727 break;
9728 case FunctionLayout::kDynamicInvocationForwarder:
9729 buffer.AddString(" dynamic-invocation-forwarder");
9730 break;
9731 case FunctionLayout::kInvokeFieldDispatcher:
9732 buffer.AddString(" invoke-field-dispatcher");
9733 break;
9734 case FunctionLayout::kIrregexpFunction:
9735 buffer.AddString(" irregexp-function");
9736 break;
9737 case FunctionLayout::kFfiTrampoline:
9738 buffer.AddString(" ffi-trampoline-function");
9739 break;
9740 default:
9741 UNREACHABLE();
9742 }
9743 if (IsNoSuchMethodDispatcher() || IsInvokeFieldDispatcher()) {
9744 const auto& args_desc_array = Array::Handle(zone, saved_args_desc());
9745 const ArgumentsDescriptor args_desc(args_desc_array);
9746 buffer.AddChar('[');
9747 args_desc.PrintTo(&buffer);
9748 buffer.AddChar(']');
9749 }
9750 if (is_const()) {
9751 buffer.AddString(" const");
9752 }
9753 buffer.AddChar('.');
9754 return buffer.buffer();
9755}
9756
9757void ClosureData::set_context_scope(const ContextScope& value) const {
9758 StorePointer(&raw_ptr()->context_scope_, value.raw());
9759}
9760
9761void ClosureData::set_implicit_static_closure(const Instance& closure) const {
9762 ASSERT(!closure.IsNull());
9763 ASSERT(raw_ptr()->closure_ == Instance::null());
9764 StorePointer(&raw_ptr()->closure_, closure.raw());
9765}
9766
9767void ClosureData::set_parent_function(const Function& value) const {
9768 StorePointer(&raw_ptr()->parent_function_, value.raw());
9769}
9770
9771void ClosureData::set_signature_type(const Type& value) const {
9772 StorePointer(&raw_ptr()->signature_type_, value.raw());
9773}
9774
9775ClosureDataPtr ClosureData::New() {
9776 ASSERT(Object::closure_data_class() != Class::null());
9777 ObjectPtr raw = Object::Allocate(ClosureData::kClassId,
9778 ClosureData::InstanceSize(), Heap::kOld);
9779 return static_cast<ClosureDataPtr>(raw);
9780}
9781
9782const char* ClosureData::ToCString() const {
9783 if (IsNull()) {
9784 return "ClosureData: null";
9785 }
9786 const Function& parent = Function::Handle(parent_function());
9787 const Type& type = Type::Handle(signature_type());
9788 return OS::SCreate(Thread::Current()->zone(),
9789 "ClosureData: context_scope: 0x%" Px
9790 " parent_function: %s signature_type: %s"
9791 " implicit_static_closure: 0x%" Px,
9792 static_cast<uword>(context_scope()),
9793 parent.IsNull() ? "null" : parent.ToCString(),
9794 type.IsNull() ? "null" : type.ToCString(),
9795 static_cast<uword>(implicit_static_closure()));
9796}
9797
9798void SignatureData::set_parent_function(const Function& value) const {
9799 StorePointer(&raw_ptr()->parent_function_, value.raw());
9800}
9801
9802void SignatureData::set_signature_type(const Type& value) const {
9803 StorePointer(&raw_ptr()->signature_type_, value.raw());
9804}
9805
9806SignatureDataPtr SignatureData::New(Heap::Space space) {
9807 ASSERT(Object::signature_data_class() != Class::null());
9808 ObjectPtr raw = Object::Allocate(SignatureData::kClassId,
9809 SignatureData::InstanceSize(), space);
9810 return static_cast<SignatureDataPtr>(raw);
9811}
9812
9813const char* SignatureData::ToCString() const {
9814 if (IsNull()) {
9815 return "SignatureData: null";
9816 }
9817 const Function& parent = Function::Handle(parent_function());
9818 const Type& type = Type::Handle(signature_type());
9819 return OS::SCreate(Thread::Current()->zone(),
9820 "SignatureData parent_function: %s signature_type: %s",
9821 parent.IsNull() ? "null" : parent.ToCString(),
9822 type.IsNull() ? "null" : type.ToCString());
9823}
9824
9825void RedirectionData::set_type(const Type& value) const {
9826 ASSERT(!value.IsNull());
9827 StorePointer(&raw_ptr()->type_, value.raw());
9828}
9829
9830void RedirectionData::set_identifier(const String& value) const {
9831 StorePointer(&raw_ptr()->identifier_, value.raw());
9832}
9833
9834void RedirectionData::set_target(const Function& value) const {
9835 StorePointer(&raw_ptr()->target_, value.raw());
9836}
9837
9838RedirectionDataPtr RedirectionData::New() {
9839 ASSERT(Object::redirection_data_class() != Class::null());
9840 ObjectPtr raw = Object::Allocate(RedirectionData::kClassId,
9841 RedirectionData::InstanceSize(), Heap::kOld);
9842 return static_cast<RedirectionDataPtr>(raw);
9843}
9844
9845const char* RedirectionData::ToCString() const {
9846 if (IsNull()) {
9847 return "RedirectionData: null";
9848 }
9849 const Type& redir_type = Type::Handle(type());
9850 const String& ident = String::Handle(identifier());
9851 const Function& target_fun = Function::Handle(target());
9852 return OS::SCreate(Thread::Current()->zone(),
9853 "RedirectionData: type: %s identifier: %s target: %s",
9854 redir_type.IsNull() ? "null" : redir_type.ToCString(),
9855 ident.IsNull() ? "null" : ident.ToCString(),
9856 target_fun.IsNull() ? "null" : target_fun.ToCString());
9857}
9858
9859void FfiTrampolineData::set_signature_type(const Type& value) const {
9860 StorePointer(&raw_ptr()->signature_type_, value.raw());
9861}
9862
9863void FfiTrampolineData::set_c_signature(const Function& value) const {
9864 StorePointer(&raw_ptr()->c_signature_, value.raw());
9865}
9866
9867void FfiTrampolineData::set_callback_target(const Function& value) const {
9868 StorePointer(&raw_ptr()->callback_target_, value.raw());
9869}
9870
9871void FfiTrampolineData::set_callback_id(int32_t callback_id) const {
9872 StoreNonPointer(&raw_ptr()->callback_id_, callback_id);
9873}
9874
9875void FfiTrampolineData::set_callback_exceptional_return(
9876 const Instance& value) const {
9877 StorePointer(&raw_ptr()->callback_exceptional_return_, value.raw());
9878}
9879
9880FfiTrampolineDataPtr FfiTrampolineData::New() {
9881 ASSERT(Object::ffi_trampoline_data_class() != Class::null());
9882 ObjectPtr raw =
9883 Object::Allocate(FfiTrampolineData::kClassId,
9884 FfiTrampolineData::InstanceSize(), Heap::kOld);
9885 FfiTrampolineDataPtr data = static_cast<FfiTrampolineDataPtr>(raw);
9886 data->ptr()->callback_id_ = 0;
9887 return data;
9888}
9889
9890const char* FfiTrampolineData::ToCString() const {
9891 Type& signature_type = Type::Handle(this->signature_type());
9892 String& signature_type_name =
9893 String::Handle(signature_type.UserVisibleName());
9894 return OS::SCreate(
9895 Thread::Current()->zone(), "TrampolineData: signature=%s",
9896 signature_type_name.IsNull() ? "null" : signature_type_name.ToCString());
9897}
9898
9899bool Field::ShouldCloneFields() {
9900 return Compiler::IsBackgroundCompilation() ||
9901 FLAG_force_clone_compiler_objects;
9902}
9903
9904FieldPtr Field::CloneFromOriginal() const {
9905 return this->Clone(*this);
9906}
9907
9908FieldPtr Field::Original() const {
9909 if (IsNull()) {
9910 return Field::null();
9911 }
9912 Object& obj = Object::Handle(raw_ptr()->owner_);
9913 if (obj.IsField()) {
9914 return Field::RawCast(obj.raw());
9915 } else {
9916 return this->raw();
9917 }
9918}
9919
9920const Object* Field::CloneForUnboxed(const Object& value) const {
9921 if (is_unboxing_candidate() && !is_nullable()) {
9922 switch (guarded_cid()) {
9923 case kDoubleCid:
9924 case kFloat32x4Cid:
9925 case kFloat64x2Cid:
9926 return &Object::Handle(Object::Clone(value, Heap::kNew));
9927 default:
9928 // Not a supported unboxed field type.
9929 return &value;
9930 }
9931 }
9932 return &value;
9933}
9934
9935void Field::SetOriginal(const Field& value) const {
9936 ASSERT(value.IsOriginal());
9937 ASSERT(!value.IsNull());
9938 StorePointer(&raw_ptr()->owner_, static_cast<ObjectPtr>(value.raw()));
9939}
9940
9941StringPtr Field::GetterName(const String& field_name) {
9942 return String::Concat(Symbols::GetterPrefix(), field_name);
9943}
9944
9945StringPtr Field::GetterSymbol(const String& field_name) {
9946 return Symbols::FromGet(Thread::Current(), field_name);
9947}
9948
9949StringPtr Field::LookupGetterSymbol(const String& field_name) {
9950 return Symbols::LookupFromGet(Thread::Current(), field_name);
9951}
9952
9953StringPtr Field::SetterName(const String& field_name) {
9954 return String::Concat(Symbols::SetterPrefix(), field_name);
9955}
9956
9957StringPtr Field::SetterSymbol(const String& field_name) {
9958 return Symbols::FromSet(Thread::Current(), field_name);
9959}
9960
9961StringPtr Field::LookupSetterSymbol(const String& field_name) {
9962 return Symbols::LookupFromSet(Thread::Current(), field_name);
9963}
9964
9965StringPtr Field::NameFromGetter(const String& getter_name) {
9966 return Symbols::New(Thread::Current(), getter_name, kGetterPrefixLength,
9967 getter_name.Length() - kGetterPrefixLength);
9968}
9969
9970StringPtr Field::NameFromSetter(const String& setter_name) {
9971 return Symbols::New(Thread::Current(), setter_name, kSetterPrefixLength,
9972 setter_name.Length() - kSetterPrefixLength);
9973}
9974
9975StringPtr Field::NameFromInit(const String& init_name) {
9976 return Symbols::New(Thread::Current(), init_name, kInitPrefixLength,
9977 init_name.Length() - kInitPrefixLength);
9978}
9979
9980bool Field::IsGetterName(const String& function_name) {
9981 return function_name.StartsWith(Symbols::GetterPrefix());
9982}
9983
9984bool Field::IsSetterName(const String& function_name) {
9985 return function_name.StartsWith(Symbols::SetterPrefix());
9986}
9987
9988bool Field::IsInitName(const String& function_name) {
9989 return function_name.StartsWith(Symbols::InitPrefix());
9990}
9991
9992void Field::set_name(const String& value) const {
9993 ASSERT(value.IsSymbol());
9994 ASSERT(IsOriginal());
9995 StorePointer(&raw_ptr()->name_, value.raw());
9996}
9997
9998ObjectPtr Field::RawOwner() const {
9999 if (IsOriginal()) {
10000 return raw_ptr()->owner_;
10001 } else {
10002 const Field& field = Field::Handle(Original());
10003 ASSERT(field.IsOriginal());
10004 ASSERT(!Object::Handle(field.raw_ptr()->owner_).IsField());
10005 return field.raw_ptr()->owner_;
10006 }
10007}
10008
10009ClassPtr Field::Owner() const {
10010 const Field& field = Field::Handle(Original());
10011 ASSERT(field.IsOriginal());
10012 const Object& obj = Object::Handle(field.raw_ptr()->owner_);
10013 if (obj.IsClass()) {
10014 return Class::Cast(obj).raw();
10015 }
10016 ASSERT(obj.IsPatchClass());
10017 return PatchClass::Cast(obj).patched_class();
10018}
10019
10020ClassPtr Field::Origin() const {
10021 const Field& field = Field::Handle(Original());
10022 ASSERT(field.IsOriginal());
10023 const Object& obj = Object::Handle(field.raw_ptr()->owner_);
10024 if (obj.IsClass()) {
10025 return Class::Cast(obj).raw();
10026 }
10027 ASSERT(obj.IsPatchClass());
10028 return PatchClass::Cast(obj).origin_class();
10029}
10030
10031ScriptPtr Field::Script() const {
10032 // NOTE(turnidge): If you update this function, you probably want to
10033 // update Class::PatchFieldsAndFunctions() at the same time.
10034 const Field& field = Field::Handle(Original());
10035 ASSERT(field.IsOriginal());
10036 const Object& obj = Object::Handle(field.raw_ptr()->owner_);
10037 if (obj.IsClass()) {
10038 return Class::Cast(obj).script();
10039 }
10040 ASSERT(obj.IsPatchClass());
10041 return PatchClass::Cast(obj).script();
10042}
10043
10044ExternalTypedDataPtr Field::KernelData() const {
10045 const Object& obj = Object::Handle(this->raw_ptr()->owner_);
10046 // During background JIT compilation field objects are copied
10047 // and copy points to the original field via the owner field.
10048 if (obj.IsField()) {
10049 return Field::Cast(obj).KernelData();
10050 } else if (obj.IsClass()) {
10051 Library& library = Library::Handle(Class::Cast(obj).library());
10052 return library.kernel_data();
10053 }
10054 ASSERT(obj.IsPatchClass());
10055 return PatchClass::Cast(obj).library_kernel_data();
10056}
10057
10058void Field::InheritBinaryDeclarationFrom(const Field& src) const {
10059#if defined(DART_PRECOMPILED_RUNTIME)
10060 UNREACHABLE();
10061#else
10062 StoreNonPointer(&raw_ptr()->binary_declaration_,
10063 src.raw_ptr()->binary_declaration_);
10064#endif
10065}
10066
10067intptr_t Field::KernelDataProgramOffset() const {
10068 ASSERT(!is_declared_in_bytecode());
10069 const Object& obj = Object::Handle(raw_ptr()->owner_);
10070 // During background JIT compilation field objects are copied
10071 // and copy points to the original field via the owner field.
10072 if (obj.IsField()) {
10073 return Field::Cast(obj).KernelDataProgramOffset();
10074 } else if (obj.IsClass()) {
10075 Library& lib = Library::Handle(Class::Cast(obj).library());
10076 ASSERT(!lib.is_declared_in_bytecode());
10077 return lib.kernel_offset();
10078 }
10079 ASSERT(obj.IsPatchClass());
10080 return PatchClass::Cast(obj).library_kernel_offset();
10081}
10082
10083// Called at finalization time
10084void Field::SetFieldType(const AbstractType& value) const {
10085 ASSERT(Thread::Current()->IsMutatorThread());
10086 ASSERT(IsOriginal());
10087 ASSERT(!value.IsNull());
10088 if (value.raw() != type()) {
10089 StorePointer(&raw_ptr()->type_, value.raw());
10090 }
10091}
10092
10093FieldPtr Field::New() {
10094 ASSERT(Object::field_class() != Class::null());
10095 ObjectPtr raw =
10096 Object::Allocate(Field::kClassId, Field::InstanceSize(), Heap::kOld);
10097 return static_cast<FieldPtr>(raw);
10098}
10099
10100void Field::InitializeNew(const Field& result,
10101 const String& name,
10102 bool is_static,
10103 bool is_final,
10104 bool is_const,
10105 bool is_reflectable,
10106 bool is_late,
10107 const Object& owner,
10108 TokenPosition token_pos,
10109 TokenPosition end_token_pos) {
10110 result.set_kind_bits(0);
10111 result.set_name(name);
10112 result.set_is_static(is_static);
10113 if (!is_static) {
10114 result.SetOffset(0, 0);
10115 }
10116 result.set_is_final(is_final);
10117 result.set_is_const(is_const);
10118 result.set_is_reflectable(is_reflectable);
10119 result.set_is_late(is_late);
10120 result.set_is_double_initialized(false);
10121 result.set_owner(owner);
10122 result.set_token_pos(token_pos);
10123 result.set_end_token_pos(end_token_pos);
10124 result.set_has_nontrivial_initializer(false);
10125 result.set_has_initializer(false);
10126 if (FLAG_precompiled_mode) {
10127 // May be updated by KernelLoader::ReadInferredType
10128 result.set_is_unboxing_candidate(false);
10129 } else {
10130 result.set_is_unboxing_candidate(!is_final && !is_late && !is_static);
10131 }
10132 result.set_initializer_changed_after_initialization(false);
10133 NOT_IN_PRECOMPILED(result.set_is_declared_in_bytecode(false));
10134 NOT_IN_PRECOMPILED(result.set_binary_declaration_offset(0));
10135 result.set_has_pragma(false);
10136 result.set_static_type_exactness_state(
10137 StaticTypeExactnessState::NotTracking());
10138 Isolate* isolate = Isolate::Current();
10139 if (is_static) {
10140 isolate->RegisterStaticField(result);
10141 }
10142
10143// Use field guards if they are enabled and the isolate has never reloaded.
10144// TODO(johnmccutchan): The reload case assumes the worst case (everything is
10145// dynamic and possibly null). Attempt to relax this later.
10146#if defined(PRODUCT)
10147 const bool use_guarded_cid =
10148 FLAG_precompiled_mode || isolate->use_field_guards();
10149#else
10150 const bool use_guarded_cid =
10151 FLAG_precompiled_mode ||
10152 (isolate->use_field_guards() && !isolate->HasAttemptedReload());
10153#endif // !defined(PRODUCT)
10154 result.set_guarded_cid(use_guarded_cid ? kIllegalCid : kDynamicCid);
10155 result.set_is_nullable(use_guarded_cid ? false : true);
10156 result.set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
10157 // Presently, we only attempt to remember the list length for final fields.
10158 if (is_final && use_guarded_cid) {
10159 result.set_guarded_list_length(Field::kUnknownFixedLength);
10160 } else {
10161 result.set_guarded_list_length(Field::kNoFixedLength);
10162 }
10163}
10164
10165FieldPtr Field::New(const String& name,
10166 bool is_static,
10167 bool is_final,
10168 bool is_const,
10169 bool is_reflectable,
10170 bool is_late,
10171 const Object& owner,
10172 const AbstractType& type,
10173 TokenPosition token_pos,
10174 TokenPosition end_token_pos) {
10175 ASSERT(!owner.IsNull());
10176 const Field& result = Field::Handle(Field::New());
10177 InitializeNew(result, name, is_static, is_final, is_const, is_reflectable,
10178 is_late, owner, token_pos, end_token_pos);
10179 result.SetFieldType(type);
10180 return result.raw();
10181}
10182
10183FieldPtr Field::NewTopLevel(const String& name,
10184 bool is_final,
10185 bool is_const,
10186 bool is_late,
10187 const Object& owner,
10188 TokenPosition token_pos,
10189 TokenPosition end_token_pos) {
10190 ASSERT(!owner.IsNull());
10191 const Field& result = Field::Handle(Field::New());
10192 InitializeNew(result, name, true, /* is_static */
10193 is_final, is_const, true, /* is_reflectable */
10194 is_late, owner, token_pos, end_token_pos);
10195 return result.raw();
10196}
10197
10198FieldPtr Field::Clone(const Field& original) const {
10199 if (original.IsNull()) {
10200 return Field::null();
10201 }
10202 ASSERT(original.IsOriginal());
10203 Field& clone = Field::Handle();
10204 clone ^= Object::Clone(*this, Heap::kOld);
10205 clone.SetOriginal(original);
10206 clone.InheritBinaryDeclarationFrom(original);
10207 return clone.raw();
10208}
10209
10210int32_t Field::SourceFingerprint() const {
10211#if !defined(DART_PRECOMPILED_RUNTIME)
10212 if (is_declared_in_bytecode()) {
10213 return 0; // TODO(37353): Implement or remove.
10214 }
10215 return kernel::KernelSourceFingerprintHelper::CalculateFieldFingerprint(
10216 *this);
10217#else
10218 return 0;
10219#endif // !defined(DART_PRECOMPILED_RUNTIME)
10220}
10221
10222StringPtr Field::InitializingExpression() const {
10223 UNREACHABLE();
10224 return String::null();
10225}
10226
10227const char* Field::UserVisibleNameCString() const {
10228 NoSafepointScope no_safepoint;
10229 if (FLAG_show_internal_names) {
10230 return String::Handle(name()).ToCString();
10231 }
10232 return String::ScrubName(String::Handle(name()), is_extension_member());
10233}
10234
10235StringPtr Field::UserVisibleName() const {
10236 if (FLAG_show_internal_names) {
10237 return name();
10238 }
10239 return Symbols::New(
10240 Thread::Current(),
10241 String::ScrubName(String::Handle(name()), is_extension_member()));
10242}
10243
10244intptr_t Field::guarded_list_length() const {
10245 return Smi::Value(raw_ptr()->guarded_list_length_);
10246}
10247
10248void Field::set_guarded_list_length(intptr_t list_length) const {
10249 ASSERT(Thread::Current()->IsMutatorThread());
10250 ASSERT(IsOriginal());
10251 StoreSmi(&raw_ptr()->guarded_list_length_, Smi::New(list_length));
10252}
10253
10254intptr_t Field::guarded_list_length_in_object_offset() const {
10255 return raw_ptr()->guarded_list_length_in_object_offset_ + kHeapObjectTag;
10256}
10257
10258void Field::set_guarded_list_length_in_object_offset(
10259 intptr_t list_length_offset) const {
10260 ASSERT(Thread::Current()->IsMutatorThread());
10261 ASSERT(IsOriginal());
10262 StoreNonPointer(&raw_ptr()->guarded_list_length_in_object_offset_,
10263 static_cast<int8_t>(list_length_offset - kHeapObjectTag));
10264 ASSERT(guarded_list_length_in_object_offset() == list_length_offset);
10265}
10266
10267bool Field::NeedsSetter() const {
10268 // Late fields always need a setter, unless they're static and non-final, or
10269 // final with an initializer.
10270 if (is_late()) {
10271 if (is_static() && !is_final()) {
10272 return false;
10273 }
10274 if (is_final() && has_initializer()) {
10275 return false;
10276 }
10277 return true;
10278 }
10279
10280 // Non-late static fields never need a setter.
10281 if (is_static()) {
10282 return false;
10283 }
10284
10285 // Otherwise, the field only needs a setter if it isn't final.
10286 return !is_final();
10287}
10288
10289bool Field::NeedsGetter() const {
10290 // All instance fields need a getter.
10291 if (!is_static()) return true;
10292
10293 // Static fields also need a getter if they have a non-trivial initializer,
10294 // because it needs to be initialized lazily.
10295 if (has_nontrivial_initializer()) return true;
10296
10297 // Static late fields with no initializer also need a getter, to check if it's
10298 // been initialized.
10299 return is_late() && !has_initializer();
10300}
10301
10302const char* Field::ToCString() const {
10303 NoSafepointScope no_safepoint;
10304 if (IsNull()) {
10305 return "Field: null";
10306 }
10307 const char* kF0 = is_static() ? " static" : "";
10308 const char* kF1 = is_late() ? " late" : "";
10309 const char* kF2 = is_final() ? " final" : "";
10310 const char* kF3 = is_const() ? " const" : "";
10311 const char* field_name = String::Handle(name()).ToCString();
10312 const Class& cls = Class::Handle(Owner());
10313 const char* cls_name = String::Handle(cls.Name()).ToCString();
10314 return OS::SCreate(Thread::Current()->zone(), "Field <%s.%s>:%s%s%s%s",
10315 cls_name, field_name, kF0, kF1, kF2, kF3);
10316}
10317
10318// Build a closure object that gets (or sets) the contents of a static
10319// field f and cache the closure in a newly created static field
10320// named #f (or #f= in case of a setter).
10321InstancePtr Field::AccessorClosure(bool make_setter) const {
10322 Thread* thread = Thread::Current();
10323 Zone* zone = thread->zone();
10324 ASSERT(is_static());
10325 const Class& field_owner = Class::Handle(zone, Owner());
10326
10327 String& closure_name = String::Handle(zone, this->name());
10328 closure_name = Symbols::FromConcat(thread, Symbols::HashMark(), closure_name);
10329 if (make_setter) {
10330 closure_name =
10331 Symbols::FromConcat(thread, Symbols::HashMark(), closure_name);
10332 }
10333
10334 Field& closure_field = Field::Handle(zone);
10335 closure_field = field_owner.LookupStaticField(closure_name);
10336 if (!closure_field.IsNull()) {
10337 ASSERT(closure_field.is_static());
10338 const Instance& closure =
10339 Instance::Handle(zone, closure_field.StaticValue());
10340 ASSERT(!closure.IsNull());
10341 ASSERT(closure.IsClosure());
10342 return closure.raw();
10343 }
10344
10345 UNREACHABLE();
10346 return Instance::null();
10347}
10348
10349InstancePtr Field::GetterClosure() const {
10350 return AccessorClosure(false);
10351}
10352
10353InstancePtr Field::SetterClosure() const {
10354 return AccessorClosure(true);
10355}
10356
10357ArrayPtr Field::dependent_code() const {
10358 return raw_ptr()->dependent_code_;
10359}
10360
10361void Field::set_dependent_code(const Array& array) const {
10362 ASSERT(IsOriginal());
10363 StorePointer(&raw_ptr()->dependent_code_, array.raw());
10364}
10365
10366class FieldDependentArray : public WeakCodeReferences {
10367 public:
10368 explicit FieldDependentArray(const Field& field)
10369 : WeakCodeReferences(Array::Handle(field.dependent_code())),
10370 field_(field) {}
10371
10372 virtual void UpdateArrayTo(const Array& value) {
10373 field_.set_dependent_code(value);
10374 }
10375
10376 virtual void ReportDeoptimization(const Code& code) {
10377 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
10378 Function& function = Function::Handle(code.function());
10379 THR_Print("Deoptimizing %s because guard on field %s failed.\n",
10380 function.ToFullyQualifiedCString(), field_.ToCString());
10381 }
10382 }
10383
10384 virtual void ReportSwitchingCode(const Code& code) {
10385 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
10386 Function& function = Function::Handle(code.function());
10387 THR_Print(
10388 "Switching '%s' to unoptimized code because guard"
10389 " on field '%s' was violated.\n",
10390 function.ToFullyQualifiedCString(), field_.ToCString());
10391 }
10392 }
10393
10394 private:
10395 const Field& field_;
10396 DISALLOW_COPY_AND_ASSIGN(FieldDependentArray);
10397};
10398
10399void Field::RegisterDependentCode(const Code& code) const {
10400 ASSERT(IsOriginal());
10401 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
10402 ASSERT(code.is_optimized());
10403 FieldDependentArray a(*this);
10404 a.Register(code);
10405}
10406
10407void Field::DeoptimizeDependentCode() const {
10408 ASSERT(Thread::Current()->IsMutatorThread());
10409 ASSERT(IsOriginal());
10410 FieldDependentArray a(*this);
10411 if (FLAG_trace_deoptimization && a.HasCodes()) {
10412 THR_Print("Deopt for field guard (field %s)\n", ToCString());
10413 }
10414 a.DisableCode();
10415}
10416
10417bool Field::IsConsistentWith(const Field& other) const {
10418 return (raw_ptr()->guarded_cid_ == other.raw_ptr()->guarded_cid_) &&
10419 (raw_ptr()->is_nullable_ == other.raw_ptr()->is_nullable_) &&
10420 (raw_ptr()->guarded_list_length_ ==
10421 other.raw_ptr()->guarded_list_length_) &&
10422 (is_unboxing_candidate() == other.is_unboxing_candidate()) &&
10423 (static_type_exactness_state().Encode() ==
10424 other.static_type_exactness_state().Encode());
10425}
10426
10427bool Field::IsUninitialized() const {
10428 Thread* thread = Thread::Current();
10429 const FieldTable* field_table = thread->isolate()->field_table();
10430 const InstancePtr raw_value = field_table->At(field_id());
10431 ASSERT(raw_value != Object::transition_sentinel().raw());
10432 return raw_value == Object::sentinel().raw();
10433}
10434
10435FunctionPtr Field::EnsureInitializerFunction() const {
10436 ASSERT(has_nontrivial_initializer());
10437 ASSERT(IsOriginal());
10438 Thread* thread = Thread::Current();
10439 Zone* zone = thread->zone();
10440 Function& initializer = Function::Handle(zone, InitializerFunction());
10441 if (initializer.IsNull()) {
10442#if defined(DART_PRECOMPILED_RUNTIME)
10443 UNREACHABLE();
10444#else
10445 SafepointMutexLocker ml(
10446 thread->isolate()->group()->initializer_functions_mutex());
10447 // Double check after grabbing the lock.
10448 initializer = InitializerFunction();
10449 if (initializer.IsNull()) {
10450 initializer = kernel::CreateFieldInitializerFunction(thread, zone, *this);
10451 }
10452#endif
10453 }
10454 return initializer.raw();
10455}
10456
10457void Field::SetInitializerFunction(const Function& initializer) const {
10458#if defined(DART_PRECOMPILED_RUNTIME)
10459 UNREACHABLE();
10460#else
10461 ASSERT(IsOriginal());
10462 ASSERT(IsolateGroup::Current()
10463 ->initializer_functions_mutex()
10464 ->IsOwnedByCurrentThread());
10465 // We have to ensure that all stores into the initializer function object
10466 // happen before releasing the pointer to the initializer as it may be
10467 // accessed without grabbing the lock.
10468 StorePointer<FunctionPtr, std::memory_order_release>(
10469 &raw_ptr()->initializer_function_, initializer.raw());
10470#endif
10471}
10472
10473bool Field::HasInitializerFunction() const {
10474 return raw_ptr()->initializer_function_ != Function::null();
10475}
10476
10477ErrorPtr Field::InitializeInstance(const Instance& instance) const {
10478 ASSERT(IsOriginal());
10479 ASSERT(is_instance());
10480 ASSERT(instance.GetField(*this) == Object::sentinel().raw());
10481 Object& value = Object::Handle();
10482 if (has_nontrivial_initializer()) {
10483 const Function& initializer = Function::Handle(EnsureInitializerFunction());
10484 const Array& args = Array::Handle(Array::New(1));
10485 args.SetAt(0, instance);
10486 value = DartEntry::InvokeFunction(initializer, args);
10487 if (!value.IsNull() && value.IsError()) {
10488 return Error::Cast(value).raw();
10489 }
10490 } else {
10491 if (is_late() && !has_initializer()) {
10492 Exceptions::ThrowLateInitializationError(String::Handle(name()));
10493 UNREACHABLE();
10494 }
10495#if defined(DART_PRECOMPILED_RUNTIME)
10496 UNREACHABLE();
10497#else
10498 value = saved_initial_value();
10499#endif
10500 }
10501 ASSERT(value.IsNull() || value.IsInstance());
10502 if (is_late() && is_final() &&
10503 (instance.GetField(*this) != Object::sentinel().raw())) {
10504 Exceptions::ThrowLateInitializationError(String::Handle(name()));
10505 UNREACHABLE();
10506 }
10507 instance.SetField(*this, value);
10508 return Error::null();
10509}
10510
10511ErrorPtr Field::InitializeStatic() const {
10512 ASSERT(IsOriginal());
10513 ASSERT(is_static());
10514 if (StaticValue() == Object::sentinel().raw()) {
10515 auto& value = Object::Handle();
10516 if (is_late()) {
10517 if (!has_initializer()) {
10518 Exceptions::ThrowLateInitializationError(String::Handle(name()));
10519 UNREACHABLE();
10520 }
10521 value = EvaluateInitializer();
10522 if (value.IsError()) {
10523 return Error::Cast(value).raw();
10524 }
10525 if (is_final() && (StaticValue() != Object::sentinel().raw())) {
10526 Exceptions::ThrowLateInitializationError(String::Handle(name()));
10527 UNREACHABLE();
10528 }
10529 } else {
10530 SetStaticValue(Object::transition_sentinel());
10531 value = EvaluateInitializer();
10532 if (value.IsError()) {
10533 SetStaticValue(Object::null_instance());
10534 return Error::Cast(value).raw();
10535 }
10536 }
10537 ASSERT(value.IsNull() || value.IsInstance());
10538 SetStaticValue(value.IsNull() ? Instance::null_instance()
10539 : Instance::Cast(value));
10540 return Error::null();
10541 } else if (StaticValue() == Object::transition_sentinel().raw()) {
10542 ASSERT(!is_late());
10543 const Array& ctor_args = Array::Handle(Array::New(1));
10544 const String& field_name = String::Handle(name());
10545 ctor_args.SetAt(0, field_name);
10546 Exceptions::ThrowByType(Exceptions::kCyclicInitializationError, ctor_args);
10547 UNREACHABLE();
10548 }
10549 return Error::null();
10550}
10551
10552ObjectPtr Field::EvaluateInitializer() const {
10553 Thread* const thread = Thread::Current();
10554 ASSERT(thread->IsMutatorThread());
10555 NoOOBMessageScope no_msg_scope(thread);
10556 NoReloadScope no_reload_scope(thread->isolate(), thread);
10557 const Function& initializer = Function::Handle(EnsureInitializerFunction());
10558 return DartEntry::InvokeFunction(initializer, Object::empty_array());
10559}
10560
10561static intptr_t GetListLength(const Object& value) {
10562 if (value.IsTypedData() || value.IsTypedDataView() ||
10563 value.IsExternalTypedData()) {
10564 return TypedDataBase::Cast(value).Length();
10565 } else if (value.IsArray()) {
10566 return Array::Cast(value).Length();
10567 } else if (value.IsGrowableObjectArray()) {
10568 // List length is variable.
10569 return Field::kNoFixedLength;
10570 }
10571 return Field::kNoFixedLength;
10572}
10573
10574static intptr_t GetListLengthOffset(intptr_t cid) {
10575 if (IsTypedDataClassId(cid) || IsTypedDataViewClassId(cid) ||
10576 IsExternalTypedDataClassId(cid)) {
10577 return TypedData::length_offset();
10578 } else if (cid == kArrayCid || cid == kImmutableArrayCid) {
10579 return Array::length_offset();
10580 } else if (cid == kGrowableObjectArrayCid) {
10581 // List length is variable.
10582 return Field::kUnknownLengthOffset;
10583 }
10584 return Field::kUnknownLengthOffset;
10585}
10586
10587const char* Field::GuardedPropertiesAsCString() const {
10588 if (guarded_cid() == kIllegalCid) {
10589 return "<?>";
10590 } else if (guarded_cid() == kDynamicCid) {
10591 ASSERT(!static_type_exactness_state().IsExactOrUninitialized());
10592 return "<*>";
10593 }
10594
10595 Zone* zone = Thread::Current()->zone();
10596
10597 const char* exactness = "";
10598 if (static_type_exactness_state().IsTracking()) {
10599 exactness =
10600 zone->PrintToString(" {%s}", static_type_exactness_state().ToCString());
10601 }
10602
10603 const Class& cls =
10604 Class::Handle(Isolate::Current()->class_table()->At(guarded_cid()));
10605 const char* class_name = String::Handle(cls.Name()).ToCString();
10606
10607 if (IsBuiltinListClassId(guarded_cid()) && !is_nullable() && is_final()) {
10608 ASSERT(guarded_list_length() != kUnknownFixedLength);
10609 if (guarded_list_length() == kNoFixedLength) {
10610 return zone->PrintToString("<%s [*]%s>", class_name, exactness);
10611 } else {
10612 return zone->PrintToString(
10613 "<%s [%" Pd " @%" Pd "]%s>", class_name, guarded_list_length(),
10614 guarded_list_length_in_object_offset(), exactness);
10615 }
10616 }
10617
10618 return zone->PrintToString("<%s %s%s>",
10619 is_nullable() ? "nullable" : "not-nullable",
10620 class_name, exactness);
10621}
10622
10623void Field::InitializeGuardedListLengthInObjectOffset() const {
10624 ASSERT(IsOriginal());
10625 if (needs_length_check() &&
10626 (guarded_list_length() != Field::kUnknownFixedLength)) {
10627 const intptr_t offset = GetListLengthOffset(guarded_cid());
10628 set_guarded_list_length_in_object_offset(offset);
10629 ASSERT(offset != Field::kUnknownLengthOffset);
10630 } else {
10631 set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
10632 }
10633}
10634
10635bool Field::UpdateGuardedCidAndLength(const Object& value) const {
10636 ASSERT(IsOriginal());
10637 const intptr_t cid = value.GetClassId();
10638
10639 if (guarded_cid() == kIllegalCid) {
10640 // Field is assigned first time.
10641 set_guarded_cid(cid);
10642 set_is_nullable(cid == kNullCid);
10643
10644 // Start tracking length if needed.
10645 ASSERT((guarded_list_length() == Field::kUnknownFixedLength) ||
10646 (guarded_list_length() == Field::kNoFixedLength));
10647 if (needs_length_check()) {
10648 ASSERT(guarded_list_length() == Field::kUnknownFixedLength);
10649 set_guarded_list_length(GetListLength(value));
10650 InitializeGuardedListLengthInObjectOffset();
10651 }
10652
10653 if (FLAG_trace_field_guards) {
10654 THR_Print(" => %s\n", GuardedPropertiesAsCString());
10655 }
10656
10657 return false;
10658 }
10659
10660 if ((cid == guarded_cid()) || ((cid == kNullCid) && is_nullable())) {
10661 // Class id of the assigned value matches expected class id and nullability.
10662
10663 // If we are tracking length check if it has matches.
10664 if (needs_length_check() &&
10665 (guarded_list_length() != GetListLength(value))) {
10666 ASSERT(guarded_list_length() != Field::kUnknownFixedLength);
10667 set_guarded_list_length(Field::kNoFixedLength);
10668 set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
10669 return true;
10670 }
10671
10672 // Everything matches.
10673 return false;
10674 }
10675
10676 if ((cid == kNullCid) && !is_nullable()) {
10677 // Assigning null value to a non-nullable field makes it nullable.
10678 set_is_nullable(true);
10679 } else if ((cid != kNullCid) && (guarded_cid() == kNullCid)) {
10680 // Assigning non-null value to a field that previously contained only null
10681 // turns it into a nullable field with the given class id.
10682 ASSERT(is_nullable());
10683 set_guarded_cid(cid);
10684 } else {
10685 // Give up on tracking class id of values contained in this field.
10686 ASSERT(guarded_cid() != cid);
10687 set_guarded_cid(kDynamicCid);
10688 set_is_nullable(true);
10689 }
10690
10691 // If we were tracking length drop collected feedback.
10692 if (needs_length_check()) {
10693 ASSERT(guarded_list_length() != Field::kUnknownFixedLength);
10694 set_guarded_list_length(Field::kNoFixedLength);
10695 set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
10696 }
10697
10698 // Expected class id or nullability of the field changed.
10699 return true;
10700}
10701
10702// Given the type G<T0, ..., Tn> and class C<U0, ..., Un> find path to C at G.
10703// This path can be used to compute type arguments of C at G.
10704//
10705// Note: we are relying on the restriction that the same class can only occur
10706// once among the supertype.
10707static bool FindInstantiationOf(const Type& type,
10708 const Class& cls,
10709 GrowableArray<const AbstractType*>* path,
10710 bool consider_only_super_classes) {
10711 if (type.type_class() == cls.raw()) {
10712 return true; // Found instantiation.
10713 }
10714
10715 Class& cls2 = Class::Handle();
10716 AbstractType& super_type = AbstractType::Handle();
10717 super_type = cls.super_type();
10718 if (!super_type.IsNull() && !super_type.IsObjectType()) {
10719 cls2 = super_type.type_class();
10720 path->Add(&super_type);
10721 if (FindInstantiationOf(type, cls2, path, consider_only_super_classes)) {
10722 return true; // Found instantiation.
10723 }
10724 path->RemoveLast();
10725 }
10726
10727 if (!consider_only_super_classes) {
10728 Array& super_interfaces = Array::Handle(cls.interfaces());
10729 for (intptr_t i = 0; i < super_interfaces.Length(); i++) {
10730 super_type ^= super_interfaces.At(i);
10731 cls2 = super_type.type_class();
10732 path->Add(&super_type);
10733 if (FindInstantiationOf(type, cls2, path,
10734 /*consider_only_supertypes=*/false)) {
10735 return true; // Found instantiation.
10736 }
10737 path->RemoveLast();
10738 }
10739 }
10740
10741 return false; // Not found.
10742}
10743
10744void Field::SetStaticValue(const Instance& value,
10745 bool save_initial_value) const {
10746 ASSERT(Thread::Current()->IsMutatorThread());
10747 ASSERT(is_static()); // Valid only for static dart fields.
10748 Isolate* isolate = Isolate::Current();
10749 const intptr_t id = field_id();
10750 isolate->field_table()->SetAt(id, value.raw());
10751 if (save_initial_value) {
10752#if !defined(DART_PRECOMPILED_RUNTIME)
10753 StorePointer(&raw_ptr()->saved_initial_value_, value.raw());
10754#endif
10755 }
10756}
10757
10758static StaticTypeExactnessState TrivialTypeExactnessFor(const Class& cls) {
10759 const intptr_t type_arguments_offset = cls.host_type_arguments_field_offset();
10760 ASSERT(type_arguments_offset != Class::kNoTypeArguments);
10761 if (StaticTypeExactnessState::CanRepresentAsTriviallyExact(
10762 type_arguments_offset / kWordSize)) {
10763 return StaticTypeExactnessState::TriviallyExact(type_arguments_offset /
10764 kWordSize);
10765 } else {
10766 return StaticTypeExactnessState::NotExact();
10767 }
10768}
10769
10770static const char* SafeTypeArgumentsToCString(const TypeArguments& args) {
10771 return (args.raw() == TypeArguments::null()) ? "<null>" : args.ToCString();
10772}
10773
10774StaticTypeExactnessState StaticTypeExactnessState::Compute(
10775 const Type& static_type,
10776 const Instance& value,
10777 bool print_trace /* = false */) {
10778 ASSERT(!value.IsNull()); // Should be handled by the caller.
10779 ASSERT(value.raw() != Object::sentinel().raw());
10780 ASSERT(value.raw() != Object::transition_sentinel().raw());
10781
10782 const TypeArguments& static_type_args =
10783 TypeArguments::Handle(static_type.arguments());
10784
10785 TypeArguments& args = TypeArguments::Handle();
10786
10787 ASSERT(static_type.IsFinalized());
10788 const Class& cls = Class::Handle(value.clazz());
10789 GrowableArray<const AbstractType*> path(10);
10790
10791 bool is_super_class = true;
10792 if (!FindInstantiationOf(static_type, cls, &path,
10793 /*consider_only_super_classes=*/true)) {
10794 is_super_class = false;
10795 bool found_super_interface = FindInstantiationOf(
10796 static_type, cls, &path, /*consider_only_super_classes=*/false);
10797 ASSERT(found_super_interface);
10798 }
10799
10800 // Trivial case: field has type G<T0, ..., Tn> and value has type
10801 // G<U0, ..., Un>. Check if type arguments match.
10802 if (path.is_empty()) {
10803 ASSERT(cls.raw() == static_type.type_class());
10804 args = value.GetTypeArguments();
10805 // TODO(dartbug.com/34170) Evaluate if comparing relevant subvectors (that
10806 // disregards superclass own arguments) improves precision of the
10807 // tracking.
10808 if (args.raw() == static_type_args.raw()) {
10809 return TrivialTypeExactnessFor(cls);
10810 }
10811
10812 if (print_trace) {
10813 THR_Print(" expected %s got %s type arguments\n",
10814 SafeTypeArgumentsToCString(static_type_args),
10815 SafeTypeArgumentsToCString(args));
10816 }
10817 return StaticTypeExactnessState::NotExact();
10818 }
10819
10820 // Value has type C<U0, ..., Un> and field has type G<T0, ..., Tn> and G != C.
10821 // Compute C<X0, ..., Xn> at G (Xi are free type arguments).
10822 // Path array contains a chain of immediate supertypes S0 <: S1 <: ... Sn,
10823 // such that S0 is an immediate supertype of C and Sn is G<...>.
10824 // Each Si might depend on type parameters of the previous supertype S{i-1}.
10825 // To compute C<X0, ..., Xn> at G we walk the chain backwards and
10826 // instantiate Si using type parameters of S{i-1} which gives us a type
10827 // depending on type parameters of S{i-2}.
10828 AbstractType& type = AbstractType::Handle(path.Last()->raw());
10829 for (intptr_t i = path.length() - 2; (i >= 0) && !type.IsInstantiated();
10830 i--) {
10831 args = path[i]->arguments();
10832 type = type.InstantiateFrom(args, TypeArguments::null_type_arguments(),
10833 kAllFree, Heap::kNew);
10834 }
10835
10836 if (type.IsInstantiated()) {
10837 // C<X0, ..., Xn> at G is fully instantiated and does not depend on
10838 // Xi. In this case just check if type arguments match.
10839 args = type.arguments();
10840 if (args.Equals(static_type_args)) {
10841 return is_super_class ? StaticTypeExactnessState::HasExactSuperClass()
10842 : StaticTypeExactnessState::HasExactSuperType();
10843 }
10844
10845 if (print_trace) {
10846 THR_Print(" expected %s got %s type arguments\n",
10847 SafeTypeArgumentsToCString(static_type_args),
10848 SafeTypeArgumentsToCString(args));
10849 }
10850
10851 return StaticTypeExactnessState::NotExact();
10852 }
10853
10854 // The most complicated case: C<X0, ..., Xn> at G depends on
10855 // Xi values. To compare type arguments we would need to instantiate
10856 // it fully from value's type arguments and compare with <U0, ..., Un>.
10857 // However this would complicate fast path in the native code. To avoid this
10858 // complication we would optimize for the trivial case: we check if
10859 // C<X0, ..., Xn> at G is exactly G<X0, ..., Xn> which means we can simply
10860 // compare values type arguements (<T0, ..., Tn>) to fields type arguments
10861 // (<U0, ..., Un>) to establish if field type is exact.
10862 ASSERT(cls.IsGeneric());
10863 const intptr_t num_type_params = cls.NumTypeParameters();
10864 bool trivial_case =
10865 (num_type_params ==
10866 Class::Handle(static_type.type_class()).NumTypeParameters()) &&
10867 (value.GetTypeArguments() == static_type.arguments());
10868 if (!trivial_case && FLAG_trace_field_guards) {
10869 THR_Print("Not a simple case: %" Pd " vs %" Pd
10870 " type parameters, %s vs %s type arguments\n",
10871 num_type_params,
10872 Class::Handle(static_type.type_class()).NumTypeParameters(),
10873 SafeTypeArgumentsToCString(
10874 TypeArguments::Handle(value.GetTypeArguments())),
10875 SafeTypeArgumentsToCString(static_type_args));
10876 }
10877
10878 AbstractType& type_arg = AbstractType::Handle();
10879 args = type.arguments();
10880 for (intptr_t i = 0; (i < num_type_params) && trivial_case; i++) {
10881 type_arg = args.TypeAt(i);
10882 if (!type_arg.IsTypeParameter() ||
10883 (TypeParameter::Cast(type_arg).index() != i)) {
10884 if (FLAG_trace_field_guards) {
10885 THR_Print(" => encountered %s at index % " Pd "\n",
10886 type_arg.ToCString(), i);
10887 }
10888 trivial_case = false;
10889 }
10890 }
10891
10892 return trivial_case ? TrivialTypeExactnessFor(cls)
10893 : StaticTypeExactnessState::NotExact();
10894}
10895
10896const char* StaticTypeExactnessState::ToCString() const {
10897 if (!IsTracking()) {
10898 return "not-tracking";
10899 } else if (!IsExactOrUninitialized()) {
10900 return "not-exact";
10901 } else if (IsTriviallyExact()) {
10902 return Thread::Current()->zone()->PrintToString(
10903 "trivially-exact(%hhu)", GetTypeArgumentsOffsetInWords());
10904 } else if (IsHasExactSuperType()) {
10905 return "has-exact-super-type";
10906 } else if (IsHasExactSuperClass()) {
10907 return "has-exact-super-class";
10908 } else {
10909 ASSERT(IsUninitialized());
10910 return "uninitialized-exactness";
10911 }
10912}
10913
10914bool Field::UpdateGuardedExactnessState(const Object& value) const {
10915 if (!static_type_exactness_state().IsExactOrUninitialized()) {
10916 // Nothing to update.
10917 return false;
10918 }
10919
10920 if (guarded_cid() == kDynamicCid) {
10921 if (FLAG_trace_field_guards) {
10922 THR_Print(
10923 " => switching off exactness tracking because guarded cid is "
10924 "dynamic\n");
10925 }
10926 set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
10927 return true; // Invalidate.
10928 }
10929
10930 // If we are storing null into a field or we have an exact super type
10931 // then there is nothing to do.
10932 if (value.IsNull() || static_type_exactness_state().IsHasExactSuperType() ||
10933 static_type_exactness_state().IsHasExactSuperClass()) {
10934 return false;
10935 }
10936
10937 // If we are storing a non-null value into a field that is considered
10938 // to be trivially exact then we need to check if value has an appropriate
10939 // type.
10940 ASSERT(guarded_cid() != kNullCid);
10941
10942 const Type& field_type = Type::Cast(AbstractType::Handle(type()));
10943 const TypeArguments& field_type_args =
10944 TypeArguments::Handle(field_type.arguments());
10945
10946 const Instance& instance = Instance::Cast(value);
10947 TypeArguments& args = TypeArguments::Handle();
10948 if (static_type_exactness_state().IsTriviallyExact()) {
10949 args = instance.GetTypeArguments();
10950 if (args.raw() == field_type_args.raw()) {
10951 return false;
10952 }
10953
10954 if (FLAG_trace_field_guards) {
10955 THR_Print(" expected %s got %s type arguments\n",
10956 field_type_args.ToCString(), args.ToCString());
10957 }
10958
10959 set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
10960 return true;
10961 }
10962
10963 ASSERT(static_type_exactness_state().IsUninitialized());
10964 set_static_type_exactness_state(StaticTypeExactnessState::Compute(
10965 field_type, instance, FLAG_trace_field_guards));
10966 return true;
10967}
10968
10969void Field::RecordStore(const Object& value) const {
10970 ASSERT(IsOriginal());
10971 if (!Isolate::Current()->use_field_guards()) {
10972 return;
10973 }
10974
10975 // We should never try to record a sentinel.
10976 ASSERT(value.raw() != Object::sentinel().raw());
10977
10978 if ((guarded_cid() == kDynamicCid) ||
10979 (is_nullable() && value.raw() == Object::null())) {
10980 // Nothing to do: the field is not guarded or we are storing null into
10981 // a nullable field.
10982 return;
10983 }
10984
10985 if (FLAG_trace_field_guards) {
10986 THR_Print("Store %s %s <- %s\n", ToCString(), GuardedPropertiesAsCString(),
10987 value.ToCString());
10988 }
10989
10990 bool invalidate = false;
10991 if (UpdateGuardedCidAndLength(value)) {
10992 invalidate = true;
10993 }
10994 if (UpdateGuardedExactnessState(value)) {
10995 invalidate = true;
10996 }
10997
10998 if (invalidate) {
10999 if (FLAG_trace_field_guards) {
11000 THR_Print(" => %s\n", GuardedPropertiesAsCString());
11001 }
11002
11003 DeoptimizeDependentCode();
11004 }
11005}
11006
11007void Field::ForceDynamicGuardedCidAndLength() const {
11008 // Assume nothing about this field.
11009 set_is_unboxing_candidate(false);
11010 set_guarded_cid(kDynamicCid);
11011 set_is_nullable(true);
11012 set_guarded_list_length(Field::kNoFixedLength);
11013 set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset);
11014 if (static_type_exactness_state().IsTracking()) {
11015 set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
11016 }
11017 // Drop any code that relied on the above assumptions.
11018 DeoptimizeDependentCode();
11019}
11020
11021#if !defined(DART_PRECOMPILED_RUNTIME)
11022void Field::set_type_test_cache(const SubtypeTestCache& cache) const {
11023 StorePointer(&raw_ptr()->type_test_cache_, cache.raw());
11024}
11025#endif
11026
11027bool Script::HasSource() const {
11028 return raw_ptr()->source_ != String::null();
11029}
11030
11031StringPtr Script::Source() const {
11032 return raw_ptr()->source_;
11033}
11034
11035bool Script::IsPartOfDartColonLibrary() const {
11036 const String& script_url = String::Handle(url());
11037 return (script_url.StartsWith(Symbols::DartScheme()) ||
11038 script_url.StartsWith(Symbols::DartSchemePrivate()));
11039}
11040
11041#if !defined(DART_PRECOMPILED_RUNTIME)
11042void Script::LoadSourceFromKernel(const uint8_t* kernel_buffer,
11043 intptr_t kernel_buffer_len) const {
11044 String& uri = String::Handle(resolved_url());
11045 String& source = String::Handle(kernel::KernelLoader::FindSourceForScript(
11046 kernel_buffer, kernel_buffer_len, uri));
11047 set_source(source);
11048}
11049#endif // !defined(DART_PRECOMPILED_RUNTIME)
11050
11051void Script::set_compile_time_constants(const Array& value) const {
11052 StorePointer(&raw_ptr()->compile_time_constants_, value.raw());
11053}
11054
11055void Script::set_kernel_program_info(const KernelProgramInfo& info) const {
11056 StorePointer(&raw_ptr()->kernel_program_info_, info.raw());
11057}
11058
11059void Script::set_kernel_script_index(const intptr_t kernel_script_index) const {
11060 StoreNonPointer(&raw_ptr()->kernel_script_index_, kernel_script_index);
11061}
11062
11063TypedDataPtr Script::kernel_string_offsets() const {
11064 KernelProgramInfo& program_info =
11065 KernelProgramInfo::Handle(kernel_program_info());
11066 ASSERT(!program_info.IsNull());
11067 return program_info.string_offsets();
11068}
11069
11070void Script::LookupSourceAndLineStarts(Zone* zone) const {
11071#if !defined(DART_PRECOMPILED_RUNTIME)
11072 if (!IsLazyLookupSourceAndLineStarts()) {
11073 return;
11074 }
11075 const String& uri = String::Handle(zone, resolved_url());
11076 ASSERT(uri.IsSymbol());
11077 if (uri.Length() > 0) {
11078 // Entry included only to provide URI - actual source should already exist
11079 // in the VM, so try to find it.
11080 Library& lib = Library::Handle(zone);
11081 Script& script = Script::Handle(zone);
11082 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
11083 zone, Isolate::Current()->object_store()->libraries());
11084 for (intptr_t i = 0; i < libs.Length(); i++) {
11085 lib ^= libs.At(i);
11086 script = lib.LookupScript(uri, /* useResolvedUri = */ true);
11087 if (!script.IsNull()) {
11088 const auto& source = String::Handle(zone, script.Source());
11089 const auto& line_starts = TypedData::Handle(zone, script.line_starts());
11090 if (!source.IsNull() || !line_starts.IsNull()) {
11091 set_source(source);
11092 set_line_starts(line_starts);
11093 break;
11094 }
11095 }
11096 }
11097 }
11098 SetLazyLookupSourceAndLineStarts(false);
11099#endif // !defined(DART_PRECOMPILED_RUNTIME)
11100}
11101
11102GrowableObjectArrayPtr Script::GenerateLineNumberArray() const {
11103 Zone* zone = Thread::Current()->zone();
11104 const GrowableObjectArray& info =
11105 GrowableObjectArray::Handle(zone, GrowableObjectArray::New());
11106 const Object& line_separator = Object::Handle(zone);
11107 LookupSourceAndLineStarts(zone);
11108 if (line_starts() == TypedData::null()) {
11109 // Scripts in the AOT snapshot do not have a line starts array.
11110 // Neither do some scripts coming from bytecode.
11111 // A well-formed line number array has a leading null.
11112 info.Add(line_separator); // New line.
11113 return info.raw();
11114 }
11115#if !defined(DART_PRECOMPILED_RUNTIME)
11116 Smi& value = Smi::Handle(zone);
11117 const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
11118 intptr_t line_count = line_starts_data.Length();
11119 const Array& debug_positions_array = Array::Handle(debug_positions());
11120 intptr_t token_count = debug_positions_array.Length();
11121 int token_index = 0;
11122
11123 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
11124 intptr_t previous_start = 0;
11125 for (int line_index = 0; line_index < line_count; ++line_index) {
11126 intptr_t start = previous_start + line_starts_reader.DeltaAt(line_index);
11127 // Output the rest of the tokens if we have no next line.
11128 intptr_t end = TokenPosition::kMaxSourcePos;
11129 if (line_index + 1 < line_count) {
11130 end = start + line_starts_reader.DeltaAt(line_index + 1);
11131 }
11132 bool first = true;
11133 while (token_index < token_count) {
11134 value ^= debug_positions_array.At(token_index);
11135 intptr_t debug_position = value.Value();
11136 if (debug_position >= end) break;
11137
11138 if (first) {
11139 info.Add(line_separator); // New line.
11140 value = Smi::New(line_index + 1); // Line number.
11141 info.Add(value);
11142 first = false;
11143 }
11144
11145 value ^= debug_positions_array.At(token_index);
11146 info.Add(value); // Token position.
11147 value = Smi::New(debug_position - start + 1); // Column.
11148 info.Add(value);
11149 ++token_index;
11150 }
11151 previous_start = start;
11152 }
11153#endif // !defined(DART_PRECOMPILED_RUNTIME)
11154 return info.raw();
11155}
11156
11157void Script::set_url(const String& value) const {
11158 StorePointer(&raw_ptr()->url_, value.raw());
11159}
11160
11161void Script::set_resolved_url(const String& value) const {
11162 StorePointer(&raw_ptr()->resolved_url_, value.raw());
11163}
11164
11165void Script::set_source(const String& value) const {
11166 StorePointer(&raw_ptr()->source_, value.raw());
11167}
11168
11169void Script::set_line_starts(const TypedData& value) const {
11170 StorePointer(&raw_ptr()->line_starts_, value.raw());
11171}
11172
11173void Script::set_debug_positions(const Array& value) const {
11174 StorePointer(&raw_ptr()->debug_positions_, value.raw());
11175}
11176
11177TypedDataPtr Script::line_starts() const {
11178 return raw_ptr()->line_starts_;
11179}
11180
11181ArrayPtr Script::debug_positions() const {
11182#if !defined(DART_PRECOMPILED_RUNTIME)
11183 Array& debug_positions_array = Array::Handle(raw_ptr()->debug_positions_);
11184 if (debug_positions_array.IsNull()) {
11185 // This is created lazily. Now we need it.
11186 kernel::CollectTokenPositionsFor(*this);
11187 }
11188#endif // !defined(DART_PRECOMPILED_RUNTIME)
11189 return raw_ptr()->debug_positions_;
11190}
11191
11192void Script::set_flags(uint8_t value) const {
11193 StoreNonPointer(&raw_ptr()->flags_, value);
11194}
11195
11196void Script::SetLazyLookupSourceAndLineStarts(bool value) const {
11197 set_flags(ScriptLayout::LazyLookupSourceAndLineStartsBit::update(
11198 value, raw_ptr()->flags_));
11199}
11200
11201bool Script::IsLazyLookupSourceAndLineStarts() const {
11202 return ScriptLayout::LazyLookupSourceAndLineStartsBit::decode(
11203 raw_ptr()->flags_);
11204}
11205
11206void Script::set_load_timestamp(int64_t value) const {
11207 StoreNonPointer(&raw_ptr()->load_timestamp_, value);
11208}
11209
11210void Script::SetLocationOffset(intptr_t line_offset,
11211 intptr_t col_offset) const {
11212 ASSERT(line_offset >= 0);
11213 ASSERT(col_offset >= 0);
11214 StoreNonPointer(&raw_ptr()->line_offset_, line_offset);
11215 StoreNonPointer(&raw_ptr()->col_offset_, col_offset);
11216}
11217
11218// Specialized for AOT compilation, which does this lookup for every token
11219// position that could be part of a stack trace.
11220bool Script::GetTokenLocationUsingLineStarts(TokenPosition target_token_pos,
11221 intptr_t* line,
11222 intptr_t* column) const {
11223#if defined(DART_PRECOMPILED_RUNTIME)
11224 return false;
11225#else
11226 // Negative positions denote positions that do not correspond to Dart code.
11227 if (target_token_pos.value() < 0) return false;
11228
11229 Zone* zone = Thread::Current()->zone();
11230 TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
11231 // Scripts loaded from bytecode may have null line_starts().
11232 if (line_starts_data.IsNull()) return false;
11233
11234 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
11235 line_starts_reader.LocationForPosition(target_token_pos.value(), line,
11236 column);
11237 // The line and column numbers returned are ordinals, so we shouldn't get 0.
11238 ASSERT(*line > 0);
11239 ASSERT(*column > 0);
11240 return true;
11241#endif
11242}
11243
11244#if !defined(DART_PRECOMPILED_RUNTIME)
11245static bool IsLetter(int32_t c) {
11246 return (('A' <= c) && (c <= 'Z')) || (('a' <= c) && (c <= 'z'));
11247}
11248
11249static bool IsDecimalDigit(int32_t c) {
11250 return '0' <= c && c <= '9';
11251}
11252
11253static bool IsIdentStartChar(int32_t c) {
11254 return IsLetter(c) || (c == '_') || (c == '$');
11255}
11256
11257static bool IsIdentChar(int32_t c) {
11258 return IsLetter(c) || IsDecimalDigit(c) || (c == '_') || (c == '$');
11259}
11260#endif // !defined(DART_PRECOMPILED_RUNTIME)
11261
11262void Script::GetTokenLocation(TokenPosition token_pos,
11263 intptr_t* line,
11264 intptr_t* column,
11265 intptr_t* token_len) const {
11266 ASSERT(line != NULL);
11267 Zone* zone = Thread::Current()->zone();
11268
11269 LookupSourceAndLineStarts(zone);
11270 if (line_starts() == TypedData::null()) {
11271 // Scripts in the AOT snapshot do not have a line starts array.
11272 // Neither do some scripts coming from bytecode.
11273 *line = -1;
11274 if (column != NULL) {
11275 *column = -1;
11276 }
11277 if (token_len != NULL) {
11278 *token_len = 1;
11279 }
11280 return;
11281 }
11282#if !defined(DART_PRECOMPILED_RUNTIME)
11283 const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
11284 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
11285 line_starts_reader.LocationForPosition(token_pos.value(), line, column);
11286 if (token_len != NULL) {
11287 *token_len = 1;
11288 // We don't explicitly save this data: Load the source
11289 // and find it from there.
11290 const String& source = String::Handle(zone, Source());
11291 if (!source.IsNull()) {
11292 intptr_t offset = token_pos.value();
11293 if (offset < source.Length() && IsIdentStartChar(source.CharAt(offset))) {
11294 for (intptr_t i = offset + 1;
11295 i < source.Length() && IsIdentChar(source.CharAt(i)); ++i) {
11296 ++*token_len;
11297 }
11298 }
11299 }
11300 }
11301#endif // !defined(DART_PRECOMPILED_RUNTIME)
11302}
11303
11304void Script::TokenRangeAtLine(intptr_t line_number,
11305 TokenPosition* first_token_index,
11306 TokenPosition* last_token_index) const {
11307 ASSERT(first_token_index != NULL && last_token_index != NULL);
11308 ASSERT(line_number > 0);
11309
11310 Thread* thread = Thread::Current();
11311 Zone* zone = thread->zone();
11312 LookupSourceAndLineStarts(zone);
11313 if (line_starts() == TypedData::null()) {
11314 // Scripts in the AOT snapshot do not have a line starts array.
11315 // Neither do some scripts coming from bytecode.
11316 *first_token_index = TokenPosition::kNoSource;
11317 *last_token_index = TokenPosition::kNoSource;
11318 return;
11319 }
11320#if !defined(DART_PRECOMPILED_RUNTIME)
11321 const String& source = String::Handle(zone, Source());
11322 intptr_t source_length;
11323 if (source.IsNull()) {
11324 Smi& value = Smi::Handle(zone);
11325 const Array& debug_positions_array = Array::Handle(zone, debug_positions());
11326 value ^= debug_positions_array.At(debug_positions_array.Length() - 1);
11327 source_length = value.Value();
11328 } else {
11329 source_length = source.Length();
11330 }
11331 const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
11332 kernel::KernelLineStartsReader line_starts_reader(line_starts_data,
11333 Thread::Current()->zone());
11334 line_starts_reader.TokenRangeAtLine(source_length, line_number,
11335 first_token_index, last_token_index);
11336#endif // !defined(DART_PRECOMPILED_RUNTIME)
11337}
11338
11339StringPtr Script::GetLine(intptr_t line_number, Heap::Space space) const {
11340 const String& src = String::Handle(Source());
11341 if (src.IsNull()) {
11342 ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
11343 return Symbols::OptimizedOut().raw();
11344 }
11345 intptr_t relative_line_number = line_number - line_offset();
11346 intptr_t current_line = 1;
11347 intptr_t line_start_idx = -1;
11348 intptr_t last_char_idx = -1;
11349 for (intptr_t ix = 0;
11350 (ix < src.Length()) && (current_line <= relative_line_number); ix++) {
11351 if ((current_line == relative_line_number) && (line_start_idx < 0)) {
11352 line_start_idx = ix;
11353 }
11354 if (src.CharAt(ix) == '\n') {
11355 current_line++;
11356 } else if (src.CharAt(ix) == '\r') {
11357 if ((ix + 1 != src.Length()) && (src.CharAt(ix + 1) != '\n')) {
11358 current_line++;
11359 }
11360 } else {
11361 last_char_idx = ix;
11362 }
11363 }
11364 // Guarantee that returned string is never NULL.
11365
11366 if (line_start_idx >= 0) {
11367 return String::SubString(src, line_start_idx,
11368 last_char_idx - line_start_idx + 1, space);
11369 } else {
11370 return Symbols::Empty().raw();
11371 }
11372}
11373
11374StringPtr Script::GetSnippet(TokenPosition from, TokenPosition to) const {
11375 intptr_t from_line;
11376 intptr_t from_column;
11377 intptr_t to_line;
11378 intptr_t to_column;
11379 GetTokenLocation(from, &from_line, &from_column);
11380 GetTokenLocation(to, &to_line, &to_column);
11381 return GetSnippet(from_line, from_column, to_line, to_column);
11382}
11383
11384StringPtr Script::GetSnippet(intptr_t from_line,
11385 intptr_t from_column,
11386 intptr_t to_line,
11387 intptr_t to_column) const {
11388 const String& src = String::Handle(Source());
11389 if (src.IsNull()) {
11390 return Symbols::OptimizedOut().raw();
11391 }
11392 intptr_t length = src.Length();
11393 intptr_t line = 1 + line_offset();
11394 intptr_t column = 1;
11395 intptr_t scan_position = 0;
11396 intptr_t snippet_start = -1;
11397 intptr_t snippet_end = -1;
11398 if (from_line - line_offset() == 1) {
11399 column += col_offset();
11400 }
11401
11402 while (scan_position != length) {
11403 if (snippet_start == -1) {
11404 if ((line == from_line) && (column == from_column)) {
11405 snippet_start = scan_position;
11406 }
11407 }
11408
11409 char c = src.CharAt(scan_position);
11410 if (c == '\n') {
11411 line++;
11412 column = 0;
11413 } else if (c == '\r') {
11414 line++;
11415 column = 0;
11416 if ((scan_position + 1 != length) &&
11417 (src.CharAt(scan_position + 1) == '\n')) {
11418 scan_position++;
11419 }
11420 }
11421 scan_position++;
11422 column++;
11423
11424 if ((line == to_line) && (column == to_column)) {
11425 snippet_end = scan_position;
11426 break;
11427 }
11428 }
11429 String& snippet = String::Handle();
11430 if ((snippet_start != -1) && (snippet_end != -1)) {
11431 snippet =
11432 String::SubString(src, snippet_start, snippet_end - snippet_start);
11433 }
11434 return snippet.raw();
11435}
11436
11437ScriptPtr Script::New() {
11438 ASSERT(Object::script_class() != Class::null());
11439 ObjectPtr raw =
11440 Object::Allocate(Script::kClassId, Script::InstanceSize(), Heap::kOld);
11441 return static_cast<ScriptPtr>(raw);
11442}
11443
11444ScriptPtr Script::New(const String& url, const String& source) {
11445 return Script::New(url, url, source);
11446}
11447
11448ScriptPtr Script::New(const String& url,
11449 const String& resolved_url,
11450 const String& source) {
11451 Thread* thread = Thread::Current();
11452 Zone* zone = thread->zone();
11453 const Script& result = Script::Handle(zone, Script::New());
11454 result.set_url(String::Handle(zone, Symbols::New(thread, url)));
11455 result.set_resolved_url(
11456 String::Handle(zone, Symbols::New(thread, resolved_url)));
11457 result.set_source(source);
11458 result.SetLocationOffset(0, 0);
11459 result.set_flags(0);
11460 result.set_kernel_script_index(0);
11461 result.set_load_timestamp(
11462 FLAG_remove_script_timestamps_for_test ? 0 : OS::GetCurrentTimeMillis());
11463 return result.raw();
11464}
11465
11466const char* Script::ToCString() const {
11467 const String& name = String::Handle(url());
11468 return OS::SCreate(Thread::Current()->zone(), "Script(%s)", name.ToCString());
11469}
11470
11471LibraryPtr Script::FindLibrary() const {
11472 Thread* thread = Thread::Current();
11473 Zone* zone = thread->zone();
11474 Isolate* isolate = thread->isolate();
11475 const GrowableObjectArray& libs =
11476 GrowableObjectArray::Handle(zone, isolate->object_store()->libraries());
11477 Library& lib = Library::Handle(zone);
11478 Array& scripts = Array::Handle(zone);
11479 for (intptr_t i = 0; i < libs.Length(); i++) {
11480 lib ^= libs.At(i);
11481 scripts = lib.LoadedScripts();
11482 for (intptr_t j = 0; j < scripts.Length(); j++) {
11483 if (scripts.At(j) == raw()) {
11484 return lib.raw();
11485 }
11486 }
11487 }
11488 return Library::null();
11489}
11490
11491DictionaryIterator::DictionaryIterator(const Library& library)
11492 : array_(Array::Handle(library.dictionary())),
11493 // Last element in array is a Smi indicating the number of entries used.
11494 size_(Array::Handle(library.dictionary()).Length() - 1),
11495 next_ix_(0) {
11496 MoveToNextObject();
11497}
11498
11499ObjectPtr DictionaryIterator::GetNext() {
11500 ASSERT(HasNext());
11501 int ix = next_ix_++;
11502 MoveToNextObject();
11503 ASSERT(array_.At(ix) != Object::null());
11504 return array_.At(ix);
11505}
11506
11507void DictionaryIterator::MoveToNextObject() {
11508 Object& obj = Object::Handle(array_.At(next_ix_));
11509 while (obj.IsNull() && HasNext()) {
11510 next_ix_++;
11511 obj = array_.At(next_ix_);
11512 }
11513}
11514
11515ClassDictionaryIterator::ClassDictionaryIterator(const Library& library,
11516 IterationKind kind)
11517 : DictionaryIterator(library),
11518 toplevel_class_(Class::Handle((kind == kIteratePrivate)
11519 ? library.toplevel_class()
11520 : Class::null())) {
11521 MoveToNextClass();
11522}
11523
11524ClassPtr ClassDictionaryIterator::GetNextClass() {
11525 ASSERT(HasNext());
11526 Class& cls = Class::Handle();
11527 if (next_ix_ < size_) {
11528 int ix = next_ix_++;
11529 cls ^= array_.At(ix);
11530 MoveToNextClass();
11531 return cls.raw();
11532 }
11533 ASSERT(!toplevel_class_.IsNull());
11534 cls = toplevel_class_.raw();
11535 toplevel_class_ = Class::null();
11536 return cls.raw();
11537}
11538
11539void ClassDictionaryIterator::MoveToNextClass() {
11540 Object& obj = Object::Handle();
11541 while (next_ix_ < size_) {
11542 obj = array_.At(next_ix_);
11543 if (obj.IsClass()) {
11544 return;
11545 }
11546 next_ix_++;
11547 }
11548}
11549
11550static void ReportTooManyImports(const Library& lib) {
11551 const String& url = String::Handle(lib.url());
11552 Report::MessageF(Report::kError, Script::Handle(lib.LookupScript(url)),
11553 TokenPosition::kNoSource, Report::AtLocation,
11554 "too many imports in library '%s'", url.ToCString());
11555 UNREACHABLE();
11556}
11557
11558bool Library::IsAnyCoreLibrary() const {
11559 String& url_str = Thread::Current()->StringHandle();
11560 url_str = url();
11561 return url_str.StartsWith(Symbols::DartScheme()) ||
11562 url_str.StartsWith(Symbols::DartSchemePrivate());
11563}
11564
11565void Library::set_num_imports(intptr_t value) const {
11566 if (!Utils::IsUint(16, value)) {
11567 ReportTooManyImports(*this);
11568 }
11569 StoreNonPointer(&raw_ptr()->num_imports_, value);
11570}
11571
11572void Library::set_name(const String& name) const {
11573 ASSERT(name.IsSymbol());
11574 StorePointer(&raw_ptr()->name_, name.raw());
11575}
11576
11577void Library::set_url(const String& name) const {
11578 StorePointer(&raw_ptr()->url_, name.raw());
11579}
11580
11581void Library::set_kernel_data(const ExternalTypedData& data) const {
11582 StorePointer(&raw_ptr()->kernel_data_, data.raw());
11583}
11584
11585void Library::set_loading_unit(const LoadingUnit& value) const {
11586 StorePointer(&raw_ptr()->loading_unit_, value.raw());
11587}
11588
11589void Library::SetName(const String& name) const {
11590 // Only set name once.
11591 ASSERT(!Loaded());
11592 set_name(name);
11593}
11594
11595void Library::SetLoadInProgress() const {
11596 // Must not already be in the process of being loaded.
11597 ASSERT(raw_ptr()->load_state_ <= LibraryLayout::kLoadRequested);
11598 StoreNonPointer(&raw_ptr()->load_state_, LibraryLayout::kLoadInProgress);
11599}
11600
11601void Library::SetLoadRequested() const {
11602 // Must not be already loaded.
11603 ASSERT(raw_ptr()->load_state_ == LibraryLayout::kAllocated);
11604 StoreNonPointer(&raw_ptr()->load_state_, LibraryLayout::kLoadRequested);
11605}
11606
11607void Library::SetLoaded() const {
11608 // Should not be already loaded or just allocated.
11609 ASSERT(LoadInProgress() || LoadRequested());
11610 StoreNonPointer(&raw_ptr()->load_state_, LibraryLayout::kLoaded);
11611}
11612
11613static StringPtr MakeClassMetaName(Thread* thread,
11614 Zone* zone,
11615 const Class& cls) {
11616 return Symbols::FromConcat(thread, Symbols::At(),
11617 String::Handle(zone, cls.Name()));
11618}
11619
11620static StringPtr MakeFieldMetaName(Thread* thread,
11621 Zone* zone,
11622 const Field& field) {
11623 const String& cname = String::Handle(
11624 zone,
11625 MakeClassMetaName(thread, zone, Class::Handle(zone, field.Origin())));
11626 GrowableHandlePtrArray<const String> pieces(zone, 3);
11627 pieces.Add(cname);
11628 pieces.Add(Symbols::At());
11629 pieces.Add(String::Handle(zone, field.name()));
11630 return Symbols::FromConcatAll(thread, pieces);
11631}
11632
11633static StringPtr MakeFunctionMetaName(Thread* thread,
11634 Zone* zone,
11635 const Function& func) {
11636 const String& cname = String::Handle(
11637 zone,
11638 MakeClassMetaName(thread, zone, Class::Handle(zone, func.origin())));
11639 GrowableHandlePtrArray<const String> pieces(zone, 3);
11640 pieces.Add(cname);
11641 pieces.Add(Symbols::At());
11642 pieces.Add(String::Handle(zone, func.name()));
11643 return Symbols::FromConcatAll(thread, pieces);
11644}
11645
11646static StringPtr MakeTypeParameterMetaName(Thread* thread,
11647 Zone* zone,
11648 const TypeParameter& param) {
11649 const String& cname = String::Handle(
11650 zone,
11651 MakeClassMetaName(thread, zone,
11652 Class::Handle(zone, param.parameterized_class())));
11653 GrowableHandlePtrArray<const String> pieces(zone, 3);
11654 pieces.Add(cname);
11655 pieces.Add(Symbols::At());
11656 pieces.Add(String::Handle(zone, param.name()));
11657 return Symbols::FromConcatAll(thread, pieces);
11658}
11659
11660void Library::AddMetadata(const Object& owner,
11661 const String& name,
11662 TokenPosition token_pos,
11663 intptr_t kernel_offset,
11664 intptr_t bytecode_offset) const {
11665#if defined(DART_PRECOMPILED_RUNTIME)
11666 UNREACHABLE();
11667#else
11668 Thread* thread = Thread::Current();
11669 ASSERT(thread->IsMutatorThread());
11670 Zone* zone = thread->zone();
11671 const String& metaname = String::Handle(zone, Symbols::New(thread, name));
11672 const Field& field =
11673 Field::Handle(zone, Field::NewTopLevel(metaname,
11674 false, // is_final
11675 false, // is_const
11676 false, // is_late
11677 owner, token_pos, token_pos));
11678 field.SetFieldType(Object::dynamic_type());
11679 field.set_is_reflectable(false);
11680 field.SetStaticValue(Array::empty_array(), true);
11681 if (bytecode_offset > 0) {
11682 field.set_is_declared_in_bytecode(true);
11683 field.set_bytecode_offset(bytecode_offset);
11684 } else {
11685 field.set_kernel_offset(kernel_offset);
11686 }
11687 GrowableObjectArray& metadata =
11688 GrowableObjectArray::Handle(zone, this->metadata());
11689 metadata.Add(field, Heap::kOld);
11690#endif // defined(DART_PRECOMPILED_RUNTIME)
11691}
11692
11693void Library::AddClassMetadata(const Class& cls,
11694 const Object& tl_owner,
11695 TokenPosition token_pos,
11696 intptr_t kernel_offset,
11697 intptr_t bytecode_offset) const {
11698 Thread* thread = Thread::Current();
11699 Zone* zone = thread->zone();
11700 // We use the toplevel class as the owner of a class's metadata field because
11701 // a class's metadata is in scope of the library, not the class.
11702 AddMetadata(tl_owner,
11703 String::Handle(zone, MakeClassMetaName(thread, zone, cls)),
11704 token_pos, kernel_offset, bytecode_offset);
11705}
11706
11707void Library::AddFieldMetadata(const Field& field,
11708 TokenPosition token_pos,
11709 intptr_t kernel_offset,
11710 intptr_t bytecode_offset) const {
11711 Thread* thread = Thread::Current();
11712 Zone* zone = thread->zone();
11713 const auto& owner = Object::Handle(zone, field.RawOwner());
11714 const auto& name =
11715 String::Handle(zone, MakeFieldMetaName(thread, zone, field));
11716 AddMetadata(owner, name, token_pos, kernel_offset, bytecode_offset);
11717}
11718
11719void Library::AddFunctionMetadata(const Function& func,
11720 TokenPosition token_pos,
11721 intptr_t kernel_offset,
11722 intptr_t bytecode_offset) const {
11723 Thread* thread = Thread::Current();
11724 Zone* zone = thread->zone();
11725 const auto& owner = Object::Handle(zone, func.RawOwner());
11726 const auto& name =
11727 String::Handle(zone, MakeFunctionMetaName(thread, zone, func));
11728 AddMetadata(owner, name, token_pos, kernel_offset, bytecode_offset);
11729}
11730
11731void Library::AddTypeParameterMetadata(const TypeParameter& param,
11732 TokenPosition token_pos) const {
11733 Thread* thread = Thread::Current();
11734 Zone* zone = thread->zone();
11735 const auto& owner = Class::Handle(zone, param.parameterized_class());
11736 const auto& name =
11737 String::Handle(zone, MakeTypeParameterMetaName(thread, zone, param));
11738 AddMetadata(owner, name, token_pos, 0, 0);
11739}
11740
11741void Library::AddLibraryMetadata(const Object& tl_owner,
11742 TokenPosition token_pos,
11743 intptr_t kernel_offset,
11744 intptr_t bytecode_offset) const {
11745 AddMetadata(tl_owner, Symbols::TopLevel(), token_pos, kernel_offset,
11746 bytecode_offset);
11747}
11748
11749StringPtr Library::MakeMetadataName(const Object& obj) const {
11750 Thread* thread = Thread::Current();
11751 Zone* zone = thread->zone();
11752 if (obj.IsClass()) {
11753 return MakeClassMetaName(thread, zone, Class::Cast(obj));
11754 } else if (obj.IsField()) {
11755 return MakeFieldMetaName(thread, zone, Field::Cast(obj));
11756 } else if (obj.IsFunction()) {
11757 return MakeFunctionMetaName(thread, zone, Function::Cast(obj));
11758 } else if (obj.IsLibrary()) {
11759 return Symbols::TopLevel().raw();
11760 } else if (obj.IsTypeParameter()) {
11761 return MakeTypeParameterMetaName(thread, zone, TypeParameter::Cast(obj));
11762 }
11763 UNIMPLEMENTED();
11764 return String::null();
11765}
11766
11767FieldPtr Library::GetMetadataField(const String& metaname) const {
11768 const GrowableObjectArray& metadata =
11769 GrowableObjectArray::Handle(this->metadata());
11770 Field& entry = Field::Handle();
11771 String& entryname = String::Handle();
11772 intptr_t num_entries = metadata.Length();
11773 for (intptr_t i = 0; i < num_entries; i++) {
11774 entry ^= metadata.At(i);
11775 entryname = entry.name();
11776 if (entryname.Equals(metaname)) {
11777 return entry.raw();
11778 }
11779 }
11780 return Field::null();
11781}
11782
11783void Library::CloneMetadataFrom(const Library& from_library,
11784 const Function& from_fun,
11785 const Function& to_fun) const {
11786 const String& metaname = String::Handle(MakeMetadataName(from_fun));
11787 const Field& from_field =
11788 Field::Handle(from_library.GetMetadataField(metaname));
11789 if (!from_field.IsNull()) {
11790 if (from_field.is_declared_in_bytecode()) {
11791 AddFunctionMetadata(to_fun, from_field.token_pos(), 0,
11792 from_field.bytecode_offset());
11793 } else {
11794 AddFunctionMetadata(to_fun, from_field.token_pos(),
11795 from_field.kernel_offset(), 0);
11796 }
11797 }
11798}
11799
11800ObjectPtr Library::GetMetadata(const Object& obj) const {
11801#if defined(DART_PRECOMPILED_RUNTIME)
11802 return Object::empty_array().raw();
11803#else
11804 if (!obj.IsClass() && !obj.IsField() && !obj.IsFunction() &&
11805 !obj.IsLibrary() && !obj.IsTypeParameter()) {
11806 UNREACHABLE();
11807 }
11808 if (obj.IsLibrary()) {
11809 // Ensure top-level class is loaded as it may contain annotations of
11810 // a library.
11811 const auto& cls = Class::Handle(toplevel_class());
11812 if (!cls.IsNull()) {
11813 cls.EnsureDeclarationLoaded();
11814 }
11815 }
11816 const String& metaname = String::Handle(MakeMetadataName(obj));
11817 Field& field = Field::Handle(GetMetadataField(metaname));
11818 if (field.IsNull()) {
11819 // There is no metadata for this object.
11820 return Object::empty_array().raw();
11821 }
11822 Object& metadata = Object::Handle(field.StaticValue());
11823 if (metadata.raw() == Object::empty_array().raw()) {
11824 if (field.is_declared_in_bytecode()) {
11825 metadata = kernel::BytecodeReader::ReadAnnotation(field);
11826 } else {
11827 ASSERT(field.kernel_offset() > 0);
11828 metadata = kernel::EvaluateMetadata(
11829 field, /* is_annotations_offset = */ obj.IsLibrary());
11830 }
11831 if (metadata.IsArray() || metadata.IsNull()) {
11832 ASSERT(metadata.raw() != Object::empty_array().raw());
11833 if (!Compiler::IsBackgroundCompilation()) {
11834 field.SetStaticValue(
11835 metadata.IsNull() ? Object::null_array() : Array::Cast(metadata),
11836 true);
11837 }
11838 }
11839 }
11840 if (metadata.IsNull()) {
11841 // Metadata field exists in order to reference extended metadata.
11842 return Object::empty_array().raw();
11843 }
11844 return metadata.raw();
11845#endif // defined(DART_PRECOMPILED_RUNTIME)
11846}
11847
11848ArrayPtr Library::GetExtendedMetadata(const Object& obj, intptr_t count) const {
11849#if defined(DART_PRECOMPILED_RUNTIME)
11850 return Object::empty_array().raw();
11851#else
11852 RELEASE_ASSERT(obj.IsFunction() || obj.IsLibrary());
11853 const String& metaname = String::Handle(MakeMetadataName(obj));
11854 Field& field = Field::Handle(GetMetadataField(metaname));
11855 if (field.IsNull()) {
11856 // There is no metadata for this object.
11857 return Object::empty_array().raw();
11858 }
11859 ASSERT(field.is_declared_in_bytecode());
11860 return kernel::BytecodeReader::ReadExtendedAnnotations(field, count);
11861#endif // defined(DART_PRECOMPILED_RUNTIME)
11862}
11863
11864static bool ShouldBePrivate(const String& name) {
11865 return (name.Length() >= 1 && name.CharAt(0) == '_') ||
11866 (name.Length() >= 5 &&
11867 (name.CharAt(4) == '_' &&
11868 (name.CharAt(0) == 'g' || name.CharAt(0) == 's') &&
11869 name.CharAt(1) == 'e' && name.CharAt(2) == 't' &&
11870 name.CharAt(3) == ':'));
11871}
11872
11873ObjectPtr Library::ResolveName(const String& name) const {
11874 Object& obj = Object::Handle();
11875 if (FLAG_use_lib_cache && LookupResolvedNamesCache(name, &obj)) {
11876 return obj.raw();
11877 }
11878 EnsureTopLevelClassIsFinalized();
11879 obj = LookupLocalObject(name);
11880 if (!obj.IsNull()) {
11881 // Names that are in this library's dictionary and are unmangled
11882 // are not cached. This reduces the size of the cache.
11883 return obj.raw();
11884 }
11885 String& accessor_name = String::Handle(Field::LookupGetterSymbol(name));
11886 if (!accessor_name.IsNull()) {
11887 obj = LookupLocalObject(accessor_name);
11888 }
11889 if (obj.IsNull()) {
11890 accessor_name = Field::LookupSetterSymbol(name);
11891 if (!accessor_name.IsNull()) {
11892 obj = LookupLocalObject(accessor_name);
11893 }
11894 if (obj.IsNull() && !ShouldBePrivate(name)) {
11895 obj = LookupImportedObject(name);
11896 }
11897 }
11898 AddToResolvedNamesCache(name, obj);
11899 return obj.raw();
11900}
11901
11902class StringEqualsTraits {
11903 public:
11904 static const char* Name() { return "StringEqualsTraits"; }
11905 static bool ReportStats() { return false; }
11906
11907 static bool IsMatch(const Object& a, const Object& b) {
11908 return String::Cast(a).Equals(String::Cast(b));
11909 }
11910 static uword Hash(const Object& obj) { return String::Cast(obj).Hash(); }
11911};
11912typedef UnorderedHashMap<StringEqualsTraits> ResolvedNamesMap;
11913
11914// Returns true if the name is found in the cache, false no cache hit.
11915// obj is set to the cached entry. It may be null, indicating that the
11916// name does not resolve to anything in this library.
11917bool Library::LookupResolvedNamesCache(const String& name, Object* obj) const {
11918 if (resolved_names() == Array::null()) {
11919 return false;
11920 }
11921 ResolvedNamesMap cache(resolved_names());
11922 bool present = false;
11923 *obj = cache.GetOrNull(name, &present);
11924// Mutator compiler thread may add entries and therefore
11925// change 'resolved_names()' while running a background compilation;
11926// ASSERT that 'resolved_names()' has not changed only in mutator.
11927#if defined(DEBUG)
11928 if (Thread::Current()->IsMutatorThread()) {
11929 ASSERT(cache.Release().raw() == resolved_names());
11930 } else {
11931 // Release must be called in debug mode.
11932 cache.Release();
11933 }
11934#endif
11935 return present;
11936}
11937
11938// Add a name to the resolved name cache. This name resolves to the
11939// given object in this library scope. obj may be null, which means
11940// the name does not resolve to anything in this library scope.
11941void Library::AddToResolvedNamesCache(const String& name,
11942 const Object& obj) const {
11943 if (!FLAG_use_lib_cache || Compiler::IsBackgroundCompilation()) {
11944 return;
11945 }
11946 if (resolved_names() == Array::null()) {
11947 InitResolvedNamesCache();
11948 }
11949 ResolvedNamesMap cache(resolved_names());
11950 cache.UpdateOrInsert(name, obj);
11951 StorePointer(&raw_ptr()->resolved_names_, cache.Release().raw());
11952}
11953
11954bool Library::LookupExportedNamesCache(const String& name, Object* obj) const {
11955 ASSERT(FLAG_use_exp_cache);
11956 if (exported_names() == Array::null()) {
11957 return false;
11958 }
11959 ResolvedNamesMap cache(exported_names());
11960 bool present = false;
11961 *obj = cache.GetOrNull(name, &present);
11962// Mutator compiler thread may add entries and therefore
11963// change 'exported_names()' while running a background compilation;
11964// do not ASSERT that 'exported_names()' has not changed.
11965#if defined(DEBUG)
11966 if (Thread::Current()->IsMutatorThread()) {
11967 ASSERT(cache.Release().raw() == exported_names());
11968 } else {
11969 // Release must be called in debug mode.
11970 cache.Release();
11971 }
11972#endif
11973 return present;
11974}
11975
11976void Library::AddToExportedNamesCache(const String& name,
11977 const Object& obj) const {
11978 if (!FLAG_use_exp_cache || Compiler::IsBackgroundCompilation()) {
11979 return;
11980 }
11981 if (exported_names() == Array::null()) {
11982 InitExportedNamesCache();
11983 }
11984 ResolvedNamesMap cache(exported_names());
11985 cache.UpdateOrInsert(name, obj);
11986 StorePointer(&raw_ptr()->exported_names_, cache.Release().raw());
11987}
11988
11989void Library::InvalidateResolvedName(const String& name) const {
11990 Thread* thread = Thread::Current();
11991 Zone* zone = thread->zone();
11992 Object& entry = Object::Handle(zone);
11993 if (FLAG_use_lib_cache && LookupResolvedNamesCache(name, &entry)) {
11994 // TODO(koda): Support deleted sentinel in snapshots and remove only 'name'.
11995 ClearResolvedNamesCache();
11996 }
11997 if (!FLAG_use_exp_cache) {
11998 return;
11999 }
12000 // When a new name is added to a library, we need to invalidate all
12001 // caches that contain an entry for this name. If the name was previously
12002 // looked up but could not be resolved, the cache contains a null entry.
12003 GrowableObjectArray& libs = GrowableObjectArray::Handle(
12004 zone, thread->isolate()->object_store()->libraries());
12005 Library& lib = Library::Handle(zone);
12006 intptr_t num_libs = libs.Length();
12007 for (intptr_t i = 0; i < num_libs; i++) {
12008 lib ^= libs.At(i);
12009 if (lib.LookupExportedNamesCache(name, &entry)) {
12010 lib.ClearExportedNamesCache();
12011 }
12012 }
12013}
12014
12015// Invalidate all exported names caches in the isolate.
12016void Library::InvalidateExportedNamesCaches() {
12017 GrowableObjectArray& libs = GrowableObjectArray::Handle(
12018 Isolate::Current()->object_store()->libraries());
12019 Library& lib = Library::Handle();
12020 intptr_t num_libs = libs.Length();
12021 for (intptr_t i = 0; i < num_libs; i++) {
12022 lib ^= libs.At(i);
12023 lib.ClearExportedNamesCache();
12024 }
12025}
12026
12027void Library::RehashDictionary(const Array& old_dict,
12028 intptr_t new_dict_size) const {
12029 intptr_t old_dict_size = old_dict.Length() - 1;
12030 const Array& new_dict =
12031 Array::Handle(Array::New(new_dict_size + 1, Heap::kOld));
12032 // Rehash all elements from the original dictionary
12033 // to the newly allocated array.
12034 Object& entry = Class::Handle();
12035 String& entry_name = String::Handle();
12036 Object& new_entry = Object::Handle();
12037 intptr_t used = 0;
12038 for (intptr_t i = 0; i < old_dict_size; i++) {
12039 entry = old_dict.At(i);
12040 if (!entry.IsNull()) {
12041 entry_name = entry.DictionaryName();
12042 ASSERT(!entry_name.IsNull());
12043 const intptr_t hash = entry_name.Hash();
12044 intptr_t index = hash % new_dict_size;
12045 new_entry = new_dict.At(index);
12046 while (!new_entry.IsNull()) {
12047 index = (index + 1) % new_dict_size; // Move to next element.
12048 new_entry = new_dict.At(index);
12049 }
12050 new_dict.SetAt(index, entry);
12051 used++;
12052 }
12053 }
12054 // Set used count.
12055 ASSERT(used < new_dict_size); // Need at least one empty slot.
12056 new_entry = Smi::New(used);
12057 new_dict.SetAt(new_dict_size, new_entry);
12058 // Remember the new dictionary now.
12059 StorePointer(&raw_ptr()->dictionary_, new_dict.raw());
12060}
12061
12062void Library::AddObject(const Object& obj, const String& name) const {
12063 ASSERT(Thread::Current()->IsMutatorThread());
12064 ASSERT(obj.IsClass() || obj.IsFunction() || obj.IsField() ||
12065 obj.IsLibraryPrefix());
12066 ASSERT(name.Equals(String::Handle(obj.DictionaryName())));
12067 ASSERT(LookupLocalObject(name) == Object::null());
12068 const Array& dict = Array::Handle(dictionary());
12069 intptr_t dict_size = dict.Length() - 1;
12070 intptr_t index = name.Hash() % dict_size;
12071
12072 Object& entry = Object::Handle();
12073 entry = dict.At(index);
12074 // An empty spot will be found because we keep the hash set at most 75% full.
12075 while (!entry.IsNull()) {
12076 index = (index + 1) % dict_size;
12077 entry = dict.At(index);
12078 }
12079
12080 // Insert the object at the empty slot.
12081 dict.SetAt(index, obj);
12082 // One more element added.
12083 intptr_t used_elements = Smi::Value(Smi::RawCast(dict.At(dict_size))) + 1;
12084 const Smi& used = Smi::Handle(Smi::New(used_elements));
12085 dict.SetAt(dict_size, used); // Update used count.
12086
12087 // Rehash if symbol_table is 75% full.
12088 if (used_elements > ((dict_size / 4) * 3)) {
12089 // TODO(iposva): Avoid exponential growth.
12090 RehashDictionary(dict, 2 * dict_size);
12091 }
12092
12093 // Invalidate the cache of loaded scripts.
12094 if (loaded_scripts() != Array::null()) {
12095 StorePointer(&raw_ptr()->loaded_scripts_, Array::null());
12096 }
12097}
12098
12099// Lookup a name in the library's re-export namespace.
12100// This lookup can occur from two different threads: background compiler and
12101// mutator thread.
12102ObjectPtr Library::LookupReExport(const String& name,
12103 ZoneGrowableArray<intptr_t>* trail) const {
12104 if (!HasExports()) {
12105 return Object::null();
12106 }
12107
12108 if (trail == NULL) {
12109 trail = new ZoneGrowableArray<intptr_t>();
12110 }
12111 Object& obj = Object::Handle();
12112 if (FLAG_use_exp_cache && LookupExportedNamesCache(name, &obj)) {
12113 return obj.raw();
12114 }
12115
12116 const intptr_t lib_id = this->index();
12117 ASSERT(lib_id >= 0); // We use -1 to indicate that a cycle was found.
12118 trail->Add(lib_id);
12119 const Array& exports = Array::Handle(this->exports());
12120 Namespace& ns = Namespace::Handle();
12121 for (int i = 0; i < exports.Length(); i++) {
12122 ns ^= exports.At(i);
12123 obj = ns.Lookup(name, trail);
12124 if (!obj.IsNull()) {
12125 // The Lookup call above may return a setter x= when we are looking
12126 // for the name x. Make sure we only return when a matching name
12127 // is found.
12128 String& obj_name = String::Handle(obj.DictionaryName());
12129 if (Field::IsSetterName(obj_name) == Field::IsSetterName(name)) {
12130 break;
12131 }
12132 }
12133 }
12134 bool in_cycle = (trail->RemoveLast() < 0);
12135 if (FLAG_use_exp_cache && !in_cycle && !Compiler::IsBackgroundCompilation()) {
12136 AddToExportedNamesCache(name, obj);
12137 }
12138 return obj.raw();
12139}
12140
12141ObjectPtr Library::LookupEntry(const String& name, intptr_t* index) const {
12142 ASSERT(!IsNull());
12143 Thread* thread = Thread::Current();
12144 REUSABLE_ARRAY_HANDLESCOPE(thread);
12145 REUSABLE_OBJECT_HANDLESCOPE(thread);
12146 REUSABLE_STRING_HANDLESCOPE(thread);
12147 Array& dict = thread->ArrayHandle();
12148 dict = dictionary();
12149 intptr_t dict_size = dict.Length() - 1;
12150 *index = name.Hash() % dict_size;
12151 Object& entry = thread->ObjectHandle();
12152 String& entry_name = thread->StringHandle();
12153 entry = dict.At(*index);
12154 // Search the entry in the hash set.
12155 while (!entry.IsNull()) {
12156 entry_name = entry.DictionaryName();
12157 ASSERT(!entry_name.IsNull());
12158 if (entry_name.Equals(name)) {
12159 return entry.raw();
12160 }
12161 *index = (*index + 1) % dict_size;
12162 entry = dict.At(*index);
12163 }
12164 return Object::null();
12165}
12166
12167void Library::AddClass(const Class& cls) const {
12168 ASSERT(!Compiler::IsBackgroundCompilation());
12169 const String& class_name = String::Handle(cls.Name());
12170 AddObject(cls, class_name);
12171 // Link class to this library.
12172 cls.set_library(*this);
12173 InvalidateResolvedName(class_name);
12174}
12175
12176static void AddScriptIfUnique(const GrowableObjectArray& scripts,
12177 const Script& candidate) {
12178 if (candidate.IsNull()) {
12179 return;
12180 }
12181 Script& script_obj = Script::Handle();
12182
12183 for (int i = 0; i < scripts.Length(); i++) {
12184 script_obj ^= scripts.At(i);
12185 if (script_obj.raw() == candidate.raw()) {
12186 // We already have a reference to this script.
12187 return;
12188 }
12189 }
12190 // Add script to the list of scripts.
12191 scripts.Add(candidate);
12192}
12193
12194ArrayPtr Library::LoadedScripts() const {
12195 ASSERT(Thread::Current()->IsMutatorThread());
12196 // We compute the list of loaded scripts lazily. The result is
12197 // cached in loaded_scripts_.
12198 if (loaded_scripts() == Array::null()) {
12199 // TODO(jensj): This can be cleaned up.
12200 // It really should just return the content of `used_scripts`, and there
12201 // should be no need to do the O(n) call to `AddScriptIfUnique` per script.
12202
12203 // Iterate over the library dictionary and collect all scripts.
12204 const GrowableObjectArray& scripts =
12205 GrowableObjectArray::Handle(GrowableObjectArray::New(8));
12206 Object& entry = Object::Handle();
12207 Class& cls = Class::Handle();
12208 Script& owner_script = Script::Handle();
12209 DictionaryIterator it(*this);
12210 while (it.HasNext()) {
12211 entry = it.GetNext();
12212 if (entry.IsClass()) {
12213 owner_script = Class::Cast(entry).script();
12214 } else if (entry.IsFunction()) {
12215 owner_script = Function::Cast(entry).script();
12216 } else if (entry.IsField()) {
12217 owner_script = Field::Cast(entry).Script();
12218 } else {
12219 continue;
12220 }
12221 AddScriptIfUnique(scripts, owner_script);
12222 }
12223
12224 // Add all scripts from patch classes.
12225 GrowableObjectArray& patches = GrowableObjectArray::Handle(used_scripts());
12226 for (intptr_t i = 0; i < patches.Length(); i++) {
12227 entry = patches.At(i);
12228 if (entry.IsClass()) {
12229 owner_script = Class::Cast(entry).script();
12230 } else {
12231 ASSERT(entry.IsScript());
12232 owner_script = Script::Cast(entry).raw();
12233 }
12234 AddScriptIfUnique(scripts, owner_script);
12235 }
12236
12237 cls = toplevel_class();
12238 if (!cls.IsNull()) {
12239 owner_script = cls.script();
12240 AddScriptIfUnique(scripts, owner_script);
12241 // Special case: Scripts that only contain external top-level functions
12242 // are not included above, but can be referenced through a library's
12243 // anonymous classes. Example: dart-core:identical.dart.
12244 Function& func = Function::Handle();
12245 Array& functions = Array::Handle(cls.functions());
12246 for (intptr_t j = 0; j < functions.Length(); j++) {
12247 func ^= functions.At(j);
12248 if (func.is_external()) {
12249 owner_script = func.script();
12250 AddScriptIfUnique(scripts, owner_script);
12251 }
12252 }
12253 }
12254
12255 // Create the array of scripts and cache it in loaded_scripts_.
12256 const Array& scripts_array = Array::Handle(Array::MakeFixedLength(scripts));
12257 StorePointer(&raw_ptr()->loaded_scripts_, scripts_array.raw());
12258 }
12259 return loaded_scripts();
12260}
12261
12262// TODO(hausner): we might want to add a script dictionary to the
12263// library class to make this lookup faster.
12264ScriptPtr Library::LookupScript(const String& url,
12265 bool useResolvedUri /* = false */) const {
12266 const intptr_t url_length = url.Length();
12267 if (url_length == 0) {
12268 return Script::null();
12269 }
12270 const Array& scripts = Array::Handle(LoadedScripts());
12271 Script& script = Script::Handle();
12272 String& script_url = String::Handle();
12273 const intptr_t num_scripts = scripts.Length();
12274 for (int i = 0; i < num_scripts; i++) {
12275 script ^= scripts.At(i);
12276 if (useResolvedUri) {
12277 // Use for urls with 'org-dartlang-sdk:' or 'file:' schemes
12278 script_url = script.resolved_url();
12279 } else {
12280 // Use for urls with 'dart:', 'package:', or 'file:' schemes
12281 script_url = script.url();
12282 }
12283 const intptr_t start_idx = script_url.Length() - url_length;
12284 if ((start_idx == 0) && url.Equals(script_url)) {
12285 return script.raw();
12286 } else if (start_idx > 0) {
12287 // If we do a suffix match, only match if the partial path
12288 // starts at or immediately after the path separator.
12289 if (((url.CharAt(0) == '/') ||
12290 (script_url.CharAt(start_idx - 1) == '/')) &&
12291 url.Equals(script_url, start_idx, url_length)) {
12292 return script.raw();
12293 }
12294 }
12295 }
12296 return Script::null();
12297}
12298
12299void Library::EnsureTopLevelClassIsFinalized() const {
12300 if (toplevel_class() == Object::null()) {
12301 return;
12302 }
12303 Thread* thread = Thread::Current();
12304 const Class& cls = Class::Handle(thread->zone(), toplevel_class());
12305 if (cls.is_finalized()) {
12306 return;
12307 }
12308 const Error& error =
12309 Error::Handle(thread->zone(), cls.EnsureIsFinalized(thread));
12310 if (!error.IsNull()) {
12311 Exceptions::PropagateError(error);
12312 }
12313}
12314
12315ObjectPtr Library::LookupLocalObject(const String& name) const {
12316 intptr_t index;
12317 return LookupEntry(name, &index);
12318}
12319
12320ObjectPtr Library::LookupLocalOrReExportObject(const String& name) const {
12321 intptr_t index;
12322 EnsureTopLevelClassIsFinalized();
12323 const Object& result = Object::Handle(LookupEntry(name, &index));
12324 if (!result.IsNull() && !result.IsLibraryPrefix()) {
12325 return result.raw();
12326 }
12327 return LookupReExport(name);
12328}
12329
12330FieldPtr Library::LookupFieldAllowPrivate(const String& name) const {
12331 EnsureTopLevelClassIsFinalized();
12332 Object& obj = Object::Handle(LookupObjectAllowPrivate(name));
12333 if (obj.IsField()) {
12334 return Field::Cast(obj).raw();
12335 }
12336 return Field::null();
12337}
12338
12339FieldPtr Library::LookupLocalField(const String& name) const {
12340 EnsureTopLevelClassIsFinalized();
12341 Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
12342 if (obj.IsField()) {
12343 return Field::Cast(obj).raw();
12344 }
12345 return Field::null();
12346}
12347
12348FunctionPtr Library::LookupFunctionAllowPrivate(const String& name) const {
12349 EnsureTopLevelClassIsFinalized();
12350 Object& obj = Object::Handle(LookupObjectAllowPrivate(name));
12351 if (obj.IsFunction()) {
12352 return Function::Cast(obj).raw();
12353 }
12354 return Function::null();
12355}
12356
12357FunctionPtr Library::LookupLocalFunction(const String& name) const {
12358 EnsureTopLevelClassIsFinalized();
12359 Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
12360 if (obj.IsFunction()) {
12361 return Function::Cast(obj).raw();
12362 }
12363 return Function::null();
12364}
12365
12366ObjectPtr Library::LookupLocalObjectAllowPrivate(const String& name) const {
12367 Thread* thread = Thread::Current();
12368 Zone* zone = thread->zone();
12369 Object& obj = Object::Handle(zone, Object::null());
12370 obj = LookupLocalObject(name);
12371 if (obj.IsNull() && ShouldBePrivate(name)) {
12372 String& private_name = String::Handle(zone, PrivateName(name));
12373 obj = LookupLocalObject(private_name);
12374 }
12375 return obj.raw();
12376}
12377
12378ObjectPtr Library::LookupObjectAllowPrivate(const String& name) const {
12379 // First check if name is found in the local scope of the library.
12380 Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
12381 if (!obj.IsNull()) {
12382 return obj.raw();
12383 }
12384
12385 // Do not look up private names in imported libraries.
12386 if (ShouldBePrivate(name)) {
12387 return Object::null();
12388 }
12389
12390 // Now check if name is found in any imported libs.
12391 return LookupImportedObject(name);
12392}
12393
12394ObjectPtr Library::LookupImportedObject(const String& name) const {
12395 Object& obj = Object::Handle();
12396 Namespace& import = Namespace::Handle();
12397 Library& import_lib = Library::Handle();
12398 String& import_lib_url = String::Handle();
12399 String& first_import_lib_url = String::Handle();
12400 Object& found_obj = Object::Handle();
12401 String& found_obj_name = String::Handle();
12402 ASSERT(!ShouldBePrivate(name));
12403 for (intptr_t i = 0; i < num_imports(); i++) {
12404 import = ImportAt(i);
12405 obj = import.Lookup(name);
12406 if (!obj.IsNull()) {
12407 import_lib = import.library();
12408 import_lib_url = import_lib.url();
12409 if (found_obj.raw() != obj.raw()) {
12410 if (first_import_lib_url.IsNull() ||
12411 first_import_lib_url.StartsWith(Symbols::DartScheme())) {
12412 // This is the first object we found, or the
12413 // previously found object is exported from a Dart
12414 // system library. The newly found object hides the one
12415 // from the Dart library.
12416 first_import_lib_url = import_lib.url();
12417 found_obj = obj.raw();
12418 found_obj_name = obj.DictionaryName();
12419 } else if (import_lib_url.StartsWith(Symbols::DartScheme())) {
12420 // The newly found object is exported from a Dart system
12421 // library. It is hidden by the previously found object.
12422 // We continue to search.
12423 } else if (Field::IsSetterName(found_obj_name) &&
12424 !Field::IsSetterName(name)) {
12425 // We are looking for an unmangled name or a getter, but
12426 // the first object we found is a setter. Replace the first
12427 // object with the one we just found.
12428 first_import_lib_url = import_lib.url();
12429 found_obj = obj.raw();
12430 found_obj_name = found_obj.DictionaryName();
12431 } else {
12432 // We found two different objects with the same name.
12433 // Note that we need to compare the names again because
12434 // looking up an unmangled name can return a getter or a
12435 // setter. A getter name is the same as the unmangled name,
12436 // but a setter name is different from an unmangled name or a
12437 // getter name.
12438 if (Field::IsGetterName(found_obj_name)) {
12439 found_obj_name = Field::NameFromGetter(found_obj_name);
12440 }
12441 String& second_obj_name = String::Handle(obj.DictionaryName());
12442 if (Field::IsGetterName(second_obj_name)) {
12443 second_obj_name = Field::NameFromGetter(second_obj_name);
12444 }
12445 if (found_obj_name.Equals(second_obj_name)) {
12446 return Object::null();
12447 }
12448 }
12449 }
12450 }
12451 }
12452 return found_obj.raw();
12453}
12454
12455ClassPtr Library::LookupClass(const String& name) const {
12456 Object& obj = Object::Handle(LookupLocalObject(name));
12457 if (obj.IsNull() && !ShouldBePrivate(name)) {
12458 obj = LookupImportedObject(name);
12459 }
12460 if (obj.IsClass()) {
12461 return Class::Cast(obj).raw();
12462 }
12463 return Class::null();
12464}
12465
12466ClassPtr Library::LookupLocalClass(const String& name) const {
12467 Object& obj = Object::Handle(LookupLocalObject(name));
12468 if (obj.IsClass()) {
12469 return Class::Cast(obj).raw();
12470 }
12471 return Class::null();
12472}
12473
12474ClassPtr Library::LookupClassAllowPrivate(const String& name) const {
12475 // See if the class is available in this library or in the top level
12476 // scope of any imported library.
12477 Zone* zone = Thread::Current()->zone();
12478 const Class& cls = Class::Handle(zone, LookupClass(name));
12479 if (!cls.IsNull()) {
12480 return cls.raw();
12481 }
12482
12483 // Now try to lookup the class using its private name, but only in
12484 // this library (not in imported libraries).
12485 if (ShouldBePrivate(name)) {
12486 String& private_name = String::Handle(zone, PrivateName(name));
12487 const Object& obj = Object::Handle(LookupLocalObject(private_name));
12488 if (obj.IsClass()) {
12489 return Class::Cast(obj).raw();
12490 }
12491 }
12492 return Class::null();
12493}
12494
12495// Mixin applications can have multiple private keys from different libraries.
12496ClassPtr Library::SlowLookupClassAllowMultiPartPrivate(
12497 const String& name) const {
12498 Array& dict = Array::Handle(dictionary());
12499 Object& entry = Object::Handle();
12500 String& cls_name = String::Handle();
12501 for (intptr_t i = 0; i < dict.Length(); i++) {
12502 entry = dict.At(i);
12503 if (entry.IsClass()) {
12504 cls_name = Class::Cast(entry).Name();
12505 // Warning: comparison is not symmetric.
12506 if (String::EqualsIgnoringPrivateKey(cls_name, name)) {
12507 return Class::Cast(entry).raw();
12508 }
12509 }
12510 }
12511 return Class::null();
12512}
12513
12514LibraryPrefixPtr Library::LookupLocalLibraryPrefix(const String& name) const {
12515 const Object& obj = Object::Handle(LookupLocalObject(name));
12516 if (obj.IsLibraryPrefix()) {
12517 return LibraryPrefix::Cast(obj).raw();
12518 }
12519 return LibraryPrefix::null();
12520}
12521
12522void Library::set_toplevel_class(const Class& value) const {
12523 ASSERT(raw_ptr()->toplevel_class_ == Class::null());
12524 StorePointer(&raw_ptr()->toplevel_class_, value.raw());
12525}
12526
12527void Library::set_dependencies(const Array& deps) const {
12528 StorePointer(&raw_ptr()->dependencies_, deps.raw());
12529}
12530
12531void Library::set_metadata(const GrowableObjectArray& value) const {
12532 StorePointer(&raw_ptr()->metadata_, value.raw());
12533}
12534
12535LibraryPtr Library::ImportLibraryAt(intptr_t index) const {
12536 Namespace& import = Namespace::Handle(ImportAt(index));
12537 if (import.IsNull()) {
12538 return Library::null();
12539 }
12540 return import.library();
12541}
12542
12543NamespacePtr Library::ImportAt(intptr_t index) const {
12544 if ((index < 0) || index >= num_imports()) {
12545 return Namespace::null();
12546 }
12547 const Array& import_list = Array::Handle(imports());
12548 return Namespace::RawCast(import_list.At(index));
12549}
12550
12551void Library::DropDependenciesAndCaches() const {
12552 // We need to preserve the "dart-ext:" imports because they are used by
12553 // Loader::ReloadNativeExtensions().
12554 intptr_t native_import_count = 0;
12555 Array& imports = Array::Handle(raw_ptr()->imports_);
12556 Namespace& ns = Namespace::Handle();
12557 Library& lib = Library::Handle();
12558 String& url = String::Handle();
12559 for (int i = 0; i < imports.Length(); ++i) {
12560 ns = Namespace::RawCast(imports.At(i));
12561 if (ns.IsNull()) continue;
12562 lib = ns.library();
12563 url = lib.url();
12564 if (url.StartsWith(Symbols::DartExtensionScheme())) {
12565 native_import_count++;
12566 }
12567 }
12568 Array& new_imports =
12569 Array::Handle(Array::New(native_import_count, Heap::kOld));
12570 for (int i = 0, j = 0; i < imports.Length(); ++i) {
12571 ns = Namespace::RawCast(imports.At(i));
12572 if (ns.IsNull()) continue;
12573 lib = ns.library();
12574 url = lib.url();
12575 if (url.StartsWith(Symbols::DartExtensionScheme())) {
12576 new_imports.SetAt(j++, ns);
12577 }
12578 }
12579
12580 StorePointer(&raw_ptr()->imports_, new_imports.raw());
12581 StorePointer(&raw_ptr()->exports_, Object::empty_array().raw());
12582 StoreNonPointer(&raw_ptr()->num_imports_, 0);
12583 StorePointer(&raw_ptr()->resolved_names_, Array::null());
12584 StorePointer(&raw_ptr()->exported_names_, Array::null());
12585 StorePointer(&raw_ptr()->loaded_scripts_, Array::null());
12586 StorePointer(&raw_ptr()->dependencies_, Array::null());
12587}
12588
12589void Library::AddImport(const Namespace& ns) const {
12590 Array& imports = Array::Handle(this->imports());
12591 intptr_t capacity = imports.Length();
12592 if (num_imports() == capacity) {
12593 capacity = capacity + kImportsCapacityIncrement + (capacity >> 2);
12594 imports = Array::Grow(imports, capacity);
12595 StorePointer(&raw_ptr()->imports_, imports.raw());
12596 }
12597 intptr_t index = num_imports();
12598 imports.SetAt(index, ns);
12599 set_num_imports(index + 1);
12600}
12601
12602// Convenience function to determine whether the export list is
12603// non-empty.
12604bool Library::HasExports() const {
12605 return exports() != Object::empty_array().raw();
12606}
12607
12608// We add one namespace at a time to the exports array and don't
12609// pre-allocate any unused capacity. The assumption is that
12610// re-exports are quite rare.
12611void Library::AddExport(const Namespace& ns) const {
12612 Array& exports = Array::Handle(this->exports());
12613 intptr_t num_exports = exports.Length();
12614 exports = Array::Grow(exports, num_exports + 1);
12615 StorePointer(&raw_ptr()->exports_, exports.raw());
12616 exports.SetAt(num_exports, ns);
12617}
12618
12619static ArrayPtr NewDictionary(intptr_t initial_size) {
12620 const Array& dict = Array::Handle(Array::New(initial_size + 1, Heap::kOld));
12621 // The last element of the dictionary specifies the number of in use slots.
12622 dict.SetAt(initial_size, Object::smi_zero());
12623 return dict.raw();
12624}
12625
12626void Library::InitResolvedNamesCache() const {
12627 Thread* thread = Thread::Current();
12628 ASSERT(thread->IsMutatorThread());
12629 REUSABLE_FUNCTION_HANDLESCOPE(thread);
12630 Array& cache = thread->ArrayHandle();
12631 cache = HashTables::New<ResolvedNamesMap>(64);
12632 StorePointer(&raw_ptr()->resolved_names_, cache.raw());
12633}
12634
12635void Library::ClearResolvedNamesCache() const {
12636 ASSERT(Thread::Current()->IsMutatorThread());
12637 StorePointer(&raw_ptr()->resolved_names_, Array::null());
12638}
12639
12640void Library::InitExportedNamesCache() const {
12641 Thread* thread = Thread::Current();
12642 ASSERT(thread->IsMutatorThread());
12643 REUSABLE_FUNCTION_HANDLESCOPE(thread);
12644 Array& cache = thread->ArrayHandle();
12645 cache = HashTables::New<ResolvedNamesMap>(16);
12646 StorePointer(&raw_ptr()->exported_names_, cache.raw());
12647}
12648
12649void Library::ClearExportedNamesCache() const {
12650 StorePointer(&raw_ptr()->exported_names_, Array::null());
12651}
12652
12653void Library::InitClassDictionary() const {
12654 Thread* thread = Thread::Current();
12655 ASSERT(thread->IsMutatorThread());
12656 REUSABLE_FUNCTION_HANDLESCOPE(thread);
12657 Array& dictionary = thread->ArrayHandle();
12658 // TODO(iposva): Find reasonable initial size.
12659 const int kInitialElementCount = 16;
12660 dictionary = NewDictionary(kInitialElementCount);
12661 StorePointer(&raw_ptr()->dictionary_, dictionary.raw());
12662}
12663
12664void Library::InitImportList() const {
12665 const Array& imports =
12666 Array::Handle(Array::New(kInitialImportsCapacity, Heap::kOld));
12667 StorePointer(&raw_ptr()->imports_, imports.raw());
12668 StoreNonPointer(&raw_ptr()->num_imports_, 0);
12669}
12670
12671LibraryPtr Library::New() {
12672 ASSERT(Object::library_class() != Class::null());
12673 ObjectPtr raw =
12674 Object::Allocate(Library::kClassId, Library::InstanceSize(), Heap::kOld);
12675 return static_cast<LibraryPtr>(raw);
12676}
12677
12678LibraryPtr Library::NewLibraryHelper(const String& url, bool import_core_lib) {
12679 Thread* thread = Thread::Current();
12680 Zone* zone = thread->zone();
12681 ASSERT(thread->IsMutatorThread());
12682 // Force the url to have a hash code.
12683 url.Hash();
12684 const bool dart_scheme = url.StartsWith(Symbols::DartScheme());
12685 const Library& result = Library::Handle(zone, Library::New());
12686 result.StorePointer(&result.raw_ptr()->name_, Symbols::Empty().raw());
12687 result.StorePointer(&result.raw_ptr()->url_, url.raw());
12688 result.StorePointer(&result.raw_ptr()->resolved_names_, Array::null());
12689 result.StorePointer(&result.raw_ptr()->exported_names_, Array::null());
12690 result.StorePointer(&result.raw_ptr()->dictionary_,
12691 Object::empty_array().raw());
12692 GrowableObjectArray& list = GrowableObjectArray::Handle(zone);
12693 list = GrowableObjectArray::New(4, Heap::kOld);
12694 result.StorePointer(&result.raw_ptr()->metadata_, list.raw());
12695 result.StorePointer(&result.raw_ptr()->toplevel_class_, Class::null());
12696 list = GrowableObjectArray::New(Object::empty_array(), Heap::kOld);
12697 result.StorePointer(&result.raw_ptr()->used_scripts_, list.raw());
12698 result.StorePointer(&result.raw_ptr()->imports_, Object::empty_array().raw());
12699 result.StorePointer(&result.raw_ptr()->exports_, Object::empty_array().raw());
12700 result.StorePointer(&result.raw_ptr()->loaded_scripts_, Array::null());
12701 result.set_native_entry_resolver(NULL);
12702 result.set_native_entry_symbol_resolver(NULL);
12703 result.set_flags(0);
12704 result.set_is_in_fullsnapshot(false);
12705 result.set_is_nnbd(false);
12706 if (dart_scheme) {
12707 // Only debug dart: libraries if we have been requested to show invisible
12708 // frames.
12709 result.set_debuggable(FLAG_show_invisible_frames);
12710 } else {
12711 // Default to debuggable for all other libraries.
12712 result.set_debuggable(true);
12713 }
12714 result.set_is_dart_scheme(dart_scheme);
12715 NOT_IN_PRECOMPILED(result.set_is_declared_in_bytecode(false));
12716 NOT_IN_PRECOMPILED(result.set_binary_declaration_offset(0));
12717 result.StoreNonPointer(&result.raw_ptr()->load_state_,
12718 LibraryLayout::kAllocated);
12719 result.StoreNonPointer(&result.raw_ptr()->index_, -1);
12720 result.InitClassDictionary();
12721 result.InitImportList();
12722 result.AllocatePrivateKey();
12723 if (import_core_lib) {
12724 const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
12725 ASSERT(!core_lib.IsNull());
12726 const Namespace& ns = Namespace::Handle(
12727 zone,
12728 Namespace::New(core_lib, Object::null_array(), Object::null_array()));
12729 result.AddImport(ns);
12730 }
12731 return result.raw();
12732}
12733
12734LibraryPtr Library::New(const String& url) {
12735 return NewLibraryHelper(url, false);
12736}
12737
12738void Library::set_flags(uint8_t flags) const {
12739 StoreNonPointer(&raw_ptr()->flags_, flags);
12740}
12741
12742void Library::InitCoreLibrary(Isolate* isolate) {
12743 Thread* thread = Thread::Current();
12744 Zone* zone = thread->zone();
12745 const String& core_lib_url = Symbols::DartCore();
12746 const Library& core_lib =
12747 Library::Handle(zone, Library::NewLibraryHelper(core_lib_url, false));
12748 core_lib.SetLoadRequested();
12749 core_lib.Register(thread);
12750 isolate->object_store()->set_bootstrap_library(ObjectStore::kCore, core_lib);
12751 isolate->object_store()->set_root_library(Library::Handle());
12752}
12753
12754// Invoke the function, or noSuchMethod if it is null.
12755static ObjectPtr InvokeInstanceFunction(
12756 const Instance& receiver,
12757 const Function& function,
12758 const String& target_name,
12759 const Array& args,
12760 const Array& args_descriptor_array,
12761 bool respect_reflectable,
12762 const TypeArguments& instantiator_type_args) {
12763 // Note "args" is already the internal arguments with the receiver as the
12764 // first element.
12765 ArgumentsDescriptor args_descriptor(args_descriptor_array);
12766 if (function.IsNull() ||
12767 !function.AreValidArguments(args_descriptor, nullptr) ||
12768 (respect_reflectable && !function.is_reflectable())) {
12769 return DartEntry::InvokeNoSuchMethod(receiver, target_name, args,
12770 args_descriptor_array);
12771 }
12772 if (!function.CanReceiveDynamicInvocation()) {
12773 ObjectPtr type_error = function.DoArgumentTypesMatch(
12774 args, args_descriptor, instantiator_type_args);
12775 if (type_error != Error::null()) {
12776 return type_error;
12777 }
12778 }
12779 return DartEntry::InvokeFunction(function, args, args_descriptor_array);
12780}
12781
12782ObjectPtr Library::InvokeGetter(const String& getter_name,
12783 bool throw_nsm_if_absent,
12784 bool respect_reflectable,
12785 bool check_is_entrypoint) const {
12786 Object& obj = Object::Handle(LookupLocalOrReExportObject(getter_name));
12787 Function& getter = Function::Handle();
12788 if (obj.IsField()) {
12789 const Field& field = Field::Cast(obj);
12790 if (check_is_entrypoint) {
12791 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kGetterOnly));
12792 }
12793 if (!field.IsUninitialized()) {
12794 return field.StaticValue();
12795 }
12796 // An uninitialized field was found. Check for a getter in the field's
12797 // owner class.
12798 const Class& klass = Class::Handle(field.Owner());
12799 const String& internal_getter_name =
12800 String::Handle(Field::GetterName(getter_name));
12801 getter = klass.LookupStaticFunction(internal_getter_name);
12802 } else {
12803 // No field found. Check for a getter in the lib.
12804 const String& internal_getter_name =
12805 String::Handle(Field::GetterName(getter_name));
12806 obj = LookupLocalOrReExportObject(internal_getter_name);
12807 if (obj.IsFunction()) {
12808 getter = Function::Cast(obj).raw();
12809 if (check_is_entrypoint) {
12810 CHECK_ERROR(getter.VerifyCallEntryPoint());
12811 }
12812 } else {
12813 obj = LookupLocalOrReExportObject(getter_name);
12814 // Normally static top-level methods cannot be closurized through the
12815 // native API even if they are marked as entry-points, with the one
12816 // exception of "main".
12817 if (obj.IsFunction() && check_is_entrypoint) {
12818 if (!getter_name.Equals(String::Handle(String::New("main"))) ||
12819 raw() != Isolate::Current()->object_store()->root_library()) {
12820 CHECK_ERROR(Function::Cast(obj).VerifyClosurizedEntryPoint());
12821 }
12822 }
12823 if (obj.IsFunction() && Function::Cast(obj).SafeToClosurize()) {
12824 // Looking for a getter but found a regular method: closurize it.
12825 const Function& closure_function =
12826 Function::Handle(Function::Cast(obj).ImplicitClosureFunction());
12827 return closure_function.ImplicitStaticClosure();
12828 }
12829 }
12830 }
12831
12832 if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) {
12833 if (throw_nsm_if_absent) {
12834 return ThrowNoSuchMethod(
12835 AbstractType::Handle(Class::Handle(toplevel_class()).RareType()),
12836 getter_name, Object::null_array(), Object::null_array(),
12837 InvocationMirror::kTopLevel, InvocationMirror::kGetter);
12838 }
12839
12840 // Fall through case: Indicate that we didn't find any function or field
12841 // using a special null instance. This is different from a field being null.
12842 // Callers make sure that this null does not leak into Dartland.
12843 return Object::sentinel().raw();
12844 }
12845
12846 // Invoke the getter and return the result.
12847 return DartEntry::InvokeFunction(getter, Object::empty_array());
12848}
12849
12850ObjectPtr Library::InvokeSetter(const String& setter_name,
12851 const Instance& value,
12852 bool respect_reflectable,
12853 bool check_is_entrypoint) const {
12854 Object& obj = Object::Handle(LookupLocalOrReExportObject(setter_name));
12855 const String& internal_setter_name =
12856 String::Handle(Field::SetterName(setter_name));
12857 AbstractType& setter_type = AbstractType::Handle();
12858 AbstractType& argument_type = AbstractType::Handle(value.GetType(Heap::kOld));
12859 if (obj.IsField()) {
12860 const Field& field = Field::Cast(obj);
12861 if (check_is_entrypoint) {
12862 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kSetterOnly));
12863 }
12864 setter_type = field.type();
12865 if (!argument_type.IsNullType() && !setter_type.IsDynamicType() &&
12866 !value.IsInstanceOf(setter_type, Object::null_type_arguments(),
12867 Object::null_type_arguments())) {
12868 return ThrowTypeError(field.token_pos(), value, setter_type, setter_name);
12869 }
12870 if (field.is_final() || (respect_reflectable && !field.is_reflectable())) {
12871 const int kNumArgs = 1;
12872 const Array& args = Array::Handle(Array::New(kNumArgs));
12873 args.SetAt(0, value);
12874
12875 return ThrowNoSuchMethod(
12876 AbstractType::Handle(Class::Handle(toplevel_class()).RareType()),
12877 internal_setter_name, args, Object::null_array(),
12878 InvocationMirror::kTopLevel, InvocationMirror::kSetter);
12879 }
12880 field.SetStaticValue(value);
12881 return value.raw();
12882 }
12883
12884 Function& setter = Function::Handle();
12885 obj = LookupLocalOrReExportObject(internal_setter_name);
12886 if (obj.IsFunction()) {
12887 setter ^= obj.raw();
12888 }
12889
12890 if (!setter.IsNull() && check_is_entrypoint) {
12891 CHECK_ERROR(setter.VerifyCallEntryPoint());
12892 }
12893
12894 const int kNumArgs = 1;
12895 const Array& args = Array::Handle(Array::New(kNumArgs));
12896 args.SetAt(0, value);
12897 if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) {
12898 return ThrowNoSuchMethod(
12899 AbstractType::Handle(Class::Handle(toplevel_class()).RareType()),
12900 internal_setter_name, args, Object::null_array(),
12901 InvocationMirror::kTopLevel, InvocationMirror::kSetter);
12902 }
12903
12904 setter_type = setter.ParameterTypeAt(0);
12905 if (!argument_type.IsNullType() && !setter_type.IsDynamicType() &&
12906 !value.IsInstanceOf(setter_type, Object::null_type_arguments(),
12907 Object::null_type_arguments())) {
12908 return ThrowTypeError(setter.token_pos(), value, setter_type, setter_name);
12909 }
12910
12911 return DartEntry::InvokeFunction(setter, args);
12912}
12913
12914ObjectPtr Library::Invoke(const String& function_name,
12915 const Array& args,
12916 const Array& arg_names,
12917 bool respect_reflectable,
12918 bool check_is_entrypoint) const {
12919 Thread* thread = Thread::Current();
12920 Zone* zone = thread->zone();
12921
12922 // We don't pass any explicit type arguments, which will be understood as
12923 // using dynamic for any function type arguments by lower layers.
12924 const int kTypeArgsLen = 0;
12925 const Array& args_descriptor_array = Array::Handle(
12926 zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(),
12927 arg_names, Heap::kNew));
12928 ArgumentsDescriptor args_descriptor(args_descriptor_array);
12929
12930 auto& function = Function::Handle(zone);
12931 auto& result =
12932 Object::Handle(zone, LookupLocalOrReExportObject(function_name));
12933 if (result.IsFunction()) {
12934 function ^= result.raw();
12935 }
12936
12937 if (!function.IsNull() && check_is_entrypoint) {
12938 CHECK_ERROR(function.VerifyCallEntryPoint());
12939 }
12940
12941 if (function.IsNull()) {
12942 // Didn't find a method: try to find a getter and invoke call on its result.
12943 const Object& getter_result = Object::Handle(
12944 zone, InvokeGetter(function_name, false, respect_reflectable,
12945 check_is_entrypoint));
12946 if (getter_result.raw() != Object::sentinel().raw()) {
12947 if (check_is_entrypoint) {
12948 CHECK_ERROR(EntryPointFieldInvocationError(function_name));
12949 }
12950 const auto& call_args_descriptor_array = Array::Handle(
12951 zone, ArgumentsDescriptor::NewBoxed(args_descriptor.TypeArgsLen(),
12952 args_descriptor.Count() + 1,
12953 arg_names, Heap::kNew));
12954 const auto& call_args = Array::Handle(
12955 zone,
12956 CreateCallableArgumentsFromStatic(zone, Instance::Cast(getter_result),
12957 args, arg_names, args_descriptor));
12958 return InvokeCallableWithChecks(zone, call_args,
12959 call_args_descriptor_array);
12960 }
12961 }
12962
12963 if (function.IsNull() ||
12964 !function.AreValidArguments(args_descriptor, nullptr) ||
12965 (respect_reflectable && !function.is_reflectable())) {
12966 return ThrowNoSuchMethod(
12967 AbstractType::Handle(zone,
12968 Class::Handle(zone, toplevel_class()).RareType()),
12969 function_name, args, arg_names, InvocationMirror::kTopLevel,
12970 InvocationMirror::kMethod);
12971 }
12972 // This is a static function, so we pass an empty instantiator tav.
12973 ASSERT(function.is_static());
12974 if (!function.CanReceiveDynamicInvocation()) {
12975 ObjectPtr type_error = function.DoArgumentTypesMatch(
12976 args, args_descriptor, Object::empty_type_arguments());
12977 if (type_error != Error::null()) {
12978 return type_error;
12979 }
12980 }
12981 return DartEntry::InvokeFunction(function, args, args_descriptor_array);
12982}
12983
12984ObjectPtr Library::EvaluateCompiledExpression(
12985 const ExternalTypedData& kernel_buffer,
12986 const Array& type_definitions,
12987 const Array& arguments,
12988 const TypeArguments& type_arguments) const {
12989 return EvaluateCompiledExpressionHelper(
12990 kernel_buffer, type_definitions, String::Handle(url()), String::Handle(),
12991 arguments, type_arguments);
12992}
12993
12994void Library::InitNativeWrappersLibrary(Isolate* isolate, bool is_kernel) {
12995 static const int kNumNativeWrappersClasses = 4;
12996 COMPILE_ASSERT((kNumNativeWrappersClasses > 0) &&
12997 (kNumNativeWrappersClasses < 10));
12998 Thread* thread = Thread::Current();
12999 Zone* zone = thread->zone();
13000 const String& native_flds_lib_url = Symbols::DartNativeWrappers();
13001 const Library& native_flds_lib = Library::Handle(
13002 zone, Library::NewLibraryHelper(native_flds_lib_url, false));
13003 const String& native_flds_lib_name = Symbols::DartNativeWrappersLibName();
13004 native_flds_lib.SetName(native_flds_lib_name);
13005 native_flds_lib.SetLoadRequested();
13006 native_flds_lib.Register(thread);
13007 native_flds_lib.SetLoadInProgress();
13008 isolate->object_store()->set_native_wrappers_library(native_flds_lib);
13009 static const char* const kNativeWrappersClass = "NativeFieldWrapperClass";
13010 static const int kNameLength = 25;
13011 ASSERT(kNameLength == (strlen(kNativeWrappersClass) + 1 + 1));
13012 char name_buffer[kNameLength];
13013 String& cls_name = String::Handle(zone);
13014 for (int fld_cnt = 1; fld_cnt <= kNumNativeWrappersClasses; fld_cnt++) {
13015 Utils::SNPrint(name_buffer, kNameLength, "%s%d", kNativeWrappersClass,
13016 fld_cnt);
13017 cls_name = Symbols::New(thread, name_buffer);
13018 Class::NewNativeWrapper(native_flds_lib, cls_name, fld_cnt);
13019 }
13020 // NOTE: If we bootstrap from a Kernel IR file we want to generate the
13021 // synthetic constructors for the native wrapper classes. We leave this up to
13022 // the [KernelLoader] who will take care of it later.
13023 if (!is_kernel) {
13024 native_flds_lib.SetLoaded();
13025 }
13026}
13027
13028// LibraryLookupSet maps URIs to libraries.
13029class LibraryLookupTraits {
13030 public:
13031 static const char* Name() { return "LibraryLookupTraits"; }
13032 static bool ReportStats() { return false; }
13033
13034 static bool IsMatch(const Object& a, const Object& b) {
13035 const String& a_str = String::Cast(a);
13036 const String& b_str = String::Cast(b);
13037
13038 ASSERT(a_str.HasHash() && b_str.HasHash());
13039 return a_str.Equals(b_str);
13040 }
13041
13042 static uword Hash(const Object& key) { return String::Cast(key).Hash(); }
13043
13044 static ObjectPtr NewKey(const String& str) { return str.raw(); }
13045};
13046typedef UnorderedHashMap<LibraryLookupTraits> LibraryLookupMap;
13047
13048static ObjectPtr EvaluateCompiledExpressionHelper(
13049 const ExternalTypedData& kernel_buffer,
13050 const Array& type_definitions,
13051 const String& library_url,
13052 const String& klass,
13053 const Array& arguments,
13054 const TypeArguments& type_arguments) {
13055 Zone* zone = Thread::Current()->zone();
13056#if defined(DART_PRECOMPILED_RUNTIME)
13057 const String& error_str = String::Handle(
13058 zone,
13059 String::New("Expression evaluation not available in precompiled mode."));
13060 return ApiError::New(error_str);
13061#else
13062 std::unique_ptr<kernel::Program> kernel_pgm =
13063 kernel::Program::ReadFromTypedData(kernel_buffer);
13064
13065 if (kernel_pgm == NULL) {
13066 return ApiError::New(String::Handle(
13067 zone, String::New("Kernel isolate returned ill-formed kernel.")));
13068 }
13069
13070 kernel::KernelLoader loader(kernel_pgm.get(),
13071 /*uri_to_source_table=*/nullptr);
13072 auto& result = Object::Handle(
13073 zone, loader.LoadExpressionEvaluationFunction(library_url, klass));
13074 kernel_pgm.reset();
13075
13076 if (result.IsError()) return result.raw();
13077
13078 const auto& callee = Function::CheckedHandle(zone, result.raw());
13079
13080 // type_arguments is null if all type arguments are dynamic.
13081 if (type_definitions.Length() == 0 || type_arguments.IsNull()) {
13082 result = DartEntry::InvokeFunction(callee, arguments);
13083 } else {
13084 intptr_t num_type_args = type_arguments.Length();
13085 Array& real_arguments =
13086 Array::Handle(zone, Array::New(arguments.Length() + 1));
13087 real_arguments.SetAt(0, type_arguments);
13088 Object& arg = Object::Handle(zone);
13089 for (intptr_t i = 0; i < arguments.Length(); ++i) {
13090 arg = arguments.At(i);
13091 real_arguments.SetAt(i + 1, arg);
13092 }
13093
13094 const Array& args_desc =
13095 Array::Handle(zone, ArgumentsDescriptor::NewBoxed(
13096 num_type_args, arguments.Length(), Heap::kNew));
13097 result = DartEntry::InvokeFunction(callee, real_arguments, args_desc);
13098 }
13099
13100 if (callee.is_declared_in_bytecode()) {
13101 // Expression evaluation binary expires immediately after evaluation is
13102 // finished. However, hot reload may still find corresponding
13103 // KernelProgramInfo object in the heap and it would try to patch it.
13104 // To prevent accessing stale kernel binary in ResetObjectTable, bytecode
13105 // component of the callee's KernelProgramInfo is reset here.
13106 const auto& script = Script::Handle(zone, callee.script());
13107 const auto& info =
13108 KernelProgramInfo::Handle(zone, script.kernel_program_info());
13109 info.set_bytecode_component(Object::null_array());
13110 }
13111
13112 return result.raw();
13113#endif
13114}
13115
13116// Returns library with given url in current isolate, or NULL.
13117LibraryPtr Library::LookupLibrary(Thread* thread, const String& url) {
13118 Zone* zone = thread->zone();
13119 Isolate* isolate = thread->isolate();
13120 ObjectStore* object_store = isolate->object_store();
13121
13122 // Make sure the URL string has an associated hash code
13123 // to speed up the repeated equality checks.
13124 url.Hash();
13125
13126 // Use the libraries map to lookup the library by URL.
13127 Library& lib = Library::Handle(zone);
13128 if (object_store->libraries_map() == Array::null()) {
13129 return Library::null();
13130 } else {
13131 LibraryLookupMap map(object_store->libraries_map());
13132 lib ^= map.GetOrNull(url);
13133 ASSERT(map.Release().raw() == object_store->libraries_map());
13134 }
13135 return lib.raw();
13136}
13137
13138bool Library::IsPrivate(const String& name) {
13139 if (ShouldBePrivate(name)) return true;
13140 // Factory names: List._fromLiteral.
13141 for (intptr_t i = 1; i < name.Length() - 1; i++) {
13142 if (name.CharAt(i) == '.') {
13143 if (name.CharAt(i + 1) == '_') {
13144 return true;
13145 }
13146 }
13147 }
13148 return false;
13149}
13150
13151// Create a private key for this library. It is based on the hash of the
13152// library URI and the sequence number of the library to guarantee unique
13153// private keys without having to verify.
13154void Library::AllocatePrivateKey() const {
13155 Thread* thread = Thread::Current();
13156 Zone* zone = thread->zone();
13157 Isolate* isolate = thread->isolate();
13158
13159#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
13160 if (isolate->group()->IsReloading()) {
13161 // When reloading, we need to make sure we use the original private key
13162 // if this library previously existed.
13163 IsolateReloadContext* reload_context = isolate->reload_context();
13164 const String& original_key =
13165 String::Handle(reload_context->FindLibraryPrivateKey(*this));
13166 if (!original_key.IsNull()) {
13167 StorePointer(&raw_ptr()->private_key_, original_key.raw());
13168 return;
13169 }
13170 }
13171#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
13172
13173 // Format of the private key is: "@<sequence number><6 digits of hash>
13174 const intptr_t hash_mask = 0x7FFFF;
13175
13176 const String& url = String::Handle(zone, this->url());
13177 intptr_t hash_value = url.Hash() & hash_mask;
13178
13179 const GrowableObjectArray& libs =
13180 GrowableObjectArray::Handle(zone, isolate->object_store()->libraries());
13181 intptr_t sequence_value = libs.Length();
13182
13183 char private_key[32];
13184 Utils::SNPrint(private_key, sizeof(private_key), "%c%" Pd "%06" Pd "",
13185 kPrivateKeySeparator, sequence_value, hash_value);
13186 const String& key =
13187 String::Handle(zone, String::New(private_key, Heap::kOld));
13188 key.Hash(); // This string may end up in the VM isolate.
13189 StorePointer(&raw_ptr()->private_key_, key.raw());
13190}
13191
13192const String& Library::PrivateCoreLibName(const String& member) {
13193 const Library& core_lib = Library::Handle(Library::CoreLibrary());
13194 const String& private_name = String::ZoneHandle(core_lib.PrivateName(member));
13195 return private_name;
13196}
13197
13198bool Library::IsPrivateCoreLibName(const String& name, const String& member) {
13199 Zone* zone = Thread::Current()->zone();
13200 const auto& core_lib = Library::Handle(zone, Library::CoreLibrary());
13201 const auto& private_key = String::Handle(zone, core_lib.private_key());
13202
13203 ASSERT(core_lib.IsPrivate(member));
13204 return name.EqualsConcat(member, private_key);
13205}
13206
13207ClassPtr Library::LookupCoreClass(const String& class_name) {
13208 Thread* thread = Thread::Current();
13209 Zone* zone = thread->zone();
13210 const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
13211 String& name = String::Handle(zone, class_name.raw());
13212 if (class_name.CharAt(0) == kPrivateIdentifierStart) {
13213 // Private identifiers are mangled on a per library basis.
13214 name = Symbols::FromConcat(thread, name,
13215 String::Handle(zone, core_lib.private_key()));
13216 }
13217 return core_lib.LookupClass(name);
13218}
13219
13220// Cannot handle qualified names properly as it only appends private key to
13221// the end (e.g. _Alfa.foo -> _Alfa.foo@...).
13222StringPtr Library::PrivateName(const String& name) const {
13223 Thread* thread = Thread::Current();
13224 Zone* zone = thread->zone();
13225 ASSERT(IsPrivate(name));
13226 // ASSERT(strchr(name, '@') == NULL);
13227 String& str = String::Handle(zone);
13228 str = name.raw();
13229 str = Symbols::FromConcat(thread, str,
13230 String::Handle(zone, this->private_key()));
13231 return str.raw();
13232}
13233
13234LibraryPtr Library::GetLibrary(intptr_t index) {
13235 Thread* thread = Thread::Current();
13236 Zone* zone = thread->zone();
13237 Isolate* isolate = thread->isolate();
13238 const GrowableObjectArray& libs =
13239 GrowableObjectArray::Handle(zone, isolate->object_store()->libraries());
13240 ASSERT(!libs.IsNull());
13241 if ((0 <= index) && (index < libs.Length())) {
13242 Library& lib = Library::Handle(zone);
13243 lib ^= libs.At(index);
13244 return lib.raw();
13245 }
13246 return Library::null();
13247}
13248
13249void Library::Register(Thread* thread) const {
13250 Zone* zone = thread->zone();
13251 Isolate* isolate = thread->isolate();
13252 ObjectStore* object_store = isolate->object_store();
13253
13254 // A library is "registered" in two places:
13255 // - A growable array mapping from index to library.
13256 const String& lib_url = String::Handle(zone, url());
13257 ASSERT(Library::LookupLibrary(thread, lib_url) == Library::null());
13258 ASSERT(lib_url.HasHash());
13259 GrowableObjectArray& libs =
13260 GrowableObjectArray::Handle(zone, object_store->libraries());
13261 ASSERT(!libs.IsNull());
13262 set_index(libs.Length());
13263 libs.Add(*this);
13264
13265 // - A map from URL string to library.
13266 if (object_store->libraries_map() == Array::null()) {
13267 LibraryLookupMap map(HashTables::New<LibraryLookupMap>(16, Heap::kOld));
13268 object_store->set_libraries_map(map.Release());
13269 }
13270
13271 LibraryLookupMap map(object_store->libraries_map());
13272 bool present = map.UpdateOrInsert(lib_url, *this);
13273 ASSERT(!present);
13274 object_store->set_libraries_map(map.Release());
13275}
13276
13277void Library::RegisterLibraries(Thread* thread,
13278 const GrowableObjectArray& libs) {
13279 Zone* zone = thread->zone();
13280 Isolate* isolate = thread->isolate();
13281 Library& lib = Library::Handle(zone);
13282 String& lib_url = String::Handle(zone);
13283
13284 LibraryLookupMap map(HashTables::New<LibraryLookupMap>(16, Heap::kOld));
13285
13286 intptr_t len = libs.Length();
13287 for (intptr_t i = 0; i < len; i++) {
13288 lib ^= libs.At(i);
13289 lib_url = lib.url();
13290 map.InsertNewOrGetValue(lib_url, lib);
13291 }
13292 // Now remember these in the isolate's object store.
13293 isolate->object_store()->set_libraries(libs);
13294 isolate->object_store()->set_libraries_map(map.Release());
13295}
13296
13297LibraryPtr Library::AsyncLibrary() {
13298 return Isolate::Current()->object_store()->async_library();
13299}
13300
13301LibraryPtr Library::ConvertLibrary() {
13302 return Isolate::Current()->object_store()->convert_library();
13303}
13304
13305LibraryPtr Library::CoreLibrary() {
13306 return Isolate::Current()->object_store()->core_library();
13307}
13308
13309LibraryPtr Library::CollectionLibrary() {
13310 return Isolate::Current()->object_store()->collection_library();
13311}
13312
13313LibraryPtr Library::DeveloperLibrary() {
13314 return Isolate::Current()->object_store()->developer_library();
13315}
13316
13317LibraryPtr Library::FfiLibrary() {
13318 return Isolate::Current()->object_store()->ffi_library();
13319}
13320
13321LibraryPtr Library::InternalLibrary() {
13322 return Isolate::Current()->object_store()->_internal_library();
13323}
13324
13325LibraryPtr Library::IsolateLibrary() {
13326 return Isolate::Current()->object_store()->isolate_library();
13327}
13328
13329LibraryPtr Library::MathLibrary() {
13330 return Isolate::Current()->object_store()->math_library();
13331}
13332
13333#if !defined(DART_PRECOMPILED_RUNTIME)
13334LibraryPtr Library::MirrorsLibrary() {
13335 return Isolate::Current()->object_store()->mirrors_library();
13336}
13337#endif
13338
13339LibraryPtr Library::NativeWrappersLibrary() {
13340 return Isolate::Current()->object_store()->native_wrappers_library();
13341}
13342
13343LibraryPtr Library::ProfilerLibrary() {
13344 return Isolate::Current()->object_store()->profiler_library();
13345}
13346
13347LibraryPtr Library::TypedDataLibrary() {
13348 return Isolate::Current()->object_store()->typed_data_library();
13349}
13350
13351LibraryPtr Library::VMServiceLibrary() {
13352 return Isolate::Current()->object_store()->_vmservice_library();
13353}
13354
13355LibraryPtr Library::WasmLibrary() {
13356 return Isolate::Current()->object_store()->wasm_library();
13357}
13358
13359const char* Library::ToCString() const {
13360 NoSafepointScope no_safepoint;
13361 const String& name = String::Handle(url());
13362 return OS::SCreate(Thread::Current()->zone(), "Library:'%s'",
13363 name.ToCString());
13364}
13365
13366LibraryPtr LibraryPrefix::GetLibrary(int index) const {
13367 if ((index >= 0) || (index < num_imports())) {
13368 const Array& imports = Array::Handle(this->imports());
13369 Namespace& import = Namespace::Handle();
13370 import ^= imports.At(index);
13371 return import.library();
13372 }
13373 return Library::null();
13374}
13375
13376void LibraryPrefix::AddImport(const Namespace& import) const {
13377 intptr_t num_current_imports = num_imports();
13378
13379 // Prefixes with deferred libraries can only contain one library.
13380 ASSERT((num_current_imports == 0) || !is_deferred_load());
13381
13382 // The library needs to be added to the list.
13383 Array& imports = Array::Handle(this->imports());
13384 const intptr_t length = (imports.IsNull()) ? 0 : imports.Length();
13385 // Grow the list if it is full.
13386 if (num_current_imports >= length) {
13387 const intptr_t new_length = length + kIncrementSize + (length >> 2);
13388 imports = Array::Grow(imports, new_length, Heap::kOld);
13389 set_imports(imports);
13390 }
13391 imports.SetAt(num_current_imports, import);
13392 set_num_imports(num_current_imports + 1);
13393}
13394
13395LibraryPrefixPtr LibraryPrefix::New() {
13396 ObjectPtr raw = Object::Allocate(LibraryPrefix::kClassId,
13397 LibraryPrefix::InstanceSize(), Heap::kOld);
13398 return static_cast<LibraryPrefixPtr>(raw);
13399}
13400
13401LibraryPrefixPtr LibraryPrefix::New(const String& name,
13402 const Namespace& import,
13403 bool deferred_load,
13404 const Library& importer) {
13405 const LibraryPrefix& result = LibraryPrefix::Handle(LibraryPrefix::New());
13406 result.set_name(name);
13407 result.set_num_imports(0);
13408 result.set_importer(importer);
13409 result.StoreNonPointer(&result.raw_ptr()->is_deferred_load_, deferred_load);
13410 result.StoreNonPointer(&result.raw_ptr()->is_loaded_, !deferred_load);
13411 result.set_imports(Array::Handle(Array::New(kInitialSize)));
13412 result.AddImport(import);
13413 return result.raw();
13414}
13415
13416void LibraryPrefix::set_name(const String& value) const {
13417 ASSERT(value.IsSymbol());
13418 StorePointer(&raw_ptr()->name_, value.raw());
13419}
13420
13421void LibraryPrefix::set_imports(const Array& value) const {
13422 StorePointer(&raw_ptr()->imports_, value.raw());
13423}
13424
13425void LibraryPrefix::set_num_imports(intptr_t value) const {
13426 if (!Utils::IsUint(16, value)) {
13427 ReportTooManyImports(Library::Handle(importer()));
13428 }
13429 StoreNonPointer(&raw_ptr()->num_imports_, value);
13430}
13431
13432void LibraryPrefix::set_importer(const Library& value) const {
13433 StorePointer(&raw_ptr()->importer_, value.raw());
13434}
13435
13436const char* LibraryPrefix::ToCString() const {
13437 const String& prefix = String::Handle(name());
13438 return prefix.ToCString();
13439}
13440
13441void Namespace::set_metadata_field(const Field& value) const {
13442 StorePointer(&raw_ptr()->metadata_field_, value.raw());
13443}
13444
13445void Namespace::AddMetadata(const Object& owner,
13446 TokenPosition token_pos,
13447 intptr_t kernel_offset) {
13448 ASSERT(Field::Handle(metadata_field()).IsNull());
13449 Field& field = Field::Handle(Field::NewTopLevel(Symbols::TopLevel(),
13450 false, // is_final
13451 false, // is_const
13452 false, // is_late
13453 owner, token_pos, token_pos));
13454 field.set_is_reflectable(false);
13455 field.SetFieldType(Object::dynamic_type());
13456 field.SetStaticValue(Array::empty_array(), true);
13457 field.set_kernel_offset(kernel_offset);
13458 set_metadata_field(field);
13459}
13460
13461ObjectPtr Namespace::GetMetadata() const {
13462#if defined(DART_PRECOMPILED_RUNTIME)
13463 return Object::empty_array().raw();
13464#else
13465 Field& field = Field::Handle(metadata_field());
13466 if (field.IsNull()) {
13467 // There is no metadata for this object.
13468 return Object::empty_array().raw();
13469 }
13470 Object& metadata = Object::Handle();
13471 metadata = field.StaticValue();
13472 if (field.StaticValue() == Object::empty_array().raw()) {
13473 if (field.kernel_offset() > 0) {
13474 metadata =
13475 kernel::EvaluateMetadata(field, /* is_annotations_offset = */ true);
13476 } else {
13477 UNREACHABLE();
13478 }
13479 if (metadata.IsArray()) {
13480 ASSERT(Array::Cast(metadata).raw() != Object::empty_array().raw());
13481 field.SetStaticValue(Array::Cast(metadata), true);
13482 }
13483 }
13484 return metadata.raw();
13485#endif // defined(DART_PRECOMPILED_RUNTIME)
13486}
13487
13488const char* Namespace::ToCString() const {
13489 const Library& lib = Library::Handle(library());
13490 return OS::SCreate(Thread::Current()->zone(), "Namespace for library '%s'",
13491 lib.ToCString());
13492}
13493
13494bool Namespace::HidesName(const String& name) const {
13495 // Quick check for common case with no combinators.
13496 if (hide_names() == show_names()) {
13497 ASSERT(hide_names() == Array::null());
13498 return false;
13499 }
13500 const String* plain_name = &name;
13501 if (Field::IsGetterName(name)) {
13502 plain_name = &String::Handle(Field::NameFromGetter(name));
13503 } else if (Field::IsSetterName(name)) {
13504 plain_name = &String::Handle(Field::NameFromSetter(name));
13505 }
13506 // Check whether the name is in the list of explicitly hidden names.
13507 if (hide_names() != Array::null()) {
13508 const Array& names = Array::Handle(hide_names());
13509 String& hidden = String::Handle();
13510 intptr_t num_names = names.Length();
13511 for (intptr_t i = 0; i < num_names; i++) {
13512 hidden ^= names.At(i);
13513 if (plain_name->Equals(hidden)) {
13514 return true;
13515 }
13516 }
13517 }
13518 // The name is not explicitly hidden. Now check whether it is in the
13519 // list of explicitly visible names, if there is one.
13520 if (show_names() != Array::null()) {
13521 const Array& names = Array::Handle(show_names());
13522 String& shown = String::Handle();
13523 intptr_t num_names = names.Length();
13524 for (intptr_t i = 0; i < num_names; i++) {
13525 shown ^= names.At(i);
13526 if (plain_name->Equals(shown)) {
13527 return false;
13528 }
13529 }
13530 // There is a list of visible names. The name we're looking for is not
13531 // contained in the list, so it is hidden.
13532 return true;
13533 }
13534 // The name is not filtered out.
13535 return false;
13536}
13537
13538// Look up object with given name in library and filter out hidden
13539// names. Also look up getters and setters.
13540ObjectPtr Namespace::Lookup(const String& name,
13541 ZoneGrowableArray<intptr_t>* trail) const {
13542 Zone* zone = Thread::Current()->zone();
13543 const Library& lib = Library::Handle(zone, library());
13544
13545 if (trail != NULL) {
13546 // Look for cycle in reexport graph.
13547 for (int i = 0; i < trail->length(); i++) {
13548 if (trail->At(i) == lib.index()) {
13549 for (int j = i + 1; j < trail->length(); j++) {
13550 (*trail)[j] = -1;
13551 }
13552 return Object::null();
13553 }
13554 }
13555 }
13556
13557 lib.EnsureTopLevelClassIsFinalized();
13558
13559 intptr_t ignore = 0;
13560 // Lookup the name in the library's symbols.
13561 Object& obj = Object::Handle(zone, lib.LookupEntry(name, &ignore));
13562 if (!Field::IsGetterName(name) && !Field::IsSetterName(name) &&
13563 (obj.IsNull() || obj.IsLibraryPrefix())) {
13564 String& accessor_name = String::Handle(zone);
13565 accessor_name = Field::LookupGetterSymbol(name);
13566 if (!accessor_name.IsNull()) {
13567 obj = lib.LookupEntry(accessor_name, &ignore);
13568 }
13569 if (obj.IsNull()) {
13570 accessor_name = Field::LookupSetterSymbol(name);
13571 if (!accessor_name.IsNull()) {
13572 obj = lib.LookupEntry(accessor_name, &ignore);
13573 }
13574 }
13575 }
13576
13577 // Library prefixes are not exported.
13578 if (obj.IsNull() || obj.IsLibraryPrefix()) {
13579 // Lookup in the re-exported symbols.
13580 obj = lib.LookupReExport(name, trail);
13581 if (obj.IsNull() && !Field::IsSetterName(name)) {
13582 // LookupReExport() only returns objects that match the given name.
13583 // If there is no field/func/getter, try finding a setter.
13584 const String& setter_name =
13585 String::Handle(zone, Field::LookupSetterSymbol(name));
13586 if (!setter_name.IsNull()) {
13587 obj = lib.LookupReExport(setter_name, trail);
13588 }
13589 }
13590 }
13591 if (obj.IsNull() || HidesName(name) || obj.IsLibraryPrefix()) {
13592 return Object::null();
13593 }
13594 return obj.raw();
13595}
13596
13597NamespacePtr Namespace::New() {
13598 ASSERT(Object::namespace_class() != Class::null());
13599 ObjectPtr raw = Object::Allocate(Namespace::kClassId,
13600 Namespace::InstanceSize(), Heap::kOld);
13601 return static_cast<NamespacePtr>(raw);
13602}
13603
13604NamespacePtr Namespace::New(const Library& library,
13605 const Array& show_names,
13606 const Array& hide_names) {
13607 ASSERT(show_names.IsNull() || (show_names.Length() > 0));
13608 ASSERT(hide_names.IsNull() || (hide_names.Length() > 0));
13609 const Namespace& result = Namespace::Handle(Namespace::New());
13610 result.StorePointer(&result.raw_ptr()->library_, library.raw());
13611 result.StorePointer(&result.raw_ptr()->show_names_, show_names.raw());
13612 result.StorePointer(&result.raw_ptr()->hide_names_, hide_names.raw());
13613 return result.raw();
13614}
13615
13616KernelProgramInfoPtr KernelProgramInfo::New() {
13617 ObjectPtr raw =
13618 Object::Allocate(KernelProgramInfo::kClassId,
13619 KernelProgramInfo::InstanceSize(), Heap::kOld);
13620 return static_cast<KernelProgramInfoPtr>(raw);
13621}
13622
13623KernelProgramInfoPtr KernelProgramInfo::New(
13624 const TypedData& string_offsets,
13625 const ExternalTypedData& string_data,
13626 const TypedData& canonical_names,
13627 const ExternalTypedData& metadata_payloads,
13628 const ExternalTypedData& metadata_mappings,
13629 const ExternalTypedData& constants_table,
13630 const Array& scripts,
13631 const Array& libraries_cache,
13632 const Array& classes_cache,
13633 const Object& retained_kernel_blob,
13634 const uint32_t binary_version) {
13635 const KernelProgramInfo& info =
13636 KernelProgramInfo::Handle(KernelProgramInfo::New());
13637 info.StorePointer(&info.raw_ptr()->string_offsets_, string_offsets.raw());
13638 info.StorePointer(&info.raw_ptr()->string_data_, string_data.raw());
13639 info.StorePointer(&info.raw_ptr()->canonical_names_, canonical_names.raw());
13640 info.StorePointer(&info.raw_ptr()->metadata_payloads_,
13641 metadata_payloads.raw());
13642 info.StorePointer(&info.raw_ptr()->metadata_mappings_,
13643 metadata_mappings.raw());
13644 info.StorePointer(&info.raw_ptr()->scripts_, scripts.raw());
13645 info.StorePointer(&info.raw_ptr()->constants_table_, constants_table.raw());
13646 info.StorePointer(&info.raw_ptr()->libraries_cache_, libraries_cache.raw());
13647 info.StorePointer(&info.raw_ptr()->classes_cache_, classes_cache.raw());
13648 info.StorePointer(&info.raw_ptr()->retained_kernel_blob_,
13649 retained_kernel_blob.raw());
13650 info.set_kernel_binary_version(binary_version);
13651 return info.raw();
13652}
13653
13654const char* KernelProgramInfo::ToCString() const {
13655 return "[KernelProgramInfo]";
13656}
13657
13658ScriptPtr KernelProgramInfo::ScriptAt(intptr_t index) const {
13659 const Array& all_scripts = Array::Handle(scripts());
13660 ObjectPtr script = all_scripts.At(index);
13661 return Script::RawCast(script);
13662}
13663
13664void KernelProgramInfo::set_scripts(const Array& scripts) const {
13665 StorePointer(&raw_ptr()->scripts_, scripts.raw());
13666}
13667
13668void KernelProgramInfo::set_constants(const Array& constants) const {
13669 StorePointer(&raw_ptr()->constants_, constants.raw());
13670}
13671
13672void KernelProgramInfo::set_kernel_binary_version(uint32_t version) const {
13673 StoreNonPointer(&raw_ptr()->kernel_binary_version_, version);
13674}
13675
13676void KernelProgramInfo::set_constants_table(
13677 const ExternalTypedData& value) const {
13678 StorePointer(&raw_ptr()->constants_table_, value.raw());
13679}
13680
13681void KernelProgramInfo::set_potential_natives(
13682 const GrowableObjectArray& candidates) const {
13683 StorePointer(&raw_ptr()->potential_natives_, candidates.raw());
13684}
13685
13686void KernelProgramInfo::set_potential_pragma_functions(
13687 const GrowableObjectArray& candidates) const {
13688 StorePointer(&raw_ptr()->potential_pragma_functions_, candidates.raw());
13689}
13690
13691void KernelProgramInfo::set_libraries_cache(const Array& cache) const {
13692 StorePointer(&raw_ptr()->libraries_cache_, cache.raw());
13693}
13694
13695typedef UnorderedHashMap<SmiTraits> IntHashMap;
13696
13697LibraryPtr KernelProgramInfo::LookupLibrary(Thread* thread,
13698 const Smi& name_index) const {
13699 REUSABLE_ARRAY_HANDLESCOPE(thread);
13700 REUSABLE_LIBRARY_HANDLESCOPE(thread);
13701 REUSABLE_OBJECT_HANDLESCOPE(thread);
13702 REUSABLE_SMI_HANDLESCOPE(thread);
13703 Array& data = thread->ArrayHandle();
13704 Library& result = thread->LibraryHandle();
13705 Object& key = thread->ObjectHandle();
13706 Smi& value = thread->SmiHandle();
13707 {
13708 Isolate* isolate = thread->isolate();
13709 SafepointMutexLocker ml(isolate->kernel_data_lib_cache_mutex());
13710 data = libraries_cache();
13711 ASSERT(!data.IsNull());
13712 IntHashMap table(&key, &value, &data);
13713 result ^= table.GetOrNull(name_index);
13714 table.Release();
13715 }
13716 return result.raw();
13717}
13718
13719LibraryPtr KernelProgramInfo::InsertLibrary(Thread* thread,
13720 const Smi& name_index,
13721 const Library& lib) const {
13722 REUSABLE_ARRAY_HANDLESCOPE(thread);
13723 REUSABLE_LIBRARY_HANDLESCOPE(thread);
13724 REUSABLE_OBJECT_HANDLESCOPE(thread);
13725 REUSABLE_SMI_HANDLESCOPE(thread);
13726 Array& data = thread->ArrayHandle();
13727 Library& result = thread->LibraryHandle();
13728 Object& key = thread->ObjectHandle();
13729 Smi& value = thread->SmiHandle();
13730 {
13731 Isolate* isolate = thread->isolate();
13732 SafepointMutexLocker ml(isolate->kernel_data_lib_cache_mutex());
13733 data = libraries_cache();
13734 ASSERT(!data.IsNull());
13735 IntHashMap table(&key, &value, &data);
13736 result ^= table.InsertOrGetValue(name_index, lib);
13737 set_libraries_cache(table.Release());
13738 }
13739 return result.raw();
13740}
13741
13742void KernelProgramInfo::set_classes_cache(const Array& cache) const {
13743 StorePointer(&raw_ptr()->classes_cache_, cache.raw());
13744}
13745
13746ClassPtr KernelProgramInfo::LookupClass(Thread* thread,
13747 const Smi& name_index) const {
13748 REUSABLE_ARRAY_HANDLESCOPE(thread);
13749 REUSABLE_CLASS_HANDLESCOPE(thread);
13750 REUSABLE_OBJECT_HANDLESCOPE(thread);
13751 REUSABLE_SMI_HANDLESCOPE(thread);
13752 Array& data = thread->ArrayHandle();
13753 Class& result = thread->ClassHandle();
13754 Object& key = thread->ObjectHandle();
13755 Smi& value = thread->SmiHandle();
13756 {
13757 Isolate* isolate = thread->isolate();
13758 SafepointMutexLocker ml(isolate->kernel_data_class_cache_mutex());
13759 data = classes_cache();
13760 ASSERT(!data.IsNull());
13761 IntHashMap table(&key, &value, &data);
13762 result ^= table.GetOrNull(name_index);
13763 table.Release();
13764 }
13765 return result.raw();
13766}
13767
13768ClassPtr KernelProgramInfo::InsertClass(Thread* thread,
13769 const Smi& name_index,
13770 const Class& klass) const {
13771 REUSABLE_ARRAY_HANDLESCOPE(thread);
13772 REUSABLE_CLASS_HANDLESCOPE(thread);
13773 REUSABLE_OBJECT_HANDLESCOPE(thread);
13774 REUSABLE_SMI_HANDLESCOPE(thread);
13775 Array& data = thread->ArrayHandle();
13776 Class& result = thread->ClassHandle();
13777 Object& key = thread->ObjectHandle();
13778 Smi& value = thread->SmiHandle();
13779 {
13780 Isolate* isolate = thread->isolate();
13781 SafepointMutexLocker ml(isolate->kernel_data_class_cache_mutex());
13782 data = classes_cache();
13783 ASSERT(!data.IsNull());
13784 IntHashMap table(&key, &value, &data);
13785 result ^= table.InsertOrGetValue(name_index, klass);
13786 set_classes_cache(table.Release());
13787 }
13788 return result.raw();
13789}
13790
13791void KernelProgramInfo::set_bytecode_component(
13792 const Array& bytecode_component) const {
13793 StorePointer(&raw_ptr()->bytecode_component_, bytecode_component.raw());
13794}
13795
13796ErrorPtr Library::CompileAll(bool ignore_error /* = false */) {
13797 Thread* thread = Thread::Current();
13798 Zone* zone = thread->zone();
13799 Error& error = Error::Handle(zone);
13800 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
13801 Isolate::Current()->object_store()->libraries());
13802 Library& lib = Library::Handle(zone);
13803 Class& cls = Class::Handle(zone);
13804 for (int i = 0; i < libs.Length(); i++) {
13805 lib ^= libs.At(i);
13806 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
13807 while (it.HasNext()) {
13808 cls = it.GetNextClass();
13809 error = cls.EnsureIsFinalized(thread);
13810 if (!error.IsNull()) {
13811 if (ignore_error) continue;
13812 return error.raw();
13813 }
13814 error = Compiler::CompileAllFunctions(cls);
13815 if (!error.IsNull()) {
13816 if (ignore_error) continue;
13817 return error.raw();
13818 }
13819 }
13820 }
13821
13822 // Inner functions get added to the closures array. As part of compilation
13823 // more closures can be added to the end of the array. Compile all the
13824 // closures until we have reached the end of the "worklist".
13825 Object& result = Object::Handle(zone);
13826 const GrowableObjectArray& closures = GrowableObjectArray::Handle(
13827 zone, Isolate::Current()->object_store()->closure_functions());
13828 Function& func = Function::Handle(zone);
13829 for (int i = 0; i < closures.Length(); i++) {
13830 func ^= closures.At(i);
13831 if (!func.HasCode()) {
13832 result = Compiler::CompileFunction(thread, func);
13833 if (result.IsError()) {
13834 if (ignore_error) continue;
13835 return Error::Cast(result).raw();
13836 }
13837 }
13838 }
13839 return Error::null();
13840}
13841
13842#if !defined(DART_PRECOMPILED_RUNTIME)
13843
13844ErrorPtr Library::FinalizeAllClasses() {
13845 Thread* thread = Thread::Current();
13846 ASSERT(thread->IsMutatorThread());
13847 Zone* zone = thread->zone();
13848 Error& error = Error::Handle(zone);
13849 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
13850 Isolate::Current()->object_store()->libraries());
13851 Library& lib = Library::Handle(zone);
13852 Class& cls = Class::Handle(zone);
13853 for (int i = 0; i < libs.Length(); i++) {
13854 lib ^= libs.At(i);
13855 if (!lib.Loaded()) {
13856 String& uri = String::Handle(zone, lib.url());
13857 String& msg = String::Handle(
13858 zone,
13859 String::NewFormatted("Library '%s' is not loaded. "
13860 "Did you forget to call Dart_FinalizeLoading?",
13861 uri.ToCString()));
13862 return ApiError::New(msg);
13863 }
13864 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
13865 while (it.HasNext()) {
13866 cls = it.GetNextClass();
13867 error = cls.EnsureIsFinalized(thread);
13868 if (!error.IsNull()) {
13869 return error.raw();
13870 }
13871 }
13872 }
13873 return Error::null();
13874}
13875
13876ErrorPtr Library::ReadAllBytecode() {
13877 Thread* thread = Thread::Current();
13878 ASSERT(thread->IsMutatorThread());
13879 Zone* zone = thread->zone();
13880 Error& error = Error::Handle(zone);
13881 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
13882 Isolate::Current()->object_store()->libraries());
13883 Library& lib = Library::Handle(zone);
13884 Class& cls = Class::Handle(zone);
13885 for (int i = 0; i < libs.Length(); i++) {
13886 lib ^= libs.At(i);
13887 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
13888 while (it.HasNext()) {
13889 cls = it.GetNextClass();
13890 error = cls.EnsureIsFinalized(thread);
13891 if (!error.IsNull()) {
13892 return error.raw();
13893 }
13894 error = Compiler::ReadAllBytecode(cls);
13895 if (!error.IsNull()) {
13896 return error.raw();
13897 }
13898 }
13899 }
13900
13901 return Error::null();
13902}
13903#endif // !defined(DART_PRECOMPILED_RUNTIME)
13904
13905// Return Function::null() if function does not exist in libs.
13906FunctionPtr Library::GetFunction(const GrowableArray<Library*>& libs,
13907 const char* class_name,
13908 const char* function_name) {
13909 Thread* thread = Thread::Current();
13910 Zone* zone = thread->zone();
13911 Function& func = Function::Handle(zone);
13912 String& class_str = String::Handle(zone);
13913 String& func_str = String::Handle(zone);
13914 Class& cls = Class::Handle(zone);
13915 for (intptr_t l = 0; l < libs.length(); l++) {
13916 const Library& lib = *libs[l];
13917 if (strcmp(class_name, "::") == 0) {
13918 func_str = Symbols::New(thread, function_name);
13919 func = lib.LookupFunctionAllowPrivate(func_str);
13920 } else {
13921 class_str = String::New(class_name);
13922 cls = lib.LookupClassAllowPrivate(class_str);
13923 if (!cls.IsNull()) {
13924 func_str = String::New(function_name);
13925 if (function_name[0] == '.') {
13926 func_str = String::Concat(class_str, func_str);
13927 }
13928 func = cls.LookupFunctionAllowPrivate(func_str);
13929 }
13930 }
13931 if (!func.IsNull()) {
13932 return func.raw();
13933 }
13934 }
13935 return Function::null();
13936}
13937
13938ObjectPtr Library::GetFunctionClosure(const String& name) const {
13939 Thread* thread = Thread::Current();
13940 Zone* zone = thread->zone();
13941 Function& func = Function::Handle(zone, LookupFunctionAllowPrivate(name));
13942 if (func.IsNull()) {
13943 // Check whether the function is reexported into the library.
13944 const Object& obj = Object::Handle(zone, LookupReExport(name));
13945 if (obj.IsFunction()) {
13946 func ^= obj.raw();
13947 } else {
13948 // Check if there is a getter of 'name', in which case invoke it
13949 // and return the result.
13950 const String& getter_name = String::Handle(zone, Field::GetterName(name));
13951 func = LookupFunctionAllowPrivate(getter_name);
13952 if (func.IsNull()) {
13953 return Closure::null();
13954 }
13955 // Invoke the getter and return the result.
13956 return DartEntry::InvokeFunction(func, Object::empty_array());
13957 }
13958 }
13959 func = func.ImplicitClosureFunction();
13960 return func.ImplicitStaticClosure();
13961}
13962
13963#if defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME)
13964void Library::CheckFunctionFingerprints() {
13965 GrowableArray<Library*> all_libs;
13966 Function& func = Function::Handle();
13967 bool fingerprints_match = true;
13968
13969#define CHECK_FINGERPRINTS(class_name, function_name, dest, fp) \
13970 func = GetFunction(all_libs, #class_name, #function_name); \
13971 if (func.IsNull()) { \
13972 fingerprints_match = false; \
13973 OS::PrintErr("Function not found %s.%s\n", #class_name, #function_name); \
13974 } else { \
13975 fingerprints_match = \
13976 func.CheckSourceFingerprint(fp) && fingerprints_match; \
13977 }
13978
13979#define CHECK_FINGERPRINTS2(class_name, function_name, dest, fp) \
13980 CHECK_FINGERPRINTS(class_name, function_name, dest, fp)
13981
13982 all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
13983 CORE_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
13984 CORE_INTEGER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
13985
13986 all_libs.Add(&Library::ZoneHandle(Library::AsyncLibrary()));
13987 all_libs.Add(&Library::ZoneHandle(Library::MathLibrary()));
13988 all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
13989 all_libs.Add(&Library::ZoneHandle(Library::CollectionLibrary()));
13990 all_libs.Add(&Library::ZoneHandle(Library::ConvertLibrary()));
13991 all_libs.Add(&Library::ZoneHandle(Library::InternalLibrary()));
13992 all_libs.Add(&Library::ZoneHandle(Library::FfiLibrary()));
13993 ASYNC_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
13994 INTERNAL_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
13995 OTHER_RECOGNIZED_LIST(CHECK_FINGERPRINTS2);
13996 POLYMORPHIC_TARGET_LIST(CHECK_FINGERPRINTS);
13997
13998 all_libs.Clear();
13999 all_libs.Add(&Library::ZoneHandle(Library::DeveloperLibrary()));
14000 DEVELOPER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
14001
14002 all_libs.Clear();
14003 all_libs.Add(&Library::ZoneHandle(Library::MathLibrary()));
14004 MATH_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
14005
14006 all_libs.Clear();
14007 all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
14008 TYPED_DATA_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
14009
14010#undef CHECK_FINGERPRINTS
14011#undef CHECK_FINGERPRINTS2
14012
14013#define CHECK_FACTORY_FINGERPRINTS(symbol, class_name, factory_name, cid, fp) \
14014 func = GetFunction(all_libs, #class_name, #factory_name); \
14015 if (func.IsNull()) { \
14016 fingerprints_match = false; \
14017 OS::PrintErr("Function not found %s.%s\n", #class_name, #factory_name); \
14018 } else { \
14019 fingerprints_match = \
14020 func.CheckSourceFingerprint(fp) && fingerprints_match; \
14021 }
14022
14023 all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
14024 RECOGNIZED_LIST_FACTORY_LIST(CHECK_FACTORY_FINGERPRINTS);
14025
14026#undef CHECK_FACTORY_FINGERPRINTS
14027
14028 if (!fingerprints_match) {
14029 FATAL(
14030 "FP mismatch while recognizing methods. If the behavior of "
14031 "these functions has changed, then changes are also needed in "
14032 "the VM's compiler. Otherwise the fingerprint can simply be "
14033 "updated in recognized_methods_list.h\n");
14034 }
14035}
14036#endif // defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME).
14037
14038InstructionsPtr Instructions::New(intptr_t size, bool has_monomorphic_entry) {
14039 ASSERT(size >= 0);
14040 ASSERT(Object::instructions_class() != Class::null());
14041 if (size < 0 || size > kMaxElements) {
14042 // This should be caught before we reach here.
14043 FATAL1("Fatal error in Instructions::New: invalid size %" Pd "\n", size);
14044 }
14045 Instructions& result = Instructions::Handle();
14046 {
14047 uword aligned_size = Instructions::InstanceSize(size);
14048 ObjectPtr raw =
14049 Object::Allocate(Instructions::kClassId, aligned_size, Heap::kCode);
14050 NoSafepointScope no_safepoint;
14051 result ^= raw;
14052 result.SetSize(size);
14053 result.SetHasMonomorphicEntry(has_monomorphic_entry);
14054 result.set_stats(nullptr);
14055 }
14056 return result.raw();
14057}
14058
14059const char* Instructions::ToCString() const {
14060 return "Instructions";
14061}
14062
14063CodeStatistics* Instructions::stats() const {
14064#if defined(DART_PRECOMPILER)
14065 return reinterpret_cast<CodeStatistics*>(
14066 Thread::Current()->heap()->GetPeer(raw()));
14067#else
14068 return nullptr;
14069#endif
14070}
14071
14072void Instructions::set_stats(CodeStatistics* stats) const {
14073#if defined(DART_PRECOMPILER)
14074 Thread::Current()->heap()->SetPeer(raw(), stats);
14075#endif
14076}
14077
14078const char* InstructionsSection::ToCString() const {
14079 return "InstructionsSection";
14080}
14081
14082// Encode integer |value| in SLEB128 format and store into |data|.
14083static void EncodeSLEB128(GrowableArray<uint8_t>* data, intptr_t value) {
14084 bool is_last_part = false;
14085 while (!is_last_part) {
14086 uint8_t part = value & 0x7f;
14087 value >>= 7;
14088 if ((value == 0 && (part & 0x40) == 0) ||
14089 (value == static_cast<intptr_t>(-1) && (part & 0x40) != 0)) {
14090 is_last_part = true;
14091 } else {
14092 part |= 0x80;
14093 }
14094 data->Add(part);
14095 }
14096}
14097
14098// Encode integer in SLEB128 format.
14099void PcDescriptors::EncodeInteger(GrowableArray<uint8_t>* data,
14100 intptr_t value) {
14101 return EncodeSLEB128(data, value);
14102}
14103
14104// Decode SLEB128 encoded integer. Update byte_index to the next integer.
14105intptr_t PcDescriptors::DecodeInteger(intptr_t* byte_index) const {
14106 NoSafepointScope no_safepoint;
14107 const uint8_t* data = raw_ptr()->data();
14108 return Utils::DecodeSLEB128<intptr_t>(data, Length(), byte_index);
14109}
14110
14111ObjectPoolPtr ObjectPool::New(intptr_t len) {
14112 ASSERT(Object::object_pool_class() != Class::null());
14113 if (len < 0 || len > kMaxElements) {
14114 // This should be caught before we reach here.
14115 FATAL1("Fatal error in ObjectPool::New: invalid length %" Pd "\n", len);
14116 }
14117 ObjectPool& result = ObjectPool::Handle();
14118 {
14119 uword size = ObjectPool::InstanceSize(len);
14120 ObjectPtr raw = Object::Allocate(ObjectPool::kClassId, size, Heap::kOld);
14121 NoSafepointScope no_safepoint;
14122 result ^= raw;
14123 result.SetLength(len);
14124 for (intptr_t i = 0; i < len; i++) {
14125 result.SetTypeAt(i, ObjectPool::EntryType::kImmediate,
14126 ObjectPool::Patchability::kPatchable);
14127 }
14128 }
14129
14130 return result.raw();
14131}
14132
14133#if !defined(DART_PRECOMPILED_RUNTIME)
14134ObjectPoolPtr ObjectPool::NewFromBuilder(
14135 const compiler::ObjectPoolBuilder& builder) {
14136 const intptr_t len = builder.CurrentLength();
14137 if (len == 0) {
14138 return Object::empty_object_pool().raw();
14139 }
14140 const ObjectPool& result = ObjectPool::Handle(ObjectPool::New(len));
14141 for (intptr_t i = 0; i < len; i++) {
14142 auto entry = builder.EntryAt(i);
14143 auto type = entry.type();
14144 auto patchable = entry.patchable();
14145 result.SetTypeAt(i, type, patchable);
14146 if (type == EntryType::kTaggedObject) {
14147 result.SetObjectAt(i, *entry.obj_);
14148 } else {
14149 result.SetRawValueAt(i, entry.raw_value_);
14150 }
14151 }
14152 return result.raw();
14153}
14154
14155void ObjectPool::CopyInto(compiler::ObjectPoolBuilder* builder) const {
14156 ASSERT(builder->CurrentLength() == 0);
14157 for (intptr_t i = 0; i < Length(); i++) {
14158 auto type = TypeAt(i);
14159 auto patchable = PatchableAt(i);
14160 switch (type) {
14161 case compiler::ObjectPoolBuilderEntry::kTaggedObject: {
14162 compiler::ObjectPoolBuilderEntry entry(&Object::ZoneHandle(ObjectAt(i)),
14163 patchable);
14164 builder->AddObject(entry);
14165 break;
14166 }
14167 case compiler::ObjectPoolBuilderEntry::kImmediate:
14168 case compiler::ObjectPoolBuilderEntry::kNativeFunction:
14169 case compiler::ObjectPoolBuilderEntry::kNativeFunctionWrapper: {
14170 compiler::ObjectPoolBuilderEntry entry(RawValueAt(i), type, patchable);
14171 builder->AddObject(entry);
14172 break;
14173 }
14174 default:
14175 UNREACHABLE();
14176 }
14177 }
14178 ASSERT(builder->CurrentLength() == Length());
14179}
14180#endif
14181
14182const char* ObjectPool::ToCString() const {
14183 Zone* zone = Thread::Current()->zone();
14184 return zone->PrintToString("ObjectPool len:%" Pd, Length());
14185}
14186
14187void ObjectPool::DebugPrint() const {
14188 THR_Print("ObjectPool len:%" Pd " {\n", Length());
14189 for (intptr_t i = 0; i < Length(); i++) {
14190 intptr_t offset = OffsetFromIndex(i);
14191 THR_Print(" [pp+0x%" Px "] ", offset);
14192 if ((TypeAt(i) == EntryType::kTaggedObject) ||
14193 (TypeAt(i) == EntryType::kNativeEntryData)) {
14194 const Object& obj = Object::Handle(ObjectAt(i));
14195 THR_Print("%s (obj)\n", obj.ToCString());
14196 } else if (TypeAt(i) == EntryType::kNativeFunction) {
14197 uword pc = RawValueAt(i);
14198 uintptr_t start = 0;
14199 char* name = NativeSymbolResolver::LookupSymbolName(pc, &start);
14200 if (name != NULL) {
14201 THR_Print("%s (native function)\n", name);
14202 NativeSymbolResolver::FreeSymbolName(name);
14203 } else {
14204 THR_Print("0x%" Px " (native function)\n", pc);
14205 }
14206 } else if (TypeAt(i) == EntryType::kNativeFunctionWrapper) {
14207 THR_Print("0x%" Px " (native function wrapper)\n", RawValueAt(i));
14208 } else {
14209 THR_Print("0x%" Px " (raw)\n", RawValueAt(i));
14210 }
14211 }
14212 THR_Print("}\n");
14213}
14214
14215intptr_t PcDescriptors::Length() const {
14216 return raw_ptr()->length_;
14217}
14218
14219void PcDescriptors::SetLength(intptr_t value) const {
14220 StoreNonPointer(&raw_ptr()->length_, value);
14221}
14222
14223void PcDescriptors::CopyData(GrowableArray<uint8_t>* delta_encoded_data) {
14224 NoSafepointScope no_safepoint;
14225 uint8_t* data = UnsafeMutableNonPointer(&raw_ptr()->data()[0]);
14226 for (intptr_t i = 0; i < delta_encoded_data->length(); ++i) {
14227 data[i] = (*delta_encoded_data)[i];
14228 }
14229}
14230
14231PcDescriptorsPtr PcDescriptors::New(GrowableArray<uint8_t>* data) {
14232 ASSERT(Object::pc_descriptors_class() != Class::null());
14233 Thread* thread = Thread::Current();
14234 PcDescriptors& result = PcDescriptors::Handle(thread->zone());
14235 {
14236 uword size = PcDescriptors::InstanceSize(data->length());
14237 ObjectPtr raw = Object::Allocate(PcDescriptors::kClassId, size, Heap::kOld);
14238 NoSafepointScope no_safepoint;
14239 result ^= raw;
14240 result.SetLength(data->length());
14241 result.CopyData(data);
14242 }
14243 return result.raw();
14244}
14245
14246PcDescriptorsPtr PcDescriptors::New(intptr_t length) {
14247 ASSERT(Object::pc_descriptors_class() != Class::null());
14248 Thread* thread = Thread::Current();
14249 PcDescriptors& result = PcDescriptors::Handle(thread->zone());
14250 {
14251 uword size = PcDescriptors::InstanceSize(length);
14252 ObjectPtr raw = Object::Allocate(PcDescriptors::kClassId, size, Heap::kOld);
14253 NoSafepointScope no_safepoint;
14254 result ^= raw;
14255 result.SetLength(length);
14256 }
14257 return result.raw();
14258}
14259
14260const char* PcDescriptors::KindAsStr(PcDescriptorsLayout::Kind kind) {
14261 switch (kind) {
14262 case PcDescriptorsLayout::kDeopt:
14263 return "deopt ";
14264 case PcDescriptorsLayout::kIcCall:
14265 return "ic-call ";
14266 case PcDescriptorsLayout::kUnoptStaticCall:
14267 return "unopt-call ";
14268 case PcDescriptorsLayout::kRuntimeCall:
14269 return "runtime-call ";
14270 case PcDescriptorsLayout::kOsrEntry:
14271 return "osr-entry ";
14272 case PcDescriptorsLayout::kRewind:
14273 return "rewind ";
14274 case PcDescriptorsLayout::kBSSRelocation:
14275 return "bss reloc ";
14276 case PcDescriptorsLayout::kOther:
14277 return "other ";
14278 case PcDescriptorsLayout::kAnyKind:
14279 UNREACHABLE();
14280 break;
14281 }
14282 UNREACHABLE();
14283 return "";
14284}
14285
14286void PcDescriptors::PrintHeaderString() {
14287 // 4 bits per hex digit + 2 for "0x".
14288 const int addr_width = (kBitsPerWord / 4) + 2;
14289 // "*" in a printf format specifier tells it to read the field width from
14290 // the printf argument list.
14291 THR_Print("%-*s\tkind \tdeopt-id\ttok-ix\ttry-ix\tyield-idx\n", addr_width,
14292 "pc");
14293}
14294
14295const char* PcDescriptors::ToCString() const {
14296// "*" in a printf format specifier tells it to read the field width from
14297// the printf argument list.
14298#define FORMAT "%#-*" Px "\t%s\t%" Pd "\t\t%s\t%" Pd "\t%" Pd "\n"
14299 if (Length() == 0) {
14300 return "empty PcDescriptors\n";
14301 }
14302 // 4 bits per hex digit.
14303 const int addr_width = kBitsPerWord / 4;
14304 // First compute the buffer size required.
14305 intptr_t len = 1; // Trailing '\0'.
14306 {
14307 Iterator iter(*this, PcDescriptorsLayout::kAnyKind);
14308 while (iter.MoveNext()) {
14309 len += Utils::SNPrint(NULL, 0, FORMAT, addr_width, iter.PcOffset(),
14310 KindAsStr(iter.Kind()), iter.DeoptId(),
14311 iter.TokenPos().ToCString(), iter.TryIndex(),
14312 iter.YieldIndex());
14313 }
14314 }
14315 // Allocate the buffer.
14316 char* buffer = Thread::Current()->zone()->Alloc<char>(len);
14317 // Layout the fields in the buffer.
14318 intptr_t index = 0;
14319 Iterator iter(*this, PcDescriptorsLayout::kAnyKind);
14320 while (iter.MoveNext()) {
14321 index += Utils::SNPrint((buffer + index), (len - index), FORMAT, addr_width,
14322 iter.PcOffset(), KindAsStr(iter.Kind()),
14323 iter.DeoptId(), iter.TokenPos().ToCString(),
14324 iter.TryIndex(), iter.YieldIndex());
14325 }
14326 return buffer;
14327#undef FORMAT
14328}
14329
14330// Verify assumptions (in debug mode only).
14331// - No two deopt descriptors have the same deoptimization id.
14332// - No two ic-call descriptors have the same deoptimization id (type feedback).
14333// A function without unique ids is marked as non-optimizable (e.g., because of
14334// finally blocks).
14335void PcDescriptors::Verify(const Function& function) const {
14336#if defined(DEBUG)
14337 // Only check ids for unoptimized code that is optimizable.
14338 if (!function.IsOptimizable()) {
14339 return;
14340 }
14341 intptr_t max_deopt_id = 0;
14342 Iterator max_iter(*this,
14343 PcDescriptorsLayout::kDeopt | PcDescriptorsLayout::kIcCall);
14344 while (max_iter.MoveNext()) {
14345 if (max_iter.DeoptId() > max_deopt_id) {
14346 max_deopt_id = max_iter.DeoptId();
14347 }
14348 }
14349
14350 Zone* zone = Thread::Current()->zone();
14351 BitVector* deopt_ids = new (zone) BitVector(zone, max_deopt_id + 1);
14352 BitVector* iccall_ids = new (zone) BitVector(zone, max_deopt_id + 1);
14353 Iterator iter(*this,
14354 PcDescriptorsLayout::kDeopt | PcDescriptorsLayout::kIcCall);
14355 while (iter.MoveNext()) {
14356 // 'deopt_id' is set for kDeopt and kIcCall and must be unique for one kind.
14357 if (DeoptId::IsDeoptAfter(iter.DeoptId())) {
14358 // TODO(vegorov): some instructions contain multiple calls and have
14359 // multiple "after" targets recorded. Right now it is benign but might
14360 // lead to issues in the future. Fix that and enable verification.
14361 continue;
14362 }
14363 if (iter.Kind() == PcDescriptorsLayout::kDeopt) {
14364 ASSERT(!deopt_ids->Contains(iter.DeoptId()));
14365 deopt_ids->Add(iter.DeoptId());
14366 } else {
14367 ASSERT(!iccall_ids->Contains(iter.DeoptId()));
14368 iccall_ids->Add(iter.DeoptId());
14369 }
14370 }
14371#endif // DEBUG
14372}
14373
14374void CodeSourceMap::SetLength(intptr_t value) const {
14375 StoreNonPointer(&raw_ptr()->length_, value);
14376}
14377
14378CodeSourceMapPtr CodeSourceMap::New(intptr_t length) {
14379 ASSERT(Object::code_source_map_class() != Class::null());
14380 Thread* thread = Thread::Current();
14381 CodeSourceMap& result = CodeSourceMap::Handle(thread->zone());
14382 {
14383 uword size = CodeSourceMap::InstanceSize(length);
14384 ObjectPtr raw = Object::Allocate(CodeSourceMap::kClassId, size, Heap::kOld);
14385 NoSafepointScope no_safepoint;
14386 result ^= raw;
14387 result.SetLength(length);
14388 }
14389 return result.raw();
14390}
14391
14392const char* CodeSourceMap::ToCString() const {
14393 return "CodeSourceMap";
14394}
14395
14396intptr_t CompressedStackMaps::Hashcode() const {
14397 uint32_t hash = payload_size();
14398 for (uintptr_t i = 0; i < payload_size(); i++) {
14399 uint8_t byte = PayloadByte(i);
14400 hash = CombineHashes(hash, byte);
14401 }
14402 return FinalizeHash(hash, kHashBits);
14403}
14404
14405CompressedStackMapsPtr CompressedStackMaps::New(
14406 const GrowableArray<uint8_t>& payload,
14407 bool is_global_table,
14408 bool uses_global_table) {
14409 ASSERT(Object::compressed_stackmaps_class() != Class::null());
14410 // We don't currently allow both flags to be true.
14411 ASSERT(!is_global_table || !uses_global_table);
14412 auto& result = CompressedStackMaps::Handle();
14413
14414 const uintptr_t payload_size = payload.length();
14415 if (!CompressedStackMapsLayout::SizeField::is_valid(payload_size)) {
14416 FATAL1(
14417 "Fatal error in CompressedStackMaps::New: "
14418 "invalid payload size %" Pu "\n",
14419 payload_size);
14420 }
14421 {
14422 // CompressedStackMaps data objects are associated with a code object,
14423 // allocate them in old generation.
14424 ObjectPtr raw = Object::Allocate(
14425 CompressedStackMaps::kClassId,
14426 CompressedStackMaps::InstanceSize(payload_size), Heap::kOld);
14427 NoSafepointScope no_safepoint;
14428 result ^= raw;
14429 result.StoreNonPointer(
14430 &result.raw_ptr()->flags_and_size_,
14431 CompressedStackMapsLayout::GlobalTableBit::encode(is_global_table) |
14432 CompressedStackMapsLayout::UsesTableBit::encode(uses_global_table) |
14433 CompressedStackMapsLayout::SizeField::encode(payload_size));
14434 auto cursor = result.UnsafeMutableNonPointer(result.raw_ptr()->data());
14435 memcpy(cursor, payload.data(), payload.length()); // NOLINT
14436 }
14437
14438 ASSERT(!result.IsGlobalTable() || !result.UsesGlobalTable());
14439
14440 return result.raw();
14441}
14442
14443const char* CompressedStackMaps::ToCString() const {
14444 ASSERT(!IsGlobalTable());
14445 auto const t = Thread::Current();
14446 auto zone = t->zone();
14447 const auto& global_table = CompressedStackMaps::Handle(
14448 zone, t->isolate()->object_store()->canonicalized_stack_map_entries());
14449 CompressedStackMapsIterator it(*this, global_table);
14450 return it.ToCString(zone);
14451}
14452
14453StringPtr LocalVarDescriptors::GetName(intptr_t var_index) const {
14454 ASSERT(var_index < Length());
14455 ASSERT(Object::Handle(*raw()->ptr()->nameAddrAt(var_index)).IsString());
14456 return *raw()->ptr()->nameAddrAt(var_index);
14457}
14458
14459void LocalVarDescriptors::SetVar(
14460 intptr_t var_index,
14461 const String& name,
14462 LocalVarDescriptorsLayout::VarInfo* info) const {
14463 ASSERT(var_index < Length());
14464 ASSERT(!name.IsNull());
14465 StorePointer(raw()->ptr()->nameAddrAt(var_index), name.raw());
14466 raw()->ptr()->data()[var_index] = *info;
14467}
14468
14469void LocalVarDescriptors::GetInfo(
14470 intptr_t var_index,
14471 LocalVarDescriptorsLayout::VarInfo* info) const {
14472 ASSERT(var_index < Length());
14473 *info = raw()->ptr()->data()[var_index];
14474}
14475
14476static int PrintVarInfo(char* buffer,
14477 int len,
14478 intptr_t i,
14479 const String& var_name,
14480 const LocalVarDescriptorsLayout::VarInfo& info) {
14481 const LocalVarDescriptorsLayout::VarInfoKind kind = info.kind();
14482 const int32_t index = info.index();
14483 if (kind == LocalVarDescriptorsLayout::kContextLevel) {
14484 return Utils::SNPrint(buffer, len,
14485 "%2" Pd
14486 " %-13s level=%-3d"
14487 " begin=%-3d end=%d\n",
14488 i, LocalVarDescriptors::KindToCString(kind), index,
14489 static_cast<int>(info.begin_pos.value()),
14490 static_cast<int>(info.end_pos.value()));
14491 } else if (kind == LocalVarDescriptorsLayout::kContextVar) {
14492 return Utils::SNPrint(
14493 buffer, len,
14494 "%2" Pd
14495 " %-13s level=%-3d index=%-3d"
14496 " begin=%-3d end=%-3d name=%s\n",
14497 i, LocalVarDescriptors::KindToCString(kind), info.scope_id, index,
14498 static_cast<int>(info.begin_pos.Pos()),
14499 static_cast<int>(info.end_pos.Pos()), var_name.ToCString());
14500 } else {
14501 return Utils::SNPrint(
14502 buffer, len,
14503 "%2" Pd
14504 " %-13s scope=%-3d index=%-3d"
14505 " begin=%-3d end=%-3d name=%s\n",
14506 i, LocalVarDescriptors::KindToCString(kind), info.scope_id, index,
14507 static_cast<int>(info.begin_pos.Pos()),
14508 static_cast<int>(info.end_pos.Pos()), var_name.ToCString());
14509 }
14510}
14511
14512const char* LocalVarDescriptors::ToCString() const {
14513 if (IsNull()) {
14514 return "LocalVarDescriptors: null";
14515 }
14516 if (Length() == 0) {
14517 return "empty LocalVarDescriptors";
14518 }
14519 intptr_t len = 1; // Trailing '\0'.
14520 String& var_name = String::Handle();
14521 for (intptr_t i = 0; i < Length(); i++) {
14522 LocalVarDescriptorsLayout::VarInfo info;
14523 var_name = GetName(i);
14524 GetInfo(i, &info);
14525 len += PrintVarInfo(NULL, 0, i, var_name, info);
14526 }
14527 char* buffer = Thread::Current()->zone()->Alloc<char>(len + 1);
14528 buffer[0] = '\0';
14529 intptr_t num_chars = 0;
14530 for (intptr_t i = 0; i < Length(); i++) {
14531 LocalVarDescriptorsLayout::VarInfo info;
14532 var_name = GetName(i);
14533 GetInfo(i, &info);
14534 num_chars += PrintVarInfo((buffer + num_chars), (len - num_chars), i,
14535 var_name, info);
14536 }
14537 return buffer;
14538}
14539
14540const char* LocalVarDescriptors::KindToCString(
14541 LocalVarDescriptorsLayout::VarInfoKind kind) {
14542 switch (kind) {
14543 case LocalVarDescriptorsLayout::kStackVar:
14544 return "StackVar";
14545 case LocalVarDescriptorsLayout::kContextVar:
14546 return "ContextVar";
14547 case LocalVarDescriptorsLayout::kContextLevel:
14548 return "ContextLevel";
14549 case LocalVarDescriptorsLayout::kSavedCurrentContext:
14550 return "CurrentCtx";
14551 default:
14552 UNIMPLEMENTED();
14553 return NULL;
14554 }
14555}
14556
14557LocalVarDescriptorsPtr LocalVarDescriptors::New(intptr_t num_variables) {
14558 ASSERT(Object::var_descriptors_class() != Class::null());
14559 if (num_variables < 0 || num_variables > kMaxElements) {
14560 // This should be caught before we reach here.
14561 FATAL2(
14562 "Fatal error in LocalVarDescriptors::New: "
14563 "invalid num_variables %" Pd ". Maximum is: %d\n",
14564 num_variables, LocalVarDescriptorsLayout::kMaxIndex);
14565 }
14566 LocalVarDescriptors& result = LocalVarDescriptors::Handle();
14567 {
14568 uword size = LocalVarDescriptors::InstanceSize(num_variables);
14569 ObjectPtr raw =
14570 Object::Allocate(LocalVarDescriptors::kClassId, size, Heap::kOld);
14571 NoSafepointScope no_safepoint;
14572 result ^= raw;
14573 result.StoreNonPointer(&result.raw_ptr()->num_entries_, num_variables);
14574 }
14575 return result.raw();
14576}
14577
14578intptr_t LocalVarDescriptors::Length() const {
14579 return raw_ptr()->num_entries_;
14580}
14581
14582intptr_t ExceptionHandlers::num_entries() const {
14583 return raw_ptr()->num_entries_;
14584}
14585
14586void ExceptionHandlers::SetHandlerInfo(intptr_t try_index,
14587 intptr_t outer_try_index,
14588 uword handler_pc_offset,
14589 bool needs_stacktrace,
14590 bool has_catch_all,
14591 bool is_generated) const {
14592 ASSERT((try_index >= 0) && (try_index < num_entries()));
14593 NoSafepointScope no_safepoint;
14594 ExceptionHandlerInfo* info =
14595 UnsafeMutableNonPointer(&raw_ptr()->data()[try_index]);
14596 info->outer_try_index = outer_try_index;
14597 // Some C compilers warn about the comparison always being true when using <=
14598 // due to limited range of data type.
14599 ASSERT((handler_pc_offset == static_cast<uword>(kMaxUint32)) ||
14600 (handler_pc_offset < static_cast<uword>(kMaxUint32)));
14601 info->handler_pc_offset = handler_pc_offset;
14602 info->needs_stacktrace = static_cast<int8_t>(needs_stacktrace);
14603 info->has_catch_all = static_cast<int8_t>(has_catch_all);
14604 info->is_generated = static_cast<int8_t>(is_generated);
14605}
14606
14607void ExceptionHandlers::GetHandlerInfo(intptr_t try_index,
14608 ExceptionHandlerInfo* info) const {
14609 ASSERT((try_index >= 0) && (try_index < num_entries()));
14610 ASSERT(info != NULL);
14611 *info = raw_ptr()->data()[try_index];
14612}
14613
14614uword ExceptionHandlers::HandlerPCOffset(intptr_t try_index) const {
14615 ASSERT((try_index >= 0) && (try_index < num_entries()));
14616 return raw_ptr()->data()[try_index].handler_pc_offset;
14617}
14618
14619intptr_t ExceptionHandlers::OuterTryIndex(intptr_t try_index) const {
14620 ASSERT((try_index >= 0) && (try_index < num_entries()));
14621 return raw_ptr()->data()[try_index].outer_try_index;
14622}
14623
14624bool ExceptionHandlers::NeedsStackTrace(intptr_t try_index) const {
14625 ASSERT((try_index >= 0) && (try_index < num_entries()));
14626 return raw_ptr()->data()[try_index].needs_stacktrace != 0;
14627}
14628
14629bool ExceptionHandlers::IsGenerated(intptr_t try_index) const {
14630 ASSERT((try_index >= 0) && (try_index < num_entries()));
14631 return raw_ptr()->data()[try_index].is_generated != 0;
14632}
14633
14634bool ExceptionHandlers::HasCatchAll(intptr_t try_index) const {
14635 ASSERT((try_index >= 0) && (try_index < num_entries()));
14636 return raw_ptr()->data()[try_index].has_catch_all != 0;
14637}
14638
14639void ExceptionHandlers::SetHandledTypes(intptr_t try_index,
14640 const Array& handled_types) const {
14641 ASSERT((try_index >= 0) && (try_index < num_entries()));
14642 ASSERT(!handled_types.IsNull());
14643 const Array& handled_types_data =
14644 Array::Handle(raw_ptr()->handled_types_data_);
14645 handled_types_data.SetAt(try_index, handled_types);
14646}
14647
14648ArrayPtr ExceptionHandlers::GetHandledTypes(intptr_t try_index) const {
14649 ASSERT((try_index >= 0) && (try_index < num_entries()));
14650 Array& array = Array::Handle(raw_ptr()->handled_types_data_);
14651 array ^= array.At(try_index);
14652 return array.raw();
14653}
14654
14655void ExceptionHandlers::set_handled_types_data(const Array& value) const {
14656 StorePointer(&raw_ptr()->handled_types_data_, value.raw());
14657}
14658
14659ExceptionHandlersPtr ExceptionHandlers::New(intptr_t num_handlers) {
14660 ASSERT(Object::exception_handlers_class() != Class::null());
14661 if ((num_handlers < 0) || (num_handlers >= kMaxHandlers)) {
14662 FATAL1(
14663 "Fatal error in ExceptionHandlers::New(): "
14664 "invalid num_handlers %" Pd "\n",
14665 num_handlers);
14666 }
14667 ExceptionHandlers& result = ExceptionHandlers::Handle();
14668 {
14669 uword size = ExceptionHandlers::InstanceSize(num_handlers);
14670 ObjectPtr raw =
14671 Object::Allocate(ExceptionHandlers::kClassId, size, Heap::kOld);
14672 NoSafepointScope no_safepoint;
14673 result ^= raw;
14674 result.StoreNonPointer(&result.raw_ptr()->num_entries_, num_handlers);
14675 }
14676 const Array& handled_types_data =
14677 (num_handlers == 0) ? Object::empty_array()
14678 : Array::Handle(Array::New(num_handlers, Heap::kOld));
14679 result.set_handled_types_data(handled_types_data);
14680 return result.raw();
14681}
14682
14683ExceptionHandlersPtr ExceptionHandlers::New(const Array& handled_types_data) {
14684 ASSERT(Object::exception_handlers_class() != Class::null());
14685 const intptr_t num_handlers = handled_types_data.Length();
14686 if ((num_handlers < 0) || (num_handlers >= kMaxHandlers)) {
14687 FATAL1(
14688 "Fatal error in ExceptionHandlers::New(): "
14689 "invalid num_handlers %" Pd "\n",
14690 num_handlers);
14691 }
14692 ExceptionHandlers& result = ExceptionHandlers::Handle();
14693 {
14694 uword size = ExceptionHandlers::InstanceSize(num_handlers);
14695 ObjectPtr raw =
14696 Object::Allocate(ExceptionHandlers::kClassId, size, Heap::kOld);
14697 NoSafepointScope no_safepoint;
14698 result ^= raw;
14699 result.StoreNonPointer(&result.raw_ptr()->num_entries_, num_handlers);
14700 }
14701 result.set_handled_types_data(handled_types_data);
14702 return result.raw();
14703}
14704
14705const char* ExceptionHandlers::ToCString() const {
14706#define FORMAT1 "%" Pd " => %#x (%" Pd " types) (outer %d)%s%s\n"
14707#define FORMAT2 " %d. %s\n"
14708 if (num_entries() == 0) {
14709 return "empty ExceptionHandlers\n";
14710 }
14711 Array& handled_types = Array::Handle();
14712 Type& type = Type::Handle();
14713 ExceptionHandlerInfo info;
14714 // First compute the buffer size required.
14715 intptr_t len = 1; // Trailing '\0'.
14716 for (intptr_t i = 0; i < num_entries(); i++) {
14717 GetHandlerInfo(i, &info);
14718 handled_types = GetHandledTypes(i);
14719 const intptr_t num_types =
14720 handled_types.IsNull() ? 0 : handled_types.Length();
14721 len += Utils::SNPrint(
14722 NULL, 0, FORMAT1, i, info.handler_pc_offset, num_types,
14723 info.outer_try_index,
14724 ((info.needs_stacktrace != 0) ? " (needs stack trace)" : ""),
14725 ((info.is_generated != 0) ? " (generated)" : ""));
14726 for (int k = 0; k < num_types; k++) {
14727 type ^= handled_types.At(k);
14728 ASSERT(!type.IsNull());
14729 len += Utils::SNPrint(NULL, 0, FORMAT2, k, type.ToCString());
14730 }
14731 }
14732 // Allocate the buffer.
14733 char* buffer = Thread::Current()->zone()->Alloc<char>(len);
14734 // Layout the fields in the buffer.
14735 intptr_t num_chars = 0;
14736 for (intptr_t i = 0; i < num_entries(); i++) {
14737 GetHandlerInfo(i, &info);
14738 handled_types = GetHandledTypes(i);
14739 const intptr_t num_types =
14740 handled_types.IsNull() ? 0 : handled_types.Length();
14741 num_chars += Utils::SNPrint(
14742 (buffer + num_chars), (len - num_chars), FORMAT1, i,
14743 info.handler_pc_offset, num_types, info.outer_try_index,
14744 ((info.needs_stacktrace != 0) ? " (needs stack trace)" : ""),
14745 ((info.is_generated != 0) ? " (generated)" : ""));
14746 for (int k = 0; k < num_types; k++) {
14747 type ^= handled_types.At(k);
14748 num_chars += Utils::SNPrint((buffer + num_chars), (len - num_chars),
14749 FORMAT2, k, type.ToCString());
14750 }
14751 }
14752 return buffer;
14753#undef FORMAT1
14754#undef FORMAT2
14755}
14756
14757void ParameterTypeCheck::set_type_or_bound(const AbstractType& value) const {
14758 StorePointer(&raw_ptr()->type_or_bound_, value.raw());
14759}
14760
14761void ParameterTypeCheck::set_param(const AbstractType& value) const {
14762 StorePointer(&raw_ptr()->param_, value.raw());
14763}
14764
14765void ParameterTypeCheck::set_name(const String& value) const {
14766 StorePointer(&raw_ptr()->name_, value.raw());
14767}
14768
14769void ParameterTypeCheck::set_cache(const SubtypeTestCache& value) const {
14770 StorePointer(&raw_ptr()->cache_, value.raw());
14771}
14772
14773const char* ParameterTypeCheck::ToCString() const {
14774 Zone* zone = Thread::Current()->zone();
14775 return zone->PrintToString("ParameterTypeCheck(%" Pd " %s %s %s)", index(),
14776 Object::Handle(zone, param()).ToCString(),
14777 Object::Handle(zone, type_or_bound()).ToCString(),
14778 Object::Handle(zone, name()).ToCString());
14779}
14780
14781ParameterTypeCheckPtr ParameterTypeCheck::New() {
14782 ParameterTypeCheck& result = ParameterTypeCheck::Handle();
14783 {
14784 ObjectPtr raw =
14785 Object::Allocate(ParameterTypeCheck::kClassId,
14786 ParameterTypeCheck::InstanceSize(), Heap::kOld);
14787 NoSafepointScope no_safepoint;
14788 result ^= raw;
14789 }
14790 result.set_index(0);
14791 return result.raw();
14792}
14793
14794void SingleTargetCache::set_target(const Code& value) const {
14795 StorePointer(&raw_ptr()->target_, value.raw());
14796}
14797
14798const char* SingleTargetCache::ToCString() const {
14799 return "SingleTargetCache";
14800}
14801
14802SingleTargetCachePtr SingleTargetCache::New() {
14803 SingleTargetCache& result = SingleTargetCache::Handle();
14804 {
14805 // IC data objects are long living objects, allocate them in old generation.
14806 ObjectPtr raw =
14807 Object::Allocate(SingleTargetCache::kClassId,
14808 SingleTargetCache::InstanceSize(), Heap::kOld);
14809 NoSafepointScope no_safepoint;
14810 result ^= raw;
14811 }
14812 result.set_target(Code::Handle());
14813 result.set_entry_point(0);
14814 result.set_lower_limit(kIllegalCid);
14815 result.set_upper_limit(kIllegalCid);
14816 return result.raw();
14817}
14818
14819void UnlinkedCall::set_can_patch_to_monomorphic(bool value) const {
14820 StoreNonPointer(&raw_ptr()->can_patch_to_monomorphic_, value);
14821}
14822
14823intptr_t UnlinkedCall::Hashcode() const {
14824 return String::Handle(target_name()).Hash();
14825}
14826
14827bool UnlinkedCall::Equals(const UnlinkedCall& other) const {
14828 return (target_name() == other.target_name()) &&
14829 (arguments_descriptor() == other.arguments_descriptor()) &&
14830 (can_patch_to_monomorphic() == other.can_patch_to_monomorphic());
14831}
14832
14833const char* UnlinkedCall::ToCString() const {
14834 return "UnlinkedCall";
14835}
14836
14837UnlinkedCallPtr UnlinkedCall::New() {
14838 UnlinkedCall& result = UnlinkedCall::Handle();
14839 result ^= Object::Allocate(UnlinkedCall::kClassId,
14840 UnlinkedCall::InstanceSize(), Heap::kOld);
14841 result.set_can_patch_to_monomorphic(!FLAG_precompiled_mode);
14842 return result.raw();
14843}
14844
14845MonomorphicSmiableCallPtr MonomorphicSmiableCall::New(classid_t expected_cid,
14846 const Code& target) {
14847 auto& result = MonomorphicSmiableCall::Handle();
14848 result ^=
14849 Object::Allocate(MonomorphicSmiableCall::kClassId,
14850 MonomorphicSmiableCall::InstanceSize(), Heap::kOld);
14851 result.StorePointer(&result.raw_ptr()->target_, target.raw());
14852 result.StoreNonPointer(&result.raw_ptr()->expected_cid_, expected_cid);
14853 result.StoreNonPointer(&result.raw_ptr()->entrypoint_, target.EntryPoint());
14854 return result.raw();
14855}
14856
14857const char* MonomorphicSmiableCall::ToCString() const {
14858 return "MonomorphicSmiableCall";
14859}
14860
14861const char* CallSiteData::ToCString() const {
14862 // CallSiteData is an abstract class. We should never reach here.
14863 UNREACHABLE();
14864 return "CallSiteData";
14865}
14866
14867void CallSiteData::set_target_name(const String& value) const {
14868 ASSERT(!value.IsNull());
14869 StorePointer(&raw_ptr()->target_name_, value.raw());
14870}
14871
14872void CallSiteData::set_arguments_descriptor(const Array& value) const {
14873 ASSERT(!value.IsNull());
14874 StorePointer(&raw_ptr()->args_descriptor_, value.raw());
14875}
14876
14877#if !defined(DART_PRECOMPILED_RUNTIME)
14878void ICData::SetReceiversStaticType(const AbstractType& type) const {
14879 StorePointer(&raw_ptr()->receivers_static_type_, type.raw());
14880
14881#if defined(TARGET_ARCH_X64)
14882 if (!type.IsNull() && type.HasTypeClass() && (NumArgsTested() == 1) &&
14883 type.IsInstantiated() && !type.IsFutureOrType()) {
14884 const Class& cls = Class::Handle(type.type_class());
14885 if (cls.IsGeneric()) {
14886 set_tracking_exactness(true);
14887 }
14888 }
14889#endif // defined(TARGET_ARCH_X64)
14890}
14891#endif
14892
14893const char* ICData::ToCString() const {
14894 Zone* zone = Thread::Current()->zone();
14895 const String& name = String::Handle(zone, target_name());
14896 const intptr_t num_args = NumArgsTested();
14897 const intptr_t num_checks = NumberOfChecks();
14898 const intptr_t type_args_len = TypeArgsLen();
14899 return zone->PrintToString(
14900 "ICData(%s num-args: %" Pd " num-checks: %" Pd " type-args-len: %" Pd ")",
14901 name.ToCString(), num_args, num_checks, type_args_len);
14902}
14903
14904FunctionPtr ICData::Owner() const {
14905 Object& obj = Object::Handle(raw_ptr()->owner_);
14906 if (obj.IsNull()) {
14907 ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
14908 return Function::null();
14909 } else if (obj.IsFunction()) {
14910 return Function::Cast(obj).raw();
14911 } else {
14912 ICData& original = ICData::Handle();
14913 original ^= obj.raw();
14914 return original.Owner();
14915 }
14916}
14917
14918ICDataPtr ICData::Original() const {
14919 if (IsNull()) {
14920 return ICData::null();
14921 }
14922 Object& obj = Object::Handle(raw_ptr()->owner_);
14923 if (obj.IsFunction()) {
14924 return this->raw();
14925 } else {
14926 return ICData::RawCast(obj.raw());
14927 }
14928}
14929
14930void ICData::SetOriginal(const ICData& value) const {
14931 ASSERT(value.IsOriginal());
14932 ASSERT(!value.IsNull());
14933 StorePointer(&raw_ptr()->owner_, static_cast<ObjectPtr>(value.raw()));
14934}
14935
14936void ICData::set_owner(const Function& value) const {
14937 StorePointer(&raw_ptr()->owner_, static_cast<ObjectPtr>(value.raw()));
14938}
14939
14940void ICData::set_deopt_id(intptr_t value) const {
14941#if defined(DART_PRECOMPILED_RUNTIME)
14942 UNREACHABLE();
14943#else
14944 ASSERT(value <= kMaxInt32);
14945 StoreNonPointer(&raw_ptr()->deopt_id_, value);
14946#endif
14947}
14948
14949void ICData::set_entries(const Array& value) const {
14950 ASSERT(!value.IsNull());
14951 StorePointer<ArrayPtr, std::memory_order_release>(&raw_ptr()->entries_,
14952 value.raw());
14953}
14954
14955intptr_t ICData::NumArgsTested() const {
14956 return NumArgsTestedBits::decode(raw_ptr()->state_bits_);
14957}
14958
14959void ICData::SetNumArgsTested(intptr_t value) const {
14960 ASSERT(Utils::IsUint(2, value));
14961 StoreNonPointer(&raw_ptr()->state_bits_,
14962 NumArgsTestedBits::update(value, raw_ptr()->state_bits_));
14963}
14964
14965intptr_t ICData::TypeArgsLen() const {
14966 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
14967 return args_desc.TypeArgsLen();
14968}
14969
14970intptr_t ICData::CountWithTypeArgs() const {
14971 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
14972 return args_desc.CountWithTypeArgs();
14973}
14974
14975intptr_t ICData::CountWithoutTypeArgs() const {
14976 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
14977 return args_desc.Count();
14978}
14979
14980intptr_t ICData::SizeWithoutTypeArgs() const {
14981 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
14982 return args_desc.Size();
14983}
14984
14985intptr_t ICData::SizeWithTypeArgs() const {
14986 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
14987 return args_desc.SizeWithTypeArgs();
14988}
14989
14990uint32_t ICData::DeoptReasons() const {
14991 return DeoptReasonBits::decode(raw_ptr()->state_bits_);
14992}
14993
14994void ICData::SetDeoptReasons(uint32_t reasons) const {
14995 StoreNonPointer(&raw_ptr()->state_bits_,
14996 DeoptReasonBits::update(reasons, raw_ptr()->state_bits_));
14997}
14998
14999bool ICData::HasDeoptReason(DeoptReasonId reason) const {
15000 ASSERT(reason <= kLastRecordedDeoptReason);
15001 return (DeoptReasons() & (1 << reason)) != 0;
15002}
15003
15004void ICData::AddDeoptReason(DeoptReasonId reason) const {
15005 if (reason <= kLastRecordedDeoptReason) {
15006 SetDeoptReasons(DeoptReasons() | (1 << reason));
15007 }
15008}
15009
15010const char* ICData::RebindRuleToCString(RebindRule r) {
15011 switch (r) {
15012#define RULE_CASE(Name) \
15013 case RebindRule::k##Name: \
15014 return #Name;
15015 FOR_EACH_REBIND_RULE(RULE_CASE)
15016#undef RULE_CASE
15017 default:
15018 return nullptr;
15019 }
15020}
15021
15022bool ICData::ParseRebindRule(const char* str, RebindRule* out) {
15023#define RULE_CASE(Name) \
15024 if (strcmp(str, #Name) == 0) { \
15025 *out = RebindRule::k##Name; \
15026 return true; \
15027 }
15028 FOR_EACH_REBIND_RULE(RULE_CASE)
15029#undef RULE_CASE
15030 return false;
15031}
15032
15033ICData::RebindRule ICData::rebind_rule() const {
15034 return (ICData::RebindRule)RebindRuleBits::decode(raw_ptr()->state_bits_);
15035}
15036
15037void ICData::set_rebind_rule(uint32_t rebind_rule) const {
15038 StoreNonPointer(&raw_ptr()->state_bits_,
15039 RebindRuleBits::update(rebind_rule, raw_ptr()->state_bits_));
15040}
15041
15042bool ICData::is_static_call() const {
15043 return rebind_rule() != kInstance;
15044}
15045
15046void ICData::set_state_bits(uint32_t bits) const {
15047 StoreNonPointer(&raw_ptr()->state_bits_, bits);
15048}
15049
15050intptr_t ICData::TestEntryLengthFor(intptr_t num_args,
15051 bool tracking_exactness) {
15052 return num_args + 1 /* target function*/ + 1 /* frequency */ +
15053 (tracking_exactness ? 1 : 0) /* exactness state */;
15054}
15055
15056intptr_t ICData::TestEntryLength() const {
15057 return TestEntryLengthFor(NumArgsTested(), is_tracking_exactness());
15058}
15059
15060intptr_t ICData::Length() const {
15061 return (Smi::Value(entries()->ptr()->length_) / TestEntryLength());
15062}
15063
15064intptr_t ICData::NumberOfChecks() const {
15065 const intptr_t length = Length();
15066 for (intptr_t i = 0; i < length; i++) {
15067 if (IsSentinelAt(i)) {
15068 return i;
15069 }
15070 }
15071 UNREACHABLE();
15072 return -1;
15073}
15074
15075bool ICData::NumberOfChecksIs(intptr_t n) const {
15076 const intptr_t length = Length();
15077 for (intptr_t i = 0; i < length; i++) {
15078 if (i == n) {
15079 return IsSentinelAt(i);
15080 } else {
15081 if (IsSentinelAt(i)) return false;
15082 }
15083 }
15084 return n == length;
15085}
15086
15087// Discounts any checks with usage of zero.
15088intptr_t ICData::NumberOfUsedChecks() const {
15089 intptr_t n = NumberOfChecks();
15090 if (n == 0) {
15091 return 0;
15092 }
15093 intptr_t count = 0;
15094 for (intptr_t i = 0; i < n; i++) {
15095 if (GetCountAt(i) > 0) {
15096 count++;
15097 }
15098 }
15099 return count;
15100}
15101
15102void ICData::WriteSentinel(const Array& data, intptr_t test_entry_length) {
15103 ASSERT(!data.IsNull());
15104 RELEASE_ASSERT(smi_illegal_cid().Value() == kIllegalCid);
15105 for (intptr_t i = 1; i <= test_entry_length; i++) {
15106 data.SetAt(data.Length() - i, smi_illegal_cid());
15107 }
15108}
15109
15110#if defined(DEBUG)
15111// Used in asserts to verify that a check is not added twice.
15112bool ICData::HasCheck(const GrowableArray<intptr_t>& cids) const {
15113 return FindCheck(cids) != -1;
15114}
15115#endif // DEBUG
15116
15117intptr_t ICData::FindCheck(const GrowableArray<intptr_t>& cids) const {
15118 const intptr_t len = NumberOfChecks();
15119 for (intptr_t i = 0; i < len; i++) {
15120 GrowableArray<intptr_t> class_ids;
15121 GetClassIdsAt(i, &class_ids);
15122 bool matches = true;
15123 for (intptr_t k = 0; k < class_ids.length(); k++) {
15124 ASSERT(class_ids[k] != kIllegalCid);
15125 if (class_ids[k] != cids[k]) {
15126 matches = false;
15127 break;
15128 }
15129 }
15130 if (matches) {
15131 return i;
15132 }
15133 }
15134 return -1;
15135}
15136
15137void ICData::WriteSentinelAt(intptr_t index) const {
15138 const intptr_t len = Length();
15139 ASSERT(index >= 0);
15140 ASSERT(index < len);
15141 Thread* thread = Thread::Current();
15142 REUSABLE_ARRAY_HANDLESCOPE(thread);
15143 Array& data = thread->ArrayHandle();
15144 data = entries();
15145 const intptr_t start = index * TestEntryLength();
15146 const intptr_t end = start + TestEntryLength();
15147 for (intptr_t i = start; i < end; i++) {
15148 data.SetAt(i, smi_illegal_cid());
15149 }
15150}
15151
15152void ICData::ClearCountAt(intptr_t index) const {
15153 ASSERT(index >= 0);
15154 ASSERT(index < NumberOfChecks());
15155 SetCountAt(index, 0);
15156}
15157
15158void ICData::ClearAndSetStaticTarget(const Function& func) const {
15159 if (IsImmutable()) {
15160 return;
15161 }
15162 const intptr_t len = Length();
15163 if (len == 0) {
15164 return;
15165 }
15166 // The final entry is always the sentinel.
15167 ASSERT(IsSentinelAt(len - 1));
15168 const intptr_t num_args_tested = NumArgsTested();
15169 if (num_args_tested == 0) {
15170 // No type feedback is being collected.
15171 Thread* thread = Thread::Current();
15172 REUSABLE_ARRAY_HANDLESCOPE(thread);
15173 Array& data = thread->ArrayHandle();
15174 data = entries();
15175 // Static calls with no argument checks hold only one target and the
15176 // sentinel value.
15177 ASSERT(len == 2);
15178 // Static calls with no argument checks only need two words.
15179 ASSERT(TestEntryLength() == 2);
15180 // Set the target.
15181 data.SetAt(TargetIndexFor(num_args_tested), func);
15182 // Set count to 0 as this is called during compilation, before the
15183 // call has been executed.
15184 data.SetAt(CountIndexFor(num_args_tested), Object::smi_zero());
15185 } else {
15186 // Type feedback on arguments is being collected.
15187 // Fill all but the first entry with the sentinel.
15188 for (intptr_t i = len - 1; i > 0; i--) {
15189 WriteSentinelAt(i);
15190 }
15191 Thread* thread = Thread::Current();
15192 REUSABLE_ARRAY_HANDLESCOPE(thread);
15193 Array& data = thread->ArrayHandle();
15194 data = entries();
15195 // Rewrite the dummy entry.
15196 const Smi& object_cid = Smi::Handle(Smi::New(kObjectCid));
15197 for (intptr_t i = 0; i < NumArgsTested(); i++) {
15198 data.SetAt(i, object_cid);
15199 }
15200 data.SetAt(TargetIndexFor(num_args_tested), func);
15201 data.SetAt(CountIndexFor(num_args_tested), Object::smi_zero());
15202 }
15203}
15204
15205// Add an initial Smi/Smi check with count 0.
15206bool ICData::AddSmiSmiCheckForFastSmiStubs() const {
15207 bool is_smi_two_args_op = false;
15208
15209 ASSERT(NumArgsTested() == 2);
15210 Zone* zone = Thread::Current()->zone();
15211 const String& name = String::Handle(zone, target_name());
15212 const Class& smi_class = Class::Handle(zone, Smi::Class());
15213 Function& smi_op_target = Function::Handle(
15214 zone, Resolver::ResolveDynamicAnyArgs(zone, smi_class, name));
15215
15216#if !defined(DART_PRECOMPILED_RUNTIME)
15217 if (smi_op_target.IsNull() &&
15218 Function::IsDynamicInvocationForwarderName(name)) {
15219 const String& demangled = String::Handle(
15220 zone, Function::DemangleDynamicInvocationForwarderName(name));
15221 smi_op_target = Resolver::ResolveDynamicAnyArgs(zone, smi_class, demangled);
15222 }
15223#endif
15224
15225 if (NumberOfChecksIs(0)) {
15226 GrowableArray<intptr_t> class_ids(2);
15227 class_ids.Add(kSmiCid);
15228 class_ids.Add(kSmiCid);
15229 AddCheck(class_ids, smi_op_target);
15230 // 'AddCheck' sets the initial count to 1.
15231 SetCountAt(0, 0);
15232 is_smi_two_args_op = true;
15233 } else if (NumberOfChecksIs(1)) {
15234 GrowableArray<intptr_t> class_ids(2);
15235 Function& target = Function::Handle();
15236 GetCheckAt(0, &class_ids, &target);
15237 if ((target.raw() == smi_op_target.raw()) && (class_ids[0] == kSmiCid) &&
15238 (class_ids[1] == kSmiCid)) {
15239 is_smi_two_args_op = true;
15240 }
15241 }
15242 return is_smi_two_args_op;
15243}
15244
15245// Used for unoptimized static calls when no class-ids are checked.
15246void ICData::AddTarget(const Function& target) const {
15247 ASSERT(!target.IsNull());
15248 if (NumArgsTested() > 0) {
15249 // Create a fake cid entry, so that we can store the target.
15250 if (NumArgsTested() == 1) {
15251 AddReceiverCheck(kObjectCid, target, 1);
15252 } else {
15253 GrowableArray<intptr_t> class_ids(NumArgsTested());
15254 for (intptr_t i = 0; i < NumArgsTested(); i++) {
15255 class_ids.Add(kObjectCid);
15256 }
15257 AddCheck(class_ids, target);
15258 }
15259 return;
15260 }
15261 ASSERT(NumArgsTested() == 0);
15262 // Can add only once.
15263 const intptr_t old_num = NumberOfChecks();
15264 ASSERT(old_num == 0);
15265 Thread* thread = Thread::Current();
15266 REUSABLE_ARRAY_HANDLESCOPE(thread);
15267 Array& data = thread->ArrayHandle();
15268 data = entries();
15269 const intptr_t new_len = data.Length() + TestEntryLength();
15270 data = Array::Grow(data, new_len, Heap::kOld);
15271 WriteSentinel(data, TestEntryLength());
15272 intptr_t data_pos = old_num * TestEntryLength();
15273 ASSERT(!target.IsNull());
15274 data.SetAt(data_pos + TargetIndexFor(NumArgsTested()), target);
15275 // Set count to 0 as this is called during compilation, before the
15276 // call has been executed.
15277 data.SetAt(data_pos + CountIndexFor(NumArgsTested()), Object::smi_zero());
15278 // Multithreaded access to ICData requires setting of array to be the last
15279 // operation.
15280 set_entries(data);
15281}
15282
15283bool ICData::ValidateInterceptor(const Function& target) const {
15284#if !defined(DART_PRECOMPILED_RUNTIME)
15285 const String& name = String::Handle(target_name());
15286 if (Function::IsDynamicInvocationForwarderName(name)) {
15287 return Function::DemangleDynamicInvocationForwarderName(name) ==
15288 target.name();
15289 }
15290#endif
15291 ObjectStore* store = Isolate::Current()->object_store();
15292 ASSERT((target.raw() == store->simple_instance_of_true_function()) ||
15293 (target.raw() == store->simple_instance_of_false_function()));
15294 const String& instance_of_name = String::Handle(
15295 Library::PrivateCoreLibName(Symbols::_simpleInstanceOf()).raw());
15296 ASSERT(target_name() == instance_of_name.raw());
15297 return true;
15298}
15299
15300void ICData::AddCheck(const GrowableArray<intptr_t>& class_ids,
15301 const Function& target,
15302 intptr_t count) const {
15303 ASSERT(!is_tracking_exactness());
15304 ASSERT(!target.IsNull());
15305 ASSERT((target.name() == target_name()) || ValidateInterceptor(target));
15306 DEBUG_ASSERT(!HasCheck(class_ids));
15307 ASSERT(NumArgsTested() > 1); // Otherwise use 'AddReceiverCheck'.
15308 const intptr_t num_args_tested = NumArgsTested();
15309 ASSERT(class_ids.length() == num_args_tested);
15310 const intptr_t old_num = NumberOfChecks();
15311 Array& data = Array::Handle(entries());
15312 // ICData of static calls with NumArgsTested() > 0 have initially a
15313 // dummy set of cids entered (see ICData::AddTarget). That entry is
15314 // overwritten by first real type feedback data.
15315 if (old_num == 1) {
15316 bool has_dummy_entry = true;
15317 for (intptr_t i = 0; i < num_args_tested; i++) {
15318 if (Smi::Value(Smi::RawCast(data.At(i))) != kObjectCid) {
15319 has_dummy_entry = false;
15320 break;
15321 }
15322 }
15323 if (has_dummy_entry) {
15324 ASSERT(target.raw() == data.At(TargetIndexFor(num_args_tested)));
15325 // Replace dummy entry.
15326 Smi& value = Smi::Handle();
15327 for (intptr_t i = 0; i < NumArgsTested(); i++) {
15328 ASSERT(class_ids[i] != kIllegalCid);
15329 value = Smi::New(class_ids[i]);
15330 data.SetAt(i, value);
15331 }
15332 return;
15333 }
15334 }
15335 intptr_t index = -1;
15336 data = Grow(&index);
15337 ASSERT(!data.IsNull());
15338 intptr_t data_pos = index * TestEntryLength();
15339 Smi& value = Smi::Handle();
15340 for (intptr_t i = 0; i < class_ids.length(); i++) {
15341 // kIllegalCid is used as terminating value, do not add it.
15342 ASSERT(class_ids[i] != kIllegalCid);
15343 value = Smi::New(class_ids[i]);
15344 data.SetAt(data_pos + i, value);
15345 }
15346 ASSERT(!target.IsNull());
15347 data.SetAt(data_pos + TargetIndexFor(num_args_tested), target);
15348 value = Smi::New(count);
15349 data.SetAt(data_pos + CountIndexFor(num_args_tested), value);
15350 // Multithreaded access to ICData requires setting of array to be the last
15351 // operation.
15352 set_entries(data);
15353}
15354
15355ArrayPtr ICData::Grow(intptr_t* index) const {
15356 Array& data = Array::Handle(entries());
15357 // Last entry in array should be a sentinel and will be the new entry
15358 // that can be updated after growing.
15359 *index = Length() - 1;
15360 ASSERT(*index >= 0);
15361 ASSERT(IsSentinelAt(*index));
15362 // Grow the array and write the new final sentinel into place.
15363 const intptr_t new_len = data.Length() + TestEntryLength();
15364 data = Array::Grow(data, new_len, Heap::kOld);
15365 WriteSentinel(data, TestEntryLength());
15366 return data.raw();
15367}
15368
15369void ICData::DebugDump() const {
15370 const Function& owner = Function::Handle(Owner());
15371 THR_Print("ICData::DebugDump\n");
15372 THR_Print("Owner = %s [deopt=%" Pd "]\n", owner.ToCString(), deopt_id());
15373 THR_Print("NumArgsTested = %" Pd "\n", NumArgsTested());
15374 THR_Print("Length = %" Pd "\n", Length());
15375 THR_Print("NumberOfChecks = %" Pd "\n", NumberOfChecks());
15376
15377 GrowableArray<intptr_t> class_ids;
15378 for (intptr_t i = 0; i < NumberOfChecks(); i++) {
15379 THR_Print("Check[%" Pd "]:", i);
15380 GetClassIdsAt(i, &class_ids);
15381 for (intptr_t c = 0; c < class_ids.length(); c++) {
15382 THR_Print(" %" Pd "", class_ids[c]);
15383 }
15384 THR_Print("--- %" Pd " hits\n", GetCountAt(i));
15385 }
15386}
15387
15388void ICData::AddReceiverCheck(intptr_t receiver_class_id,
15389 const Function& target,
15390 intptr_t count,
15391 StaticTypeExactnessState exactness) const {
15392#if defined(DEBUG)
15393 GrowableArray<intptr_t> class_ids(1);
15394 class_ids.Add(receiver_class_id);
15395 ASSERT(!HasCheck(class_ids));
15396#endif // DEBUG
15397 ASSERT(!target.IsNull());
15398 const intptr_t kNumArgsTested = 1;
15399 ASSERT(NumArgsTested() == kNumArgsTested); // Otherwise use 'AddCheck'.
15400 ASSERT(receiver_class_id != kIllegalCid);
15401
15402 intptr_t index = -1;
15403 Array& data = Array::Handle(Grow(&index));
15404 intptr_t data_pos = index * TestEntryLength();
15405 if ((receiver_class_id == kSmiCid) && (data_pos > 0)) {
15406 ASSERT(GetReceiverClassIdAt(0) != kSmiCid);
15407 // Move class occupying position 0 to the data_pos.
15408 for (intptr_t i = 0; i < TestEntryLength(); i++) {
15409 data.SetAt(data_pos + i, Object::Handle(data.At(i)));
15410 }
15411 // Insert kSmiCid in position 0.
15412 data_pos = 0;
15413 }
15414 data.SetAt(data_pos, Smi::Handle(Smi::New(receiver_class_id)));
15415
15416#if !defined(DART_PRECOMPILED_RUNTIME)
15417 // JIT
15418 data.SetAt(data_pos + TargetIndexFor(kNumArgsTested), target);
15419 data.SetAt(data_pos + CountIndexFor(kNumArgsTested),
15420 Smi::Handle(Smi::New(count)));
15421 if (is_tracking_exactness()) {
15422 data.SetAt(data_pos + ExactnessIndexFor(kNumArgsTested),
15423 Smi::Handle(Smi::New(exactness.Encode())));
15424 }
15425#else
15426 // AOT
15427 ASSERT(target.HasCode());
15428 const Code& code = Code::Handle(target.CurrentCode());
15429 const Smi& entry_point =
15430 Smi::Handle(Smi::FromAlignedAddress(code.EntryPoint()));
15431 data.SetAt(data_pos + CodeIndexFor(kNumArgsTested), code);
15432 data.SetAt(data_pos + EntryPointIndexFor(kNumArgsTested), entry_point);
15433#endif
15434
15435 // Multithreaded access to ICData requires setting of array to be the last
15436 // operation.
15437 set_entries(data);
15438}
15439
15440StaticTypeExactnessState ICData::GetExactnessAt(intptr_t index) const {
15441 if (!is_tracking_exactness()) {
15442 return StaticTypeExactnessState::NotTracking();
15443 }
15444 Thread* thread = Thread::Current();
15445 REUSABLE_ARRAY_HANDLESCOPE(thread);
15446 Array& data = thread->ArrayHandle();
15447 data = entries();
15448 intptr_t data_pos =
15449 index * TestEntryLength() + ExactnessIndexFor(NumArgsTested());
15450 return StaticTypeExactnessState::Decode(
15451 Smi::Value(Smi::RawCast(data.At(data_pos))));
15452}
15453
15454void ICData::GetCheckAt(intptr_t index,
15455 GrowableArray<intptr_t>* class_ids,
15456 Function* target) const {
15457 ASSERT(index < NumberOfChecks());
15458 ASSERT(class_ids != NULL);
15459 ASSERT(target != NULL);
15460 class_ids->Clear();
15461 Thread* thread = Thread::Current();
15462 REUSABLE_ARRAY_HANDLESCOPE(thread);
15463 Array& data = thread->ArrayHandle();
15464 data = entries();
15465 intptr_t data_pos = index * TestEntryLength();
15466 for (intptr_t i = 0; i < NumArgsTested(); i++) {
15467 class_ids->Add(Smi::Value(Smi::RawCast(data.At(data_pos + i))));
15468 }
15469 (*target) ^= data.At(data_pos + TargetIndexFor(NumArgsTested()));
15470}
15471
15472bool ICData::IsSentinelAt(intptr_t index) const {
15473 ASSERT(index < Length());
15474 Thread* thread = Thread::Current();
15475 REUSABLE_ARRAY_HANDLESCOPE(thread);
15476 Array& data = thread->ArrayHandle();
15477 data = entries();
15478 const intptr_t entry_length = TestEntryLength();
15479 intptr_t data_pos = index * TestEntryLength();
15480 for (intptr_t i = 0; i < entry_length; i++) {
15481 if (data.At(data_pos++) != smi_illegal_cid().raw()) {
15482 return false;
15483 }
15484 }
15485 // The entry at |index| was filled with the value kIllegalCid.
15486 return true;
15487}
15488
15489void ICData::GetClassIdsAt(intptr_t index,
15490 GrowableArray<intptr_t>* class_ids) const {
15491 ASSERT(index < Length());
15492 ASSERT(class_ids != NULL);
15493 ASSERT(!IsSentinelAt(index));
15494 class_ids->Clear();
15495 Thread* thread = Thread::Current();
15496 REUSABLE_ARRAY_HANDLESCOPE(thread);
15497 Array& data = thread->ArrayHandle();
15498 data = entries();
15499 intptr_t data_pos = index * TestEntryLength();
15500 for (intptr_t i = 0; i < NumArgsTested(); i++) {
15501 class_ids->Add(Smi::Value(Smi::RawCast(data.At(data_pos++))));
15502 }
15503}
15504
15505void ICData::GetOneClassCheckAt(intptr_t index,
15506 intptr_t* class_id,
15507 Function* target) const {
15508 ASSERT(class_id != NULL);
15509 ASSERT(target != NULL);
15510 ASSERT(NumArgsTested() == 1);
15511 Thread* thread = Thread::Current();
15512 REUSABLE_ARRAY_HANDLESCOPE(thread);
15513 Array& data = thread->ArrayHandle();
15514 data = entries();
15515 const intptr_t data_pos = index * TestEntryLength();
15516 *class_id = Smi::Value(Smi::RawCast(data.At(data_pos)));
15517 *target ^= data.At(data_pos + TargetIndexFor(NumArgsTested()));
15518}
15519
15520intptr_t ICData::GetCidAt(intptr_t index) const {
15521 ASSERT(NumArgsTested() == 1);
15522 Thread* thread = Thread::Current();
15523 REUSABLE_ARRAY_HANDLESCOPE(thread);
15524 Array& data = thread->ArrayHandle();
15525 data = entries();
15526 const intptr_t data_pos = index * TestEntryLength();
15527 return Smi::Value(Smi::RawCast(data.At(data_pos)));
15528}
15529
15530intptr_t ICData::GetClassIdAt(intptr_t index, intptr_t arg_nr) const {
15531 GrowableArray<intptr_t> class_ids;
15532 GetClassIdsAt(index, &class_ids);
15533 return class_ids[arg_nr];
15534}
15535
15536intptr_t ICData::GetReceiverClassIdAt(intptr_t index) const {
15537 ASSERT(index < Length());
15538 ASSERT(!IsSentinelAt(index));
15539 const intptr_t data_pos = index * TestEntryLength();
15540 NoSafepointScope no_safepoint;
15541 ArrayPtr raw_data = entries();
15542 return Smi::Value(Smi::RawCast(raw_data->ptr()->data()[data_pos]));
15543}
15544
15545FunctionPtr ICData::GetTargetAt(intptr_t index) const {
15546#if defined(DART_PRECOMPILED_RUNTIME)
15547 UNREACHABLE();
15548 return nullptr;
15549#else
15550 const intptr_t data_pos =
15551 index * TestEntryLength() + TargetIndexFor(NumArgsTested());
15552 ASSERT(Object::Handle(Array::Handle(entries()).At(data_pos)).IsFunction());
15553
15554 NoSafepointScope no_safepoint;
15555 ArrayPtr raw_data = entries();
15556 return static_cast<FunctionPtr>(raw_data->ptr()->data()[data_pos]);
15557#endif
15558}
15559
15560ObjectPtr ICData::GetTargetOrCodeAt(intptr_t index) const {
15561 const intptr_t data_pos =
15562 index * TestEntryLength() + TargetIndexFor(NumArgsTested());
15563
15564 NoSafepointScope no_safepoint;
15565 ArrayPtr raw_data = entries();
15566 return raw_data->ptr()->data()[data_pos];
15567}
15568
15569void ICData::IncrementCountAt(intptr_t index, intptr_t value) const {
15570 ASSERT(0 <= value);
15571 ASSERT(value <= Smi::kMaxValue);
15572 SetCountAt(index, Utils::Minimum(GetCountAt(index) + value, Smi::kMaxValue));
15573}
15574
15575void ICData::SetCountAt(intptr_t index, intptr_t value) const {
15576 ASSERT(0 <= value);
15577 ASSERT(value <= Smi::kMaxValue);
15578
15579 Thread* thread = Thread::Current();
15580 REUSABLE_ARRAY_HANDLESCOPE(thread);
15581 Array& data = thread->ArrayHandle();
15582 data = entries();
15583 const intptr_t data_pos =
15584 index * TestEntryLength() + CountIndexFor(NumArgsTested());
15585 data.SetAt(data_pos, Smi::Handle(Smi::New(value)));
15586}
15587
15588intptr_t ICData::GetCountAt(intptr_t index) const {
15589#if defined(DART_PRECOMPILED_RUNTIME)
15590 UNREACHABLE();
15591 return 0;
15592#else
15593 Thread* thread = Thread::Current();
15594 REUSABLE_ARRAY_HANDLESCOPE(thread);
15595 Array& data = thread->ArrayHandle();
15596 data = entries();
15597 const intptr_t data_pos =
15598 index * TestEntryLength() + CountIndexFor(NumArgsTested());
15599 intptr_t value = Smi::Value(Smi::RawCast(data.At(data_pos)));
15600 if (value >= 0) return value;
15601
15602 // The counter very rarely overflows to a negative value, but if it does, we
15603 // would rather just reset it to zero.
15604 SetCountAt(index, 0);
15605 return 0;
15606#endif
15607}
15608
15609intptr_t ICData::AggregateCount() const {
15610 if (IsNull()) return 0;
15611 const intptr_t len = NumberOfChecks();
15612 intptr_t count = 0;
15613 for (intptr_t i = 0; i < len; i++) {
15614 count += GetCountAt(i);
15615 }
15616 return count;
15617}
15618
15619void ICData::SetCodeAt(intptr_t index, const Code& value) const {
15620#if !defined(DART_PRECOMPILED_RUNTIME)
15621 UNREACHABLE();
15622#else
15623 Thread* thread = Thread::Current();
15624 REUSABLE_ARRAY_HANDLESCOPE(thread);
15625 Array& data = thread->ArrayHandle();
15626 data = entries();
15627 const intptr_t data_pos =
15628 index * TestEntryLength() + CodeIndexFor(NumArgsTested());
15629 data.SetAt(data_pos, value);
15630#endif
15631}
15632
15633void ICData::SetEntryPointAt(intptr_t index, const Smi& value) const {
15634#if !defined(DART_PRECOMPILED_RUNTIME)
15635 UNREACHABLE();
15636#else
15637 Thread* thread = Thread::Current();
15638 REUSABLE_ARRAY_HANDLESCOPE(thread);
15639 Array& data = thread->ArrayHandle();
15640 data = entries();
15641 const intptr_t data_pos =
15642 index * TestEntryLength() + EntryPointIndexFor(NumArgsTested());
15643 data.SetAt(data_pos, value);
15644#endif
15645}
15646
15647#if !defined(DART_PRECOMPILED_RUNTIME)
15648ICDataPtr ICData::AsUnaryClassChecksForCid(intptr_t cid,
15649 const Function& target) const {
15650 ASSERT(!IsNull());
15651 const intptr_t kNumArgsTested = 1;
15652 ICData& result = ICData::Handle(ICData::NewFrom(*this, kNumArgsTested));
15653
15654 // Copy count so that we copy the state "count == 0" vs "count > 0".
15655 result.AddReceiverCheck(cid, target, GetCountAt(0));
15656 return result.raw();
15657}
15658
15659ICDataPtr ICData::AsUnaryClassChecksForArgNr(intptr_t arg_nr) const {
15660 ASSERT(!IsNull());
15661 ASSERT(NumArgsTested() > arg_nr);
15662 if ((arg_nr == 0) && (NumArgsTested() == 1)) {
15663 // Frequent case.
15664 return raw();
15665 }
15666 const intptr_t kNumArgsTested = 1;
15667 ICData& result = ICData::Handle(ICData::NewFrom(*this, kNumArgsTested));
15668 const intptr_t len = NumberOfChecks();
15669 for (intptr_t i = 0; i < len; i++) {
15670 const intptr_t class_id = GetClassIdAt(i, arg_nr);
15671 const intptr_t count = GetCountAt(i);
15672 if (count == 0) {
15673 continue;
15674 }
15675 intptr_t duplicate_class_id = -1;
15676 const intptr_t result_len = result.NumberOfChecks();
15677 for (intptr_t k = 0; k < result_len; k++) {
15678 if (class_id == result.GetReceiverClassIdAt(k)) {
15679 duplicate_class_id = k;
15680 break;
15681 }
15682 }
15683 if (duplicate_class_id >= 0) {
15684 // This check is valid only when checking the receiver.
15685 ASSERT((arg_nr != 0) ||
15686 (result.GetTargetAt(duplicate_class_id) == GetTargetAt(i)));
15687 result.IncrementCountAt(duplicate_class_id, count);
15688 } else {
15689 // This will make sure that Smi is first if it exists.
15690 result.AddReceiverCheck(class_id, Function::Handle(GetTargetAt(i)),
15691 count);
15692 }
15693 }
15694
15695 return result.raw();
15696}
15697
15698// (cid, count) tuple used to sort ICData by count.
15699struct CidCount {
15700 CidCount(intptr_t cid_, intptr_t count_, Function* f_)
15701 : cid(cid_), count(count_), function(f_) {}
15702
15703 static int HighestCountFirst(const CidCount* a, const CidCount* b);
15704
15705 intptr_t cid;
15706 intptr_t count;
15707 Function* function;
15708};
15709
15710int CidCount::HighestCountFirst(const CidCount* a, const CidCount* b) {
15711 if (a->count > b->count) {
15712 return -1;
15713 }
15714 return (a->count < b->count) ? 1 : 0;
15715}
15716
15717ICDataPtr ICData::AsUnaryClassChecksSortedByCount() const {
15718 ASSERT(!IsNull());
15719 const intptr_t kNumArgsTested = 1;
15720 const intptr_t len = NumberOfChecks();
15721 if (len <= 1) {
15722 // No sorting needed.
15723 return AsUnaryClassChecks();
15724 }
15725 GrowableArray<CidCount> aggregate;
15726 for (intptr_t i = 0; i < len; i++) {
15727 const intptr_t class_id = GetClassIdAt(i, 0);
15728 const intptr_t count = GetCountAt(i);
15729 if (count == 0) {
15730 continue;
15731 }
15732 bool found = false;
15733 for (intptr_t r = 0; r < aggregate.length(); r++) {
15734 if (aggregate[r].cid == class_id) {
15735 aggregate[r].count += count;
15736 found = true;
15737 break;
15738 }
15739 }
15740 if (!found) {
15741 aggregate.Add(
15742 CidCount(class_id, count, &Function::ZoneHandle(GetTargetAt(i))));
15743 }
15744 }
15745 aggregate.Sort(CidCount::HighestCountFirst);
15746
15747 ICData& result = ICData::Handle(ICData::NewFrom(*this, kNumArgsTested));
15748 ASSERT(result.NumberOfChecksIs(0));
15749 // Room for all entries and the sentinel.
15750 const intptr_t data_len = result.TestEntryLength() * (aggregate.length() + 1);
15751 // Allocate the array but do not assign it to result until we have populated
15752 // it with the aggregate data and the terminating sentinel.
15753 const Array& data = Array::Handle(Array::New(data_len, Heap::kOld));
15754 intptr_t pos = 0;
15755 for (intptr_t i = 0; i < aggregate.length(); i++) {
15756 data.SetAt(pos + 0, Smi::Handle(Smi::New(aggregate[i].cid)));
15757 data.SetAt(pos + TargetIndexFor(1), *aggregate[i].function);
15758 data.SetAt(pos + CountIndexFor(1),
15759 Smi::Handle(Smi::New(aggregate[i].count)));
15760
15761 pos += result.TestEntryLength();
15762 }
15763 WriteSentinel(data, result.TestEntryLength());
15764 result.set_entries(data);
15765 ASSERT(result.NumberOfChecksIs(aggregate.length()));
15766 return result.raw();
15767}
15768
15769UnlinkedCallPtr ICData::AsUnlinkedCall() const {
15770 ASSERT(NumArgsTested() == 1);
15771 ASSERT(!is_tracking_exactness());
15772 const UnlinkedCall& result = UnlinkedCall::Handle(UnlinkedCall::New());
15773 result.set_target_name(String::Handle(target_name()));
15774 result.set_arguments_descriptor(Array::Handle(arguments_descriptor()));
15775 result.set_can_patch_to_monomorphic(!FLAG_precompiled_mode ||
15776 receiver_cannot_be_smi());
15777 return result.raw();
15778}
15779
15780bool ICData::HasReceiverClassId(intptr_t class_id) const {
15781 ASSERT(NumArgsTested() > 0);
15782 const intptr_t len = NumberOfChecks();
15783 for (intptr_t i = 0; i < len; i++) {
15784 if (IsUsedAt(i)) {
15785 const intptr_t test_class_id = GetReceiverClassIdAt(i);
15786 if (test_class_id == class_id) {
15787 return true;
15788 }
15789 }
15790 }
15791 return false;
15792}
15793#endif
15794
15795bool ICData::IsUsedAt(intptr_t i) const {
15796 if (GetCountAt(i) <= 0) {
15797 // Do not mistake unoptimized static call ICData for unused.
15798 // See ICData::AddTarget.
15799 // TODO(srdjan): Make this test more robust.
15800 if (NumArgsTested() > 0) {
15801 const intptr_t cid = GetReceiverClassIdAt(i);
15802 if (cid == kObjectCid) {
15803 return true;
15804 }
15805 }
15806 return false;
15807 }
15808 return true;
15809}
15810
15811void ICData::Init() {
15812 for (int i = 0; i <= kCachedICDataMaxArgsTestedWithoutExactnessTracking;
15813 i++) {
15814 cached_icdata_arrays_
15815 [kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx + i] =
15816 ICData::NewNonCachedEmptyICDataArray(i, false);
15817 }
15818 cached_icdata_arrays_[kCachedICDataOneArgWithExactnessTrackingIdx] =
15819 ICData::NewNonCachedEmptyICDataArray(1, true);
15820}
15821
15822void ICData::Cleanup() {
15823 for (int i = 0; i < kCachedICDataArrayCount; ++i) {
15824 cached_icdata_arrays_[i] = NULL;
15825 }
15826}
15827
15828ArrayPtr ICData::NewNonCachedEmptyICDataArray(intptr_t num_args_tested,
15829 bool tracking_exactness) {
15830 // IC data array must be null terminated (sentinel entry).
15831 const intptr_t len = TestEntryLengthFor(num_args_tested, tracking_exactness);
15832 const Array& array = Array::Handle(Array::New(len, Heap::kOld));
15833 WriteSentinel(array, len);
15834 array.MakeImmutable();
15835 return array.raw();
15836}
15837
15838ArrayPtr ICData::CachedEmptyICDataArray(intptr_t num_args_tested,
15839 bool tracking_exactness) {
15840 if (tracking_exactness) {
15841 ASSERT(num_args_tested == 1);
15842 return cached_icdata_arrays_[kCachedICDataOneArgWithExactnessTrackingIdx];
15843 } else {
15844 ASSERT(num_args_tested >= 0);
15845 ASSERT(num_args_tested <=
15846 kCachedICDataMaxArgsTestedWithoutExactnessTracking);
15847 return cached_icdata_arrays_
15848 [kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx +
15849 num_args_tested];
15850 }
15851}
15852
15853// Does not initialize ICData array.
15854ICDataPtr ICData::NewDescriptor(Zone* zone,
15855 const Function& owner,
15856 const String& target_name,
15857 const Array& arguments_descriptor,
15858 intptr_t deopt_id,
15859 intptr_t num_args_tested,
15860 RebindRule rebind_rule,
15861 const AbstractType& receivers_static_type) {
15862#if !defined(DART_PRECOMPILED_RUNTIME)
15863 // We should only have null owners in the precompiled runtime, if the
15864 // owning function for a Code object was optimized out.
15865 ASSERT(!owner.IsNull());
15866#endif
15867 ASSERT(!target_name.IsNull());
15868 ASSERT(!arguments_descriptor.IsNull());
15869 ASSERT(Object::icdata_class() != Class::null());
15870 ASSERT(num_args_tested >= 0);
15871 ICData& result = ICData::Handle(zone);
15872 {
15873 // IC data objects are long living objects, allocate them in old generation.
15874 ObjectPtr raw =
15875 Object::Allocate(ICData::kClassId, ICData::InstanceSize(), Heap::kOld);
15876 NoSafepointScope no_safepoint;
15877 result ^= raw;
15878 }
15879 result.set_owner(owner);
15880 result.set_target_name(target_name);
15881 result.set_arguments_descriptor(arguments_descriptor);
15882 NOT_IN_PRECOMPILED(result.set_deopt_id(deopt_id));
15883 result.set_state_bits(0);
15884 result.set_rebind_rule(rebind_rule);
15885 result.SetNumArgsTested(num_args_tested);
15886 NOT_IN_PRECOMPILED(result.SetReceiversStaticType(receivers_static_type));
15887 return result.raw();
15888}
15889
15890bool ICData::IsImmutable() const {
15891 return entries()->IsImmutableArray();
15892}
15893
15894ICDataPtr ICData::New() {
15895 ICData& result = ICData::Handle();
15896 {
15897 // IC data objects are long living objects, allocate them in old generation.
15898 ObjectPtr raw =
15899 Object::Allocate(ICData::kClassId, ICData::InstanceSize(), Heap::kOld);
15900 NoSafepointScope no_safepoint;
15901 result ^= raw;
15902 }
15903 result.set_deopt_id(DeoptId::kNone);
15904 result.set_state_bits(0);
15905 return result.raw();
15906}
15907
15908ICDataPtr ICData::New(const Function& owner,
15909 const String& target_name,
15910 const Array& arguments_descriptor,
15911 intptr_t deopt_id,
15912 intptr_t num_args_tested,
15913 RebindRule rebind_rule,
15914 const AbstractType& receivers_static_type) {
15915 Zone* zone = Thread::Current()->zone();
15916 const ICData& result = ICData::Handle(
15917 zone,
15918 NewDescriptor(zone, owner, target_name, arguments_descriptor, deopt_id,
15919 num_args_tested, rebind_rule, receivers_static_type));
15920 result.set_entries(Array::Handle(
15921 zone,
15922 CachedEmptyICDataArray(num_args_tested, result.is_tracking_exactness())));
15923 return result.raw();
15924}
15925
15926#if !defined(DART_PRECOMPILED_RUNTIME)
15927ICDataPtr ICData::NewFrom(const ICData& from, intptr_t num_args_tested) {
15928 // See comment in [ICData::Clone] why we access the megamorphic bit first.
15929 const bool is_megamorphic = from.is_megamorphic();
15930
15931 const ICData& result = ICData::Handle(ICData::New(
15932 Function::Handle(from.Owner()), String::Handle(from.target_name()),
15933 Array::Handle(from.arguments_descriptor()), from.deopt_id(),
15934 num_args_tested, from.rebind_rule(),
15935 AbstractType::Handle(from.receivers_static_type())));
15936 // Copy deoptimization reasons.
15937 result.SetDeoptReasons(from.DeoptReasons());
15938 result.set_is_megamorphic(is_megamorphic);
15939 return result.raw();
15940}
15941
15942ICDataPtr ICData::Clone(const ICData& from) {
15943 Zone* zone = Thread::Current()->zone();
15944
15945 // We have to check the megamorphic bit before accessing the entries of the
15946 // ICData to ensure all writes to the entries have been flushed and are
15947 // visible at this point.
15948 //
15949 // This will allow us to maintain the invariant that if the megamorphic bit is
15950 // set, the number of entries in the ICData have reached the limit.
15951 const bool is_megamorphic = from.is_megamorphic();
15952
15953 const ICData& result = ICData::Handle(
15954 zone, ICData::NewDescriptor(
15955 zone, Function::Handle(zone, from.Owner()),
15956 String::Handle(zone, from.target_name()),
15957 Array::Handle(zone, from.arguments_descriptor()),
15958 from.deopt_id(), from.NumArgsTested(), from.rebind_rule(),
15959 AbstractType::Handle(zone, from.receivers_static_type())));
15960 // Clone entry array.
15961 const Array& from_array = Array::Handle(zone, from.entries());
15962 const intptr_t len = from_array.Length();
15963 const Array& cloned_array = Array::Handle(zone, Array::New(len, Heap::kOld));
15964 Object& obj = Object::Handle(zone);
15965 for (intptr_t i = 0; i < len; i++) {
15966 obj = from_array.At(i);
15967 cloned_array.SetAt(i, obj);
15968 }
15969 result.set_entries(cloned_array);
15970 // Copy deoptimization reasons.
15971 result.SetDeoptReasons(from.DeoptReasons());
15972 result.set_is_megamorphic(is_megamorphic);
15973
15974 RELEASE_ASSERT(!is_megamorphic ||
15975 result.NumberOfChecks() >= FLAG_max_polymorphic_checks);
15976
15977 return result.raw();
15978}
15979#endif
15980
15981const char* WeakSerializationReference::ToCString() const {
15982#if defined(DART_PRECOMPILED_RUNTIME)
15983 return Symbols::OptimizedOut().ToCString();
15984#else
15985 return Object::Handle(target()).ToCString();
15986#endif
15987}
15988
15989#if defined(DART_PRECOMPILER)
15990bool WeakSerializationReference::CanWrap(const Object& object) {
15991 // Currently we do not wrap the null object (which cannot be dropped from
15992 // snapshots), non-heap objects, and WSRs (as there is no point in deeply
15993 // nesting them). We also only wrap objects in the precompiler.
15994 return FLAG_precompiled_mode && !object.IsNull() &&
15995 object.raw()->IsHeapObject() && !object.IsWeakSerializationReference();
15996}
15997
15998ObjectPtr WeakSerializationReference::Wrap(Zone* zone, const Object& target) {
15999 if (!CanWrap(target)) return target.raw();
16000 ASSERT(Object::weak_serialization_reference_class() != Class::null());
16001 WeakSerializationReference& result = WeakSerializationReference::Handle(zone);
16002 {
16003 ObjectPtr raw = Object::Allocate(WeakSerializationReference::kClassId,
16004 WeakSerializationReference::InstanceSize(),
16005 Heap::kOld);
16006 NoSafepointScope no_safepoint;
16007
16008 result ^= raw;
16009 result.StorePointer(&result.raw_ptr()->target_, target.raw());
16010 }
16011 return result.raw();
16012}
16013#endif
16014
16015Code::Comments& Code::Comments::New(intptr_t count) {
16016 Comments* comments;
16017 if (count < 0 || count > (kIntptrMax / kNumberOfEntries)) {
16018 // This should be caught before we reach here.
16019 FATAL1("Fatal error in Code::Comments::New: invalid count %" Pd "\n",
16020 count);
16021 }
16022 if (count == 0) {
16023 comments = new Comments(Object::empty_array());
16024 } else {
16025 const Array& data =
16026 Array::Handle(Array::New(count * kNumberOfEntries, Heap::kOld));
16027 comments = new Comments(data);
16028 }
16029 return *comments;
16030}
16031
16032intptr_t Code::Comments::Length() const {
16033 if (comments_.IsNull()) {
16034 return 0;
16035 }
16036 return comments_.Length() / kNumberOfEntries;
16037}
16038
16039intptr_t Code::Comments::PCOffsetAt(intptr_t idx) const {
16040 return Smi::Value(
16041 Smi::RawCast(comments_.At(idx * kNumberOfEntries + kPCOffsetEntry)));
16042}
16043
16044void Code::Comments::SetPCOffsetAt(intptr_t idx, intptr_t pc) {
16045 comments_.SetAt(idx * kNumberOfEntries + kPCOffsetEntry,
16046 Smi::Handle(Smi::New(pc)));
16047}
16048
16049StringPtr Code::Comments::CommentAt(intptr_t idx) const {
16050 return String::RawCast(comments_.At(idx * kNumberOfEntries + kCommentEntry));
16051}
16052
16053void Code::Comments::SetCommentAt(intptr_t idx, const String& comment) {
16054 comments_.SetAt(idx * kNumberOfEntries + kCommentEntry, comment);
16055}
16056
16057Code::Comments::Comments(const Array& comments) : comments_(comments) {}
16058
16059const char* Code::EntryKindToCString(EntryKind kind) {
16060 switch (kind) {
16061 case EntryKind::kNormal:
16062 return "Normal";
16063 case EntryKind::kUnchecked:
16064 return "Unchecked";
16065 case EntryKind::kMonomorphic:
16066 return "Monomorphic";
16067 case EntryKind::kMonomorphicUnchecked:
16068 return "MonomorphicUnchecked";
16069 default:
16070 UNREACHABLE();
16071 return nullptr;
16072 }
16073}
16074
16075bool Code::ParseEntryKind(const char* str, EntryKind* out) {
16076 if (strcmp(str, "Normal") == 0) {
16077 *out = EntryKind::kNormal;
16078 return true;
16079 } else if (strcmp(str, "Unchecked") == 0) {
16080 *out = EntryKind::kUnchecked;
16081 return true;
16082 } else if (strcmp(str, "Monomorphic") == 0) {
16083 *out = EntryKind::kMonomorphic;
16084 return true;
16085 } else if (strcmp(str, "MonomorphicUnchecked") == 0) {
16086 *out = EntryKind::kMonomorphicUnchecked;
16087 return true;
16088 }
16089 return false;
16090}
16091
16092LocalVarDescriptorsPtr Code::GetLocalVarDescriptors() const {
16093 const LocalVarDescriptors& v = LocalVarDescriptors::Handle(var_descriptors());
16094 if (v.IsNull()) {
16095 ASSERT(!is_optimized());
16096 const Function& f = Function::Handle(function());
16097 ASSERT(!f.IsIrregexpFunction()); // Not yet implemented.
16098 Compiler::ComputeLocalVarDescriptors(*this);
16099 }
16100 return var_descriptors();
16101}
16102
16103void Code::set_owner(const Object& owner) const {
16104#if defined(DEBUG)
16105 const auto& unwrapped_owner =
16106 Object::Handle(WeakSerializationReference::Unwrap(owner));
16107 ASSERT(unwrapped_owner.IsFunction() || unwrapped_owner.IsClass() ||
16108 unwrapped_owner.IsAbstractType());
16109#endif
16110 StorePointer(&raw_ptr()->owner_, owner.raw());
16111}
16112
16113void Code::set_state_bits(intptr_t bits) const {
16114 StoreNonPointer(&raw_ptr()->state_bits_, bits);
16115}
16116
16117void Code::set_is_optimized(bool value) const {
16118 set_state_bits(OptimizedBit::update(value, raw_ptr()->state_bits_));
16119}
16120
16121void Code::set_is_force_optimized(bool value) const {
16122 set_state_bits(ForceOptimizedBit::update(value, raw_ptr()->state_bits_));
16123}
16124
16125void Code::set_is_alive(bool value) const {
16126 set_state_bits(AliveBit::update(value, raw_ptr()->state_bits_));
16127}
16128
16129void Code::set_compressed_stackmaps(const CompressedStackMaps& maps) const {
16130 ASSERT(maps.IsOld());
16131 StorePointer(&raw_ptr()->compressed_stackmaps_, maps.raw());
16132}
16133
16134#if !defined(DART_PRECOMPILED_RUNTIME)
16135intptr_t Code::num_variables() const {
16136 ASSERT(!FLAG_precompiled_mode);
16137 return Smi::Value(Smi::RawCast(raw_ptr()->catch_entry_));
16138}
16139void Code::set_num_variables(intptr_t num_variables) const {
16140 ASSERT(!FLAG_precompiled_mode);
16141 // Object::RawCast is needed for StorePointer template argument resolution.
16142 StorePointer(&raw_ptr()->catch_entry_,
16143 Object::RawCast(Smi::New(num_variables)));
16144}
16145#endif
16146
16147#if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
16148TypedDataPtr Code::catch_entry_moves_maps() const {
16149 ASSERT(FLAG_precompiled_mode);
16150 return TypedData::RawCast(raw_ptr()->catch_entry_);
16151}
16152void Code::set_catch_entry_moves_maps(const TypedData& maps) const {
16153 ASSERT(FLAG_precompiled_mode);
16154 // Object::RawCast is needed for StorePointer template argument resolution.
16155 StorePointer(&raw_ptr()->catch_entry_, Object::RawCast(maps.raw()));
16156}
16157#endif
16158
16159void Code::set_deopt_info_array(const Array& array) const {
16160#if defined(DART_PRECOMPILED_RUNTIME)
16161 UNREACHABLE();
16162#else
16163 ASSERT(array.IsOld());
16164 StorePointer(&raw_ptr()->deopt_info_array_, array.raw());
16165#endif
16166}
16167
16168void Code::set_static_calls_target_table(const Array& value) const {
16169#if defined(DART_PRECOMPILED_RUNTIME)
16170 UNREACHABLE();
16171#else
16172 StorePointer(&raw_ptr()->static_calls_target_table_, value.raw());
16173#endif
16174#if defined(DEBUG)
16175 // Check that the table is sorted by pc offsets.
16176 // FlowGraphCompiler::AddStaticCallTarget adds pc-offsets to the table while
16177 // emitting assembly. This guarantees that every succeeding pc-offset is
16178 // larger than the previously added one.
16179 StaticCallsTable entries(value);
16180 const intptr_t count = entries.Length();
16181 for (intptr_t i = 0; i < count - 1; ++i) {
16182 auto left = Smi::Value(entries[i].Get<kSCallTableKindAndOffset>());
16183 auto right = Smi::Value(entries[i + 1].Get<kSCallTableKindAndOffset>());
16184 ASSERT(OffsetField::decode(left) < OffsetField::decode(right));
16185 }
16186#endif // DEBUG
16187}
16188
16189ObjectPoolPtr Code::GetObjectPool() const {
16190#if defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
16191 if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
16192 return Isolate::Current()->object_store()->global_object_pool();
16193 }
16194#endif
16195 return object_pool();
16196}
16197
16198bool Code::HasBreakpoint() const {
16199#if defined(PRODUCT)
16200 return false;
16201#else
16202 return Isolate::Current()->debugger()->HasBreakpoint(*this);
16203#endif
16204}
16205
16206TypedDataPtr Code::GetDeoptInfoAtPc(uword pc,
16207 ICData::DeoptReasonId* deopt_reason,
16208 uint32_t* deopt_flags) const {
16209#if defined(DART_PRECOMPILED_RUNTIME)
16210 ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
16211 return TypedData::null();
16212#else
16213 ASSERT(is_optimized());
16214 const Instructions& instrs = Instructions::Handle(instructions());
16215 uword code_entry = instrs.PayloadStart();
16216 const Array& table = Array::Handle(deopt_info_array());
16217 if (table.IsNull()) {
16218 ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
16219 return TypedData::null();
16220 }
16221 // Linear search for the PC offset matching the target PC.
16222 intptr_t length = DeoptTable::GetLength(table);
16223 Smi& offset = Smi::Handle();
16224 Smi& reason_and_flags = Smi::Handle();
16225 TypedData& info = TypedData::Handle();
16226 for (intptr_t i = 0; i < length; ++i) {
16227 DeoptTable::GetEntry(table, i, &offset, &info, &reason_and_flags);
16228 if (pc == (code_entry + offset.Value())) {
16229 ASSERT(!info.IsNull());
16230 *deopt_reason = DeoptTable::ReasonField::decode(reason_and_flags.Value());
16231 *deopt_flags = DeoptTable::FlagsField::decode(reason_and_flags.Value());
16232 return info.raw();
16233 }
16234 }
16235 *deopt_reason = ICData::kDeoptUnknown;
16236 return TypedData::null();
16237#endif // defined(DART_PRECOMPILED_RUNTIME)
16238}
16239
16240intptr_t Code::BinarySearchInSCallTable(uword pc) const {
16241#if defined(DART_PRECOMPILED_RUNTIME)
16242 UNREACHABLE();
16243#else
16244 NoSafepointScope no_safepoint;
16245 const Array& table = Array::Handle(raw_ptr()->static_calls_target_table_);
16246 StaticCallsTable entries(table);
16247 const intptr_t pc_offset = pc - PayloadStart();
16248 intptr_t imin = 0;
16249 intptr_t imax = (table.Length() / kSCallTableEntryLength) - 1;
16250 while (imax >= imin) {
16251 const intptr_t imid = imin + (imax - imin) / 2;
16252 const auto offset = OffsetField::decode(
16253 Smi::Value(entries[imid].Get<kSCallTableKindAndOffset>()));
16254 if (offset < pc_offset) {
16255 imin = imid + 1;
16256 } else if (offset > pc_offset) {
16257 imax = imid - 1;
16258 } else {
16259 return imid;
16260 }
16261 }
16262#endif
16263 return -1;
16264}
16265
16266FunctionPtr Code::GetStaticCallTargetFunctionAt(uword pc) const {
16267#if defined(DART_PRECOMPILED_RUNTIME)
16268 UNREACHABLE();
16269 return Function::null();
16270#else
16271 const intptr_t i = BinarySearchInSCallTable(pc);
16272 if (i < 0) {
16273 return Function::null();
16274 }
16275 const Array& array = Array::Handle(raw_ptr()->static_calls_target_table_);
16276 StaticCallsTable entries(array);
16277 return entries[i].Get<kSCallTableFunctionTarget>();
16278#endif
16279}
16280
16281void Code::SetStaticCallTargetCodeAt(uword pc, const Code& code) const {
16282#if defined(DART_PRECOMPILED_RUNTIME)
16283 UNREACHABLE();
16284#else
16285 const intptr_t i = BinarySearchInSCallTable(pc);
16286 ASSERT(i >= 0);
16287 const Array& array = Array::Handle(raw_ptr()->static_calls_target_table_);
16288 StaticCallsTable entries(array);
16289 ASSERT(code.IsNull() ||
16290 (code.function() == entries[i].Get<kSCallTableFunctionTarget>()));
16291 return entries[i].Set<kSCallTableCodeOrTypeTarget>(code);
16292#endif
16293}
16294
16295void Code::SetStubCallTargetCodeAt(uword pc, const Code& code) const {
16296#if defined(DART_PRECOMPILED_RUNTIME)
16297 UNREACHABLE();
16298#else
16299 const intptr_t i = BinarySearchInSCallTable(pc);
16300 ASSERT(i >= 0);
16301 const Array& array = Array::Handle(raw_ptr()->static_calls_target_table_);
16302 StaticCallsTable entries(array);
16303#if defined(DEBUG)
16304 if (entries[i].Get<kSCallTableFunctionTarget>() == Function::null()) {
16305 ASSERT(!code.IsNull() && Object::Handle(code.owner()).IsClass());
16306 } else {
16307 ASSERT(code.IsNull() ||
16308 (code.function() == entries[i].Get<kSCallTableFunctionTarget>()));
16309 }
16310#endif
16311 return entries[i].Set<kSCallTableCodeOrTypeTarget>(code);
16312#endif
16313}
16314
16315void Code::Disassemble(DisassemblyFormatter* formatter) const {
16316#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
16317 if (!FLAG_support_disassembler) {
16318 return;
16319 }
16320 const uword start = PayloadStart();
16321 if (formatter == NULL) {
16322 Disassembler::Disassemble(start, start + Size(), *this);
16323 } else {
16324 Disassembler::Disassemble(start, start + Size(), formatter, *this);
16325 }
16326#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
16327}
16328
16329const Code::Comments& Code::comments() const {
16330#if defined(PRODUCT)
16331 Comments* comments = new Code::Comments(Array::Handle());
16332#else
16333 Comments* comments = new Code::Comments(Array::Handle(raw_ptr()->comments_));
16334#endif
16335 return *comments;
16336}
16337
16338void Code::set_comments(const Code::Comments& comments) const {
16339#if defined(PRODUCT)
16340 UNREACHABLE();
16341#else
16342 ASSERT(comments.comments_.IsOld());
16343 StorePointer(&raw_ptr()->comments_, comments.comments_.raw());
16344#endif
16345}
16346
16347void Code::SetPrologueOffset(intptr_t offset) const {
16348#if defined(PRODUCT)
16349 UNREACHABLE();
16350#else
16351 ASSERT(offset >= 0);
16352 StoreSmi(
16353 reinterpret_cast<SmiPtr const*>(&raw_ptr()->return_address_metadata_),
16354 Smi::New(offset));
16355#endif
16356}
16357
16358intptr_t Code::GetPrologueOffset() const {
16359#if defined(PRODUCT)
16360 UNREACHABLE();
16361 return -1;
16362#else
16363 const Object& object = Object::Handle(raw_ptr()->return_address_metadata_);
16364 // In the future we may put something other than a smi in
16365 // |return_address_metadata_|.
16366 if (object.IsNull() || !object.IsSmi()) {
16367 return -1;
16368 }
16369 return Smi::Cast(object).Value();
16370#endif
16371}
16372
16373ArrayPtr Code::inlined_id_to_function() const {
16374 return raw_ptr()->inlined_id_to_function_;
16375}
16376
16377void Code::set_inlined_id_to_function(const Array& value) const {
16378 ASSERT(value.IsOld());
16379 StorePointer(&raw_ptr()->inlined_id_to_function_, value.raw());
16380}
16381
16382CodePtr Code::New(intptr_t pointer_offsets_length) {
16383 if (pointer_offsets_length < 0 || pointer_offsets_length > kMaxElements) {
16384 // This should be caught before we reach here.
16385 FATAL1("Fatal error in Code::New: invalid pointer_offsets_length %" Pd "\n",
16386 pointer_offsets_length);
16387 }
16388 ASSERT(Object::code_class() != Class::null());
16389 Code& result = Code::Handle();
16390 {
16391 uword size = Code::InstanceSize(pointer_offsets_length);
16392 ObjectPtr raw = Object::Allocate(Code::kClassId, size, Heap::kOld);
16393 NoSafepointScope no_safepoint;
16394 result ^= raw;
16395 result.set_pointer_offsets_length(pointer_offsets_length);
16396 result.set_is_optimized(false);
16397 result.set_is_force_optimized(false);
16398 result.set_is_alive(false);
16399 NOT_IN_PRODUCT(result.set_comments(Comments::New(0)));
16400 NOT_IN_PRODUCT(result.set_compile_timestamp(0));
16401 result.set_pc_descriptors(Object::empty_descriptors());
16402 }
16403 return result.raw();
16404}
16405
16406#if !defined(DART_PRECOMPILED_RUNTIME)
16407CodePtr Code::FinalizeCodeAndNotify(const Function& function,
16408 FlowGraphCompiler* compiler,
16409 compiler::Assembler* assembler,
16410 PoolAttachment pool_attachment,
16411 bool optimized,
16412 CodeStatistics* stats) {
16413 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
16414 const auto& code = Code::Handle(
16415 FinalizeCode(compiler, assembler, pool_attachment, optimized, stats));
16416 NotifyCodeObservers(function, code, optimized);
16417 return code.raw();
16418}
16419
16420CodePtr Code::FinalizeCodeAndNotify(const char* name,
16421 FlowGraphCompiler* compiler,
16422 compiler::Assembler* assembler,
16423 PoolAttachment pool_attachment,
16424 bool optimized,
16425 CodeStatistics* stats) {
16426 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
16427 const auto& code = Code::Handle(
16428 FinalizeCode(compiler, assembler, pool_attachment, optimized, stats));
16429 NotifyCodeObservers(name, code, optimized);
16430 return code.raw();
16431}
16432
16433#if defined(DART_PRECOMPILER)
16434DECLARE_FLAG(charp, write_v8_snapshot_profile_to);
16435DECLARE_FLAG(charp, trace_precompiler_to);
16436#endif // defined(DART_PRECOMPILER)
16437
16438CodePtr Code::FinalizeCode(FlowGraphCompiler* compiler,
16439 compiler::Assembler* assembler,
16440 PoolAttachment pool_attachment,
16441 bool optimized,
16442 CodeStatistics* stats /* = nullptr */) {
16443 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
16444
16445 ASSERT(assembler != NULL);
16446 ObjectPool& object_pool = ObjectPool::Handle();
16447
16448 if (pool_attachment == PoolAttachment::kAttachPool) {
16449 if (assembler->HasObjectPoolBuilder()) {
16450 object_pool =
16451 ObjectPool::NewFromBuilder(assembler->object_pool_builder());
16452 } else {
16453 object_pool = ObjectPool::empty_object_pool().raw();
16454 }
16455 } else {
16456#if defined(DART_PRECOMPILER)
16457 const bool needs_pool = (FLAG_write_v8_snapshot_profile_to != nullptr) ||
16458 (FLAG_trace_precompiler_to != nullptr);
16459 if (needs_pool && assembler->HasObjectPoolBuilder() &&
16460 assembler->object_pool_builder().HasParent()) {
16461 // We are not going to write this pool into snapshot, but we will use
16462 // it to emit references from this code object to other objects in the
16463 // snapshot that it uses.
16464 object_pool =
16465 ObjectPool::NewFromBuilder(assembler->object_pool_builder());
16466 }
16467#endif // defined(DART_PRECOMPILER)
16468 }
16469
16470 // Allocate the Code and Instructions objects. Code is allocated first
16471 // because a GC during allocation of the code will leave the instruction
16472 // pages read-only.
16473 intptr_t pointer_offset_count = assembler->CountPointerOffsets();
16474 Code& code = Code::ZoneHandle(Code::New(pointer_offset_count));
16475#ifdef TARGET_ARCH_IA32
16476 assembler->GetSelfHandle() = code.raw();
16477#endif
16478 Instructions& instrs = Instructions::ZoneHandle(Instructions::New(
16479 assembler->CodeSize(), assembler->has_monomorphic_entry()));
16480
16481 {
16482 // Important: if GC is triggerred at any point between Instructions::New
16483 // and here it would write protect instructions object that we are trying
16484 // to fill in.
16485 NoSafepointScope no_safepoint;
16486
16487 // Copy the instructions into the instruction area and apply all fixups.
16488 // Embedded pointers are still in handles at this point.
16489 MemoryRegion region(reinterpret_cast<void*>(instrs.PayloadStart()),
16490 instrs.Size());
16491 assembler->FinalizeInstructions(region);
16492
16493 const auto& pointer_offsets = assembler->GetPointerOffsets();
16494 ASSERT(pointer_offsets.length() == pointer_offset_count);
16495 ASSERT(code.pointer_offsets_length() == pointer_offsets.length());
16496
16497 // Set pointer offsets list in Code object and resolve all handles in
16498 // the instruction stream to raw objects.
16499 Thread* thread = Thread::Current();
16500 for (intptr_t i = 0; i < pointer_offsets.length(); i++) {
16501 intptr_t offset_in_instrs = pointer_offsets[i];
16502 code.SetPointerOffsetAt(i, offset_in_instrs);
16503 uword addr = region.start() + offset_in_instrs;
16504 ASSERT(instrs.PayloadStart() <= addr);
16505 ASSERT((instrs.PayloadStart() + instrs.Size()) > addr);
16506 const Object* object = LoadUnaligned(reinterpret_cast<Object**>(addr));
16507 ASSERT(object->IsOld());
16508 // N.B. The pointer is embedded in the Instructions object, but visited
16509 // through the Code object.
16510 code.raw()->ptr()->StorePointerUnaligned(
16511 reinterpret_cast<ObjectPtr*>(addr), object->raw(), thread);
16512 }
16513
16514 // Write protect instructions and, if supported by OS, use dual mapping
16515 // for execution.
16516 if (FLAG_write_protect_code) {
16517 uword address = ObjectLayout::ToAddr(instrs.raw());
16518 // Check if a dual mapping exists.
16519 instrs = Instructions::RawCast(OldPage::ToExecutable(instrs.raw()));
16520 uword exec_address = ObjectLayout::ToAddr(instrs.raw());
16521 const bool use_dual_mapping = exec_address != address;
16522 ASSERT(use_dual_mapping == FLAG_dual_map_code);
16523
16524 // When dual mapping is enabled the executable mapping is RX from the
16525 // point of allocation and never changes protection.
16526 // Yet the writable mapping is still turned back from RW to R.
16527 if (use_dual_mapping) {
16528 VirtualMemory::Protect(reinterpret_cast<void*>(address),
16529 instrs.raw()->ptr()->HeapSize(),
16530 VirtualMemory::kReadOnly);
16531 address = exec_address;
16532 } else {
16533 // If dual mapping is disabled and we write protect then we have to
16534 // change the single mapping from RW -> RX.
16535 VirtualMemory::Protect(reinterpret_cast<void*>(address),
16536 instrs.raw()->ptr()->HeapSize(),
16537 VirtualMemory::kReadExecute);
16538 }
16539 }
16540
16541 // Hook up Code and Instructions objects.
16542 const uword unchecked_offset = assembler->UncheckedEntryOffset();
16543 code.SetActiveInstructions(instrs, unchecked_offset);
16544 code.set_instructions(instrs);
16545 NOT_IN_PRECOMPILED(code.set_unchecked_offset(unchecked_offset));
16546 code.set_is_alive(true);
16547
16548 // Set object pool in Instructions object.
16549 if (!object_pool.IsNull()) {
16550 code.set_object_pool(object_pool.raw());
16551 }
16552
16553#if defined(DART_PRECOMPILER)
16554 if (stats != nullptr) {
16555 stats->Finalize();
16556 instrs.set_stats(stats);
16557 }
16558#endif
16559
16560 CPU::FlushICache(instrs.PayloadStart(), instrs.Size());
16561 }
16562
16563#ifndef PRODUCT
16564 code.set_compile_timestamp(OS::GetCurrentMonotonicMicros());
16565 code.set_comments(CreateCommentsFrom(assembler));
16566 if (assembler->prologue_offset() >= 0) {
16567 code.SetPrologueOffset(assembler->prologue_offset());
16568 } else {
16569 // No prologue was ever entered, optimistically assume nothing was ever
16570 // pushed onto the stack.
16571 code.SetPrologueOffset(assembler->CodeSize());
16572 }
16573#endif
16574 return code.raw();
16575}
16576
16577void Code::NotifyCodeObservers(const Code& code, bool optimized) {
16578#if !defined(PRODUCT)
16579 ASSERT(!Thread::Current()->IsAtSafepoint());
16580 if (CodeObservers::AreActive()) {
16581 if (code.IsFunctionCode()) {
16582 const auto& function = Function::Handle(code.function());
16583 if (!function.IsNull()) {
16584 return NotifyCodeObservers(function, code, optimized);
16585 }
16586 }
16587 NotifyCodeObservers(code.Name(), code, optimized);
16588 }
16589#endif
16590}
16591
16592void Code::NotifyCodeObservers(const Function& function,
16593 const Code& code,
16594 bool optimized) {
16595#if !defined(PRODUCT)
16596 ASSERT(!function.IsNull());
16597 ASSERT(!Thread::Current()->IsAtSafepoint());
16598 // Calling ToLibNamePrefixedQualifiedCString is very expensive,
16599 // try to avoid it.
16600 if (CodeObservers::AreActive()) {
16601 const char* name = function.ToLibNamePrefixedQualifiedCString();
16602 NotifyCodeObservers(name, code, optimized);
16603 }
16604#endif
16605}
16606
16607void Code::NotifyCodeObservers(const char* name,
16608 const Code& code,
16609 bool optimized) {
16610#if !defined(PRODUCT)
16611 ASSERT(name != nullptr);
16612 ASSERT(!code.IsNull());
16613 ASSERT(!Thread::Current()->IsAtSafepoint());
16614 if (CodeObservers::AreActive()) {
16615 const auto& instrs = Instructions::Handle(code.instructions());
16616 CodeCommentsWrapper comments_wrapper(code.comments());
16617 CodeObservers::NotifyAll(name, instrs.PayloadStart(),
16618 code.GetPrologueOffset(), instrs.Size(), optimized,
16619 &comments_wrapper);
16620 }
16621#endif
16622}
16623#endif // !defined(DART_PRECOMPILED_RUNTIME)
16624
16625bool Code::SlowFindRawCodeVisitor::FindObject(ObjectPtr raw_obj) const {
16626 return CodeLayout::ContainsPC(raw_obj, pc_);
16627}
16628
16629CodePtr Code::LookupCodeInIsolate(Isolate* isolate, uword pc) {
16630 ASSERT((isolate == Isolate::Current()) || (isolate == Dart::vm_isolate()));
16631 if (isolate->heap() == NULL) {
16632 return Code::null();
16633 }
16634 HeapIterationScope heap_iteration_scope(Thread::Current());
16635 SlowFindRawCodeVisitor visitor(pc);
16636 ObjectPtr needle = isolate->heap()->FindOldObject(&visitor);
16637 if (needle != Code::null()) {
16638 return static_cast<CodePtr>(needle);
16639 }
16640 return Code::null();
16641}
16642
16643CodePtr Code::LookupCode(uword pc) {
16644 return LookupCodeInIsolate(Isolate::Current(), pc);
16645}
16646
16647CodePtr Code::LookupCodeInVmIsolate(uword pc) {
16648 return LookupCodeInIsolate(Dart::vm_isolate(), pc);
16649}
16650
16651// Given a pc and a timestamp, lookup the code.
16652CodePtr Code::FindCode(uword pc, int64_t timestamp) {
16653 Code& code = Code::Handle(Code::LookupCode(pc));
16654 if (!code.IsNull() && (code.compile_timestamp() == timestamp) &&
16655 (code.PayloadStart() == pc)) {
16656 // Found code in isolate.
16657 return code.raw();
16658 }
16659 code = Code::LookupCodeInVmIsolate(pc);
16660 if (!code.IsNull() && (code.compile_timestamp() == timestamp) &&
16661 (code.PayloadStart() == pc)) {
16662 // Found code in VM isolate.
16663 return code.raw();
16664 }
16665 return Code::null();
16666}
16667
16668TokenPosition Code::GetTokenIndexOfPC(uword pc) const {
16669 uword pc_offset = pc - PayloadStart();
16670 const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
16671 PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kAnyKind);
16672 while (iter.MoveNext()) {
16673 if (iter.PcOffset() == pc_offset) {
16674 return iter.TokenPos();
16675 }
16676 }
16677 return TokenPosition::kNoSource;
16678}
16679
16680uword Code::GetPcForDeoptId(intptr_t deopt_id,
16681 PcDescriptorsLayout::Kind kind) const {
16682 const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
16683 PcDescriptors::Iterator iter(descriptors, kind);
16684 while (iter.MoveNext()) {
16685 if (iter.DeoptId() == deopt_id) {
16686 uword pc_offset = iter.PcOffset();
16687 uword pc = PayloadStart() + pc_offset;
16688 ASSERT(ContainsInstructionAt(pc));
16689 return pc;
16690 }
16691 }
16692 return 0;
16693}
16694
16695intptr_t Code::GetDeoptIdForOsr(uword pc) const {
16696 uword pc_offset = pc - PayloadStart();
16697 const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
16698 PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kOsrEntry);
16699 while (iter.MoveNext()) {
16700 if (iter.PcOffset() == pc_offset) {
16701 return iter.DeoptId();
16702 }
16703 }
16704 return DeoptId::kNone;
16705}
16706
16707const char* Code::ToCString() const {
16708 return OS::SCreate(Thread::Current()->zone(), "Code(%s)",
16709 QualifiedName(NameFormattingParams(
16710 kScrubbedName, NameDisambiguation::kYes)));
16711}
16712
16713const char* Code::Name() const {
16714 Zone* zone = Thread::Current()->zone();
16715 if (IsStubCode()) {
16716 // Regular stub.
16717 const char* name = StubCode::NameOfStub(EntryPoint());
16718 if (name == NULL) {
16719 return "[unknown stub]"; // Not yet recorded.
16720 }
16721 return OS::SCreate(zone, "[Stub] %s", name);
16722 }
16723 const auto& obj =
16724 Object::Handle(zone, WeakSerializationReference::UnwrapIfTarget(owner()));
16725 if (obj.IsClass()) {
16726 // Allocation stub.
16727 return OS::SCreate(zone, "[Stub] Allocate %s",
16728 Class::Cast(obj).ScrubbedNameCString());
16729 } else if (obj.IsAbstractType()) {
16730 // Type test stub.
16731 return OS::SCreate(zone, "[Stub] Type Test %s",
16732 AbstractType::Cast(obj).ToCString());
16733 } else {
16734 ASSERT(IsFunctionCode());
16735 // Dart function.
16736 const char* opt = is_optimized() ? "[Optimized]" : "[Unoptimized]";
16737 const char* function_name =
16738 obj.IsFunction()
16739 ? String::Handle(zone, Function::Cast(obj).UserVisibleName())
16740 .ToCString()
16741 : WeakSerializationReference::Cast(obj).ToCString();
16742 return OS::SCreate(zone, "%s %s", opt, function_name);
16743 }
16744}
16745
16746const char* Code::QualifiedName(const NameFormattingParams& params) const {
16747 Zone* zone = Thread::Current()->zone();
16748 const Object& obj =
16749 Object::Handle(zone, WeakSerializationReference::UnwrapIfTarget(owner()));
16750 if (obj.IsFunction()) {
16751 ZoneTextBuffer printer(zone);
16752 printer.AddString(is_optimized() ? "[Optimized] " : "[Unoptimized] ");
16753 Function::Cast(obj).PrintName(params, &printer);
16754 return printer.buffer();
16755 }
16756 return Name();
16757}
16758
16759bool Code::IsStubCode() const {
16760 // We should _not_ unwrap any possible WSRs here, as the null value is never
16761 // wrapped by a WSR.
16762 return owner() == Object::null();
16763}
16764
16765bool Code::IsAllocationStubCode() const {
16766 return OwnerClassId() == kClassCid;
16767}
16768
16769bool Code::IsTypeTestStubCode() const {
16770 auto const cid = OwnerClassId();
16771 return cid == kAbstractTypeCid || cid == kTypeCid || cid == kTypeRefCid ||
16772 cid == kTypeParameterCid;
16773}
16774
16775bool Code::IsFunctionCode() const {
16776 return OwnerClassId() == kFunctionCid;
16777}
16778
16779void Code::DisableDartCode() const {
16780 DEBUG_ASSERT(IsMutatorOrAtSafepoint());
16781 ASSERT(IsFunctionCode());
16782 ASSERT(instructions() == active_instructions());
16783 const Code& new_code = StubCode::FixCallersTarget();
16784 SetActiveInstructions(Instructions::Handle(new_code.instructions()),
16785 new_code.UncheckedEntryPointOffset());
16786}
16787
16788void Code::DisableStubCode() const {
16789 ASSERT(Thread::Current()->IsMutatorThread());
16790 ASSERT(IsAllocationStubCode());
16791 ASSERT(instructions() == active_instructions());
16792 const Code& new_code = StubCode::FixAllocationStubTarget();
16793 SetActiveInstructions(Instructions::Handle(new_code.instructions()),
16794 new_code.UncheckedEntryPointOffset());
16795}
16796
16797void Code::InitializeCachedEntryPointsFrom(CodePtr code,
16798 InstructionsPtr instructions,
16799 uint32_t unchecked_offset) {
16800 NoSafepointScope _;
16801 const uword entry_point = Instructions::EntryPoint(instructions);
16802 const uword monomorphic_entry_point =
16803 Instructions::MonomorphicEntryPoint(instructions);
16804 code->ptr()->entry_point_ = entry_point;
16805 code->ptr()->monomorphic_entry_point_ = monomorphic_entry_point;
16806 code->ptr()->unchecked_entry_point_ = entry_point + unchecked_offset;
16807 code->ptr()->monomorphic_unchecked_entry_point_ =
16808 monomorphic_entry_point + unchecked_offset;
16809}
16810
16811void Code::SetActiveInstructions(const Instructions& instructions,
16812 uint32_t unchecked_offset) const {
16813#if defined(DART_PRECOMPILED_RUNTIME)
16814 UNREACHABLE();
16815#else
16816 DEBUG_ASSERT(IsMutatorOrAtSafepoint() || !is_alive());
16817 // RawInstructions are never allocated in New space and hence a
16818 // store buffer update is not needed here.
16819 StorePointer(&raw_ptr()->active_instructions_, instructions.raw());
16820 Code::InitializeCachedEntryPointsFrom(raw(), instructions.raw(),
16821 unchecked_offset);
16822#endif
16823}
16824
16825void Code::ResetActiveInstructions() const {
16826#if defined(DART_PRECOMPILED_RUNTIME)
16827 UNREACHABLE();
16828#else
16829 SetActiveInstructions(Instructions::Handle(instructions()),
16830 raw_ptr()->unchecked_offset_);
16831#endif
16832}
16833
16834void Code::GetInlinedFunctionsAtInstruction(
16835 intptr_t pc_offset,
16836 GrowableArray<const Function*>* functions,
16837 GrowableArray<TokenPosition>* token_positions) const {
16838 const CodeSourceMap& map = CodeSourceMap::Handle(code_source_map());
16839 if (map.IsNull()) {
16840 ASSERT(!IsFunctionCode());
16841 return; // VM stub, allocation stub, or type testing stub.
16842 }
16843 const Array& id_map = Array::Handle(inlined_id_to_function());
16844 const Function& root = Function::Handle(function());
16845 CodeSourceMapReader reader(map, id_map, root);
16846 reader.GetInlinedFunctionsAt(pc_offset, functions, token_positions);
16847}
16848
16849#ifndef PRODUCT
16850void Code::PrintJSONInlineIntervals(JSONObject* jsobj) const {
16851 if (!is_optimized()) {
16852 return; // No inlining.
16853 }
16854 const CodeSourceMap& map = CodeSourceMap::Handle(code_source_map());
16855 const Array& id_map = Array::Handle(inlined_id_to_function());
16856 const Function& root = Function::Handle(function());
16857 CodeSourceMapReader reader(map, id_map, root);
16858 reader.PrintJSONInlineIntervals(jsobj);
16859}
16860#endif
16861
16862void Code::DumpInlineIntervals() const {
16863 const CodeSourceMap& map = CodeSourceMap::Handle(code_source_map());
16864 if (map.IsNull()) {
16865 // Stub code.
16866 return;
16867 }
16868 const Array& id_map = Array::Handle(inlined_id_to_function());
16869 const Function& root = Function::Handle(function());
16870 CodeSourceMapReader reader(map, id_map, root);
16871 reader.DumpInlineIntervals(PayloadStart());
16872}
16873
16874void Code::DumpSourcePositions(bool relative_addresses) const {
16875 const CodeSourceMap& map = CodeSourceMap::Handle(code_source_map());
16876 if (map.IsNull()) {
16877 // Stub code.
16878 return;
16879 }
16880 const Array& id_map = Array::Handle(inlined_id_to_function());
16881 const Function& root = Function::Handle(function());
16882 CodeSourceMapReader reader(map, id_map, root);
16883 reader.DumpSourcePositions(relative_addresses ? 0 : PayloadStart());
16884}
16885
16886void Bytecode::Disassemble(DisassemblyFormatter* formatter) const {
16887#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
16888#if !defined(DART_PRECOMPILED_RUNTIME)
16889 if (!FLAG_support_disassembler) {
16890 return;
16891 }
16892 uword start = PayloadStart();
16893 intptr_t size = Size();
16894 if (formatter == NULL) {
16895 KernelBytecodeDisassembler::Disassemble(start, start + size, *this);
16896 } else {
16897 KernelBytecodeDisassembler::Disassemble(start, start + size, formatter,
16898 *this);
16899 }
16900#endif // !defined(DART_PRECOMPILED_RUNTIME)
16901#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
16902}
16903
16904BytecodePtr Bytecode::New(uword instructions,
16905 intptr_t instructions_size,
16906 intptr_t instructions_offset,
16907 const ObjectPool& object_pool) {
16908 ASSERT(Object::bytecode_class() != Class::null());
16909 Bytecode& result = Bytecode::Handle();
16910 {
16911 uword size = Bytecode::InstanceSize();
16912 ObjectPtr raw = Object::Allocate(Bytecode::kClassId, size, Heap::kOld);
16913 NoSafepointScope no_safepoint;
16914 result ^= raw;
16915 result.set_instructions(instructions);
16916 result.set_instructions_size(instructions_size);
16917 result.set_object_pool(object_pool);
16918 result.set_pc_descriptors(Object::empty_descriptors());
16919 result.set_instructions_binary_offset(instructions_offset);
16920 result.set_source_positions_binary_offset(0);
16921 result.set_local_variables_binary_offset(0);
16922 }
16923 return result.raw();
16924}
16925
16926ExternalTypedDataPtr Bytecode::GetBinary(Zone* zone) const {
16927 const Function& func = Function::Handle(zone, function());
16928 if (func.IsNull()) {
16929 return ExternalTypedData::null();
16930 }
16931 const Script& script = Script::Handle(zone, func.script());
16932 const KernelProgramInfo& info =
16933 KernelProgramInfo::Handle(zone, script.kernel_program_info());
16934 return info.metadata_payloads();
16935}
16936
16937TokenPosition Bytecode::GetTokenIndexOfPC(uword return_address) const {
16938#if defined(DART_PRECOMPILED_RUNTIME)
16939 UNREACHABLE();
16940#else
16941 if (!HasSourcePositions()) {
16942 return TokenPosition::kNoSource;
16943 }
16944 uword pc_offset = return_address - PayloadStart();
16945 // pc_offset could equal to bytecode size if the last instruction is Throw.
16946 ASSERT(pc_offset <= static_cast<uword>(Size()));
16947 kernel::BytecodeSourcePositionsIterator iter(Thread::Current()->zone(),
16948 *this);
16949 TokenPosition token_pos = TokenPosition::kNoSource;
16950 while (iter.MoveNext()) {
16951 if (pc_offset <= iter.PcOffset()) {
16952 break;
16953 }
16954 token_pos = iter.TokenPos();
16955 }
16956 return token_pos;
16957#endif
16958}
16959
16960intptr_t Bytecode::GetTryIndexAtPc(uword return_address) const {
16961#if defined(DART_PRECOMPILED_RUNTIME)
16962 UNREACHABLE();
16963#else
16964 intptr_t try_index = -1;
16965 const uword pc_offset = return_address - PayloadStart();
16966 const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
16967 PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kAnyKind);
16968 while (iter.MoveNext()) {
16969 // PC descriptors for try blocks in bytecode are generated in pairs,
16970 // marking start and end of a try block.
16971 // See BytecodeReaderHelper::ReadExceptionsTable for details.
16972 const intptr_t current_try_index = iter.TryIndex();
16973 const uword start_pc = iter.PcOffset();
16974 if (pc_offset < start_pc) {
16975 break;
16976 }
16977 const bool has_next = iter.MoveNext();
16978 ASSERT(has_next);
16979 const uword end_pc = iter.PcOffset();
16980 if (start_pc <= pc_offset && pc_offset < end_pc) {
16981 ASSERT(try_index < current_try_index);
16982 try_index = current_try_index;
16983 }
16984 }
16985 return try_index;
16986#endif
16987}
16988
16989uword Bytecode::GetFirstDebugCheckOpcodePc() const {
16990#if defined(DART_PRECOMPILED_RUNTIME)
16991 UNREACHABLE();
16992#else
16993 uword pc = PayloadStart();
16994 const uword end_pc = pc + Size();
16995 while (pc < end_pc) {
16996 if (KernelBytecode::IsDebugCheckOpcode(
16997 reinterpret_cast<const KBCInstr*>(pc))) {
16998 return pc;
16999 }
17000 pc = KernelBytecode::Next(pc);
17001 }
17002 return 0;
17003#endif
17004}
17005
17006uword Bytecode::GetDebugCheckedOpcodeReturnAddress(uword from_offset,
17007 uword to_offset) const {
17008#if defined(DART_PRECOMPILED_RUNTIME)
17009 UNREACHABLE();
17010#else
17011 uword pc = PayloadStart() + from_offset;
17012 const uword end_pc = pc + (to_offset - from_offset);
17013 while (pc < end_pc) {
17014 uword next_pc = KernelBytecode::Next(pc);
17015 if (KernelBytecode::IsDebugCheckedOpcode(
17016 reinterpret_cast<const KBCInstr*>(pc))) {
17017 // Return the pc after the opcode, i.e. its 'return address'.
17018 return next_pc;
17019 }
17020 pc = next_pc;
17021 }
17022 return 0;
17023#endif
17024}
17025
17026const char* Bytecode::ToCString() const {
17027 return Thread::Current()->zone()->PrintToString("Bytecode(%s)",
17028 QualifiedName());
17029}
17030
17031static const char* BytecodeStubName(const Bytecode& bytecode) {
17032 if (bytecode.raw() == Object::implicit_getter_bytecode().raw()) {
17033 return "[Bytecode Stub] VMInternal_ImplicitGetter";
17034 } else if (bytecode.raw() == Object::implicit_setter_bytecode().raw()) {
17035 return "[Bytecode Stub] VMInternal_ImplicitSetter";
17036 } else if (bytecode.raw() ==
17037 Object::implicit_static_getter_bytecode().raw()) {
17038 return "[Bytecode Stub] VMInternal_ImplicitStaticGetter";
17039 } else if (bytecode.raw() == Object::method_extractor_bytecode().raw()) {
17040 return "[Bytecode Stub] VMInternal_MethodExtractor";
17041 } else if (bytecode.raw() == Object::invoke_closure_bytecode().raw()) {
17042 return "[Bytecode Stub] VMInternal_InvokeClosure";
17043 } else if (bytecode.raw() == Object::invoke_field_bytecode().raw()) {
17044 return "[Bytecode Stub] VMInternal_InvokeField";
17045 }
17046 return "[unknown stub]";
17047}
17048
17049const char* Bytecode::Name() const {
17050 Zone* zone = Thread::Current()->zone();
17051 const Function& fun = Function::Handle(zone, function());
17052 if (fun.IsNull()) {
17053 return BytecodeStubName(*this);
17054 }
17055 const char* function_name =
17056 String::Handle(zone, fun.UserVisibleName()).ToCString();
17057 return zone->PrintToString("[Bytecode] %s", function_name);
17058}
17059
17060const char* Bytecode::QualifiedName() const {
17061 Zone* zone = Thread::Current()->zone();
17062 const Function& fun = Function::Handle(zone, function());
17063 if (fun.IsNull()) {
17064 return BytecodeStubName(*this);
17065 }
17066 const char* function_name =
17067 String::Handle(zone, fun.QualifiedScrubbedName()).ToCString();
17068 return zone->PrintToString("[Bytecode] %s", function_name);
17069}
17070
17071const char* Bytecode::FullyQualifiedName() const {
17072 Zone* zone = Thread::Current()->zone();
17073 const Function& fun = Function::Handle(zone, function());
17074 if (fun.IsNull()) {
17075 return BytecodeStubName(*this);
17076 }
17077 const char* function_name = fun.ToFullyQualifiedCString();
17078 return zone->PrintToString("[Bytecode] %s", function_name);
17079}
17080
17081bool Bytecode::SlowFindRawBytecodeVisitor::FindObject(ObjectPtr raw_obj) const {
17082 return BytecodeLayout::ContainsPC(raw_obj, pc_);
17083}
17084
17085BytecodePtr Bytecode::FindCode(uword pc) {
17086 Thread* thread = Thread::Current();
17087 HeapIterationScope heap_iteration_scope(thread);
17088 SlowFindRawBytecodeVisitor visitor(pc);
17089 ObjectPtr needle = thread->heap()->FindOldObject(&visitor);
17090 if (needle != Bytecode::null()) {
17091 return static_cast<BytecodePtr>(needle);
17092 }
17093 return Bytecode::null();
17094}
17095
17096LocalVarDescriptorsPtr Bytecode::GetLocalVarDescriptors() const {
17097#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
17098 UNREACHABLE();
17099 return LocalVarDescriptors::null();
17100#else
17101 Zone* zone = Thread::Current()->zone();
17102 auto& var_descs = LocalVarDescriptors::Handle(zone, var_descriptors());
17103 if (var_descs.IsNull()) {
17104 const auto& func = Function::Handle(zone, function());
17105 ASSERT(!func.IsNull());
17106 var_descs =
17107 kernel::BytecodeReader::ComputeLocalVarDescriptors(zone, func, *this);
17108 ASSERT(!var_descs.IsNull());
17109 set_var_descriptors(var_descs);
17110 }
17111 return var_descs.raw();
17112#endif
17113}
17114
17115intptr_t Context::GetLevel() const {
17116 intptr_t level = 0;
17117 Context& parent_ctx = Context::Handle(parent());
17118 while (!parent_ctx.IsNull()) {
17119 level++;
17120 parent_ctx = parent_ctx.parent();
17121 }
17122 return level;
17123}
17124
17125ContextPtr Context::New(intptr_t num_variables, Heap::Space space) {
17126 ASSERT(num_variables >= 0);
17127 ASSERT(Object::context_class() != Class::null());
17128
17129 if (!IsValidLength(num_variables)) {
17130 // This should be caught before we reach here.
17131 FATAL1("Fatal error in Context::New: invalid num_variables %" Pd "\n",
17132 num_variables);
17133 }
17134 Context& result = Context::Handle();
17135 {
17136 ObjectPtr raw = Object::Allocate(
17137 Context::kClassId, Context::InstanceSize(num_variables), space);
17138 NoSafepointScope no_safepoint;
17139 result ^= raw;
17140 result.set_num_variables(num_variables);
17141 }
17142 return result.raw();
17143}
17144
17145const char* Context::ToCString() const {
17146 if (IsNull()) {
17147 return "Context: null";
17148 }
17149 Zone* zone = Thread::Current()->zone();
17150 const Context& parent_ctx = Context::Handle(parent());
17151 if (parent_ctx.IsNull()) {
17152 return zone->PrintToString("Context num_variables: %" Pd "",
17153 num_variables());
17154 } else {
17155 const char* parent_str = parent_ctx.ToCString();
17156 return zone->PrintToString("Context num_variables: %" Pd " parent:{ %s }",
17157 num_variables(), parent_str);
17158 }
17159}
17160
17161static void IndentN(int count) {
17162 for (int i = 0; i < count; i++) {
17163 THR_Print(" ");
17164 }
17165}
17166
17167void Context::Dump(int indent) const {
17168 if (IsNull()) {
17169 IndentN(indent);
17170 THR_Print("Context@null\n");
17171 return;
17172 }
17173
17174 IndentN(indent);
17175 THR_Print("Context vars(%" Pd ") {\n", num_variables());
17176 Object& obj = Object::Handle();
17177 for (intptr_t i = 0; i < num_variables(); i++) {
17178 IndentN(indent + 2);
17179 obj = At(i);
17180 const char* s = obj.ToCString();
17181 if (strlen(s) > 50) {
17182 THR_Print("[%" Pd "] = [first 50 chars:] %.50s...\n", i, s);
17183 } else {
17184 THR_Print("[%" Pd "] = %s\n", i, s);
17185 }
17186 }
17187
17188 const Context& parent_ctx = Context::Handle(parent());
17189 if (!parent_ctx.IsNull()) {
17190 parent_ctx.Dump(indent + 2);
17191 }
17192 IndentN(indent);
17193 THR_Print("}\n");
17194}
17195
17196ContextScopePtr ContextScope::New(intptr_t num_variables, bool is_implicit) {
17197 ASSERT(Object::context_scope_class() != Class::null());
17198 if (num_variables < 0 || num_variables > kMaxElements) {
17199 // This should be caught before we reach here.
17200 FATAL1("Fatal error in ContextScope::New: invalid num_variables %" Pd "\n",
17201 num_variables);
17202 }
17203 intptr_t size = ContextScope::InstanceSize(num_variables);
17204 ContextScope& result = ContextScope::Handle();
17205 {
17206 ObjectPtr raw = Object::Allocate(ContextScope::kClassId, size, Heap::kOld);
17207 NoSafepointScope no_safepoint;
17208 result ^= raw;
17209 result.set_num_variables(num_variables);
17210 result.set_is_implicit(is_implicit);
17211 }
17212 return result.raw();
17213}
17214
17215TokenPosition ContextScope::TokenIndexAt(intptr_t scope_index) const {
17216 return TokenPosition(Smi::Value(VariableDescAddr(scope_index)->token_pos));
17217}
17218
17219void ContextScope::SetTokenIndexAt(intptr_t scope_index,
17220 TokenPosition token_pos) const {
17221 StoreSmi(&VariableDescAddr(scope_index)->token_pos,
17222 Smi::New(token_pos.value()));
17223}
17224
17225TokenPosition ContextScope::DeclarationTokenIndexAt(
17226 intptr_t scope_index) const {
17227 return TokenPosition(
17228 Smi::Value(VariableDescAddr(scope_index)->declaration_token_pos));
17229}
17230
17231void ContextScope::SetDeclarationTokenIndexAt(
17232 intptr_t scope_index,
17233 TokenPosition declaration_token_pos) const {
17234 StoreSmi(&VariableDescAddr(scope_index)->declaration_token_pos,
17235 Smi::New(declaration_token_pos.value()));
17236}
17237
17238StringPtr ContextScope::NameAt(intptr_t scope_index) const {
17239 return VariableDescAddr(scope_index)->name;
17240}
17241
17242void ContextScope::SetNameAt(intptr_t scope_index, const String& name) const {
17243 StorePointer(&(VariableDescAddr(scope_index)->name), name.raw());
17244}
17245
17246void ContextScope::ClearFlagsAt(intptr_t scope_index) const {
17247 StoreSmi(&(VariableDescAddr(scope_index)->flags), 0);
17248}
17249
17250bool ContextScope::GetFlagAt(intptr_t scope_index, intptr_t mask) const {
17251 return (Smi::Value(VariableDescAddr(scope_index)->flags) & mask) != 0;
17252}
17253
17254void ContextScope::SetFlagAt(intptr_t scope_index,
17255 intptr_t mask,
17256 bool value) const {
17257 intptr_t flags = Smi::Value(VariableDescAddr(scope_index)->flags);
17258 StoreSmi(&(VariableDescAddr(scope_index)->flags),
17259 Smi::New(value ? flags | mask : flags & ~mask));
17260}
17261
17262bool ContextScope::IsFinalAt(intptr_t scope_index) const {
17263 return GetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsFinal);
17264}
17265
17266void ContextScope::SetIsFinalAt(intptr_t scope_index, bool is_final) const {
17267 SetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsFinal, is_final);
17268}
17269
17270bool ContextScope::IsLateAt(intptr_t scope_index) const {
17271 return GetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsLate);
17272}
17273
17274void ContextScope::SetIsLateAt(intptr_t scope_index, bool is_late) const {
17275 SetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsLate, is_late);
17276}
17277
17278bool ContextScope::IsConstAt(intptr_t scope_index) const {
17279 return GetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsConst);
17280}
17281
17282void ContextScope::SetIsConstAt(intptr_t scope_index, bool is_const) const {
17283 SetFlagAt(scope_index, ContextScopeLayout::VariableDesc::kIsConst, is_const);
17284}
17285
17286intptr_t ContextScope::LateInitOffsetAt(intptr_t scope_index) const {
17287 return Smi::Value(VariableDescAddr(scope_index)->late_init_offset);
17288}
17289
17290void ContextScope::SetLateInitOffsetAt(intptr_t scope_index,
17291 intptr_t late_init_offset) const {
17292 StoreSmi(&(VariableDescAddr(scope_index)->late_init_offset),
17293 Smi::New(late_init_offset));
17294}
17295
17296AbstractTypePtr ContextScope::TypeAt(intptr_t scope_index) const {
17297 ASSERT(!IsConstAt(scope_index));
17298 return VariableDescAddr(scope_index)->type;
17299}
17300
17301void ContextScope::SetTypeAt(intptr_t scope_index,
17302 const AbstractType& type) const {
17303 StorePointer(&(VariableDescAddr(scope_index)->type), type.raw());
17304}
17305
17306InstancePtr ContextScope::ConstValueAt(intptr_t scope_index) const {
17307 ASSERT(IsConstAt(scope_index));
17308 return VariableDescAddr(scope_index)->value;
17309}
17310
17311void ContextScope::SetConstValueAt(intptr_t scope_index,
17312 const Instance& value) const {
17313 ASSERT(IsConstAt(scope_index));
17314 StorePointer(&(VariableDescAddr(scope_index)->value), value.raw());
17315}
17316
17317intptr_t ContextScope::ContextIndexAt(intptr_t scope_index) const {
17318 return Smi::Value(VariableDescAddr(scope_index)->context_index);
17319}
17320
17321void ContextScope::SetContextIndexAt(intptr_t scope_index,
17322 intptr_t context_index) const {
17323 StoreSmi(&(VariableDescAddr(scope_index)->context_index),
17324 Smi::New(context_index));
17325}
17326
17327intptr_t ContextScope::ContextLevelAt(intptr_t scope_index) const {
17328 return Smi::Value(VariableDescAddr(scope_index)->context_level);
17329}
17330
17331void ContextScope::SetContextLevelAt(intptr_t scope_index,
17332 intptr_t context_level) const {
17333 StoreSmi(&(VariableDescAddr(scope_index)->context_level),
17334 Smi::New(context_level));
17335}
17336
17337const char* ContextScope::ToCString() const {
17338 const char* prev_cstr = "ContextScope:";
17339 String& name = String::Handle();
17340 for (int i = 0; i < num_variables(); i++) {
17341 name = NameAt(i);
17342 const char* cname = name.ToCString();
17343 TokenPosition pos = TokenIndexAt(i);
17344 intptr_t idx = ContextIndexAt(i);
17345 intptr_t lvl = ContextLevelAt(i);
17346 char* chars =
17347 OS::SCreate(Thread::Current()->zone(),
17348 "%s\nvar %s token-pos %s ctx lvl %" Pd " index %" Pd "",
17349 prev_cstr, cname, pos.ToCString(), lvl, idx);
17350 prev_cstr = chars;
17351 }
17352 return prev_cstr;
17353}
17354
17355ArrayPtr MegamorphicCache::buckets() const {
17356 return raw_ptr()->buckets_;
17357}
17358
17359void MegamorphicCache::set_buckets(const Array& buckets) const {
17360 StorePointer(&raw_ptr()->buckets_, buckets.raw());
17361}
17362
17363// Class IDs in the table are smi-tagged, so we use a smi-tagged mask
17364// and target class ID to avoid untagging (on each iteration of the
17365// test loop) in generated code.
17366intptr_t MegamorphicCache::mask() const {
17367 return Smi::Value(raw_ptr()->mask_);
17368}
17369
17370void MegamorphicCache::set_mask(intptr_t mask) const {
17371 StoreSmi(&raw_ptr()->mask_, Smi::New(mask));
17372}
17373
17374intptr_t MegamorphicCache::filled_entry_count() const {
17375 return raw_ptr()->filled_entry_count_;
17376}
17377
17378void MegamorphicCache::set_filled_entry_count(intptr_t count) const {
17379 StoreNonPointer(&raw_ptr()->filled_entry_count_, count);
17380}
17381
17382MegamorphicCachePtr MegamorphicCache::New() {
17383 MegamorphicCache& result = MegamorphicCache::Handle();
17384 {
17385 ObjectPtr raw =
17386 Object::Allocate(MegamorphicCache::kClassId,
17387 MegamorphicCache::InstanceSize(), Heap::kOld);
17388 NoSafepointScope no_safepoint;
17389 result ^= raw;
17390 }
17391 result.set_filled_entry_count(0);
17392 return result.raw();
17393}
17394
17395MegamorphicCachePtr MegamorphicCache::New(const String& target_name,
17396 const Array& arguments_descriptor) {
17397 MegamorphicCache& result = MegamorphicCache::Handle();
17398 {
17399 ObjectPtr raw =
17400 Object::Allocate(MegamorphicCache::kClassId,
17401 MegamorphicCache::InstanceSize(), Heap::kOld);
17402 NoSafepointScope no_safepoint;
17403 result ^= raw;
17404 }
17405 const intptr_t capacity = kInitialCapacity;
17406 const Array& buckets =
17407 Array::Handle(Array::New(kEntryLength * capacity, Heap::kOld));
17408 const Object& handler = Object::Handle();
17409 for (intptr_t i = 0; i < capacity; ++i) {
17410 SetEntry(buckets, i, smi_illegal_cid(), handler);
17411 }
17412 result.set_buckets(buckets);
17413 result.set_mask(capacity - 1);
17414 result.set_target_name(target_name);
17415 result.set_arguments_descriptor(arguments_descriptor);
17416 result.set_filled_entry_count(0);
17417 return result.raw();
17418}
17419
17420void MegamorphicCache::Insert(const Smi& class_id, const Object& target) const {
17421 SafepointMutexLocker ml(Isolate::Current()->megamorphic_mutex());
17422 EnsureCapacityLocked();
17423 InsertLocked(class_id, target);
17424}
17425
17426void MegamorphicCache::EnsureCapacityLocked() const {
17427 ASSERT(Isolate::Current()->megamorphic_mutex()->IsOwnedByCurrentThread());
17428 intptr_t old_capacity = mask() + 1;
17429 double load_limit = kLoadFactor * static_cast<double>(old_capacity);
17430 if (static_cast<double>(filled_entry_count() + 1) > load_limit) {
17431 const Array& old_buckets = Array::Handle(buckets());
17432 intptr_t new_capacity = old_capacity * 2;
17433 const Array& new_buckets =
17434 Array::Handle(Array::New(kEntryLength * new_capacity));
17435
17436 auto& target = Object::Handle();
17437 for (intptr_t i = 0; i < new_capacity; ++i) {
17438 SetEntry(new_buckets, i, smi_illegal_cid(), target);
17439 }
17440 set_buckets(new_buckets);
17441 set_mask(new_capacity - 1);
17442 set_filled_entry_count(0);
17443
17444 // Rehash the valid entries.
17445 Smi& class_id = Smi::Handle();
17446 for (intptr_t i = 0; i < old_capacity; ++i) {
17447 class_id ^= GetClassId(old_buckets, i);
17448 if (class_id.Value() != kIllegalCid) {
17449 target = GetTargetFunction(old_buckets, i);
17450 InsertLocked(class_id, target);
17451 }
17452 }
17453 }
17454}
17455
17456void MegamorphicCache::InsertLocked(const Smi& class_id,
17457 const Object& target) const {
17458 ASSERT(Isolate::Current()->megamorphic_mutex()->IsOwnedByCurrentThread());
17459 ASSERT(Thread::Current()->IsMutatorThread());
17460 ASSERT(static_cast<double>(filled_entry_count() + 1) <=
17461 (kLoadFactor * static_cast<double>(mask() + 1)));
17462 const Array& backing_array = Array::Handle(buckets());
17463 intptr_t id_mask = mask();
17464 intptr_t index = (class_id.Value() * kSpreadFactor) & id_mask;
17465 intptr_t i = index;
17466 do {
17467 if (Smi::Value(Smi::RawCast(GetClassId(backing_array, i))) == kIllegalCid) {
17468 SetEntry(backing_array, i, class_id, target);
17469 set_filled_entry_count(filled_entry_count() + 1);
17470 return;
17471 }
17472 i = (i + 1) & id_mask;
17473 } while (i != index);
17474 UNREACHABLE();
17475}
17476
17477const char* MegamorphicCache::ToCString() const {
17478 const String& name = String::Handle(target_name());
17479 return OS::SCreate(Thread::Current()->zone(), "MegamorphicCache(%s)",
17480 name.ToCString());
17481}
17482
17483void MegamorphicCache::SwitchToBareInstructions() {
17484 NoSafepointScope no_safepoint_scope;
17485
17486 intptr_t capacity = mask() + 1;
17487 for (intptr_t i = 0; i < capacity; ++i) {
17488 const intptr_t target_index = i * kEntryLength + kTargetFunctionIndex;
17489 ObjectPtr* slot = &Array::DataOf(buckets())[target_index];
17490 const intptr_t cid = (*slot)->GetClassIdMayBeSmi();
17491 if (cid == kFunctionCid) {
17492 CodePtr code = Function::CurrentCodeOf(Function::RawCast(*slot));
17493 *slot = Smi::FromAlignedAddress(Code::EntryPointOf(code));
17494 } else {
17495 ASSERT(cid == kSmiCid || cid == kNullCid);
17496 }
17497 }
17498}
17499
17500void SubtypeTestCache::Init() {
17501 cached_array_ = Array::New(kTestEntryLength, Heap::kOld);
17502}
17503
17504void SubtypeTestCache::Cleanup() {
17505 cached_array_ = NULL;
17506}
17507
17508SubtypeTestCachePtr SubtypeTestCache::New() {
17509 ASSERT(Object::subtypetestcache_class() != Class::null());
17510 SubtypeTestCache& result = SubtypeTestCache::Handle();
17511 {
17512 // SubtypeTestCache objects are long living objects, allocate them in the
17513 // old generation.
17514 ObjectPtr raw =
17515 Object::Allocate(SubtypeTestCache::kClassId,
17516 SubtypeTestCache::InstanceSize(), Heap::kOld);
17517 NoSafepointScope no_safepoint;
17518 result ^= raw;
17519 }
17520 result.set_cache(Array::Handle(cached_array_));
17521 return result.raw();
17522}
17523
17524ArrayPtr SubtypeTestCache::cache() const {
17525 // We rely on the fact that any loads from the array are dependent loads and
17526 // avoid the load-acquire barrier here.
17527 return raw_ptr()->cache_;
17528}
17529
17530void SubtypeTestCache::set_cache(const Array& value) const {
17531 // We have to ensure that initializing stores to the array are available
17532 // when releasing the pointer to the array pointer.
17533 // => We have to use store-release here.
17534 StorePointer<ArrayPtr, std::memory_order_release>(&raw_ptr()->cache_,
17535 value.raw());
17536}
17537
17538intptr_t SubtypeTestCache::NumberOfChecks() const {
17539 NoSafepointScope no_safepoint;
17540 // Do not count the sentinel;
17541 return (Smi::Value(cache()->ptr()->length_) / kTestEntryLength) - 1;
17542}
17543
17544void SubtypeTestCache::AddCheck(
17545 const Object& instance_class_id_or_function,
17546 const TypeArguments& instance_type_arguments,
17547 const TypeArguments& instantiator_type_arguments,
17548 const TypeArguments& function_type_arguments,
17549 const TypeArguments& instance_parent_function_type_arguments,
17550 const TypeArguments& instance_delayed_type_arguments,
17551 const Bool& test_result) const {
17552 ASSERT(Thread::Current()
17553 ->isolate_group()
17554 ->subtype_test_cache_mutex()
17555 ->IsOwnedByCurrentThread());
17556
17557 intptr_t old_num = NumberOfChecks();
17558 Array& data = Array::Handle(cache());
17559 intptr_t new_len = data.Length() + kTestEntryLength;
17560 data = Array::Grow(data, new_len);
17561
17562 SubtypeTestCacheTable entries(data);
17563 auto entry = entries[old_num];
17564 ASSERT(entry.Get<kInstanceClassIdOrFunction>() == Object::null());
17565 entry.Set<kInstanceClassIdOrFunction>(instance_class_id_or_function);
17566 entry.Set<kInstanceTypeArguments>(instance_type_arguments);
17567 entry.Set<kInstantiatorTypeArguments>(instantiator_type_arguments);
17568 entry.Set<kFunctionTypeArguments>(function_type_arguments);
17569 entry.Set<kInstanceParentFunctionTypeArguments>(
17570 instance_parent_function_type_arguments);
17571 entry.Set<kInstanceDelayedFunctionTypeArguments>(
17572 instance_delayed_type_arguments);
17573 entry.Set<kTestResult>(test_result);
17574
17575 // We let any concurrently running mutator thread now see the new entry (the
17576 // `set_cache()` uses a store-release barrier).
17577 set_cache(data);
17578}
17579
17580void SubtypeTestCache::GetCheck(
17581 intptr_t ix,
17582 Object* instance_class_id_or_function,
17583 TypeArguments* instance_type_arguments,
17584 TypeArguments* instantiator_type_arguments,
17585 TypeArguments* function_type_arguments,
17586 TypeArguments* instance_parent_function_type_arguments,
17587 TypeArguments* instance_delayed_type_arguments,
17588 Bool* test_result) const {
17589 ASSERT(Thread::Current()
17590 ->isolate_group()
17591 ->subtype_test_cache_mutex()
17592 ->IsOwnedByCurrentThread());
17593
17594 Array& data = Array::Handle(cache());
17595 SubtypeTestCacheTable entries(data);
17596 auto entry = entries[ix];
17597 *instance_class_id_or_function = entry.Get<kInstanceClassIdOrFunction>();
17598 *instance_type_arguments = entry.Get<kInstanceTypeArguments>();
17599 *instantiator_type_arguments = entry.Get<kInstantiatorTypeArguments>();
17600 *function_type_arguments = entry.Get<kFunctionTypeArguments>();
17601 *instance_parent_function_type_arguments =
17602 entry.Get<kInstanceParentFunctionTypeArguments>();
17603 *instance_delayed_type_arguments =
17604 entry.Get<kInstanceDelayedFunctionTypeArguments>();
17605 *test_result ^= entry.Get<kTestResult>();
17606}
17607
17608void SubtypeTestCache::Reset() const {
17609 set_cache(Array::Handle(cached_array_));
17610}
17611
17612const char* SubtypeTestCache::ToCString() const {
17613 return "SubtypeTestCache";
17614}
17615
17616LoadingUnitPtr LoadingUnit::New() {
17617 ASSERT(Object::loadingunit_class() != Class::null());
17618 LoadingUnit& result = LoadingUnit::Handle();
17619 {
17620 // LoadingUnit objects are long living objects, allocate them in the
17621 // old generation.
17622 ObjectPtr raw = Object::Allocate(LoadingUnit::kClassId,
17623 LoadingUnit::InstanceSize(), Heap::kOld);
17624 NoSafepointScope no_safepoint;
17625 result ^= raw;
17626 }
17627 result.set_id(kIllegalId);
17628 result.set_loaded(false);
17629 result.set_load_outstanding(false);
17630 return result.raw();
17631}
17632
17633LoadingUnitPtr LoadingUnit::parent() const {
17634 return raw_ptr()->parent_;
17635}
17636void LoadingUnit::set_parent(const LoadingUnit& value) const {
17637 StorePointer(&raw_ptr()->parent_, value.raw());
17638}
17639
17640ArrayPtr LoadingUnit::base_objects() const {
17641 return raw_ptr()->base_objects_;
17642}
17643void LoadingUnit::set_base_objects(const Array& value) const {
17644 StorePointer(&raw_ptr()->base_objects_, value.raw());
17645}
17646
17647const char* LoadingUnit::ToCString() const {
17648 return "LoadingUnit";
17649}
17650
17651ObjectPtr LoadingUnit::IssueLoad() const {
17652 ASSERT(!loaded());
17653 ASSERT(!load_outstanding());
17654 set_load_outstanding(true);
17655 return Isolate::Current()->CallDeferredLoadHandler(id());
17656}
17657
17658void LoadingUnit::CompleteLoad(const String& error_message,
17659 bool transient_error) const {
17660 ASSERT(!loaded());
17661 ASSERT(load_outstanding());
17662 set_loaded(error_message.IsNull());
17663 set_load_outstanding(false);
17664
17665 const Library& lib = Library::Handle(Library::CoreLibrary());
17666 const String& sel = String::Handle(String::New("_completeLoads"));
17667 const Function& func = Function::Handle(lib.LookupFunctionAllowPrivate(sel));
17668 ASSERT(!func.IsNull());
17669 const Array& args = Array::Handle(Array::New(3));
17670 args.SetAt(0, Smi::Handle(Smi::New(id())));
17671 args.SetAt(1, error_message);
17672 args.SetAt(2, Bool::Get(transient_error));
17673 const Object& result = Object::Handle(DartEntry::InvokeFunction(func, args));
17674 if (result.IsUnwindError()) {
17675 Thread::Current()->set_sticky_error(Error::Cast(result));
17676 } else if (result.IsError()) {
17677 UNREACHABLE();
17678 }
17679}
17680
17681const char* Error::ToErrorCString() const {
17682 if (IsNull()) {
17683 return "Error: null";
17684 }
17685 UNREACHABLE();
17686 return "Error";
17687}
17688
17689const char* Error::ToCString() const {
17690 if (IsNull()) {
17691 return "Error: null";
17692 }
17693 // Error is an abstract class. We should never reach here.
17694 UNREACHABLE();
17695 return "Error";
17696}
17697
17698ApiErrorPtr ApiError::New() {
17699 ASSERT(Object::api_error_class() != Class::null());
17700 ObjectPtr raw = Object::Allocate(ApiError::kClassId, ApiError::InstanceSize(),
17701 Heap::kOld);
17702 return static_cast<ApiErrorPtr>(raw);
17703}
17704
17705ApiErrorPtr ApiError::New(const String& message, Heap::Space space) {
17706#ifndef PRODUCT
17707 if (FLAG_print_stacktrace_at_api_error) {
17708 OS::PrintErr("ApiError: %s\n", message.ToCString());
17709 Profiler::DumpStackTrace(false /* for_crash */);
17710 }
17711#endif // !PRODUCT
17712
17713 ASSERT(Object::api_error_class() != Class::null());
17714 ApiError& result = ApiError::Handle();
17715 {
17716 ObjectPtr raw =
17717 Object::Allocate(ApiError::kClassId, ApiError::InstanceSize(), space);
17718 NoSafepointScope no_safepoint;
17719 result ^= raw;
17720 }
17721 result.set_message(message);
17722 return result.raw();
17723}
17724
17725void ApiError::set_message(const String& message) const {
17726 StorePointer(&raw_ptr()->message_, message.raw());
17727}
17728
17729const char* ApiError::ToErrorCString() const {
17730 const String& msg_str = String::Handle(message());
17731 return msg_str.ToCString();
17732}
17733
17734const char* ApiError::ToCString() const {
17735 return "ApiError";
17736}
17737
17738LanguageErrorPtr LanguageError::New() {
17739 ASSERT(Object::language_error_class() != Class::null());
17740 ObjectPtr raw = Object::Allocate(LanguageError::kClassId,
17741 LanguageError::InstanceSize(), Heap::kOld);
17742 return static_cast<LanguageErrorPtr>(raw);
17743}
17744
17745LanguageErrorPtr LanguageError::NewFormattedV(const Error& prev_error,
17746 const Script& script,
17747 TokenPosition token_pos,
17748 bool report_after_token,
17749 Report::Kind kind,
17750 Heap::Space space,
17751 const char* format,
17752 va_list args) {
17753 ASSERT(Object::language_error_class() != Class::null());
17754 LanguageError& result = LanguageError::Handle();
17755 {
17756 ObjectPtr raw = Object::Allocate(LanguageError::kClassId,
17757 LanguageError::InstanceSize(), space);
17758 NoSafepointScope no_safepoint;
17759 result ^= raw;
17760 }
17761 result.set_previous_error(prev_error);
17762 result.set_script(script);
17763 result.set_token_pos(token_pos);
17764 result.set_report_after_token(report_after_token);
17765 result.set_kind(kind);
17766 result.set_message(
17767 String::Handle(String::NewFormattedV(format, args, space)));
17768 return result.raw();
17769}
17770
17771LanguageErrorPtr LanguageError::NewFormatted(const Error& prev_error,
17772 const Script& script,
17773 TokenPosition token_pos,
17774 bool report_after_token,
17775 Report::Kind kind,
17776 Heap::Space space,
17777 const char* format,
17778 ...) {
17779 va_list args;
17780 va_start(args, format);
17781 LanguageErrorPtr result = LanguageError::NewFormattedV(
17782 prev_error, script, token_pos, report_after_token, kind, space, format,
17783 args);
17784 NoSafepointScope no_safepoint;
17785 va_end(args);
17786 return result;
17787}
17788
17789LanguageErrorPtr LanguageError::New(const String& formatted_message,
17790 Report::Kind kind,
17791 Heap::Space space) {
17792 ASSERT(Object::language_error_class() != Class::null());
17793 LanguageError& result = LanguageError::Handle();
17794 {
17795 ObjectPtr raw = Object::Allocate(LanguageError::kClassId,
17796 LanguageError::InstanceSize(), space);
17797 NoSafepointScope no_safepoint;
17798 result ^= raw;
17799 }
17800 result.set_formatted_message(formatted_message);
17801 result.set_kind(kind);
17802 return result.raw();
17803}
17804
17805void LanguageError::set_previous_error(const Error& value) const {
17806 StorePointer(&raw_ptr()->previous_error_, value.raw());
17807}
17808
17809void LanguageError::set_script(const Script& value) const {
17810 StorePointer(&raw_ptr()->script_, value.raw());
17811}
17812
17813void LanguageError::set_token_pos(TokenPosition token_pos) const {
17814 ASSERT(!token_pos.IsClassifying());
17815 StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
17816}
17817
17818void LanguageError::set_report_after_token(bool value) {
17819 StoreNonPointer(&raw_ptr()->report_after_token_, value);
17820}
17821
17822void LanguageError::set_kind(uint8_t value) const {
17823 StoreNonPointer(&raw_ptr()->kind_, value);
17824}
17825
17826void LanguageError::set_message(const String& value) const {
17827 StorePointer(&raw_ptr()->message_, value.raw());
17828}
17829
17830void LanguageError::set_formatted_message(const String& value) const {
17831 StorePointer(&raw_ptr()->formatted_message_, value.raw());
17832}
17833
17834StringPtr LanguageError::FormatMessage() const {
17835 if (formatted_message() != String::null()) {
17836 return formatted_message();
17837 }
17838 String& result = String::Handle(
17839 Report::PrependSnippet(kind(), Script::Handle(script()), token_pos(),
17840 report_after_token(), String::Handle(message())));
17841 // Prepend previous error message.
17842 const Error& prev_error = Error::Handle(previous_error());
17843 if (!prev_error.IsNull()) {
17844 result = String::Concat(
17845 String::Handle(String::New(prev_error.ToErrorCString())), result);
17846 }
17847 set_formatted_message(result);
17848 return result.raw();
17849}
17850
17851const char* LanguageError::ToErrorCString() const {
17852 Thread* thread = Thread::Current();
17853 NoReloadScope no_reload_scope(thread->isolate(), thread);
17854 const String& msg_str = String::Handle(FormatMessage());
17855 return msg_str.ToCString();
17856}
17857
17858const char* LanguageError::ToCString() const {
17859 return "LanguageError";
17860}
17861
17862UnhandledExceptionPtr UnhandledException::New(const Instance& exception,
17863 const Instance& stacktrace,
17864 Heap::Space space) {
17865 ASSERT(Object::unhandled_exception_class() != Class::null());
17866 UnhandledException& result = UnhandledException::Handle();
17867 {
17868 ObjectPtr raw = Object::Allocate(UnhandledException::kClassId,
17869 UnhandledException::InstanceSize(), space);
17870 NoSafepointScope no_safepoint;
17871 result ^= raw;
17872 }
17873 result.set_exception(exception);
17874 result.set_stacktrace(stacktrace);
17875 return result.raw();
17876}
17877
17878UnhandledExceptionPtr UnhandledException::New(Heap::Space space) {
17879 ASSERT(Object::unhandled_exception_class() != Class::null());
17880 UnhandledException& result = UnhandledException::Handle();
17881 {
17882 ObjectPtr raw = Object::Allocate(UnhandledException::kClassId,
17883 UnhandledException::InstanceSize(), space);
17884 NoSafepointScope no_safepoint;
17885 result ^= raw;
17886 }
17887 result.set_exception(Object::null_instance());
17888 result.set_stacktrace(StackTrace::Handle());
17889 return result.raw();
17890}
17891
17892void UnhandledException::set_exception(const Instance& exception) const {
17893 StorePointer(&raw_ptr()->exception_, exception.raw());
17894}
17895
17896void UnhandledException::set_stacktrace(const Instance& stacktrace) const {
17897 StorePointer(&raw_ptr()->stacktrace_, stacktrace.raw());
17898}
17899
17900const char* UnhandledException::ToErrorCString() const {
17901 Thread* thread = Thread::Current();
17902 Isolate* isolate = thread->isolate();
17903 NoReloadScope no_reload_scope(isolate, thread);
17904 HANDLESCOPE(thread);
17905 Object& strtmp = Object::Handle();
17906 const char* exc_str;
17907 if (exception() == isolate->object_store()->out_of_memory()) {
17908 exc_str = "Out of Memory";
17909 } else if (exception() == isolate->object_store()->stack_overflow()) {
17910 exc_str = "Stack Overflow";
17911 } else {
17912 const Instance& exc = Instance::Handle(exception());
17913 strtmp = DartLibraryCalls::ToString(exc);
17914 if (!strtmp.IsError()) {
17915 exc_str = strtmp.ToCString();
17916 } else {
17917 exc_str = "<Received error while converting exception to string>";
17918 }
17919 }
17920 const Instance& stack = Instance::Handle(stacktrace());
17921 strtmp = DartLibraryCalls::ToString(stack);
17922 const char* stack_str =
17923 "<Received error while converting stack trace to string>";
17924 if (!strtmp.IsError()) {
17925 stack_str = strtmp.ToCString();
17926 }
17927 return OS::SCreate(thread->zone(), "Unhandled exception:\n%s\n%s", exc_str,
17928 stack_str);
17929}
17930
17931const char* UnhandledException::ToCString() const {
17932 return "UnhandledException";
17933}
17934
17935UnwindErrorPtr UnwindError::New(const String& message, Heap::Space space) {
17936 ASSERT(Object::unwind_error_class() != Class::null());
17937 UnwindError& result = UnwindError::Handle();
17938 {
17939 ObjectPtr raw = Object::Allocate(UnwindError::kClassId,
17940 UnwindError::InstanceSize(), space);
17941 NoSafepointScope no_safepoint;
17942 result ^= raw;
17943 }
17944 result.set_message(message);
17945 result.set_is_user_initiated(false);
17946 return result.raw();
17947}
17948
17949void UnwindError::set_message(const String& message) const {
17950 StorePointer(&raw_ptr()->message_, message.raw());
17951}
17952
17953void UnwindError::set_is_user_initiated(bool value) const {
17954 StoreNonPointer(&raw_ptr()->is_user_initiated_, value);
17955}
17956
17957const char* UnwindError::ToErrorCString() const {
17958 const String& msg_str = String::Handle(message());
17959 return msg_str.ToCString();
17960}
17961
17962const char* UnwindError::ToCString() const {
17963 return "UnwindError";
17964}
17965
17966ObjectPtr Instance::InvokeGetter(const String& getter_name,
17967 bool respect_reflectable,
17968 bool check_is_entrypoint) const {
17969 Thread* thread = Thread::Current();
17970 Zone* zone = thread->zone();
17971
17972 Class& klass = Class::Handle(zone, clazz());
17973 CHECK_ERROR(klass.EnsureIsFinalized(thread));
17974 const auto& inst_type_args =
17975 klass.NumTypeArguments() > 0
17976 ? TypeArguments::Handle(zone, GetTypeArguments())
17977 : Object::null_type_arguments();
17978
17979 const String& internal_getter_name =
17980 String::Handle(zone, Field::GetterName(getter_name));
17981 Function& function = Function::Handle(
17982 zone, Resolver::ResolveDynamicAnyArgs(zone, klass, internal_getter_name));
17983
17984 if (!function.IsNull() && check_is_entrypoint) {
17985 // The getter must correspond to either an entry-point field or a getter
17986 // method explicitly marked.
17987 Field& field = Field::Handle(zone);
17988 if (function.kind() == FunctionLayout::kImplicitGetter) {
17989 field = function.accessor_field();
17990 }
17991 if (!field.IsNull()) {
17992 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kGetterOnly));
17993 } else {
17994 CHECK_ERROR(function.VerifyCallEntryPoint());
17995 }
17996 }
17997
17998 // Check for method extraction when method extractors are not created.
17999 if (function.IsNull() && !FLAG_lazy_dispatchers) {
18000 function = Resolver::ResolveDynamicAnyArgs(zone, klass, getter_name);
18001
18002 if (!function.IsNull() && check_is_entrypoint) {
18003 CHECK_ERROR(function.VerifyClosurizedEntryPoint());
18004 }
18005
18006 if (!function.IsNull() && function.SafeToClosurize()) {
18007 const Function& closure_function =
18008 Function::Handle(zone, function.ImplicitClosureFunction());
18009 return closure_function.ImplicitInstanceClosure(*this);
18010 }
18011 }
18012
18013 const int kTypeArgsLen = 0;
18014 const int kNumArgs = 1;
18015 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
18016 args.SetAt(0, *this);
18017 const Array& args_descriptor = Array::Handle(
18018 zone,
18019 ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(), Heap::kNew));
18020
18021 return InvokeInstanceFunction(*this, function, internal_getter_name, args,
18022 args_descriptor, respect_reflectable,
18023 inst_type_args);
18024}
18025
18026ObjectPtr Instance::InvokeSetter(const String& setter_name,
18027 const Instance& value,
18028 bool respect_reflectable,
18029 bool check_is_entrypoint) const {
18030 Thread* thread = Thread::Current();
18031 Zone* zone = thread->zone();
18032
18033 const Class& klass = Class::Handle(zone, clazz());
18034 CHECK_ERROR(klass.EnsureIsFinalized(thread));
18035 const auto& inst_type_args =
18036 klass.NumTypeArguments() > 0
18037 ? TypeArguments::Handle(zone, GetTypeArguments())
18038 : Object::null_type_arguments();
18039
18040 const String& internal_setter_name =
18041 String::Handle(zone, Field::SetterName(setter_name));
18042 const Function& setter = Function::Handle(
18043 zone, Resolver::ResolveDynamicAnyArgs(zone, klass, internal_setter_name));
18044
18045 if (check_is_entrypoint) {
18046 // The setter must correspond to either an entry-point field or a setter
18047 // method explicitly marked.
18048 Field& field = Field::Handle(zone);
18049 if (setter.kind() == FunctionLayout::kImplicitSetter) {
18050 field = setter.accessor_field();
18051 }
18052 if (!field.IsNull()) {
18053 CHECK_ERROR(field.VerifyEntryPoint(EntryPointPragma::kSetterOnly));
18054 } else if (!setter.IsNull()) {
18055 CHECK_ERROR(setter.VerifyCallEntryPoint());
18056 }
18057 }
18058
18059 const int kTypeArgsLen = 0;
18060 const int kNumArgs = 2;
18061 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
18062 args.SetAt(0, *this);
18063 args.SetAt(1, value);
18064 const Array& args_descriptor = Array::Handle(
18065 zone,
18066 ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(), Heap::kNew));
18067
18068 return InvokeInstanceFunction(*this, setter, internal_setter_name, args,
18069 args_descriptor, respect_reflectable,
18070 inst_type_args);
18071}
18072
18073ObjectPtr Instance::Invoke(const String& function_name,
18074 const Array& args,
18075 const Array& arg_names,
18076 bool respect_reflectable,
18077 bool check_is_entrypoint) const {
18078 Thread* thread = Thread::Current();
18079 Zone* zone = thread->zone();
18080 Class& klass = Class::Handle(zone, clazz());
18081 CHECK_ERROR(klass.EnsureIsFinalized(thread));
18082
18083 Function& function = Function::Handle(
18084 zone, Resolver::ResolveDynamicAnyArgs(zone, klass, function_name));
18085
18086 if (!function.IsNull() && check_is_entrypoint) {
18087 CHECK_ERROR(function.VerifyCallEntryPoint());
18088 }
18089
18090 // We don't pass any explicit type arguments, which will be understood as
18091 // using dynamic for any function type arguments by lower layers.
18092 const int kTypeArgsLen = 0;
18093 const Array& args_descriptor = Array::Handle(
18094 zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(),
18095 arg_names, Heap::kNew));
18096
18097 const auto& inst_type_args =
18098 klass.NumTypeArguments() > 0
18099 ? TypeArguments::Handle(zone, GetTypeArguments())
18100 : Object::null_type_arguments();
18101
18102 if (function.IsNull()) {
18103 // Didn't find a method: try to find a getter and invoke call on its result.
18104 const String& getter_name =
18105 String::Handle(zone, Field::GetterName(function_name));
18106 function = Resolver::ResolveDynamicAnyArgs(zone, klass, getter_name);
18107 if (!function.IsNull()) {
18108 if (check_is_entrypoint) {
18109 CHECK_ERROR(EntryPointFieldInvocationError(function_name));
18110 }
18111 ASSERT(function.kind() != FunctionLayout::kMethodExtractor);
18112 // Invoke the getter.
18113 const int kNumArgs = 1;
18114 const Array& getter_args = Array::Handle(zone, Array::New(kNumArgs));
18115 getter_args.SetAt(0, *this);
18116 const Array& getter_args_descriptor = Array::Handle(
18117 zone, ArgumentsDescriptor::NewBoxed(
18118 kTypeArgsLen, getter_args.Length(), Heap::kNew));
18119 const Object& getter_result = Object::Handle(
18120 zone, InvokeInstanceFunction(*this, function, getter_name,
18121 getter_args, getter_args_descriptor,
18122 respect_reflectable, inst_type_args));
18123 if (getter_result.IsError()) {
18124 return getter_result.raw();
18125 }
18126 // Replace the closure as the receiver in the arguments list.
18127 args.SetAt(0, getter_result);
18128 return InvokeCallableWithChecks(zone, args, args_descriptor);
18129 }
18130 }
18131
18132 // Found an ordinary method.
18133 return InvokeInstanceFunction(*this, function, function_name, args,
18134 args_descriptor, respect_reflectable,
18135 inst_type_args);
18136}
18137
18138ObjectPtr Instance::EvaluateCompiledExpression(
18139 const Class& method_cls,
18140 const ExternalTypedData& kernel_buffer,
18141 const Array& type_definitions,
18142 const Array& arguments,
18143 const TypeArguments& type_arguments) const {
18144 const Array& arguments_with_receiver =
18145 Array::Handle(Array::New(1 + arguments.Length()));
18146 PassiveObject& param = PassiveObject::Handle();
18147 arguments_with_receiver.SetAt(0, *this);
18148 for (intptr_t i = 0; i < arguments.Length(); i++) {
18149 param = arguments.At(i);
18150 arguments_with_receiver.SetAt(i + 1, param);
18151 }
18152
18153 return EvaluateCompiledExpressionHelper(
18154 kernel_buffer, type_definitions,
18155 String::Handle(Library::Handle(method_cls.library()).url()),
18156 String::Handle(method_cls.UserVisibleName()), arguments_with_receiver,
18157 type_arguments);
18158}
18159
18160ObjectPtr Instance::HashCode() const {
18161 // TODO(koda): Optimize for all builtin classes and all classes
18162 // that do not override hashCode.
18163 return DartLibraryCalls::HashCode(*this);
18164}
18165
18166ObjectPtr Instance::IdentityHashCode() const {
18167 return DartLibraryCalls::IdentityHashCode(*this);
18168}
18169
18170bool Instance::CanonicalizeEquals(const Instance& other) const {
18171 if (this->raw() == other.raw()) {
18172 return true; // "===".
18173 }
18174
18175 if (other.IsNull() || (this->clazz() != other.clazz())) {
18176 return false;
18177 }
18178
18179 {
18180 NoSafepointScope no_safepoint;
18181 // Raw bits compare.
18182 const intptr_t instance_size = SizeFromClass();
18183 ASSERT(instance_size != 0);
18184 const intptr_t other_instance_size = other.SizeFromClass();
18185 ASSERT(other_instance_size != 0);
18186 if (instance_size != other_instance_size) {
18187 return false;
18188 }
18189 uword this_addr = reinterpret_cast<uword>(this->raw_ptr());
18190 uword other_addr = reinterpret_cast<uword>(other.raw_ptr());
18191 for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
18192 offset += kWordSize) {
18193 if ((*reinterpret_cast<ObjectPtr*>(this_addr + offset)) !=
18194 (*reinterpret_cast<ObjectPtr*>(other_addr + offset))) {
18195 return false;
18196 }
18197 }
18198 }
18199 return true;
18200}
18201
18202uint32_t Instance::CanonicalizeHash() const {
18203 if (GetClassId() == kNullCid) {
18204 return 2011; // Matches null_patch.dart.
18205 }
18206 Thread* thread = Thread::Current();
18207 uint32_t hash = thread->heap()->GetCanonicalHash(raw());
18208 if (hash != 0) {
18209 return hash;
18210 }
18211 const Class& cls = Class::Handle(clazz());
18212 NoSafepointScope no_safepoint(thread);
18213 const intptr_t instance_size = SizeFromClass();
18214 ASSERT(instance_size != 0);
18215 hash = instance_size / kWordSize;
18216 uword this_addr = reinterpret_cast<uword>(this->raw_ptr());
18217 Instance& member = Instance::Handle();
18218
18219 const auto unboxed_fields_bitmap =
18220 thread->isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
18221 GetClassId());
18222
18223 for (intptr_t offset = Instance::NextFieldOffset();
18224 offset < cls.host_next_field_offset(); offset += kWordSize) {
18225 if (unboxed_fields_bitmap.Get(offset / kWordSize)) {
18226 if (kWordSize == 8) {
18227 hash = CombineHashes(hash,
18228 *reinterpret_cast<uint32_t*>(this_addr + offset));
18229 hash = CombineHashes(
18230 hash, *reinterpret_cast<uint32_t*>(this_addr + offset + 4));
18231 } else {
18232 hash = CombineHashes(hash,
18233 *reinterpret_cast<uint32_t*>(this_addr + offset));
18234 }
18235 } else {
18236 member ^= *reinterpret_cast<ObjectPtr*>(this_addr + offset);
18237 hash = CombineHashes(hash, member.CanonicalizeHash());
18238 }
18239 }
18240 hash = FinalizeHash(hash, String::kHashBits);
18241 thread->heap()->SetCanonicalHash(raw(), hash);
18242 return hash;
18243}
18244
18245#if defined(DEBUG)
18246class CheckForPointers : public ObjectPointerVisitor {
18247 public:
18248 explicit CheckForPointers(IsolateGroup* isolate_group)
18249 : ObjectPointerVisitor(isolate_group), has_pointers_(false) {}
18250
18251 bool has_pointers() const { return has_pointers_; }
18252
18253 void VisitPointers(ObjectPtr* first, ObjectPtr* last) {
18254 if (first != last) {
18255 has_pointers_ = true;
18256 }
18257 }
18258
18259 private:
18260 bool has_pointers_;
18261
18262 DISALLOW_COPY_AND_ASSIGN(CheckForPointers);
18263};
18264#endif // DEBUG
18265
18266bool Instance::CheckAndCanonicalizeFields(Thread* thread,
18267 const char** error_str) const {
18268 ASSERT(error_str != NULL);
18269 ASSERT(*error_str == NULL);
18270 const intptr_t class_id = GetClassId();
18271 if (class_id >= kNumPredefinedCids) {
18272 // Iterate over all fields, canonicalize numbers and strings, expect all
18273 // other instances to be canonical otherwise report error (return false).
18274 Zone* zone = thread->zone();
18275 Object& obj = Object::Handle(zone);
18276 const intptr_t instance_size = SizeFromClass();
18277 ASSERT(instance_size != 0);
18278 const auto unboxed_fields_bitmap =
18279 thread->isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt(
18280 GetClassId());
18281 for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
18282 offset += kWordSize) {
18283 if (unboxed_fields_bitmap.Get(offset / kWordSize)) {
18284 continue;
18285 }
18286 obj = *this->FieldAddrAtOffset(offset);
18287 if (obj.IsInstance() && !obj.IsSmi() && !obj.IsCanonical()) {
18288 if (obj.IsNumber() || obj.IsString()) {
18289 obj = Instance::Cast(obj).CheckAndCanonicalize(thread, error_str);
18290 if (*error_str != NULL) {
18291 return false;
18292 }
18293 ASSERT(!obj.IsNull());
18294 this->SetFieldAtOffset(offset, obj);
18295 } else {
18296 char* chars = OS::SCreate(zone, "field: %s, owner: %s\n",
18297 obj.ToCString(), ToCString());
18298 *error_str = chars;
18299 return false;
18300 }
18301 }
18302 }
18303 } else {
18304#if defined(DEBUG)
18305 // Make sure that we are not missing any fields.
18306 CheckForPointers has_pointers(Isolate::Current()->group());
18307 this->raw()->ptr()->VisitPointers(&has_pointers);
18308 ASSERT(!has_pointers.has_pointers());
18309#endif // DEBUG
18310 }
18311 return true;
18312}
18313
18314InstancePtr Instance::CopyShallowToOldSpace(Thread* thread) const {
18315 return Instance::RawCast(Object::Clone(*this, Heap::kOld));
18316}
18317
18318InstancePtr Instance::CheckAndCanonicalize(Thread* thread,
18319 const char** error_str) const {
18320 ASSERT(error_str != NULL);
18321 ASSERT(*error_str == NULL);
18322 ASSERT(!IsNull());
18323 if (this->IsCanonical()) {
18324 return this->raw();
18325 }
18326 if (!CheckAndCanonicalizeFields(thread, error_str)) {
18327 return Instance::null();
18328 }
18329 Zone* zone = thread->zone();
18330 Isolate* isolate = thread->isolate();
18331 Instance& result = Instance::Handle(zone);
18332 const Class& cls = Class::Handle(zone, this->clazz());
18333 {
18334 SafepointMutexLocker ml(isolate->constant_canonicalization_mutex());
18335 result = cls.LookupCanonicalInstance(zone, *this);
18336 if (!result.IsNull()) {
18337 return result.raw();
18338 }
18339 if (IsNew()) {
18340 ASSERT((isolate == Dart::vm_isolate()) || !InVMIsolateHeap());
18341 // Create a canonical object in old space.
18342 result ^= Object::Clone(*this, Heap::kOld);
18343 } else {
18344 result = this->raw();
18345 }
18346 ASSERT(result.IsOld());
18347 result.SetCanonical();
18348 return cls.InsertCanonicalConstant(zone, result);
18349 }
18350}
18351
18352#if defined(DEBUG)
18353bool Instance::CheckIsCanonical(Thread* thread) const {
18354 Zone* zone = thread->zone();
18355 Isolate* isolate = thread->isolate();
18356 Instance& result = Instance::Handle(zone);
18357 const Class& cls = Class::Handle(zone, this->clazz());
18358 SafepointMutexLocker ml(isolate->constant_canonicalization_mutex());
18359 result ^= cls.LookupCanonicalInstance(zone, *this);
18360 return (result.raw() == this->raw());
18361}
18362#endif // DEBUG
18363
18364ObjectPtr Instance::GetField(const Field& field) const {
18365 if (FLAG_precompiled_mode && field.is_unboxing_candidate()) {
18366 switch (field.guarded_cid()) {
18367 case kDoubleCid:
18368 return Double::New(*reinterpret_cast<double_t*>(FieldAddr(field)));
18369 case kFloat32x4Cid:
18370 return Float32x4::New(
18371 *reinterpret_cast<simd128_value_t*>(FieldAddr(field)));
18372 case kFloat64x2Cid:
18373 return Float64x2::New(
18374 *reinterpret_cast<simd128_value_t*>(FieldAddr(field)));
18375 default:
18376 if (field.is_non_nullable_integer()) {
18377 return Integer::New(*reinterpret_cast<int64_t*>(FieldAddr(field)));
18378 } else {
18379 UNREACHABLE();
18380 return nullptr;
18381 }
18382 }
18383 } else {
18384 return *FieldAddr(field);
18385 }
18386}
18387
18388void Instance::SetField(const Field& field, const Object& value) const {
18389 if (FLAG_precompiled_mode && field.is_unboxing_candidate()) {
18390 switch (field.guarded_cid()) {
18391 case kDoubleCid:
18392 StoreNonPointer(reinterpret_cast<double_t*>(FieldAddr(field)),
18393 Double::Cast(value).value());
18394 break;
18395 case kFloat32x4Cid:
18396 StoreNonPointer(reinterpret_cast<simd128_value_t*>(FieldAddr(field)),
18397 Float32x4::Cast(value).value());
18398 break;
18399 case kFloat64x2Cid:
18400 StoreNonPointer(reinterpret_cast<simd128_value_t*>(FieldAddr(field)),
18401 Float64x2::Cast(value).value());
18402 break;
18403 default:
18404 if (field.is_non_nullable_integer()) {
18405 StoreNonPointer(reinterpret_cast<int64_t*>(FieldAddr(field)),
18406 Integer::Cast(value).AsInt64Value());
18407 } else {
18408 UNREACHABLE();
18409 }
18410 break;
18411 }
18412 } else {
18413 field.RecordStore(value);
18414 const Object* stored_value = field.CloneForUnboxed(value);
18415 StorePointer(FieldAddr(field), stored_value->raw());
18416 }
18417}
18418
18419AbstractTypePtr Instance::GetType(Heap::Space space) const {
18420 if (IsNull()) {
18421 return Type::NullType();
18422 }
18423 const Class& cls = Class::Handle(clazz());
18424 if (!cls.is_finalized()) {
18425 // Various predefined classes can be instantiated by the VM or
18426 // Dart_NewString/Integer/TypedData/... before the class is finalized.
18427 ASSERT(cls.is_prefinalized());
18428 cls.EnsureDeclarationLoaded();
18429 }
18430 if (cls.IsClosureClass()) {
18431 Function& signature =
18432 Function::Handle(Closure::Cast(*this).GetInstantiatedSignature(
18433 Thread::Current()->zone()));
18434 Type& type = Type::Handle(signature.SignatureType());
18435 if (!type.IsFinalized()) {
18436 type.SetIsFinalized();
18437 }
18438 type ^= type.Canonicalize();
18439 return type.raw();
18440 }
18441 Type& type = Type::Handle();
18442 if (!cls.IsGeneric()) {
18443 type = cls.DeclarationType();
18444 }
18445 if (type.IsNull()) {
18446 TypeArguments& type_arguments = TypeArguments::Handle();
18447 if (cls.NumTypeArguments() > 0) {
18448 type_arguments = GetTypeArguments();
18449 }
18450 type = Type::New(cls, type_arguments, TokenPosition::kNoSource,
18451 Nullability::kNonNullable, space);
18452 type.SetIsFinalized();
18453 type ^= type.Canonicalize();
18454 }
18455 return type.raw();
18456}
18457
18458TypeArgumentsPtr Instance::GetTypeArguments() const {
18459 ASSERT(!IsType());
18460 const Class& cls = Class::Handle(clazz());
18461 intptr_t field_offset = cls.host_type_arguments_field_offset();
18462 ASSERT(field_offset != Class::kNoTypeArguments);
18463 TypeArguments& type_arguments = TypeArguments::Handle();
18464 type_arguments ^= *FieldAddrAtOffset(field_offset);
18465 return type_arguments.raw();
18466}
18467
18468void Instance::SetTypeArguments(const TypeArguments& value) const {
18469 ASSERT(!IsType());
18470 ASSERT(value.IsNull() || value.IsCanonical());
18471 const Class& cls = Class::Handle(clazz());
18472 intptr_t field_offset = cls.host_type_arguments_field_offset();
18473 ASSERT(field_offset != Class::kNoTypeArguments);
18474 SetFieldAtOffset(field_offset, value);
18475}
18476
18477/*
18478Specification of instance checks (e is T) and casts (e as T), where e evaluates
18479to a value v and v has runtime type S:
18480
18481Instance checks (e is T) in weak checking mode in a legacy or opted-in library:
18482 If v == null and T is a legacy type
18483 return LEGACY_SUBTYPE(T, Null) || LEGACY_SUBTYPE(Object, T)
18484 If v == null and T is not a legacy type, return NNBD_SUBTYPE(Null, T)
18485 Otherwise return LEGACY_SUBTYPE(S, T)
18486
18487Instance checks (e is T) in strong checking mode in a legacy or opted-in lib:
18488 If v == null and T is a legacy type
18489 return LEGACY_SUBTYPE(T, Null) || LEGACY_SUBTYPE(Object, T)
18490 Otherwise return NNBD_SUBTYPE(S, T)
18491
18492Casts (e as T) in weak checking mode in a legacy or opted-in library:
18493 If LEGACY_SUBTYPE(S, T) then e as T evaluates to v.
18494 Otherwise a CastError is thrown.
18495
18496Casts (e as T) in strong checking mode in a legacy or opted-in library:
18497 If NNBD_SUBTYPE(S, T) then e as T evaluates to v.
18498 Otherwise a CastError is thrown.
18499*/
18500
18501bool Instance::IsInstanceOf(
18502 const AbstractType& other,
18503 const TypeArguments& other_instantiator_type_arguments,
18504 const TypeArguments& other_function_type_arguments) const {
18505 ASSERT(!other.IsDynamicType());
18506 if (IsNull()) {
18507 return Instance::NullIsInstanceOf(other, other_instantiator_type_arguments,
18508 other_function_type_arguments);
18509 }
18510 // In strong mode, compute NNBD_SUBTYPE(runtimeType, other).
18511 // In weak mode, compute LEGACY_SUBTYPE(runtimeType, other).
18512 return RuntimeTypeIsSubtypeOf(other, other_instantiator_type_arguments,
18513 other_function_type_arguments);
18514}
18515
18516bool Instance::IsAssignableTo(
18517 const AbstractType& other,
18518 const TypeArguments& other_instantiator_type_arguments,
18519 const TypeArguments& other_function_type_arguments) const {
18520 ASSERT(!other.IsDynamicType());
18521 // In weak mode type casts, whether in legacy or opted-in libraries, the null
18522 // instance is detected and handled in inlined code and therefore cannot be
18523 // encountered here as a Dart null receiver.
18524 ASSERT(Isolate::Current()->null_safety() || !IsNull());
18525 // In strong mode, compute NNBD_SUBTYPE(runtimeType, other).
18526 // In weak mode, compute LEGACY_SUBTYPE(runtimeType, other).
18527 return RuntimeTypeIsSubtypeOf(other, other_instantiator_type_arguments,
18528 other_function_type_arguments);
18529}
18530
18531// If 'other' type (once instantiated) is a legacy type:
18532// return LEGACY_SUBTYPE(other, Null) || LEGACY_SUBTYPE(Object, other).
18533// Otherwise return NNBD_SUBTYPE(Null, T).
18534// Ignore value of strong flag value.
18535bool Instance::NullIsInstanceOf(
18536 const AbstractType& other,
18537 const TypeArguments& other_instantiator_type_arguments,
18538 const TypeArguments& other_function_type_arguments) {
18539 ASSERT(other.IsFinalized());
18540 ASSERT(!other.IsTypeRef()); // Must be dereferenced at compile time.
18541 if (other.IsNullable()) {
18542 // This case includes top types (void, dynamic, Object?).
18543 // The uninstantiated nullable type will remain nullable after
18544 // instantiation.
18545 return true;
18546 }
18547 if (other.IsFutureOrType()) {
18548 const auto& type = AbstractType::Handle(other.UnwrapFutureOr());
18549 return NullIsInstanceOf(type, other_instantiator_type_arguments,
18550 other_function_type_arguments);
18551 }
18552 // No need to instantiate type, unless it is a type parameter.
18553 // Note that a typeref cannot refer to a type parameter.
18554 if (other.IsTypeParameter()) {
18555 auto& type = AbstractType::Handle(other.InstantiateFrom(
18556 other_instantiator_type_arguments, other_function_type_arguments,
18557 kAllFree, Heap::kOld));
18558 if (type.IsTypeRef()) {
18559 type = TypeRef::Cast(type).type();
18560 }
18561 return Instance::NullIsInstanceOf(type, Object::null_type_arguments(),
18562 Object::null_type_arguments());
18563 }
18564 return other.IsLegacy() && (other.IsObjectType() || other.IsNeverType());
18565}
18566
18567bool Instance::NullIsAssignableTo(const AbstractType& other) {
18568 Thread* thread = Thread::Current();
18569 Isolate* isolate = thread->isolate();
18570 Zone* zone = thread->zone();
18571
18572 // In weak mode, Null is a bottom type (according to LEGACY_SUBTYPE).
18573 if (!isolate->null_safety()) {
18574 return true;
18575 }
18576 // "Left Null" rule: null is assignable when destination type is either
18577 // legacy or nullable. Otherwise it is not assignable or we cannot tell
18578 // without instantiating type parameter.
18579 if (other.IsLegacy() || other.IsNullable()) {
18580 return true;
18581 }
18582 if (other.IsFutureOrType()) {
18583 return NullIsAssignableTo(
18584 AbstractType::Handle(zone, other.UnwrapFutureOr()));
18585 }
18586 return false;
18587}
18588
18589bool Instance::RuntimeTypeIsSubtypeOf(
18590 const AbstractType& other,
18591 const TypeArguments& other_instantiator_type_arguments,
18592 const TypeArguments& other_function_type_arguments) const {
18593 ASSERT(other.IsFinalized());
18594 ASSERT(!other.IsTypeRef()); // Must be dereferenced at compile time.
18595 ASSERT(raw() != Object::sentinel().raw());
18596 // Instance may not have runtimeType dynamic, void, or Never.
18597 if (other.IsTopTypeForSubtyping()) {
18598 return true;
18599 }
18600 Thread* thread = Thread::Current();
18601 Isolate* isolate = thread->isolate();
18602 Zone* zone = thread->zone();
18603 // In weak testing mode, Null type is a subtype of any type.
18604 if (IsNull() && !isolate->null_safety()) {
18605 return true;
18606 }
18607 const Class& cls = Class::Handle(zone, clazz());
18608 if (cls.IsClosureClass()) {
18609 if (other.IsDartFunctionType() || other.IsDartClosureType() ||
18610 other.IsObjectType()) {
18611 return true;
18612 }
18613 AbstractType& instantiated_other = AbstractType::Handle(zone, other.raw());
18614 if (!other.IsInstantiated()) {
18615 instantiated_other = other.InstantiateFrom(
18616 other_instantiator_type_arguments, other_function_type_arguments,
18617 kAllFree, Heap::kOld);
18618 if (instantiated_other.IsTypeRef()) {
18619 instantiated_other = TypeRef::Cast(instantiated_other).type();
18620 }
18621 if (instantiated_other.IsTopTypeForSubtyping() ||
18622 instantiated_other.IsDartFunctionType()) {
18623 return true;
18624 }
18625 }
18626 if (RuntimeTypeIsSubtypeOfFutureOr(zone, instantiated_other)) {
18627 return true;
18628 }
18629 if (!instantiated_other.IsFunctionType()) {
18630 return false;
18631 }
18632 Function& other_signature =
18633 Function::Handle(zone, Type::Cast(instantiated_other).signature());
18634 const Function& sig_fun =
18635 Function::Handle(Closure::Cast(*this).GetInstantiatedSignature(zone));
18636 return sig_fun.IsSubtypeOf(other_signature, Heap::kOld);
18637 }
18638 TypeArguments& type_arguments = TypeArguments::Handle(zone);
18639 if (cls.NumTypeArguments() > 0) {
18640 type_arguments = GetTypeArguments();
18641 ASSERT(type_arguments.IsNull() || type_arguments.IsCanonical());
18642 // The number of type arguments in the instance must be greater or equal to
18643 // the number of type arguments expected by the instance class.
18644 // A discrepancy is allowed for closures, which borrow the type argument
18645 // vector of their instantiator, which may be of a subclass of the class
18646 // defining the closure. Truncating the vector to the correct length on
18647 // instantiation is unnecessary. The vector may therefore be longer.
18648 // Also, an optimization reuses the type argument vector of the instantiator
18649 // of generic instances when its layout is compatible.
18650 ASSERT(type_arguments.IsNull() ||
18651 (type_arguments.Length() >= cls.NumTypeArguments()));
18652 }
18653 AbstractType& instantiated_other = AbstractType::Handle(zone, other.raw());
18654 if (!other.IsInstantiated()) {
18655 instantiated_other = other.InstantiateFrom(
18656 other_instantiator_type_arguments, other_function_type_arguments,
18657 kAllFree, Heap::kOld);
18658 if (instantiated_other.IsTypeRef()) {
18659 instantiated_other = TypeRef::Cast(instantiated_other).type();
18660 }
18661 if (instantiated_other.IsTopTypeForSubtyping()) {
18662 return true;
18663 }
18664 }
18665 if (!instantiated_other.IsType()) {
18666 return false;
18667 }
18668 if (IsNull()) {
18669 ASSERT(isolate->null_safety());
18670 if (instantiated_other.IsNullType()) {
18671 return true;
18672 }
18673 if (RuntimeTypeIsSubtypeOfFutureOr(zone, instantiated_other)) {
18674 return true;
18675 }
18676 return !instantiated_other.IsNonNullable();
18677 }
18678 // RuntimeType of non-null instance is non-nullable, so there is no need to
18679 // check nullability of other type.
18680 return Class::IsSubtypeOf(cls, type_arguments, Nullability::kNonNullable,
18681 instantiated_other, Heap::kOld);
18682}
18683
18684bool Instance::RuntimeTypeIsSubtypeOfFutureOr(Zone* zone,
18685 const AbstractType& other) const {
18686 if (other.IsFutureOrType()) {
18687 const TypeArguments& other_type_arguments =
18688 TypeArguments::Handle(zone, other.arguments());
18689 const AbstractType& other_type_arg =
18690 AbstractType::Handle(zone, other_type_arguments.TypeAtNullSafe(0));
18691 if (other_type_arg.IsTopTypeForSubtyping()) {
18692 return true;
18693 }
18694 if (Class::Handle(zone, clazz()).IsFutureClass()) {
18695 const TypeArguments& type_arguments =
18696 TypeArguments::Handle(zone, GetTypeArguments());
18697 const AbstractType& type_arg =
18698 AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0));
18699 if (type_arg.IsSubtypeOf(other_type_arg, Heap::kOld)) {
18700 return true;
18701 }
18702 }
18703 // Retry RuntimeTypeIsSubtypeOf after unwrapping type arg of FutureOr.
18704 if (RuntimeTypeIsSubtypeOf(other_type_arg, Object::null_type_arguments(),
18705 Object::null_type_arguments())) {
18706 return true;
18707 }
18708 }
18709 return false;
18710}
18711
18712bool Instance::OperatorEquals(const Instance& other) const {
18713 // TODO(koda): Optimize for all builtin classes and all classes
18714 // that do not override operator==.
18715 return DartLibraryCalls::Equals(*this, other) == Object::bool_true().raw();
18716}
18717
18718bool Instance::IsIdenticalTo(const Instance& other) const {
18719 if (raw() == other.raw()) return true;
18720 if (IsInteger() && other.IsInteger()) {
18721 return Integer::Cast(*this).Equals(other);
18722 }
18723 if (IsDouble() && other.IsDouble()) {
18724 double other_value = Double::Cast(other).value();
18725 return Double::Cast(*this).BitwiseEqualsToDouble(other_value);
18726 }
18727 return false;
18728}
18729
18730intptr_t* Instance::NativeFieldsDataAddr() const {
18731 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
18732 TypedDataPtr native_fields = static_cast<TypedDataPtr>(*NativeFieldsAddr());
18733 if (native_fields == TypedData::null()) {
18734 return NULL;
18735 }
18736 return reinterpret_cast<intptr_t*>(native_fields->ptr()->data());
18737}
18738
18739void Instance::SetNativeField(int index, intptr_t value) const {
18740 ASSERT(IsValidNativeIndex(index));
18741 Object& native_fields = Object::Handle(*NativeFieldsAddr());
18742 if (native_fields.IsNull()) {
18743 // Allocate backing storage for the native fields.
18744 native_fields = TypedData::New(kIntPtrCid, NumNativeFields());
18745 StorePointer(NativeFieldsAddr(), native_fields.raw());
18746 }
18747 intptr_t byte_offset = index * sizeof(intptr_t);
18748 TypedData::Cast(native_fields).SetIntPtr(byte_offset, value);
18749}
18750
18751void Instance::SetNativeFields(uint16_t num_native_fields,
18752 const intptr_t* field_values) const {
18753 ASSERT(num_native_fields == NumNativeFields());
18754 ASSERT(field_values != NULL);
18755 Object& native_fields = Object::Handle(*NativeFieldsAddr());
18756 if (native_fields.IsNull()) {
18757 // Allocate backing storage for the native fields.
18758 native_fields = TypedData::New(kIntPtrCid, NumNativeFields());
18759 StorePointer(NativeFieldsAddr(), native_fields.raw());
18760 }
18761 for (uint16_t i = 0; i < num_native_fields; i++) {
18762 intptr_t byte_offset = i * sizeof(intptr_t);
18763 TypedData::Cast(native_fields).SetIntPtr(byte_offset, field_values[i]);
18764 }
18765}
18766
18767bool Instance::IsCallable(Function* function) const {
18768 Class& cls = Class::Handle(clazz());
18769 if (cls.IsClosureClass()) {
18770 if (function != nullptr) {
18771 *function = Closure::Cast(*this).function();
18772 }
18773 return true;
18774 }
18775 // Try to resolve a "call" method.
18776 Zone* zone = Thread::Current()->zone();
18777 Function& call_function = Function::Handle(
18778 zone, Resolver::ResolveDynamicAnyArgs(zone, cls, Symbols::Call(),
18779 /*allow_add=*/false));
18780 if (call_function.IsNull()) {
18781 return false;
18782 }
18783 if (function != nullptr) {
18784 *function = call_function.raw();
18785 }
18786 return true;
18787}
18788
18789InstancePtr Instance::New(const Class& cls, Heap::Space space) {
18790 Thread* thread = Thread::Current();
18791 if (cls.EnsureIsFinalized(thread) != Error::null()) {
18792 return Instance::null();
18793 }
18794 intptr_t instance_size = cls.host_instance_size();
18795 ASSERT(instance_size > 0);
18796 ObjectPtr raw = Object::Allocate(cls.id(), instance_size, space);
18797 return static_cast<InstancePtr>(raw);
18798}
18799
18800InstancePtr Instance::NewFromCidAndSize(SharedClassTable* shared_class_table,
18801 classid_t cid,
18802 Heap::Space heap) {
18803 const intptr_t instance_size = shared_class_table->SizeAt(cid);
18804 ASSERT(instance_size > 0);
18805 ObjectPtr raw = Object::Allocate(cid, instance_size, heap);
18806 return static_cast<InstancePtr>(raw);
18807}
18808
18809bool Instance::IsValidFieldOffset(intptr_t offset) const {
18810 Thread* thread = Thread::Current();
18811 REUSABLE_CLASS_HANDLESCOPE(thread);
18812 Class& cls = thread->ClassHandle();
18813 cls = clazz();
18814 return (offset >= 0 && offset <= (cls.host_instance_size() - kWordSize));
18815}
18816
18817intptr_t Instance::ElementSizeFor(intptr_t cid) {
18818 if (IsExternalTypedDataClassId(cid) || IsTypedDataClassId(cid) ||
18819 IsTypedDataViewClassId(cid)) {
18820 return TypedDataBase::ElementSizeInBytes(cid);
18821 }
18822 switch (cid) {
18823 case kArrayCid:
18824 case kImmutableArrayCid:
18825 return Array::kBytesPerElement;
18826 case kOneByteStringCid:
18827 return OneByteString::kBytesPerElement;
18828 case kTwoByteStringCid:
18829 return TwoByteString::kBytesPerElement;
18830 case kExternalOneByteStringCid:
18831 return ExternalOneByteString::kBytesPerElement;
18832 case kExternalTwoByteStringCid:
18833 return ExternalTwoByteString::kBytesPerElement;
18834 default:
18835 UNIMPLEMENTED();
18836 return 0;
18837 }
18838}
18839
18840intptr_t Instance::DataOffsetFor(intptr_t cid) {
18841 if (IsExternalTypedDataClassId(cid) || IsExternalStringClassId(cid)) {
18842 // Elements start at offset 0 of the external data.
18843 return 0;
18844 }
18845 if (IsTypedDataClassId(cid)) {
18846 return TypedData::data_offset();
18847 }
18848 switch (cid) {
18849 case kArrayCid:
18850 case kImmutableArrayCid:
18851 return Array::data_offset();
18852 case kOneByteStringCid:
18853 return OneByteString::data_offset();
18854 case kTwoByteStringCid:
18855 return TwoByteString::data_offset();
18856 default:
18857 UNIMPLEMENTED();
18858 return Array::data_offset();
18859 }
18860}
18861
18862const char* Instance::ToCString() const {
18863 if (IsNull()) {
18864 return "null";
18865 } else if (raw() == Object::sentinel().raw()) {
18866 return "sentinel";
18867 } else if (raw() == Object::transition_sentinel().raw()) {
18868 return "transition_sentinel";
18869 } else if (raw() == Object::unknown_constant().raw()) {
18870 return "unknown_constant";
18871 } else if (raw() == Object::non_constant().raw()) {
18872 return "non_constant";
18873 } else if (Thread::Current()->no_safepoint_scope_depth() > 0) {
18874 // Can occur when running disassembler.
18875 return "Instance";
18876 } else {
18877 if (IsClosure()) {
18878 return Closure::Cast(*this).ToCString();
18879 }
18880 // Background compiler disassembly of instructions referring to pool objects
18881 // calls this function and requires allocation of Type in old space.
18882 const AbstractType& type = AbstractType::Handle(GetType(Heap::kOld));
18883 const String& type_name = String::Handle(type.UserVisibleName());
18884 return OS::SCreate(Thread::Current()->zone(), "Instance of '%s'",
18885 type_name.ToCString());
18886 }
18887}
18888
18889classid_t AbstractType::type_class_id() const {
18890 // AbstractType is an abstract class.
18891 UNREACHABLE();
18892 return kIllegalCid;
18893}
18894
18895ClassPtr AbstractType::type_class() const {
18896 // AbstractType is an abstract class.
18897 UNREACHABLE();
18898 return Class::null();
18899}
18900
18901TypeArgumentsPtr AbstractType::arguments() const {
18902 // AbstractType is an abstract class.
18903 UNREACHABLE();
18904 return NULL;
18905}
18906
18907void AbstractType::set_arguments(const TypeArguments& value) const {
18908 // AbstractType is an abstract class.
18909 UNREACHABLE();
18910}
18911
18912TokenPosition AbstractType::token_pos() const {
18913 // AbstractType is an abstract class.
18914 UNREACHABLE();
18915 return TokenPosition::kNoSource;
18916}
18917
18918Nullability AbstractType::nullability() const {
18919 // AbstractType is an abstract class.
18920 UNREACHABLE();
18921 return Nullability::kNullable;
18922}
18923
18924bool AbstractType::IsStrictlyNonNullable() const {
18925 // Null can be assigned to legacy and nullable types.
18926 if (!IsNonNullable()) {
18927 return false;
18928 }
18929
18930 Thread* thread = Thread::Current();
18931 Zone* zone = thread->zone();
18932
18933 // In weak mode null can be assigned to any type.
18934 if (!thread->isolate()->null_safety()) {
18935 return false;
18936 }
18937
18938 if (IsTypeParameter()) {
18939 const auto& bound =
18940 AbstractType::Handle(zone, TypeParameter::Cast(*this).bound());
18941 ASSERT(!bound.IsNull());
18942 return bound.IsStrictlyNonNullable();
18943 }
18944 if (IsFutureOrType()) {
18945 return AbstractType::Handle(zone, UnwrapFutureOr()).IsStrictlyNonNullable();
18946 }
18947 return true;
18948}
18949
18950AbstractTypePtr AbstractType::SetInstantiatedNullability(
18951 const TypeParameter& type_param,
18952 Heap::Space space) const {
18953 Nullability result_nullability;
18954 const Nullability arg_nullability = nullability();
18955 const Nullability var_nullability = type_param.nullability();
18956 // Adjust nullability of result 'arg' instantiated from 'var'.
18957 // arg/var ! ? *
18958 // ! ! ? *
18959 // ? ? ? ?
18960 // * * ? *
18961 if (var_nullability == Nullability::kNullable ||
18962 arg_nullability == Nullability::kNullable) {
18963 result_nullability = Nullability::kNullable;
18964 } else if (var_nullability == Nullability::kLegacy ||
18965 arg_nullability == Nullability::kLegacy) {
18966 result_nullability = Nullability::kLegacy;
18967 } else {
18968 // Keep arg nullability.
18969 return raw();
18970 }
18971 if (arg_nullability == result_nullability) {
18972 return raw();
18973 }
18974 if (IsType()) {
18975 return Type::Cast(*this).ToNullability(result_nullability, space);
18976 }
18977 if (IsTypeParameter()) {
18978 return TypeParameter::Cast(*this).ToNullability(result_nullability, space);
18979 }
18980 // TODO(regis): TypeRefs are problematic, since changing the nullability of
18981 // a type by cloning it may break the graph of a recursive type.
18982 ASSERT(IsTypeRef());
18983 return AbstractType::Handle(TypeRef::Cast(*this).type())
18984 .SetInstantiatedNullability(type_param, space);
18985}
18986
18987AbstractTypePtr AbstractType::NormalizeFutureOrType(Heap::Space space) const {
18988 if (IsFutureOrType()) {
18989 Zone* zone = Thread::Current()->zone();
18990 const AbstractType& unwrapped_type =
18991 AbstractType::Handle(zone, UnwrapFutureOr());
18992 const classid_t cid = unwrapped_type.type_class_id();
18993 if (cid == kDynamicCid || cid == kVoidCid) {
18994 return unwrapped_type.raw();
18995 }
18996 if (cid == kInstanceCid) {
18997 if (IsNonNullable()) {
18998 return unwrapped_type.raw();
18999 }
19000 if (IsNullable() || unwrapped_type.IsNullable()) {
19001 return Type::Cast(unwrapped_type)
19002 .ToNullability(Nullability::kNullable, space);
19003 }
19004 return Type::Cast(unwrapped_type)
19005 .ToNullability(Nullability::kLegacy, space);
19006 }
19007 if (cid == kNeverCid && unwrapped_type.IsNonNullable()) {
19008 ObjectStore* object_store = Isolate::Current()->object_store();
19009 const Type& future_never_type =
19010 Type::Handle(zone, object_store->non_nullable_future_never_type());
19011 ASSERT(!future_never_type.IsNull());
19012 return future_never_type.ToNullability(nullability(), space);
19013 }
19014 if (cid == kNullCid) {
19015 ObjectStore* object_store = Isolate::Current()->object_store();
19016 ASSERT(object_store->nullable_future_null_type() != Type::null());
19017 return object_store->nullable_future_null_type();
19018 }
19019 if (IsNullable() && unwrapped_type.IsNullable()) {
19020 return Type::Cast(*this).ToNullability(Nullability::kNonNullable, space);
19021 }
19022 }
19023 return raw();
19024}
19025
19026bool AbstractType::IsInstantiated(Genericity genericity,
19027 intptr_t num_free_fun_type_params,
19028 TrailPtr trail) const {
19029 // AbstractType is an abstract class.
19030 UNREACHABLE();
19031 return false;
19032}
19033
19034bool AbstractType::IsFinalized() const {
19035 // AbstractType is an abstract class.
19036 UNREACHABLE();
19037 return false;
19038}
19039
19040void AbstractType::SetIsFinalized() const {
19041 // AbstractType is an abstract class.
19042 UNREACHABLE();
19043}
19044
19045bool AbstractType::IsBeingFinalized() const {
19046 // AbstractType is an abstract class.
19047 UNREACHABLE();
19048 return false;
19049}
19050
19051void AbstractType::SetIsBeingFinalized() const {
19052 // AbstractType is an abstract class.
19053 UNREACHABLE();
19054}
19055
19056bool AbstractType::IsEquivalent(const Instance& other,
19057 TypeEquality kind,
19058 TrailPtr trail) const {
19059 // AbstractType is an abstract class.
19060 UNREACHABLE();
19061 return false;
19062}
19063
19064bool AbstractType::IsRecursive() const {
19065 // AbstractType is an abstract class.
19066 UNREACHABLE();
19067 return false;
19068}
19069
19070AbstractTypePtr AbstractType::InstantiateFrom(
19071 const TypeArguments& instantiator_type_arguments,
19072 const TypeArguments& function_type_arguments,
19073 intptr_t num_free_fun_type_params,
19074 Heap::Space space,
19075 TrailPtr trail) const {
19076 // AbstractType is an abstract class.
19077 UNREACHABLE();
19078 return NULL;
19079}
19080
19081AbstractTypePtr AbstractType::Canonicalize(TrailPtr trail) const {
19082 // AbstractType is an abstract class.
19083 UNREACHABLE();
19084 return NULL;
19085}
19086
19087void AbstractType::EnumerateURIs(URIs* uris) const {
19088 // AbstractType is an abstract class.
19089 UNREACHABLE();
19090}
19091
19092AbstractTypePtr AbstractType::OnlyBuddyInTrail(TrailPtr trail) const {
19093 if (trail == NULL) {
19094 return AbstractType::null();
19095 }
19096 const intptr_t len = trail->length();
19097 ASSERT((len % 2) == 0);
19098 for (intptr_t i = 0; i < len; i += 2) {
19099 ASSERT(trail->At(i).IsZoneHandle());
19100 ASSERT(trail->At(i + 1).IsZoneHandle());
19101 if (trail->At(i).raw() == this->raw()) {
19102 ASSERT(!trail->At(i + 1).IsNull());
19103 return trail->At(i + 1).raw();
19104 }
19105 }
19106 return AbstractType::null();
19107}
19108
19109void AbstractType::AddOnlyBuddyToTrail(TrailPtr* trail,
19110 const AbstractType& buddy) const {
19111 if (*trail == NULL) {
19112 *trail = new Trail(Thread::Current()->zone(), 4);
19113 } else {
19114 ASSERT(OnlyBuddyInTrail(*trail) == AbstractType::null());
19115 }
19116 (*trail)->Add(*this);
19117 (*trail)->Add(buddy);
19118}
19119
19120bool AbstractType::TestAndAddToTrail(TrailPtr* trail) const {
19121 if (*trail == NULL) {
19122 *trail = new Trail(Thread::Current()->zone(), 4);
19123 } else {
19124 const intptr_t len = (*trail)->length();
19125 for (intptr_t i = 0; i < len; i++) {
19126 if ((*trail)->At(i).raw() == this->raw()) {
19127 return true;
19128 }
19129 }
19130 }
19131 (*trail)->Add(*this);
19132 return false;
19133}
19134
19135bool AbstractType::TestAndAddBuddyToTrail(TrailPtr* trail,
19136 const AbstractType& buddy) const {
19137 if (*trail == NULL) {
19138 *trail = new Trail(Thread::Current()->zone(), 4);
19139 } else {
19140 const intptr_t len = (*trail)->length();
19141 ASSERT((len % 2) == 0);
19142 const bool this_is_typeref = IsTypeRef();
19143 const bool buddy_is_typeref = buddy.IsTypeRef();
19144 // Note that at least one of 'this' and 'buddy' should be a typeref, with
19145 // one exception, when the class of the 'this' type implements the 'call'
19146 // method, thereby possibly creating a recursive type (see regress_29405).
19147 for (intptr_t i = 0; i < len; i += 2) {
19148 if ((((*trail)->At(i).raw() == this->raw()) ||
19149 (buddy_is_typeref && (*trail)->At(i).Equals(*this))) &&
19150 (((*trail)->At(i + 1).raw() == buddy.raw()) ||
19151 (this_is_typeref && (*trail)->At(i + 1).Equals(buddy)))) {
19152 return true;
19153 }
19154 }
19155 }
19156 (*trail)->Add(*this);
19157 (*trail)->Add(buddy);
19158 return false;
19159}
19160
19161void AbstractType::AddURI(URIs* uris, const String& name, const String& uri) {
19162 ASSERT(uris != NULL);
19163 const intptr_t len = uris->length();
19164 ASSERT((len % 3) == 0);
19165 bool print_uri = false;
19166 for (intptr_t i = 0; i < len; i += 3) {
19167 if (uris->At(i).Equals(name)) {
19168 if (uris->At(i + 1).Equals(uri)) {
19169 // Same name and same URI: no need to add this already listed URI.
19170 return; // No state change is possible.
19171 } else {
19172 // Same name and different URI: the name is ambiguous, print both URIs.
19173 print_uri = true;
19174 uris->SetAt(i + 2, Symbols::print());
19175 }
19176 }
19177 }
19178 uris->Add(name);
19179 uris->Add(uri);
19180 if (print_uri) {
19181 uris->Add(Symbols::print());
19182 } else {
19183 uris->Add(Symbols::Empty());
19184 }
19185}
19186
19187StringPtr AbstractType::PrintURIs(URIs* uris) {
19188 ASSERT(uris != NULL);
19189 Thread* thread = Thread::Current();
19190 Zone* zone = thread->zone();
19191 const intptr_t len = uris->length();
19192 ASSERT((len % 3) == 0);
19193 GrowableHandlePtrArray<const String> pieces(zone, 5 * (len / 3));
19194 for (intptr_t i = 0; i < len; i += 3) {
19195 // Only print URIs that have been marked.
19196 if (uris->At(i + 2).raw() == Symbols::print().raw()) {
19197 pieces.Add(Symbols::TwoSpaces());
19198 pieces.Add(uris->At(i));
19199 pieces.Add(Symbols::SpaceIsFromSpace());
19200 pieces.Add(uris->At(i + 1));
19201 pieces.Add(Symbols::NewLine());
19202 }
19203 }
19204 return Symbols::FromConcatAll(thread, pieces);
19205}
19206
19207const char* AbstractType::NullabilitySuffix(
19208 NameVisibility name_visibility) const {
19209 if (IsDynamicType() || IsVoidType() || IsNullType()) {
19210 // Hide nullable suffix.
19211 return "";
19212 }
19213 // Keep in sync with Nullability enum in runtime/vm/object.h.
19214 switch (nullability()) {
19215 case Nullability::kNullable:
19216 return "?";
19217 case Nullability::kNonNullable:
19218 return "";
19219 case Nullability::kLegacy:
19220 return (FLAG_show_internal_names || name_visibility == kInternalName)
19221 ? "*"
19222 : "";
19223 default:
19224 UNREACHABLE();
19225 }
19226}
19227
19228StringPtr AbstractType::Name() const {
19229 Thread* thread = Thread::Current();
19230 ZoneTextBuffer printer(thread->zone());
19231 PrintName(kInternalName, &printer);
19232 return Symbols::New(thread, printer.buffer());
19233}
19234
19235StringPtr AbstractType::UserVisibleName() const {
19236 Thread* thread = Thread::Current();
19237 ZoneTextBuffer printer(thread->zone());
19238 PrintName(kUserVisibleName, &printer);
19239 return Symbols::New(thread, printer.buffer());
19240}
19241
19242void AbstractType::PrintName(
19243 NameVisibility name_visibility,
19244 BaseTextBuffer* printer,
19245 NameDisambiguation name_disambiguation /* = NameDisambiguation::kNo */)
19246 const {
19247 ASSERT(name_visibility != kScrubbedName);
19248 Thread* thread = Thread::Current();
19249 Zone* zone = thread->zone();
19250 Class& cls = Class::Handle(zone);
19251 String& name_str = String::Handle(zone);
19252 if (IsTypeParameter()) {
19253 const TypeParameter& param = TypeParameter::Cast(*this);
19254
19255 // Type parameters might have the same name but be owned by different
19256 // entities. If we want to disambiguate them we need to prefix
19257 // type parameter name with the name of its owner.
19258 if (name_disambiguation == NameDisambiguation::kYes) {
19259 cls = param.parameterized_class();
19260 if (cls.raw() != Class::null()) {
19261 printer->AddString(cls.NameCString(name_visibility));
19262 printer->AddString("::");
19263 } else if (param.parameterized_function() != Function::null()) {
19264 const Function& func =
19265 Function::Handle(zone, param.parameterized_function());
19266 func.PrintName(
19267 NameFormattingParams(name_visibility, name_disambiguation),
19268 printer);
19269 printer->AddString("::");
19270 }
19271 }
19272
19273 name_str = param.name();
19274 printer->AddString(name_str.ToCString());
19275 printer->AddString(NullabilitySuffix(name_visibility));
19276 return;
19277 }
19278 const TypeArguments& args = TypeArguments::Handle(zone, arguments());
19279 const intptr_t num_args = args.IsNull() ? 0 : args.Length();
19280 intptr_t first_type_param_index;
19281 intptr_t num_type_params; // Number of type parameters to print.
19282 cls = type_class();
19283 if (IsFunctionType()) {
19284 const Function& signature_function =
19285 Function::Handle(zone, Type::Cast(*this).signature());
19286 if (!cls.IsTypedefClass()) {
19287 const char* suffix = NullabilitySuffix(name_visibility);
19288 if (suffix[0] != '\0') {
19289 printer->AddString("(");
19290 }
19291 signature_function.PrintSignature(name_visibility, printer);
19292 if (suffix[0] != '\0') {
19293 printer->AddString(")");
19294 printer->AddString(suffix);
19295 }
19296 return;
19297 }
19298 // Instead of printing the actual signature, use the typedef name with
19299 // its type arguments, if any.
19300 name_str = cls.Name(); // Typedef name.
19301 if (!IsFinalized() || IsBeingFinalized()) {
19302 // TODO(regis): Check if this is dead code.
19303 printer->AddString(name_str.ToCString());
19304 printer->AddString(NullabilitySuffix(name_visibility));
19305 return;
19306 }
19307 // Print the name of a typedef as a regular, possibly parameterized, class.
19308 }
19309 // Do not print the full vector, but only the declared type parameters.
19310 num_type_params = cls.NumTypeParameters();
19311 if (name_visibility == kInternalName) {
19312 name_str = cls.Name();
19313 printer->AddString(name_str.ToCString());
19314 } else {
19315 ASSERT(name_visibility == kUserVisibleName);
19316 // Map internal types to their corresponding public interfaces.
19317 printer->AddString(cls.UserVisibleNameCString());
19318 }
19319 if (num_type_params > num_args) {
19320 first_type_param_index = 0;
19321 if (!IsFinalized() || IsBeingFinalized()) {
19322 // TODO(regis): Check if this is dead code.
19323 num_type_params = num_args;
19324 } else {
19325 ASSERT(num_args == 0); // Type is raw.
19326 }
19327 } else {
19328 // The actual type argument vector can be longer than necessary, because
19329 // of type optimizations.
19330 if (IsFinalized() && cls.is_type_finalized()) {
19331 first_type_param_index = cls.NumTypeArguments() - num_type_params;
19332 } else {
19333 first_type_param_index = num_args - num_type_params;
19334 }
19335 }
19336 if (num_type_params == 0) {
19337 // Do nothing.
19338 } else {
19339 args.PrintSubvectorName(first_type_param_index, num_type_params,
19340 name_visibility, printer, name_disambiguation);
19341 }
19342 printer->AddString(NullabilitySuffix(name_visibility));
19343 // The name is only used for type checking and debugging purposes.
19344 // Unless profiling data shows otherwise, it is not worth caching the name in
19345 // the type.
19346}
19347
19348StringPtr AbstractType::ClassName() const {
19349 ASSERT(!IsFunctionType());
19350 return Class::Handle(type_class()).Name();
19351}
19352
19353bool AbstractType::IsNullTypeRef() const {
19354 return IsTypeRef() && (TypeRef::Cast(*this).type() == AbstractType::null());
19355}
19356
19357bool AbstractType::IsNullType() const {
19358 return type_class_id() == kNullCid;
19359}
19360
19361bool AbstractType::IsNeverType() const {
19362 return type_class_id() == kNeverCid;
19363}
19364
19365bool AbstractType::IsTopTypeForInstanceOf() const {
19366 const classid_t cid = type_class_id();
19367 if (cid == kDynamicCid || cid == kVoidCid) {
19368 return true;
19369 }
19370 if (cid == kInstanceCid) { // Object type.
19371 return !IsNonNullable(); // kLegacy or kNullable.
19372 }
19373 if (cid == kFutureOrCid) {
19374 // FutureOr<T> where T is a top type behaves as a top type.
19375 return AbstractType::Handle(UnwrapFutureOr()).IsTopTypeForInstanceOf();
19376 }
19377 return false;
19378}
19379
19380bool AbstractType::IsTopTypeForSubtyping() const {
19381 const classid_t cid = type_class_id();
19382 if (cid == kDynamicCid || cid == kVoidCid) {
19383 return true;
19384 }
19385 if (cid == kInstanceCid) { // Object type.
19386 // NNBD weak mode uses LEGACY_SUBTYPE for assignability / 'as' tests,
19387 // and non-nullable Object is a top type according to LEGACY_SUBTYPE.
19388 return !IsNonNullable() || !Isolate::Current()->null_safety();
19389 }
19390 if (cid == kFutureOrCid) {
19391 // FutureOr<T> where T is a top type behaves as a top type.
19392 return AbstractType::Handle(UnwrapFutureOr()).IsTopTypeForSubtyping();
19393 }
19394 return false;
19395}
19396
19397bool AbstractType::IsIntType() const {
19398 return HasTypeClass() &&
19399 (type_class() == Type::Handle(Type::IntType()).type_class());
19400}
19401
19402bool AbstractType::IsDoubleType() const {
19403 return HasTypeClass() &&
19404 (type_class() == Type::Handle(Type::Double()).type_class());
19405}
19406
19407bool AbstractType::IsFloat32x4Type() const {
19408 // kFloat32x4Cid refers to the private class and cannot be used here.
19409 return HasTypeClass() &&
19410 (type_class() == Type::Handle(Type::Float32x4()).type_class());
19411}
19412
19413bool AbstractType::IsFloat64x2Type() const {
19414 // kFloat64x2Cid refers to the private class and cannot be used here.
19415 return HasTypeClass() &&
19416 (type_class() == Type::Handle(Type::Float64x2()).type_class());
19417}
19418
19419bool AbstractType::IsInt32x4Type() const {
19420 // kInt32x4Cid refers to the private class and cannot be used here.
19421 return HasTypeClass() &&
19422 (type_class() == Type::Handle(Type::Int32x4()).type_class());
19423}
19424
19425bool AbstractType::IsStringType() const {
19426 return HasTypeClass() &&
19427 (type_class() == Type::Handle(Type::StringType()).type_class());
19428}
19429
19430bool AbstractType::IsDartFunctionType() const {
19431 return HasTypeClass() &&
19432 (type_class() == Type::Handle(Type::DartFunctionType()).type_class());
19433}
19434
19435bool AbstractType::IsDartClosureType() const {
19436 // Non-typedef function types have '_Closure' class as type class, but are not
19437 // the Dart '_Closure' type.
19438 return !IsFunctionType() && (type_class_id() == kClosureCid);
19439}
19440
19441bool AbstractType::IsFfiPointerType() const {
19442 return HasTypeClass() && type_class_id() == kFfiPointerCid;
19443}
19444
19445AbstractTypePtr AbstractType::UnwrapFutureOr() const {
19446 if (!IsFutureOrType()) {
19447 return raw();
19448 }
19449 if (arguments() == TypeArguments::null()) {
19450 return Type::dynamic_type().raw();
19451 }
19452 Thread* thread = Thread::Current();
19453 REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread);
19454 TypeArguments& type_args = thread->TypeArgumentsHandle();
19455 type_args = arguments();
19456 REUSABLE_ABSTRACT_TYPE_HANDLESCOPE(thread);
19457 AbstractType& type_arg = thread->AbstractTypeHandle();
19458 type_arg = type_args.TypeAt(0);
19459 while (type_arg.IsFutureOrType()) {
19460 if (type_arg.arguments() == TypeArguments::null()) {
19461 return Type::dynamic_type().raw();
19462 }
19463 type_args = type_arg.arguments();
19464 type_arg = type_args.TypeAt(0);
19465 }
19466 return type_arg.raw();
19467}
19468
19469bool AbstractType::IsSubtypeOf(const AbstractType& other,
19470 Heap::Space space,
19471 TrailPtr trail) const {
19472 ASSERT(IsFinalized());
19473 ASSERT(other.IsFinalized());
19474 // Reflexivity.
19475 if (raw() == other.raw()) {
19476 return true;
19477 }
19478 // Right top type.
19479 if (other.IsTopTypeForSubtyping()) {
19480 return true;
19481 }
19482 // Left bottom type.
19483 // Any form of Never in weak mode maps to Null and Null is a bottom type in
19484 // weak mode. In strong mode, Never and Never* are bottom types. Therefore,
19485 // Never and Never* are bottom types regardless of weak/strong mode.
19486 // Note that we cannot encounter Never?, as it is normalized to Null.
19487 if (IsNeverType()) {
19488 ASSERT(!IsNullable());
19489 return true;
19490 }
19491 // Left top type.
19492 if (IsDynamicType() || IsVoidType()) {
19493 return false;
19494 }
19495 // Left Null type.
19496 if (IsNullType()) {
19497 return Instance::NullIsAssignableTo(other);
19498 }
19499 Thread* thread = Thread::Current();
19500 Isolate* isolate = thread->isolate();
19501 Zone* zone = thread->zone();
19502 // Type parameters cannot be handled by Class::IsSubtypeOf().
19503 // When comparing two uninstantiated function types, one returning type
19504 // parameter K, the other returning type parameter V, we cannot assume that
19505 // K is a subtype of V, or vice versa. We only return true if K equals V, as
19506 // defined by TypeParameter::Equals.
19507 // The same rule applies when checking the upper bound of a still
19508 // uninstantiated type at compile time. Returning false will defer the test
19509 // to run time.
19510 // There are however some cases that can be decided at compile time.
19511 // For example, with class A<K, V extends K>, new A<T, T> called from within
19512 // a class B<T> will never require a run time bound check, even if T is
19513 // uninstantiated at compile time.
19514 if (IsTypeParameter()) {
19515 const TypeParameter& type_param = TypeParameter::Cast(*this);
19516 if (other.IsTypeParameter()) {
19517 const TypeParameter& other_type_param = TypeParameter::Cast(other);
19518 if (type_param.IsEquivalent(other_type_param,
19519 TypeEquality::kInSubtypeTest)) {
19520 return true;
19521 }
19522 }
19523 const AbstractType& bound = AbstractType::Handle(zone, type_param.bound());
19524 ASSERT(bound.IsFinalized());
19525 // Avoid cycles with F-bounded types.
19526 if (TestAndAddBuddyToTrail(&trail, other)) {
19527 return true;
19528 }
19529 if (bound.IsSubtypeOf(other, space, trail)) {
19530 return true;
19531 }
19532 // Apply additional subtyping rules if 'other' is 'FutureOr'.
19533 if (IsSubtypeOfFutureOr(zone, other, space, trail)) {
19534 return true;
19535 }
19536 return false;
19537 }
19538 if (other.IsTypeParameter()) {
19539 return false;
19540 }
19541 const Class& type_cls = Class::Handle(zone, type_class());
19542 const Class& other_type_cls = Class::Handle(zone, other.type_class());
19543 // Function types cannot be handled by Class::IsSubtypeOf().
19544 const bool other_is_dart_function_type = other.IsDartFunctionType();
19545 if (other_is_dart_function_type || other.IsFunctionType()) {
19546 if (IsFunctionType()) {
19547 if (isolate->null_safety() && IsNullable() && other.IsNonNullable()) {
19548 return false;
19549 }
19550 if (other_is_dart_function_type) {
19551 return true;
19552 }
19553 const Function& other_fun =
19554 Function::Handle(zone, Type::Cast(other).signature());
19555 // Check for two function types.
19556 const Function& fun =
19557 Function::Handle(zone, Type::Cast(*this).signature());
19558 return fun.IsSubtypeOf(other_fun, space);
19559 }
19560 if (other.IsFunctionType() && !other_type_cls.IsTypedefClass()) {
19561 // [this] is not a function type. Therefore, non-function type [this]
19562 // cannot be a subtype of function type [other], unless [other] is not
19563 // only a function type, but also a named typedef.
19564 // Indeed a typedef also behaves as a regular class-based type (with
19565 // type arguments when generic).
19566 // This check is needed to avoid falling through to class-based type
19567 // tests, which yield incorrect result if [this] = _Closure class,
19568 // and [other] is a function type, because class of a function type is
19569 // also _Closure (unless [other] is a typedef).
19570 return false;
19571 }
19572 }
19573 if (IsFunctionType()) {
19574 // Apply additional subtyping rules if 'other' is 'FutureOr'.
19575 if (IsSubtypeOfFutureOr(zone, other, space, trail)) {
19576 return true;
19577 }
19578 return false;
19579 }
19580 return Class::IsSubtypeOf(type_cls, TypeArguments::Handle(zone, arguments()),
19581 nullability(), other, space, trail);
19582}
19583
19584bool AbstractType::IsSubtypeOfFutureOr(Zone* zone,
19585 const AbstractType& other,
19586 Heap::Space space,
19587 TrailPtr trail) const {
19588 if (other.IsFutureOrType()) {
19589 // This function is only called with a receiver that is either a function
19590 // type or an uninstantiated type parameter, therefore, it cannot be of
19591 // class Future and we can spare the check.
19592 ASSERT(IsFunctionType() || IsTypeParameter());
19593 const TypeArguments& other_type_arguments =
19594 TypeArguments::Handle(zone, other.arguments());
19595 const AbstractType& other_type_arg =
19596 AbstractType::Handle(zone, other_type_arguments.TypeAtNullSafe(0));
19597 if (other_type_arg.IsTopTypeForSubtyping()) {
19598 return true;
19599 }
19600 // Retry the IsSubtypeOf check after unwrapping type arg of FutureOr.
19601 if (IsSubtypeOf(other_type_arg, space, trail)) {
19602 return true;
19603 }
19604 }
19605 return false;
19606}
19607
19608intptr_t AbstractType::Hash() const {
19609 // AbstractType is an abstract class.
19610 UNREACHABLE();
19611 return 0;
19612}
19613
19614const char* AbstractType::ToCString() const {
19615 if (IsNull()) {
19616 return "AbstractType: null";
19617 }
19618 // AbstractType is an abstract class.
19619 UNREACHABLE();
19620 return "AbstractType";
19621}
19622
19623void AbstractType::SetTypeTestingStub(const Code& stub) const {
19624 if (stub.IsNull()) {
19625 // This only happens during bootstrapping when creating Type objects before
19626 // we have the instructions.
19627 ASSERT(type_class_id() == kDynamicCid || type_class_id() == kVoidCid);
19628 StoreNonPointer(&raw_ptr()->type_test_stub_entry_point_, 0);
19629 } else {
19630 StoreNonPointer(&raw_ptr()->type_test_stub_entry_point_, stub.EntryPoint());
19631 }
19632 StorePointer(&raw_ptr()->type_test_stub_, stub.raw());
19633}
19634
19635TypePtr Type::NullType() {
19636 return Isolate::Current()->object_store()->null_type();
19637}
19638
19639TypePtr Type::DynamicType() {
19640 return Object::dynamic_type().raw();
19641}
19642
19643TypePtr Type::VoidType() {
19644 return Object::void_type().raw();
19645}
19646
19647TypePtr Type::NeverType() {
19648 return Isolate::Current()->object_store()->never_type();
19649}
19650
19651TypePtr Type::ObjectType() {
19652 return Isolate::Current()->object_store()->object_type();
19653}
19654
19655TypePtr Type::BoolType() {
19656 return Isolate::Current()->object_store()->bool_type();
19657}
19658
19659TypePtr Type::IntType() {
19660 return Isolate::Current()->object_store()->int_type();
19661}
19662
19663TypePtr Type::NullableIntType() {
19664 return Isolate::Current()->object_store()->nullable_int_type();
19665}
19666
19667TypePtr Type::SmiType() {
19668 return Isolate::Current()->object_store()->smi_type();
19669}
19670
19671TypePtr Type::MintType() {
19672 return Isolate::Current()->object_store()->mint_type();
19673}
19674
19675TypePtr Type::Double() {
19676 return Isolate::Current()->object_store()->double_type();
19677}
19678
19679TypePtr Type::NullableDouble() {
19680 return Isolate::Current()->object_store()->nullable_double_type();
19681}
19682
19683TypePtr Type::Float32x4() {
19684 return Isolate::Current()->object_store()->float32x4_type();
19685}
19686
19687TypePtr Type::Float64x2() {
19688 return Isolate::Current()->object_store()->float64x2_type();
19689}
19690
19691TypePtr Type::Int32x4() {
19692 return Isolate::Current()->object_store()->int32x4_type();
19693}
19694
19695TypePtr Type::Number() {
19696 return Isolate::Current()->object_store()->number_type();
19697}
19698
19699TypePtr Type::StringType() {
19700 return Isolate::Current()->object_store()->string_type();
19701}
19702
19703TypePtr Type::ArrayType() {
19704 return Isolate::Current()->object_store()->array_type();
19705}
19706
19707TypePtr Type::DartFunctionType() {
19708 return Isolate::Current()->object_store()->function_type();
19709}
19710
19711TypePtr Type::DartTypeType() {
19712 return Isolate::Current()->object_store()->type_type();
19713}
19714
19715TypePtr Type::NewNonParameterizedType(const Class& type_class) {
19716 ASSERT(type_class.NumTypeArguments() == 0);
19717 if (type_class.IsNullClass()) {
19718 return Type::NullType();
19719 }
19720 if (type_class.IsDynamicClass()) {
19721 return Type::DynamicType();
19722 }
19723 if (type_class.IsVoidClass()) {
19724 return Type::VoidType();
19725 }
19726 // It is too early to use the class finalizer, as type_class may not be named
19727 // yet, so do not call DeclarationType().
19728 Type& type = Type::Handle(type_class.declaration_type());
19729 if (type.IsNull()) {
19730 type = Type::New(Class::Handle(type_class.raw()),
19731 Object::null_type_arguments(), TokenPosition::kNoSource,
19732 Nullability::kNonNullable);
19733 type.SetIsFinalized();
19734 type ^= type.Canonicalize();
19735 type_class.set_declaration_type(type);
19736 }
19737 ASSERT(type.IsFinalized());
19738 return type.raw();
19739}
19740
19741void Type::SetIsFinalized() const {
19742 ASSERT(!IsFinalized());
19743 if (IsInstantiated()) {
19744 set_type_state(TypeLayout::kFinalizedInstantiated);
19745 } else {
19746 set_type_state(TypeLayout::kFinalizedUninstantiated);
19747 }
19748}
19749
19750void Type::ResetIsFinalized() const {
19751 ASSERT(IsFinalized());
19752 set_type_state(TypeLayout::kBeingFinalized);
19753 SetIsFinalized();
19754}
19755
19756void Type::SetIsBeingFinalized() const {
19757 ASSERT(!IsFinalized() && !IsBeingFinalized());
19758 set_type_state(TypeLayout::kBeingFinalized);
19759}
19760
19761TypePtr Type::ToNullability(Nullability value, Heap::Space space) const {
19762 if (nullability() == value) {
19763 return raw();
19764 }
19765 // Type parameter instantiation may request a nullability change, which should
19766 // be ignored for types dynamic and void. Type Null cannot be the result of
19767 // instantiating a non-nullable type parameter (TypeError thrown).
19768 const classid_t cid = type_class_id();
19769 if (cid == kDynamicCid || cid == kVoidCid || cid == kNullCid) {
19770 return raw();
19771 }
19772 if (cid == kNeverCid && value == Nullability::kNullable) {
19773 // Normalize Never? to Null.
19774 return Type::NullType();
19775 }
19776 // Clone type and set new nullability.
19777 Type& type = Type::Handle();
19778 // Always cloning in old space and removing space parameter would not satisfy
19779 // currently existing requests for type instantiation in new space.
19780 type ^= Object::Clone(*this, space);
19781 type.set_nullability(value);
19782 type.SetHash(0);
19783 type.SetTypeTestingStub(
19784 Code::Handle(TypeTestingStubGenerator::DefaultCodeForType(type)));
19785 if (IsCanonical()) {
19786 // Object::Clone does not clone canonical bit.
19787 ASSERT(!type.IsCanonical());
19788 type ^= type.Canonicalize();
19789 }
19790 return type.raw();
19791}
19792
19793FunctionPtr Type::signature() const {
19794 intptr_t cid = raw_ptr()->signature_->GetClassId();
19795 if (cid == kNullCid) {
19796 return Function::null();
19797 }
19798 ASSERT(cid == kFunctionCid);
19799 return Function::RawCast(raw_ptr()->signature_);
19800}
19801
19802void Type::set_signature(const Function& value) const {
19803 StorePointer(&raw_ptr()->signature_, value.raw());
19804}
19805
19806classid_t Type::type_class_id() const {
19807 return Smi::Value(raw_ptr()->type_class_id_);
19808}
19809
19810ClassPtr Type::type_class() const {
19811 return Isolate::Current()->class_table()->At(type_class_id());
19812}
19813
19814bool Type::IsInstantiated(Genericity genericity,
19815 intptr_t num_free_fun_type_params,
19816 TrailPtr trail) const {
19817 if (raw_ptr()->type_state_ == TypeLayout::kFinalizedInstantiated) {
19818 return true;
19819 }
19820 if ((genericity == kAny) && (num_free_fun_type_params == kAllFree) &&
19821 (raw_ptr()->type_state_ == TypeLayout::kFinalizedUninstantiated)) {
19822 return false;
19823 }
19824 if (IsFunctionType()) {
19825 const Function& sig_fun = Function::Handle(signature());
19826 if (!sig_fun.HasInstantiatedSignature(genericity, num_free_fun_type_params,
19827 trail)) {
19828 return false;
19829 }
19830 // Because a generic typedef with an instantiated signature is considered
19831 // uninstantiated, we still need to check the type arguments, even if the
19832 // signature is instantiated.
19833 }
19834 if (arguments() == TypeArguments::null()) {
19835 return true;
19836 }
19837 const TypeArguments& args = TypeArguments::Handle(arguments());
19838 intptr_t num_type_args = args.Length();
19839 intptr_t len = num_type_args; // Check the full vector of type args.
19840 ASSERT(num_type_args > 0);
19841 // This type is not instantiated if it refers to type parameters.
19842 const Class& cls = Class::Handle(type_class());
19843 len = cls.NumTypeParameters(); // Check the type parameters only.
19844 if (len > num_type_args) {
19845 // This type has the wrong number of arguments and is not finalized yet.
19846 // Type arguments are reset to null when finalizing such a type.
19847 ASSERT(!IsFinalized());
19848 len = num_type_args;
19849 }
19850 return (len == 0) ||
19851 args.IsSubvectorInstantiated(num_type_args - len, len, genericity,
19852 num_free_fun_type_params, trail);
19853}
19854
19855AbstractTypePtr Type::InstantiateFrom(
19856 const TypeArguments& instantiator_type_arguments,
19857 const TypeArguments& function_type_arguments,
19858 intptr_t num_free_fun_type_params,
19859 Heap::Space space,
19860 TrailPtr trail) const {
19861 Zone* zone = Thread::Current()->zone();
19862 ASSERT(IsFinalized() || IsBeingFinalized());
19863 ASSERT(!IsInstantiated());
19864 // Note that the type class has to be resolved at this time, but not
19865 // necessarily finalized yet. We may be checking bounds at compile time or
19866 // finalizing the type argument vector of a recursive type.
19867 const Class& cls = Class::Handle(zone, type_class());
19868 TypeArguments& type_arguments = TypeArguments::Handle(zone, arguments());
19869 Function& sig_fun = Function::Handle(zone, signature());
19870 if (!type_arguments.IsNull() &&
19871 (sig_fun.IsNull() || !type_arguments.IsInstantiated())) {
19872 // This type is uninstantiated because either its type arguments or its
19873 // signature, or both are uninstantiated.
19874 // Note that the type arguments of a function type merely document the
19875 // parameterization of a generic typedef. They are otherwise ignored.
19876 ASSERT(type_arguments.Length() == cls.NumTypeArguments());
19877 type_arguments = type_arguments.InstantiateFrom(
19878 instantiator_type_arguments, function_type_arguments,
19879 num_free_fun_type_params, space, trail);
19880 // A returned empty_type_arguments indicates a failed instantiation in dead
19881 // code that must be propagated up to the caller, the optimizing compiler.
19882 if (type_arguments.raw() == Object::empty_type_arguments().raw()) {
19883 return Type::null();
19884 }
19885 }
19886 // This uninstantiated type is not modified, as it can be instantiated
19887 // with different instantiators. Allocate a new instantiated version of it.
19888 const Type& instantiated_type = Type::Handle(
19889 zone, Type::New(cls, type_arguments, token_pos(), nullability(), space));
19890 // For a function type, possibly instantiate and set its signature.
19891 if (!sig_fun.IsNull()) {
19892 // If we are finalizing a typedef, do not yet instantiate its signature,
19893 // since it gets instantiated just before the type is marked as finalized.
19894 // Other function types should never get instantiated while unfinalized,
19895 // even while checking bounds of recursive types.
19896 if (IsFinalized()) {
19897 // A generic typedef may actually declare an instantiated signature.
19898 if (!sig_fun.HasInstantiatedSignature(kAny, num_free_fun_type_params)) {
19899 sig_fun = sig_fun.InstantiateSignatureFrom(
19900 instantiator_type_arguments, function_type_arguments,
19901 num_free_fun_type_params, space);
19902 // A returned null signature indicates a failed instantiation in dead
19903 // code that must be propagated up to the caller, the optimizing
19904 // compiler.
19905 if (sig_fun.IsNull()) {
19906 return Type::null();
19907 }
19908 }
19909 } else {
19910 // The Kernel frontend does not keep the information that a function type
19911 // is a typedef, so we cannot assert that cls.IsTypedefClass().
19912 }
19913 instantiated_type.set_signature(sig_fun);
19914 }
19915 if (IsFinalized()) {
19916 instantiated_type.SetIsFinalized();
19917 } else {
19918 if (IsBeingFinalized()) {
19919 instantiated_type.SetIsBeingFinalized();
19920 }
19921 }
19922 // Canonicalization is not part of instantiation.
19923 return instantiated_type.NormalizeFutureOrType(space);
19924}
19925
19926bool Type::IsEquivalent(const Instance& other,
19927 TypeEquality kind,
19928 TrailPtr trail) const {
19929 ASSERT(!IsNull());
19930 if (raw() == other.raw()) {
19931 return true;
19932 }
19933 if (other.IsTypeRef()) {
19934 // Unfold right hand type. Divergence is controlled by left hand type.
19935 const AbstractType& other_ref_type =
19936 AbstractType::Handle(TypeRef::Cast(other).type());
19937 ASSERT(!other_ref_type.IsTypeRef());
19938 return IsEquivalent(other_ref_type, kind, trail);
19939 }
19940 if (!other.IsType()) {
19941 return false;
19942 }
19943 const Type& other_type = Type::Cast(other);
19944 if (IsFunctionType() != other_type.IsFunctionType()) {
19945 return false;
19946 }
19947 if (type_class_id() != other_type.type_class_id()) {
19948 return false;
19949 }
19950 Nullability this_type_nullability = nullability();
19951 Nullability other_type_nullability = other_type.nullability();
19952 Thread* thread = Thread::Current();
19953 Isolate* isolate = thread->isolate();
19954 Zone* zone = thread->zone();
19955 if (kind == TypeEquality::kInSubtypeTest) {
19956 if (isolate->null_safety() &&
19957 this_type_nullability == Nullability::kNullable &&
19958 other_type_nullability == Nullability::kNonNullable) {
19959 return false;
19960 }
19961 } else {
19962 if (kind == TypeEquality::kSyntactical) {
19963 if (this_type_nullability == Nullability::kLegacy) {
19964 this_type_nullability = Nullability::kNonNullable;
19965 }
19966 if (other_type_nullability == Nullability::kLegacy) {
19967 other_type_nullability = Nullability::kNonNullable;
19968 }
19969 } else {
19970 ASSERT(kind == TypeEquality::kCanonical);
19971 }
19972 if (this_type_nullability != other_type_nullability) {
19973 return false;
19974 }
19975 }
19976 if (!IsFinalized() || !other_type.IsFinalized()) {
19977 return false; // Too early to decide if equal.
19978 }
19979 if ((arguments() == other_type.arguments()) &&
19980 (signature() == other_type.signature())) {
19981 return true;
19982 }
19983 if (arguments() != other_type.arguments()) {
19984 const Class& cls = Class::Handle(zone, type_class());
19985 const intptr_t num_type_params = cls.NumTypeParameters(thread);
19986 // Shortcut unnecessary handle allocation below if non-generic.
19987 if (num_type_params > 0) {
19988 const intptr_t num_type_args = cls.NumTypeArguments();
19989 const intptr_t from_index = num_type_args - num_type_params;
19990 const TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
19991 const TypeArguments& other_type_args =
19992 TypeArguments::Handle(zone, other_type.arguments());
19993 if (type_args.IsNull()) {
19994 // Ignore from_index.
19995 if (!other_type_args.IsRaw(0, num_type_args)) {
19996 return false;
19997 }
19998 } else if (other_type_args.IsNull()) {
19999 // Ignore from_index.
20000 if (!type_args.IsRaw(0, num_type_args)) {
20001 return false;
20002 }
20003 } else if (!type_args.IsSubvectorEquivalent(other_type_args, from_index,
20004 num_type_params, kind,
20005 trail)) {
20006 return false;
20007 }
20008#ifdef DEBUG
20009 if ((from_index > 0) && !type_args.IsNull() &&
20010 !other_type_args.IsNull()) {
20011 // Verify that the type arguments of the super class match, since they
20012 // depend solely on the type parameters that were just verified to
20013 // match.
20014 ASSERT(type_args.Length() >= (from_index + num_type_params));
20015 ASSERT(other_type_args.Length() >= (from_index + num_type_params));
20016 AbstractType& type_arg = AbstractType::Handle(zone);
20017 AbstractType& other_type_arg = AbstractType::Handle(zone);
20018 for (intptr_t i = 0; i < from_index; i++) {
20019 type_arg = type_args.TypeAt(i);
20020 other_type_arg = other_type_args.TypeAt(i);
20021 ASSERT(type_arg.IsEquivalent(other_type_arg, kind, trail));
20022 }
20023 }
20024#endif
20025 }
20026 }
20027 if (!IsFunctionType()) {
20028 return true;
20029 }
20030 ASSERT(Type::Cast(other).IsFunctionType());
20031 // Equal function types must have equal signature types and equal optional
20032 // named arguments.
20033 if (signature() == other_type.signature()) {
20034 return true;
20035 }
20036 const Function& sig_fun = Function::Handle(zone, signature());
20037 const Function& other_sig_fun =
20038 Function::Handle(zone, other_type.signature());
20039
20040 // Compare function type parameters and their bounds.
20041 // Check the type parameters and bounds of generic functions.
20042 if (!sig_fun.HasSameTypeParametersAndBounds(other_sig_fun, kind)) {
20043 return false;
20044 }
20045
20046 // Compare number of function parameters.
20047 const intptr_t num_fixed_params = sig_fun.num_fixed_parameters();
20048 const intptr_t other_num_fixed_params = other_sig_fun.num_fixed_parameters();
20049 if (num_fixed_params != other_num_fixed_params) {
20050 return false;
20051 }
20052 const intptr_t num_opt_pos_params = sig_fun.NumOptionalPositionalParameters();
20053 const intptr_t other_num_opt_pos_params =
20054 other_sig_fun.NumOptionalPositionalParameters();
20055 if (num_opt_pos_params != other_num_opt_pos_params) {
20056 return false;
20057 }
20058 const intptr_t num_opt_named_params = sig_fun.NumOptionalNamedParameters();
20059 const intptr_t other_num_opt_named_params =
20060 other_sig_fun.NumOptionalNamedParameters();
20061 if (num_opt_named_params != other_num_opt_named_params) {
20062 return false;
20063 }
20064 const intptr_t num_ignored_params = sig_fun.NumImplicitParameters();
20065 const intptr_t other_num_ignored_params =
20066 other_sig_fun.NumImplicitParameters();
20067 if (num_ignored_params != other_num_ignored_params) {
20068 return false;
20069 }
20070 AbstractType& param_type = Type::Handle(zone);
20071 AbstractType& other_param_type = Type::Handle(zone);
20072 // Check the result type.
20073 param_type = sig_fun.result_type();
20074 other_param_type = other_sig_fun.result_type();
20075 if (!param_type.IsEquivalent(other_param_type, kind)) {
20076 return false;
20077 }
20078 // Check the types of all parameters.
20079 const intptr_t num_params = sig_fun.NumParameters();
20080 ASSERT(other_sig_fun.NumParameters() == num_params);
20081 for (intptr_t i = 0; i < num_params; i++) {
20082 param_type = sig_fun.ParameterTypeAt(i);
20083 other_param_type = other_sig_fun.ParameterTypeAt(i);
20084 // Use contravariant order in case we test for subtyping.
20085 if (!other_param_type.IsEquivalent(param_type, kind)) {
20086 return false;
20087 }
20088 }
20089 // Check the names and types of optional named parameters.
20090 if (num_opt_named_params == 0) {
20091 return true;
20092 }
20093 for (intptr_t i = num_fixed_params; i < num_params; i++) {
20094 if (sig_fun.ParameterNameAt(i) != other_sig_fun.ParameterNameAt(i)) {
20095 return false;
20096 }
20097 if (sig_fun.IsRequiredAt(i) != other_sig_fun.IsRequiredAt(i)) {
20098 return false;
20099 }
20100 }
20101 return true;
20102}
20103
20104bool Type::IsRecursive() const {
20105 return TypeArguments::Handle(arguments()).IsRecursive();
20106}
20107
20108bool Type::IsDeclarationTypeOf(const Class& cls) const {
20109 ASSERT(type_class() == cls.raw());
20110 if (cls.IsNullClass()) {
20111 return true;
20112 }
20113 if (cls.IsGeneric() || cls.IsClosureClass() || cls.IsTypedefClass()) {
20114 return false;
20115 }
20116 return nullability() == Nullability::kNonNullable;
20117}
20118
20119AbstractTypePtr Type::Canonicalize(TrailPtr trail) const {
20120 ASSERT(IsFinalized());
20121 if (IsCanonical()) {
20122 ASSERT(TypeArguments::Handle(arguments()).IsOld());
20123 return this->raw();
20124 }
20125 Thread* thread = Thread::Current();
20126 Zone* zone = thread->zone();
20127 Isolate* isolate = thread->isolate();
20128
20129 const classid_t cid = type_class_id();
20130 if (cid == kDynamicCid) {
20131 ASSERT(Object::dynamic_type().IsCanonical());
20132 return Object::dynamic_type().raw();
20133 }
20134
20135 if (cid == kVoidCid) {
20136 ASSERT(Object::void_type().IsCanonical());
20137 return Object::void_type().raw();
20138 }
20139
20140 const Class& cls = Class::Handle(zone, type_class());
20141
20142 // Fast canonical lookup/registry for simple types.
20143 if (IsDeclarationTypeOf(cls)) {
20144 ASSERT(!IsFunctionType());
20145 ASSERT(!cls.IsNullClass() || IsNullable());
20146 Type& type = Type::Handle(zone, cls.declaration_type());
20147 if (type.IsNull()) {
20148 ASSERT(!cls.raw()->ptr()->InVMIsolateHeap() ||
20149 (isolate == Dart::vm_isolate()));
20150 // Canonicalize the type arguments of the supertype, if any.
20151 TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
20152 type_args = type_args.Canonicalize(trail);
20153 if (IsCanonical()) {
20154 // Canonicalizing type_args canonicalized this type.
20155 ASSERT(IsRecursive());
20156 return this->raw();
20157 }
20158 set_arguments(type_args);
20159 type = cls.declaration_type();
20160 // May be set while canonicalizing type args.
20161 if (type.IsNull()) {
20162 SafepointMutexLocker ml(
20163 isolate->group()->type_canonicalization_mutex());
20164 // Recheck if type exists.
20165 type = cls.declaration_type();
20166 if (type.IsNull()) {
20167 if (this->IsNew()) {
20168 type ^= Object::Clone(*this, Heap::kOld);
20169 } else {
20170 type = this->raw();
20171 }
20172 ASSERT(type.IsOld());
20173 type.ComputeHash();
20174 type.SetCanonical();
20175 cls.set_declaration_type(type);
20176 return type.raw();
20177 }
20178 }
20179 }
20180 ASSERT(this->Equals(type));
20181 ASSERT(type.IsCanonical());
20182 ASSERT(type.IsOld());
20183 return type.raw();
20184 }
20185
20186 AbstractType& type = Type::Handle(zone);
20187 ObjectStore* object_store = isolate->object_store();
20188 {
20189 SafepointMutexLocker ml(isolate->group()->type_canonicalization_mutex());
20190 CanonicalTypeSet table(zone, object_store->canonical_types());
20191 type ^= table.GetOrNull(CanonicalTypeKey(*this));
20192 ASSERT(object_store->canonical_types() == table.Release().raw());
20193 }
20194 if (type.IsNull()) {
20195 // The type was not found in the table. It is not canonical yet.
20196
20197 // Canonicalize the type arguments.
20198 TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
20199 // In case the type is first canonicalized at runtime, its type argument
20200 // vector may be longer than necessary. If so, reallocate a vector of the
20201 // exact size to prevent multiple "canonical" types.
20202 if (!type_args.IsNull()) {
20203 const intptr_t num_type_args = cls.NumTypeArguments();
20204 ASSERT(type_args.Length() >= num_type_args);
20205 if (type_args.Length() > num_type_args) {
20206 TypeArguments& new_type_args =
20207 TypeArguments::Handle(zone, TypeArguments::New(num_type_args));
20208 AbstractType& type_arg = AbstractType::Handle(zone);
20209 for (intptr_t i = 0; i < num_type_args; i++) {
20210 type_arg = type_args.TypeAt(i);
20211 new_type_args.SetTypeAt(i, type_arg);
20212 }
20213 type_args = new_type_args.raw();
20214 set_arguments(type_args);
20215 SetHash(0); // Flush cached hash value.
20216 }
20217 }
20218 type_args = type_args.Canonicalize(trail);
20219 if (IsCanonical()) {
20220 // Canonicalizing type_args canonicalized this type as a side effect.
20221 ASSERT(IsRecursive());
20222 // Cycles via typedefs are detected and disallowed, but a function type
20223 // can be recursive due to a cycle in its type arguments.
20224 return this->raw();
20225 }
20226 set_arguments(type_args);
20227 ASSERT(type_args.IsNull() || type_args.IsOld());
20228
20229 // In case of a function type, the signature has already been canonicalized
20230 // when finalizing the type and passing kCanonicalize as finalization.
20231 // Therefore, we do not canonicalize the signature here, which would have no
20232 // effect on selecting the canonical type anyway, because the function
20233 // object is not replaced when canonicalizing the signature.
20234
20235 // Check to see if the type got added to canonical list as part of the
20236 // type arguments canonicalization.
20237 SafepointMutexLocker ml(isolate->group()->type_canonicalization_mutex());
20238 CanonicalTypeSet table(zone, object_store->canonical_types());
20239 type ^= table.GetOrNull(CanonicalTypeKey(*this));
20240 if (type.IsNull()) {
20241 // Add this Type into the canonical list of types.
20242 if (this->IsNew()) {
20243 type ^= Object::Clone(*this, Heap::kOld);
20244 } else {
20245 type = this->raw();
20246 }
20247 ASSERT(type.IsOld());
20248 type.SetCanonical(); // Mark object as being canonical.
20249 bool present = table.Insert(type);
20250 ASSERT(!present);
20251 }
20252 object_store->set_canonical_types(table.Release());
20253 }
20254 return type.raw();
20255}
20256
20257#if defined(DEBUG)
20258bool Type::CheckIsCanonical(Thread* thread) const {
20259 if (IsRecursive()) {
20260 return true;
20261 }
20262 const classid_t cid = type_class_id();
20263 if (cid == kDynamicCid) {
20264 return (raw() == Object::dynamic_type().raw());
20265 }
20266 if (cid == kVoidCid) {
20267 return (raw() == Object::void_type().raw());
20268 }
20269 Zone* zone = thread->zone();
20270 Isolate* isolate = thread->isolate();
20271 AbstractType& type = Type::Handle(zone);
20272 const Class& cls = Class::Handle(zone, type_class());
20273
20274 // Fast canonical lookup/registry for simple types.
20275 if (IsDeclarationTypeOf(cls)) {
20276 ASSERT(!IsFunctionType());
20277 type = cls.declaration_type();
20278 ASSERT(type.IsCanonical());
20279 return (raw() == type.raw());
20280 }
20281
20282 ObjectStore* object_store = isolate->object_store();
20283 {
20284 SafepointMutexLocker ml(isolate->group()->type_canonicalization_mutex());
20285 CanonicalTypeSet table(zone, object_store->canonical_types());
20286 type ^= table.GetOrNull(CanonicalTypeKey(*this));
20287 object_store->set_canonical_types(table.Release());
20288 }
20289 return (raw() == type.raw());
20290}
20291#endif // DEBUG
20292
20293void Type::EnumerateURIs(URIs* uris) const {
20294 if (IsDynamicType() || IsVoidType() || IsNeverType()) {
20295 return;
20296 }
20297 Thread* thread = Thread::Current();
20298 Zone* zone = thread->zone();
20299 if (IsFunctionType()) {
20300 // The scope class and type arguments do not appear explicitly in the user
20301 // visible name. The type arguments were used to instantiate the function
20302 // type prior to this call.
20303 const Function& sig_fun = Function::Handle(zone, signature());
20304 AbstractType& type = AbstractType::Handle(zone);
20305 const intptr_t num_params = sig_fun.NumParameters();
20306 for (intptr_t i = 0; i < num_params; i++) {
20307 type = sig_fun.ParameterTypeAt(i);
20308 type.EnumerateURIs(uris);
20309 }
20310 // Handle result type last, since it appears last in the user visible name.
20311 type = sig_fun.result_type();
20312 type.EnumerateURIs(uris);
20313 } else {
20314 const Class& cls = Class::Handle(zone, type_class());
20315 const String& name = String::Handle(zone, cls.UserVisibleName());
20316 const Library& library = Library::Handle(zone, cls.library());
20317 const String& uri = String::Handle(zone, library.url());
20318 AddURI(uris, name, uri);
20319 const TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
20320 type_args.EnumerateURIs(uris);
20321 }
20322}
20323
20324intptr_t Type::ComputeHash() const {
20325 ASSERT(IsFinalized());
20326 uint32_t result = 0;
20327 result = CombineHashes(result, type_class_id());
20328 // A legacy type should have the same hash as its non-nullable version to be
20329 // consistent with the definition of type equality in Dart code.
20330 Nullability type_nullability = nullability();
20331 if (type_nullability == Nullability::kLegacy) {
20332 type_nullability = Nullability::kNonNullable;
20333 }
20334 result = CombineHashes(result, static_cast<uint32_t>(type_nullability));
20335 uint32_t type_args_hash = TypeArguments::kAllDynamicHash;
20336 if (arguments() != TypeArguments::null()) {
20337 // Only include hashes of type arguments corresponding to type parameters.
20338 // This prevents obtaining different hashes depending on the location of
20339 // TypeRefs in the super class type argument vector.
20340 const TypeArguments& type_args = TypeArguments::Handle(arguments());
20341 const Class& cls = Class::Handle(type_class());
20342 const intptr_t num_type_params = cls.NumTypeParameters();
20343 if (num_type_params > 0) {
20344 const intptr_t from_index = cls.NumTypeArguments() - num_type_params;
20345 type_args_hash = type_args.HashForRange(from_index, num_type_params);
20346 }
20347 }
20348 result = CombineHashes(result, type_args_hash);
20349 if (IsFunctionType()) {
20350 AbstractType& type = AbstractType::Handle();
20351 const Function& sig_fun = Function::Handle(signature());
20352 const intptr_t num_type_params = sig_fun.NumTypeParameters();
20353 if (num_type_params > 0) {
20354 const TypeArguments& type_params =
20355 TypeArguments::Handle(sig_fun.type_parameters());
20356 for (intptr_t i = 0; i < num_type_params; i++) {
20357 type = type_params.TypeAt(i);
20358 type = TypeParameter::Cast(type).bound();
20359 result = CombineHashes(result, type.Hash());
20360 }
20361 }
20362 type = sig_fun.result_type();
20363 result = CombineHashes(result, type.Hash());
20364 result = CombineHashes(result, sig_fun.NumOptionalPositionalParameters());
20365 const intptr_t num_params = sig_fun.NumParameters();
20366 for (intptr_t i = 0; i < num_params; i++) {
20367 type = sig_fun.ParameterTypeAt(i);
20368 result = CombineHashes(result, type.Hash());
20369 }
20370 if (sig_fun.NumOptionalNamedParameters() > 0) {
20371 String& param_name = String::Handle();
20372 for (intptr_t i = sig_fun.num_fixed_parameters(); i < num_params; i++) {
20373 param_name = sig_fun.ParameterNameAt(i);
20374 result = CombineHashes(result, param_name.Hash());
20375 }
20376 // Required flag is not hashed, see comment above.
20377 }
20378 }
20379 result = FinalizeHash(result, kHashBits);
20380 SetHash(result);
20381 return result;
20382}
20383
20384void Type::set_type_class(const Class& value) const {
20385 ASSERT(!value.IsNull());
20386 StorePointer(&raw_ptr()->type_class_id_, Smi::New(value.id()));
20387}
20388
20389void Type::set_arguments(const TypeArguments& value) const {
20390 ASSERT(!IsCanonical());
20391 StorePointer(&raw_ptr()->arguments_, value.raw());
20392}
20393
20394TypePtr Type::New(Heap::Space space) {
20395 ObjectPtr raw = Object::Allocate(Type::kClassId, Type::InstanceSize(), space);
20396 return static_cast<TypePtr>(raw);
20397}
20398
20399TypePtr Type::New(const Class& clazz,
20400 const TypeArguments& arguments,
20401 TokenPosition token_pos,
20402 Nullability nullability,
20403 Heap::Space space) {
20404 Zone* Z = Thread::Current()->zone();
20405 const Type& result = Type::Handle(Z, Type::New(space));
20406 result.set_type_class(clazz);
20407 result.set_arguments(arguments);
20408 result.SetHash(0);
20409 result.set_token_pos(token_pos);
20410 result.StoreNonPointer(&result.raw_ptr()->type_state_,
20411 TypeLayout::kAllocated);
20412 result.set_nullability(nullability);
20413
20414 result.SetTypeTestingStub(
20415 Code::Handle(Z, TypeTestingStubGenerator::DefaultCodeForType(result)));
20416 return result.raw();
20417}
20418
20419void Type::set_token_pos(TokenPosition token_pos) const {
20420 ASSERT(!token_pos.IsClassifying());
20421 StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
20422}
20423
20424void Type::set_type_state(int8_t state) const {
20425 ASSERT((state >= TypeLayout::kAllocated) &&
20426 (state <= TypeLayout::kFinalizedUninstantiated));
20427 StoreNonPointer(&raw_ptr()->type_state_, state);
20428}
20429
20430const char* Type::ToCString() const {
20431 if (IsNull()) {
20432 return "Type: null";
20433 }
20434 Zone* zone = Thread::Current()->zone();
20435 ZoneTextBuffer args(zone);
20436 const TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
20437 const char* args_cstr = "";
20438 if (!type_args.IsNull()) {
20439 type_args.PrintSubvectorName(0, type_args.Length(), kInternalName, &args,
20440 NameDisambiguation::kYes);
20441 args_cstr = args.buffer();
20442 }
20443 const Class& cls = Class::Handle(zone, type_class());
20444 const char* class_name;
20445 const String& name = String::Handle(zone, cls.Name());
20446 class_name = name.IsNull() ? "<null>" : name.ToCString();
20447 const char* suffix = NullabilitySuffix(kInternalName);
20448 if (IsFunctionType()) {
20449 const Function& sig_fun = Function::Handle(zone, signature());
20450 ZoneTextBuffer sig(zone);
20451 if (suffix[0] != '\0') {
20452 sig.AddString("(");
20453 }
20454 sig_fun.PrintSignature(kInternalName, &sig);
20455 if (suffix[0] != '\0') {
20456 sig.AddString(")");
20457 sig.AddString(suffix);
20458 }
20459 if (cls.IsClosureClass()) {
20460 ASSERT(type_args.IsNull());
20461 return OS::SCreate(zone, "Function Type: %s", sig.buffer());
20462 }
20463 return OS::SCreate(zone, "Function Type: %s (%s%s%s)", sig.buffer(),
20464 class_name, args_cstr, suffix);
20465 }
20466 if (IsFinalized() && IsRecursive()) {
20467 const intptr_t hash = Hash();
20468 return OS::SCreate(zone, "Type: (H%" Px ") %s%s%s", hash, class_name,
20469 args_cstr, suffix);
20470 } else {
20471 return OS::SCreate(zone, "Type: %s%s%s", class_name, args_cstr, suffix);
20472 }
20473}
20474
20475bool TypeRef::IsInstantiated(Genericity genericity,
20476 intptr_t num_free_fun_type_params,
20477 TrailPtr trail) const {
20478 if (TestAndAddToTrail(&trail)) {
20479 return true;
20480 }
20481 const AbstractType& ref_type = AbstractType::Handle(type());
20482 return !ref_type.IsNull() &&
20483 ref_type.IsInstantiated(genericity, num_free_fun_type_params, trail);
20484}
20485
20486bool TypeRef::IsEquivalent(const Instance& other,
20487 TypeEquality kind,
20488 TrailPtr trail) const {
20489 if (raw() == other.raw()) {
20490 return true;
20491 }
20492 if (!other.IsAbstractType()) {
20493 return false;
20494 }
20495 if (TestAndAddBuddyToTrail(&trail, AbstractType::Cast(other))) {
20496 return true;
20497 }
20498 const AbstractType& ref_type = AbstractType::Handle(type());
20499 return !ref_type.IsNull() && ref_type.IsEquivalent(other, kind, trail);
20500}
20501
20502AbstractTypePtr TypeRef::InstantiateFrom(
20503 const TypeArguments& instantiator_type_arguments,
20504 const TypeArguments& function_type_arguments,
20505 intptr_t num_free_fun_type_params,
20506 Heap::Space space,
20507 TrailPtr trail) const {
20508 TypeRef& instantiated_type_ref = TypeRef::Handle();
20509 instantiated_type_ref ^= OnlyBuddyInTrail(trail);
20510 if (!instantiated_type_ref.IsNull()) {
20511 return instantiated_type_ref.raw();
20512 }
20513 instantiated_type_ref = TypeRef::New();
20514 AddOnlyBuddyToTrail(&trail, instantiated_type_ref);
20515
20516 AbstractType& ref_type = AbstractType::Handle(type());
20517 ASSERT(!ref_type.IsNull() && !ref_type.IsTypeRef());
20518 AbstractType& instantiated_ref_type = AbstractType::Handle();
20519 instantiated_ref_type = ref_type.InstantiateFrom(
20520 instantiator_type_arguments, function_type_arguments,
20521 num_free_fun_type_params, space, trail);
20522 // A returned null type indicates a failed instantiation in dead code that
20523 // must be propagated up to the caller, the optimizing compiler.
20524 if (instantiated_ref_type.IsNull()) {
20525 return TypeRef::null();
20526 }
20527 ASSERT(!instantiated_ref_type.IsTypeRef());
20528 instantiated_type_ref.set_type(instantiated_ref_type);
20529
20530 instantiated_type_ref.SetTypeTestingStub(Code::Handle(
20531 TypeTestingStubGenerator::DefaultCodeForType(instantiated_type_ref)));
20532 return instantiated_type_ref.raw();
20533}
20534
20535void TypeRef::set_type(const AbstractType& value) const {
20536 ASSERT(value.IsNull() || value.IsFunctionType() || value.HasTypeClass());
20537 ASSERT(!value.IsTypeRef());
20538 StorePointer(&raw_ptr()->type_, value.raw());
20539}
20540
20541// A TypeRef cannot be canonical by definition. Only its referenced type can be.
20542// Consider the type Derived, where class Derived extends Base<Derived>.
20543// The first type argument of its flattened type argument vector is Derived,
20544// represented by a TypeRef pointing to itself.
20545AbstractTypePtr TypeRef::Canonicalize(TrailPtr trail) const {
20546 if (TestAndAddToTrail(&trail)) {
20547 return raw();
20548 }
20549 // TODO(regis): Try to reduce the number of nodes required to represent the
20550 // referenced recursive type.
20551 AbstractType& ref_type = AbstractType::Handle(type());
20552 ASSERT(!ref_type.IsNull());
20553 ref_type = ref_type.Canonicalize(trail);
20554 set_type(ref_type);
20555 return raw();
20556}
20557
20558#if defined(DEBUG)
20559bool TypeRef::CheckIsCanonical(Thread* thread) const {
20560 AbstractType& ref_type = AbstractType::Handle(type());
20561 ASSERT(!ref_type.IsNull());
20562 return ref_type.CheckIsCanonical(thread);
20563}
20564#endif // DEBUG
20565
20566void TypeRef::EnumerateURIs(URIs* uris) const {
20567 Thread* thread = Thread::Current();
20568 Zone* zone = thread->zone();
20569 const AbstractType& ref_type = AbstractType::Handle(zone, type());
20570 ASSERT(!ref_type.IsDynamicType() && !ref_type.IsVoidType() &&
20571 !ref_type.IsNeverType());
20572 const Class& cls = Class::Handle(zone, ref_type.type_class());
20573 const String& name = String::Handle(zone, cls.UserVisibleName());
20574 const Library& library = Library::Handle(zone, cls.library());
20575 const String& uri = String::Handle(zone, library.url());
20576 AddURI(uris, name, uri);
20577 // Break cycle by not printing type arguments.
20578}
20579
20580intptr_t TypeRef::Hash() const {
20581 // Do not use hash of the referenced type because
20582 // - we could be in process of calculating it (as TypeRef is used to
20583 // represent recursive references to types).
20584 // - referenced type might be incomplete (e.g. not all its
20585 // type arguments are set).
20586 const AbstractType& ref_type = AbstractType::Handle(type());
20587 ASSERT(!ref_type.IsNull());
20588 uint32_t result = Class::Handle(ref_type.type_class()).id();
20589 result = CombineHashes(result, static_cast<uint32_t>(ref_type.nullability()));
20590 return FinalizeHash(result, kHashBits);
20591}
20592
20593TypeRefPtr TypeRef::New() {
20594 ObjectPtr raw =
20595 Object::Allocate(TypeRef::kClassId, TypeRef::InstanceSize(), Heap::kOld);
20596 return static_cast<TypeRefPtr>(raw);
20597}
20598
20599TypeRefPtr TypeRef::New(const AbstractType& type) {
20600 Zone* Z = Thread::Current()->zone();
20601 const TypeRef& result = TypeRef::Handle(Z, TypeRef::New());
20602 result.set_type(type);
20603
20604 result.SetTypeTestingStub(
20605 Code::Handle(Z, TypeTestingStubGenerator::DefaultCodeForType(result)));
20606 return result.raw();
20607}
20608
20609const char* TypeRef::ToCString() const {
20610 Zone* zone = Thread::Current()->zone();
20611 AbstractType& ref_type = AbstractType::Handle(zone, type());
20612 if (ref_type.IsNull()) {
20613 return "TypeRef: null";
20614 }
20615 ZoneTextBuffer printer(zone);
20616 printer.AddString("TypeRef: ");
20617 ref_type.PrintName(kInternalName, &printer);
20618 if (ref_type.IsFinalized()) {
20619 const intptr_t hash = ref_type.Hash();
20620 printer.Printf(" (H%" Px ")", hash);
20621 }
20622 return printer.buffer();
20623}
20624
20625void TypeParameter::SetIsFinalized() const {
20626 ASSERT(!IsFinalized());
20627 set_flags(TypeParameterLayout::FinalizedBit::update(true, raw_ptr()->flags_));
20628}
20629
20630void TypeParameter::SetGenericCovariantImpl(bool value) const {
20631 set_flags(TypeParameterLayout::GenericCovariantImplBit::update(
20632 value, raw_ptr()->flags_));
20633}
20634
20635void TypeParameter::SetDeclaration(bool value) const {
20636 set_flags(
20637 TypeParameterLayout::DeclarationBit::update(value, raw_ptr()->flags_));
20638}
20639
20640void TypeParameter::set_nullability(Nullability value) const {
20641 StoreNonPointer(&raw_ptr()->nullability_, static_cast<int8_t>(value));
20642}
20643
20644TypeParameterPtr TypeParameter::ToNullability(Nullability value,
20645 Heap::Space space) const {
20646 if (nullability() == value) {
20647 return raw();
20648 }
20649 // Clone type parameter and set new nullability.
20650 TypeParameter& type_parameter = TypeParameter::Handle();
20651 type_parameter ^= Object::Clone(*this, space);
20652 type_parameter.set_nullability(value);
20653 type_parameter.SetDeclaration(false);
20654 type_parameter.SetHash(0);
20655 type_parameter.SetTypeTestingStub(Code::Handle(
20656 TypeTestingStubGenerator::DefaultCodeForType(type_parameter)));
20657 if (IsCanonical()) {
20658 // Object::Clone does not clone canonical bit.
20659 ASSERT(!type_parameter.IsCanonical());
20660 if (IsFinalized()) {
20661 type_parameter ^= type_parameter.Canonicalize();
20662 }
20663 }
20664 return type_parameter.raw();
20665}
20666
20667bool TypeParameter::IsInstantiated(Genericity genericity,
20668 intptr_t num_free_fun_type_params,
20669 TrailPtr trail) const {
20670 // Bounds of class type parameters are ignored in the VM.
20671 if (IsClassTypeParameter()) {
20672 return genericity == kFunctions;
20673 }
20674 ASSERT(IsFunctionTypeParameter());
20675 ASSERT(IsFinalized());
20676 if ((genericity != kCurrentClass) && (index() < num_free_fun_type_params)) {
20677 return false;
20678 }
20679 // Although the type parameter is instantiated, its bound may not be.
20680 const AbstractType& upper_bound = AbstractType::Handle(bound());
20681 if (upper_bound.IsTypeParameter() ||
20682 upper_bound.arguments() != TypeArguments::null()) {
20683 // Use trail to break cycles created by bound referring to type parameter.
20684 if (!TestAndAddToTrail(&trail) &&
20685 !upper_bound.IsInstantiated(genericity, num_free_fun_type_params,
20686 trail)) {
20687 return false;
20688 }
20689 }
20690 return true;
20691}
20692
20693bool TypeParameter::IsEquivalent(const Instance& other,
20694 TypeEquality kind,
20695 TrailPtr trail) const {
20696 if (raw() == other.raw()) {
20697 return true;
20698 }
20699 if (other.IsTypeRef()) {
20700 // Unfold right hand type. Divergence is controlled by left hand type.
20701 const AbstractType& other_ref_type =
20702 AbstractType::Handle(TypeRef::Cast(other).type());
20703 ASSERT(!other_ref_type.IsTypeRef());
20704 return IsEquivalent(other_ref_type, kind, trail);
20705 }
20706 if (!other.IsTypeParameter()) {
20707 return false;
20708 }
20709 const TypeParameter& other_type_param = TypeParameter::Cast(other);
20710 // Class type parameters must parameterize the same class to be equivalent.
20711 // Note that this check will also reject a class type parameter being compared
20712 // to a function type parameter.
20713 if (parameterized_class_id() != other_type_param.parameterized_class_id()) {
20714 return false;
20715 }
20716 // The function does not matter in type tests or when comparing types with
20717 // syntactical equality, but it does matter in canonicalization.
20718 if (kind == TypeEquality::kCanonical &&
20719 parameterized_function() != other_type_param.parameterized_function()) {
20720 return false;
20721 }
20722 Nullability this_type_param_nullability = nullability();
20723 Nullability other_type_param_nullability = other_type_param.nullability();
20724 if (kind == TypeEquality::kInSubtypeTest) {
20725 if (Isolate::Current()->null_safety() &&
20726 (this_type_param_nullability == Nullability::kNullable) &&
20727 (other_type_param_nullability == Nullability::kNonNullable)) {
20728 return false;
20729 }
20730 } else {
20731 if (kind == TypeEquality::kSyntactical) {
20732 if (this_type_param_nullability == Nullability::kLegacy) {
20733 this_type_param_nullability = Nullability::kNonNullable;
20734 }
20735 if (other_type_param_nullability == Nullability::kLegacy) {
20736 other_type_param_nullability = Nullability::kNonNullable;
20737 }
20738 } else {
20739 ASSERT(kind == TypeEquality::kCanonical);
20740 }
20741 if (this_type_param_nullability != other_type_param_nullability) {
20742 return false;
20743 }
20744 }
20745 if (kind == TypeEquality::kInSubtypeTest) {
20746 if (IsFunctionTypeParameter() && IsFinalized() &&
20747 other_type_param.IsFinalized()) {
20748 ASSERT(other_type_param.IsFunctionTypeParameter()); // Checked above.
20749 // To be equivalent, the function type parameters should be declared
20750 // at the same position in the generic function. Their index therefore
20751 // needs adjustement before comparison.
20752 // Example: 'foo<F>(bar<B>(B b)) { }' and 'baz<Z>(Z z) { }', baz can
20753 // be assigned to bar, although B has index 1 and Z index 0.
20754 const Function& sig_fun = Function::Handle(parameterized_function());
20755 const Function& other_sig_fun =
20756 Function::Handle(other_type_param.parameterized_function());
20757 const int offset = sig_fun.NumParentTypeParameters();
20758 const int other_offset = other_sig_fun.NumParentTypeParameters();
20759 return index() - offset == other_type_param.index() - other_offset;
20760 } else if (IsFinalized() == other_type_param.IsFinalized()) {
20761 return index() == other_type_param.index();
20762 }
20763 return false;
20764 }
20765 if (IsFinalized() == other_type_param.IsFinalized()) {
20766 return index() == other_type_param.index();
20767 }
20768 return name() == other_type_param.name();
20769}
20770
20771void TypeParameter::set_parameterized_class(const Class& value) const {
20772 // Set value may be null.
20773 classid_t cid = kFunctionCid; // Denotes a function type parameter.
20774 if (!value.IsNull()) {
20775 cid = value.id();
20776 }
20777 StoreNonPointer(&raw_ptr()->parameterized_class_id_, cid);
20778}
20779
20780classid_t TypeParameter::parameterized_class_id() const {
20781 return raw_ptr()->parameterized_class_id_;
20782}
20783
20784ClassPtr TypeParameter::parameterized_class() const {
20785 classid_t cid = parameterized_class_id();
20786 if (cid == kFunctionCid) {
20787 return Class::null();
20788 }
20789 return Isolate::Current()->class_table()->At(cid);
20790}
20791
20792void TypeParameter::set_parameterized_function(const Function& value) const {
20793 StorePointer(&raw_ptr()->parameterized_function_, value.raw());
20794}
20795
20796void TypeParameter::set_index(intptr_t value) const {
20797 ASSERT(value >= 0);
20798 ASSERT(Utils::IsInt(16, value));
20799 StoreNonPointer(&raw_ptr()->index_, value);
20800}
20801
20802void TypeParameter::set_name(const String& value) const {
20803 ASSERT(value.IsSymbol());
20804 StorePointer(&raw_ptr()->name_, value.raw());
20805}
20806
20807void TypeParameter::set_bound(const AbstractType& value) const {
20808 StorePointer(&raw_ptr()->bound_, value.raw());
20809}
20810
20811AbstractTypePtr TypeParameter::GetFromTypeArguments(
20812 const TypeArguments& instantiator_type_arguments,
20813 const TypeArguments& function_type_arguments) const {
20814 ASSERT(IsFinalized());
20815 const TypeArguments& type_args = IsFunctionTypeParameter()
20816 ? function_type_arguments
20817 : instantiator_type_arguments;
20818 return type_args.TypeAtNullSafe(index());
20819}
20820
20821AbstractTypePtr TypeParameter::InstantiateFrom(
20822 const TypeArguments& instantiator_type_arguments,
20823 const TypeArguments& function_type_arguments,
20824 intptr_t num_free_fun_type_params,
20825 Heap::Space space,
20826 TrailPtr trail) const {
20827 ASSERT(IsFinalized());
20828 AbstractType& result = AbstractType::Handle();
20829 if (IsFunctionTypeParameter()) {
20830 if (index() >= num_free_fun_type_params) {
20831 // Do not instantiate the function type parameter, but possibly its bound.
20832 result = raw();
20833 AbstractType& upper_bound = AbstractType::Handle(bound());
20834 if (!upper_bound.IsInstantiated(kAny, num_free_fun_type_params,
20835 nullptr)) {
20836 // Use trail to break cycles created by bound referring to type param.
20837 if (OnlyBuddyInTrail(trail) == Object::null()) {
20838 AddOnlyBuddyToTrail(&trail, *this);
20839 upper_bound = upper_bound.InstantiateFrom(
20840 instantiator_type_arguments, function_type_arguments,
20841 num_free_fun_type_params, space, trail);
20842 if (upper_bound.raw() == Type::NeverType()) {
20843 // Normalize 'X extends Never' to 'Never'.
20844 result = Type::NeverType();
20845 } else if (upper_bound.raw() != bound()) {
20846 result ^= Object::Clone(result, space);
20847 TypeParameter::Cast(result).set_bound(upper_bound);
20848 }
20849 }
20850 }
20851 } else if (function_type_arguments.IsNull()) {
20852 return Type::DynamicType();
20853 } else {
20854 result = function_type_arguments.TypeAt(index());
20855 ASSERT(!result.IsTypeParameter());
20856 }
20857 } else {
20858 ASSERT(IsClassTypeParameter());
20859 if (instantiator_type_arguments.IsNull()) {
20860 return Type::DynamicType();
20861 }
20862 if (instantiator_type_arguments.Length() <= index()) {
20863 // InstantiateFrom can be invoked from a compilation pipeline with
20864 // mismatching type arguments vector. This can only happen for
20865 // a dynamically unreachable code - which compiler can't remove
20866 // statically for some reason.
20867 // To prevent crashes we return AbstractType::null(), understood by caller
20868 // (see AssertAssignableInstr::Canonicalize).
20869 return AbstractType::null();
20870 }
20871 result = instantiator_type_arguments.TypeAt(index());
20872 // Instantiating a class type parameter cannot result in a
20873 // function type parameter.
20874 // Bounds of class type parameters are ignored in the VM.
20875 }
20876 result = result.SetInstantiatedNullability(*this, space);
20877 // Canonicalization is not part of instantiation.
20878 return result.NormalizeFutureOrType(space);
20879}
20880
20881AbstractTypePtr TypeParameter::Canonicalize(TrailPtr trail) const {
20882 ASSERT(IsFinalized());
20883 if (IsCanonical()) {
20884 return this->raw();
20885 }
20886 Thread* thread = Thread::Current();
20887 Zone* zone = thread->zone();
20888 Isolate* isolate = thread->isolate();
20889
20890 const Class& cls = Class::Handle(zone, parameterized_class());
20891 const Function& function = Function::Handle(
20892 zone, cls.IsNull() ? parameterized_function() : Function::null());
20893 const TypeArguments& type_params = TypeArguments::Handle(
20894 zone, cls.IsNull() ? function.type_parameters() : cls.type_parameters());
20895 const intptr_t offset =
20896 cls.IsNull() ? function.NumParentTypeParameters()
20897 : (cls.NumTypeArguments() - cls.NumTypeParameters());
20898 TypeParameter& type_parameter = TypeParameter::Handle(zone);
20899 type_parameter ^= type_params.TypeAt(index() - offset);
20900 ASSERT(!type_parameter.IsNull());
20901 if (type_parameter.nullability() == nullability()) {
20902 ASSERT(this->Equals(type_parameter));
20903 ASSERT(type_parameter.IsCanonical());
20904 ASSERT(type_parameter.IsDeclaration());
20905 ASSERT(type_parameter.IsOld());
20906 return type_parameter.raw();
20907 }
20908
20909 ObjectStore* object_store = isolate->object_store();
20910 {
20911 SafepointMutexLocker ml(isolate->group()->type_canonicalization_mutex());
20912 CanonicalTypeParameterSet table(zone,
20913 object_store->canonical_type_parameters());
20914 type_parameter ^= table.GetOrNull(CanonicalTypeParameterKey(*this));
20915 if (type_parameter.IsNull()) {
20916 // The type parameter was not found in the table. It is not canonical yet.
20917 // Add this type parameter into the canonical list of type parameters.
20918 if (this->IsNew()) {
20919 type_parameter ^= Object::Clone(*this, Heap::kOld);
20920 } else {
20921 type_parameter = this->raw();
20922 }
20923 ASSERT(type_parameter.IsOld());
20924 type_parameter.SetCanonical(); // Mark object as being canonical.
20925 bool present = table.Insert(type_parameter);
20926 ASSERT(!present);
20927 }
20928 object_store->set_canonical_type_parameters(table.Release());
20929 }
20930 return type_parameter.raw();
20931}
20932
20933#if defined(DEBUG)
20934bool TypeParameter::CheckIsCanonical(Thread* thread) const {
20935 Zone* zone = thread->zone();
20936 Isolate* isolate = thread->isolate();
20937
20938 const Class& cls = Class::Handle(zone, parameterized_class());
20939 const Function& function = Function::Handle(
20940 zone, cls.IsNull() ? parameterized_function() : Function::null());
20941 const TypeArguments& type_params = TypeArguments::Handle(
20942 zone, cls.IsNull() ? function.type_parameters() : cls.type_parameters());
20943 const intptr_t offset =
20944 cls.IsNull() ? function.NumParentTypeParameters()
20945 : (cls.NumTypeArguments() - cls.NumTypeParameters());
20946 TypeParameter& type_parameter = TypeParameter::Handle(zone);
20947 type_parameter ^= type_params.TypeAt(index() - offset);
20948 ASSERT(!type_parameter.IsNull());
20949 if (type_parameter.nullability() == nullability()) {
20950 ASSERT(type_parameter.IsCanonical());
20951 return (raw() == type_parameter.raw());
20952 }
20953
20954 ObjectStore* object_store = isolate->object_store();
20955 {
20956 SafepointMutexLocker ml(isolate->group()->type_canonicalization_mutex());
20957 CanonicalTypeParameterSet table(zone,
20958 object_store->canonical_type_parameters());
20959 type_parameter ^= table.GetOrNull(CanonicalTypeParameterKey(*this));
20960 object_store->set_canonical_type_parameters(table.Release());
20961 }
20962 return (raw() == type_parameter.raw());
20963}
20964#endif // DEBUG
20965
20966void TypeParameter::EnumerateURIs(URIs* uris) const {
20967 Thread* thread = Thread::Current();
20968 Zone* zone = thread->zone();
20969 GrowableHandlePtrArray<const String> pieces(zone, 4);
20970 pieces.Add(String::Handle(zone, name()));
20971 Class& cls = Class::Handle(zone, parameterized_class());
20972 if (cls.IsNull()) {
20973 const Function& fun = Function::Handle(zone, parameterized_function());
20974 pieces.Add(Symbols::SpaceOfSpace());
20975 pieces.Add(String::Handle(zone, fun.UserVisibleName()));
20976 cls = fun.Owner(); // May be null.
20977 // TODO(regis): Should we keep the function owner for better error messages?
20978 }
20979 if (!cls.IsNull()) {
20980 pieces.Add(Symbols::SpaceOfSpace());
20981 pieces.Add(String::Handle(zone, cls.UserVisibleName()));
20982 const String& name =
20983 String::Handle(zone, Symbols::FromConcatAll(thread, pieces));
20984 const Library& library = Library::Handle(zone, cls.library());
20985 const String& uri = String::Handle(zone, library.url());
20986 AddURI(uris, name, uri);
20987 }
20988}
20989
20990intptr_t TypeParameter::ComputeHash() const {
20991 ASSERT(IsFinalized());
20992 uint32_t result;
20993 if (IsClassTypeParameter()) {
20994 result = parameterized_class_id();
20995 } else {
20996 result = Function::Handle(parameterized_function()).Hash();
20997 }
20998 // No need to include the hash of the bound, since the type parameter is fully
20999 // identified by its class and index.
21000 result = CombineHashes(result, index());
21001 // A legacy type should have the same hash as its non-nullable version to be
21002 // consistent with the definition of type equality in Dart code.
21003 Nullability type_param_nullability = nullability();
21004 if (type_param_nullability == Nullability::kLegacy) {
21005 type_param_nullability = Nullability::kNonNullable;
21006 }
21007 result = CombineHashes(result, static_cast<uint32_t>(type_param_nullability));
21008 result = FinalizeHash(result, kHashBits);
21009 SetHash(result);
21010 return result;
21011}
21012
21013TypeParameterPtr TypeParameter::New() {
21014 ObjectPtr raw = Object::Allocate(TypeParameter::kClassId,
21015 TypeParameter::InstanceSize(), Heap::kOld);
21016 return static_cast<TypeParameterPtr>(raw);
21017}
21018
21019TypeParameterPtr TypeParameter::New(const Class& parameterized_class,
21020 const Function& parameterized_function,
21021 intptr_t index,
21022 const String& name,
21023 const AbstractType& bound,
21024 bool is_generic_covariant_impl,
21025 Nullability nullability,
21026 TokenPosition token_pos) {
21027 ASSERT(parameterized_class.IsNull() != parameterized_function.IsNull());
21028 Zone* Z = Thread::Current()->zone();
21029 const TypeParameter& result = TypeParameter::Handle(Z, TypeParameter::New());
21030 result.set_parameterized_class(parameterized_class);
21031 result.set_parameterized_function(parameterized_function);
21032 result.set_index(index);
21033 result.set_name(name);
21034 result.set_bound(bound);
21035 result.set_flags(0);
21036 result.set_nullability(nullability);
21037 result.SetGenericCovariantImpl(is_generic_covariant_impl);
21038 result.SetDeclaration(false);
21039 result.SetHash(0);
21040 result.set_token_pos(token_pos);
21041
21042 result.SetTypeTestingStub(
21043 Code::Handle(Z, TypeTestingStubGenerator::DefaultCodeForType(result)));
21044 return result.raw();
21045}
21046
21047void TypeParameter::set_token_pos(TokenPosition token_pos) const {
21048 ASSERT(!token_pos.IsClassifying());
21049 StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
21050}
21051
21052void TypeParameter::set_flags(uint8_t flags) const {
21053 StoreNonPointer(&raw_ptr()->flags_, flags);
21054}
21055
21056const char* TypeParameter::ToCString() const {
21057 Thread* thread = Thread::Current();
21058 ZoneTextBuffer printer(thread->zone());
21059 auto& name_str = String::Handle(thread->zone(), name());
21060 printer.Printf("TypeParameter: name ");
21061 printer.AddString(name_str.ToCString());
21062 printer.AddString(NullabilitySuffix(kInternalName));
21063 printer.Printf("; index: %" Pd ";", index());
21064 if (IsFunctionTypeParameter()) {
21065 const Function& function = Function::Handle(parameterized_function());
21066 printer.Printf(" function: ");
21067 name_str = function.name();
21068 printer.AddString(name_str.ToCString());
21069 } else {
21070 const Class& cls = Class::Handle(parameterized_class());
21071 printer.Printf(" class: ");
21072 name_str = cls.Name();
21073 printer.AddString(name_str.ToCString());
21074 }
21075 printer.Printf("; bound: ");
21076 const AbstractType& upper_bound = AbstractType::Handle(bound());
21077 if (upper_bound.IsNull()) {
21078 printer.AddString("<null>");
21079 } else {
21080 upper_bound.PrintName(kInternalName, &printer);
21081 }
21082 return printer.buffer();
21083}
21084
21085InstancePtr Number::CheckAndCanonicalize(Thread* thread,
21086 const char** error_str) const {
21087 intptr_t cid = GetClassId();
21088 switch (cid) {
21089 case kSmiCid:
21090 return static_cast<SmiPtr>(raw_value());
21091 case kMintCid:
21092 return Mint::NewCanonical(Mint::Cast(*this).value());
21093 case kDoubleCid:
21094 return Double::NewCanonical(Double::Cast(*this).value());
21095 default:
21096 UNREACHABLE();
21097 }
21098 return Instance::null();
21099}
21100
21101#if defined(DEBUG)
21102bool Number::CheckIsCanonical(Thread* thread) const {
21103 intptr_t cid = GetClassId();
21104 Zone* zone = thread->zone();
21105 const Class& cls = Class::Handle(zone, this->clazz());
21106 switch (cid) {
21107 case kSmiCid:
21108 return true;
21109 case kMintCid: {
21110 Mint& result = Mint::Handle(zone);
21111 result ^= cls.LookupCanonicalMint(zone, Mint::Cast(*this).value());
21112 return (result.raw() == this->raw());
21113 }
21114 case kDoubleCid: {
21115 Double& dbl = Double::Handle(zone);
21116 dbl ^= cls.LookupCanonicalDouble(zone, Double::Cast(*this).value());
21117 return (dbl.raw() == this->raw());
21118 }
21119 default:
21120 UNREACHABLE();
21121 }
21122 return false;
21123}
21124#endif // DEBUG
21125
21126const char* Number::ToCString() const {
21127 // Number is an interface. No instances of Number should exist.
21128 UNREACHABLE();
21129 return "Number";
21130}
21131
21132const char* Integer::ToCString() const {
21133 // Integer is an interface. No instances of Integer should exist except null.
21134 ASSERT(IsNull());
21135 return "NULL Integer";
21136}
21137
21138IntegerPtr Integer::New(const String& str, Heap::Space space) {
21139 // We are not supposed to have integers represented as two byte strings.
21140 ASSERT(str.IsOneByteString());
21141 if (str.IsNull() || (str.Length() == 0)) {
21142 return Integer::null();
21143 }
21144 int64_t value = 0;
21145 const char* cstr = str.ToCString();
21146 if (!OS::StringToInt64(cstr, &value)) {
21147 // Out of range.
21148 return Integer::null();
21149 }
21150 return Integer::New(value, space);
21151}
21152
21153IntegerPtr Integer::NewCanonical(const String& str) {
21154 // We are not supposed to have integers represented as two byte strings.
21155 ASSERT(str.IsOneByteString());
21156 int64_t value = 0;
21157 const char* cstr = str.ToCString();
21158 if (!OS::StringToInt64(cstr, &value)) {
21159 // Out of range.
21160 return Integer::null();
21161 }
21162 return NewCanonical(value);
21163}
21164
21165IntegerPtr Integer::NewCanonical(int64_t value) {
21166 if (Smi::IsValid(value)) {
21167 return Smi::New(static_cast<intptr_t>(value));
21168 }
21169 return Mint::NewCanonical(value);
21170}
21171
21172IntegerPtr Integer::New(int64_t value, Heap::Space space) {
21173 const bool is_smi = Smi::IsValid(value);
21174 if (is_smi) {
21175 return Smi::New(static_cast<intptr_t>(value));
21176 }
21177 return Mint::New(value, space);
21178}
21179
21180IntegerPtr Integer::NewFromUint64(uint64_t value, Heap::Space space) {
21181 return Integer::New(static_cast<int64_t>(value), space);
21182}
21183
21184bool Integer::IsValueInRange(uint64_t value) {
21185 return (value <= static_cast<uint64_t>(Mint::kMaxValue));
21186}
21187
21188bool Integer::Equals(const Instance& other) const {
21189 // Integer is an abstract class.
21190 UNREACHABLE();
21191 return false;
21192}
21193
21194bool Integer::IsZero() const {
21195 // Integer is an abstract class.
21196 UNREACHABLE();
21197 return false;
21198}
21199
21200bool Integer::IsNegative() const {
21201 // Integer is an abstract class.
21202 UNREACHABLE();
21203 return false;
21204}
21205
21206double Integer::AsDoubleValue() const {
21207 // Integer is an abstract class.
21208 UNREACHABLE();
21209 return 0.0;
21210}
21211
21212int64_t Integer::AsInt64Value() const {
21213 // Integer is an abstract class.
21214 UNREACHABLE();
21215 return 0;
21216}
21217
21218uint32_t Integer::AsTruncatedUint32Value() const {
21219 // Integer is an abstract class.
21220 UNREACHABLE();
21221 return 0;
21222}
21223
21224bool Integer::FitsIntoSmi() const {
21225 // Integer is an abstract class.
21226 UNREACHABLE();
21227 return false;
21228}
21229
21230int Integer::CompareWith(const Integer& other) const {
21231 // Integer is an abstract class.
21232 UNREACHABLE();
21233 return 0;
21234}
21235
21236IntegerPtr Integer::AsValidInteger() const {
21237 if (IsSmi()) return raw();
21238 if (IsMint()) {
21239 Mint& mint = Mint::Handle();
21240 mint ^= raw();
21241 if (Smi::IsValid(mint.value())) {
21242 return Smi::New(static_cast<intptr_t>(mint.value()));
21243 } else {
21244 return raw();
21245 }
21246 }
21247 return raw();
21248}
21249
21250const char* Integer::ToHexCString(Zone* zone) const {
21251 ASSERT(IsSmi() || IsMint());
21252 int64_t value = AsInt64Value();
21253 if (value < 0) {
21254 return OS::SCreate(zone, "-0x%" PX64, -static_cast<uint64_t>(value));
21255 } else {
21256 return OS::SCreate(zone, "0x%" PX64, static_cast<uint64_t>(value));
21257 }
21258}
21259
21260IntegerPtr Integer::ArithmeticOp(Token::Kind operation,
21261 const Integer& other,
21262 Heap::Space space) const {
21263 // In 32-bit mode, the result of any operation between two Smis will fit in a
21264 // 32-bit signed result, except the product of two Smis, which will be 64-bit.
21265 // In 64-bit mode, the result of any operation between two Smis will fit in a
21266 // 64-bit signed result, except the product of two Smis (see below).
21267 if (IsSmi() && other.IsSmi()) {
21268 const intptr_t left_value = Smi::Value(Smi::RawCast(raw()));
21269 const intptr_t right_value = Smi::Value(Smi::RawCast(other.raw()));
21270 switch (operation) {
21271 case Token::kADD:
21272 return Integer::New(left_value + right_value, space);
21273 case Token::kSUB:
21274 return Integer::New(left_value - right_value, space);
21275 case Token::kMUL:
21276 return Integer::New(
21277 Utils::MulWithWrapAround(static_cast<int64_t>(left_value),
21278 static_cast<int64_t>(right_value)),
21279 space);
21280 case Token::kTRUNCDIV:
21281 return Integer::New(left_value / right_value, space);
21282 case Token::kMOD: {
21283 const intptr_t remainder = left_value % right_value;
21284 if (remainder < 0) {
21285 if (right_value < 0) {
21286 return Integer::New(remainder - right_value, space);
21287 } else {
21288 return Integer::New(remainder + right_value, space);
21289 }
21290 }
21291 return Integer::New(remainder, space);
21292 }
21293 default:
21294 UNIMPLEMENTED();
21295 }
21296 }
21297 const int64_t left_value = AsInt64Value();
21298 const int64_t right_value = other.AsInt64Value();
21299 switch (operation) {
21300 case Token::kADD:
21301 return Integer::New(Utils::AddWithWrapAround(left_value, right_value),
21302 space);
21303
21304 case Token::kSUB:
21305 return Integer::New(Utils::SubWithWrapAround(left_value, right_value),
21306 space);
21307
21308 case Token::kMUL:
21309 return Integer::New(Utils::MulWithWrapAround(left_value, right_value),
21310 space);
21311
21312 case Token::kTRUNCDIV:
21313 if ((left_value == Mint::kMinValue) && (right_value == -1)) {
21314 // Division special case: overflow in int64_t.
21315 // MIN_VALUE / -1 = (MAX_VALUE + 1), which wraps around to MIN_VALUE
21316 return Integer::New(Mint::kMinValue, space);
21317 }
21318 return Integer::New(left_value / right_value, space);
21319
21320 case Token::kMOD: {
21321 if ((left_value == Mint::kMinValue) && (right_value == -1)) {
21322 // Modulo special case: overflow in int64_t.
21323 // MIN_VALUE % -1 = 0 for reason given above.
21324 return Integer::New(0, space);
21325 }
21326 const int64_t remainder = left_value % right_value;
21327 if (remainder < 0) {
21328 if (right_value < 0) {
21329 return Integer::New(remainder - right_value, space);
21330 } else {
21331 return Integer::New(remainder + right_value, space);
21332 }
21333 }
21334 return Integer::New(remainder, space);
21335 }
21336 default:
21337 UNIMPLEMENTED();
21338 return Integer::null();
21339 }
21340}
21341
21342IntegerPtr Integer::BitOp(Token::Kind kind,
21343 const Integer& other,
21344 Heap::Space space) const {
21345 if (IsSmi() && other.IsSmi()) {
21346 intptr_t op1_value = Smi::Value(Smi::RawCast(raw()));
21347 intptr_t op2_value = Smi::Value(Smi::RawCast(other.raw()));
21348 intptr_t result = 0;
21349 switch (kind) {
21350 case Token::kBIT_AND:
21351 result = op1_value & op2_value;
21352 break;
21353 case Token::kBIT_OR:
21354 result = op1_value | op2_value;
21355 break;
21356 case Token::kBIT_XOR:
21357 result = op1_value ^ op2_value;
21358 break;
21359 default:
21360 UNIMPLEMENTED();
21361 }
21362 ASSERT(Smi::IsValid(result));
21363 return Smi::New(result);
21364 } else {
21365 int64_t a = AsInt64Value();
21366 int64_t b = other.AsInt64Value();
21367 switch (kind) {
21368 case Token::kBIT_AND:
21369 return Integer::New(a & b, space);
21370 case Token::kBIT_OR:
21371 return Integer::New(a | b, space);
21372 case Token::kBIT_XOR:
21373 return Integer::New(a ^ b, space);
21374 default:
21375 UNIMPLEMENTED();
21376 return Integer::null();
21377 }
21378 }
21379}
21380
21381IntegerPtr Integer::ShiftOp(Token::Kind kind,
21382 const Integer& other,
21383 Heap::Space space) const {
21384 int64_t a = AsInt64Value();
21385 int64_t b = other.AsInt64Value();
21386 ASSERT(b >= 0);
21387 switch (kind) {
21388 case Token::kSHL:
21389 return Integer::New(Utils::ShiftLeftWithTruncation(a, b), space);
21390 case Token::kSHR:
21391 return Integer::New(a >> Utils::Minimum<int64_t>(b, Mint::kBits), space);
21392 default:
21393 UNIMPLEMENTED();
21394 return Integer::null();
21395 }
21396}
21397
21398bool Smi::Equals(const Instance& other) const {
21399 if (other.IsNull() || !other.IsSmi()) {
21400 return false;
21401 }
21402 return (this->Value() == Smi::Cast(other).Value());
21403}
21404
21405double Smi::AsDoubleValue() const {
21406 return static_cast<double>(this->Value());
21407}
21408
21409int64_t Smi::AsInt64Value() const {
21410 return this->Value();
21411}
21412
21413uint32_t Smi::AsTruncatedUint32Value() const {
21414 return this->Value() & 0xFFFFFFFF;
21415}
21416
21417int Smi::CompareWith(const Integer& other) const {
21418 if (other.IsSmi()) {
21419 const Smi& other_smi = Smi::Cast(other);
21420 if (this->Value() < other_smi.Value()) {
21421 return -1;
21422 } else if (this->Value() > other_smi.Value()) {
21423 return 1;
21424 } else {
21425 return 0;
21426 }
21427 }
21428 ASSERT(!other.FitsIntoSmi());
21429 if (other.IsMint()) {
21430 if (this->IsNegative() == other.IsNegative()) {
21431 return this->IsNegative() ? 1 : -1;
21432 }
21433 return this->IsNegative() ? -1 : 1;
21434 }
21435 UNREACHABLE();
21436 return 0;
21437}
21438
21439const char* Smi::ToCString() const {
21440 return OS::SCreate(Thread::Current()->zone(), "%" Pd "", Value());
21441}
21442
21443ClassPtr Smi::Class() {
21444 return Isolate::Current()->object_store()->smi_class();
21445}
21446
21447void Mint::set_value(int64_t value) const {
21448 StoreNonPointer(&raw_ptr()->value_, value);
21449}
21450
21451MintPtr Mint::New(int64_t val, Heap::Space space) {
21452 // Do not allocate a Mint if Smi would do.
21453 ASSERT(!Smi::IsValid(val));
21454 ASSERT(Isolate::Current()->object_store()->mint_class() != Class::null());
21455 Mint& result = Mint::Handle();
21456 {
21457 ObjectPtr raw =
21458 Object::Allocate(Mint::kClassId, Mint::InstanceSize(), space);
21459 NoSafepointScope no_safepoint;
21460 result ^= raw;
21461 }
21462 result.set_value(val);
21463 return result.raw();
21464}
21465
21466MintPtr Mint::NewCanonical(int64_t value) {
21467 // Do not allocate a Mint if Smi would do.
21468 ASSERT(!Smi::IsValid(value));
21469 Thread* thread = Thread::Current();
21470 Zone* zone = thread->zone();
21471 Isolate* isolate = thread->isolate();
21472 const Class& cls = Class::Handle(zone, isolate->object_store()->mint_class());
21473 Mint& canonical_value = Mint::Handle(zone);
21474 canonical_value = cls.LookupCanonicalMint(zone, value);
21475 if (!canonical_value.IsNull()) {
21476 return canonical_value.raw();
21477 }
21478 {
21479 SafepointMutexLocker ml(isolate->constant_canonicalization_mutex());
21480 // Retry lookup.
21481 {
21482 canonical_value = cls.LookupCanonicalMint(zone, value);
21483 if (!canonical_value.IsNull()) {
21484 return canonical_value.raw();
21485 }
21486 }
21487 canonical_value = Mint::New(value, Heap::kOld);
21488 canonical_value.SetCanonical();
21489 // The value needs to be added to the constants list. Grow the list if
21490 // it is full.
21491 cls.InsertCanonicalMint(zone, canonical_value);
21492 return canonical_value.raw();
21493 }
21494}
21495
21496bool Mint::Equals(const Instance& other) const {
21497 if (this->raw() == other.raw()) {
21498 // Both handles point to the same raw instance.
21499 return true;
21500 }
21501 if (!other.IsMint() || other.IsNull()) {
21502 return false;
21503 }
21504 return value() == Mint::Cast(other).value();
21505}
21506
21507double Mint::AsDoubleValue() const {
21508 return static_cast<double>(this->value());
21509}
21510
21511int64_t Mint::AsInt64Value() const {
21512 return this->value();
21513}
21514
21515uint32_t Mint::AsTruncatedUint32Value() const {
21516 return this->value() & 0xFFFFFFFF;
21517}
21518
21519bool Mint::FitsIntoSmi() const {
21520 return Smi::IsValid(AsInt64Value());
21521}
21522
21523int Mint::CompareWith(const Integer& other) const {
21524 ASSERT(!FitsIntoSmi());
21525 ASSERT(other.IsMint() || other.IsSmi());
21526 int64_t a = AsInt64Value();
21527 int64_t b = other.AsInt64Value();
21528 if (a < b) {
21529 return -1;
21530 } else if (a > b) {
21531 return 1;
21532 } else {
21533 return 0;
21534 }
21535}
21536
21537const char* Mint::ToCString() const {
21538 return OS::SCreate(Thread::Current()->zone(), "%" Pd64 "", value());
21539}
21540
21541void Double::set_value(double value) const {
21542 StoreNonPointer(&raw_ptr()->value_, value);
21543}
21544
21545bool Double::BitwiseEqualsToDouble(double value) const {
21546 intptr_t value_offset = Double::value_offset();
21547 void* this_addr = reinterpret_cast<void*>(
21548 reinterpret_cast<uword>(this->raw_ptr()) + value_offset);
21549 void* other_addr = reinterpret_cast<void*>(&value);
21550 return (memcmp(this_addr, other_addr, sizeof(value)) == 0);
21551}
21552
21553bool Double::OperatorEquals(const Instance& other) const {
21554 if (this->IsNull() || other.IsNull()) {
21555 return (this->IsNull() && other.IsNull());
21556 }
21557 if (!other.IsDouble()) {
21558 return false;
21559 }
21560 return this->value() == Double::Cast(other).value();
21561}
21562
21563bool Double::CanonicalizeEquals(const Instance& other) const {
21564 if (this->raw() == other.raw()) {
21565 return true; // "===".
21566 }
21567 if (other.IsNull() || !other.IsDouble()) {
21568 return false;
21569 }
21570 return BitwiseEqualsToDouble(Double::Cast(other).value());
21571}
21572
21573uint32_t Double::CanonicalizeHash() const {
21574 return Hash64To32(bit_cast<uint64_t>(value()));
21575}
21576
21577DoublePtr Double::New(double d, Heap::Space space) {
21578 ASSERT(Isolate::Current()->object_store()->double_class() != Class::null());
21579 Double& result = Double::Handle();
21580 {
21581 ObjectPtr raw =
21582 Object::Allocate(Double::kClassId, Double::InstanceSize(), space);
21583 NoSafepointScope no_safepoint;
21584 result ^= raw;
21585 }
21586 result.set_value(d);
21587 return result.raw();
21588}
21589
21590DoublePtr Double::New(const String& str, Heap::Space space) {
21591 double double_value;
21592 if (!CStringToDouble(str.ToCString(), str.Length(), &double_value)) {
21593 return Double::Handle().raw();
21594 }
21595 return New(double_value, space);
21596}
21597
21598DoublePtr Double::NewCanonical(double value) {
21599 Thread* thread = Thread::Current();
21600 Zone* zone = thread->zone();
21601 Isolate* isolate = thread->isolate();
21602 const Class& cls = Class::Handle(isolate->object_store()->double_class());
21603 // Linear search to see whether this value is already present in the
21604 // list of canonicalized constants.
21605 Double& canonical_value = Double::Handle(zone);
21606
21607 canonical_value = cls.LookupCanonicalDouble(zone, value);
21608 if (!canonical_value.IsNull()) {
21609 return canonical_value.raw();
21610 }
21611 {
21612 SafepointMutexLocker ml(isolate->constant_canonicalization_mutex());
21613 // Retry lookup.
21614 {
21615 canonical_value = cls.LookupCanonicalDouble(zone, value);
21616 if (!canonical_value.IsNull()) {
21617 return canonical_value.raw();
21618 }
21619 }
21620 canonical_value = Double::New(value, Heap::kOld);
21621 canonical_value.SetCanonical();
21622 // The value needs to be added to the constants list.
21623 cls.InsertCanonicalDouble(zone, canonical_value);
21624 return canonical_value.raw();
21625 }
21626}
21627
21628DoublePtr Double::NewCanonical(const String& str) {
21629 double double_value;
21630 if (!CStringToDouble(str.ToCString(), str.Length(), &double_value)) {
21631 return Double::Handle().raw();
21632 }
21633 return NewCanonical(double_value);
21634}
21635
21636StringPtr Number::ToString(Heap::Space space) const {
21637 // Refactoring can avoid Zone::Alloc and strlen, but gains are insignificant.
21638 const char* cstr = ToCString();
21639 intptr_t len = strlen(cstr);
21640// Resulting string is ASCII ...
21641#ifdef DEBUG
21642 for (intptr_t i = 0; i < len; ++i) {
21643 ASSERT(static_cast<uint8_t>(cstr[i]) < 128);
21644 }
21645#endif // DEBUG
21646 // ... which is a subset of Latin-1.
21647 return String::FromLatin1(reinterpret_cast<const uint8_t*>(cstr), len, space);
21648}
21649
21650const char* Double::ToCString() const {
21651 if (isnan(value())) {
21652 return "NaN";
21653 }
21654 if (isinf(value())) {
21655 return value() < 0 ? "-Infinity" : "Infinity";
21656 }
21657 const int kBufferSize = 128;
21658 char* buffer = Thread::Current()->zone()->Alloc<char>(kBufferSize);
21659 buffer[kBufferSize - 1] = '\0';
21660 DoubleToCString(value(), buffer, kBufferSize);
21661 return buffer;
21662}
21663
21664// Synchronize with implementation in compiler (intrinsifier).
21665class StringHasher : ValueObject {
21666 public:
21667 StringHasher() : hash_(0) {}
21668 void Add(int32_t ch) { hash_ = CombineHashes(hash_, ch); }
21669 void Add(const String& str, intptr_t begin_index, intptr_t len);
21670
21671 // Return a non-zero hash of at most 'bits' bits.
21672 intptr_t Finalize(int bits) {
21673 ASSERT(1 <= bits && bits <= (kBitsPerWord - 1));
21674 hash_ = FinalizeHash(hash_, bits);
21675 ASSERT(hash_ <= static_cast<uint32_t>(kMaxInt32));
21676 return hash_;
21677 }
21678
21679 private:
21680 uint32_t hash_;
21681};
21682
21683void StringHasher::Add(const String& str, intptr_t begin_index, intptr_t len) {
21684 ASSERT(begin_index >= 0);
21685 ASSERT(len >= 0);
21686 ASSERT((begin_index + len) <= str.Length());
21687 if (len == 0) {
21688 return;
21689 }
21690 if (str.IsOneByteString()) {
21691 NoSafepointScope no_safepoint;
21692 uint8_t* str_addr = OneByteString::CharAddr(str, begin_index);
21693 for (intptr_t i = 0; i < len; i++) {
21694 Add(*str_addr);
21695 str_addr++;
21696 }
21697 } else {
21698 String::CodePointIterator it(str, begin_index, len);
21699 while (it.Next()) {
21700 Add(it.Current());
21701 }
21702 }
21703}
21704
21705intptr_t String::Hash(const String& str, intptr_t begin_index, intptr_t len) {
21706 StringHasher hasher;
21707 hasher.Add(str, begin_index, len);
21708 return hasher.Finalize(kHashBits);
21709}
21710
21711intptr_t String::HashConcat(const String& str1, const String& str2) {
21712 intptr_t len1 = str1.Length();
21713 // Since String::Hash works at the code point (rune) level, a surrogate pair
21714 // that crosses the boundary between str1 and str2 must be composed.
21715 if (str1.IsTwoByteString() && Utf16::IsLeadSurrogate(str1.CharAt(len1 - 1))) {
21716 const String& temp = String::Handle(String::Concat(str1, str2));
21717 return temp.Hash();
21718 } else {
21719 StringHasher hasher;
21720 hasher.Add(str1, 0, len1);
21721 hasher.Add(str2, 0, str2.Length());
21722 return hasher.Finalize(kHashBits);
21723 }
21724}
21725
21726template <typename T>
21727static intptr_t HashImpl(const T* characters, intptr_t len) {
21728 ASSERT(len >= 0);
21729 StringHasher hasher;
21730 for (intptr_t i = 0; i < len; i++) {
21731 hasher.Add(characters[i]);
21732 }
21733 return hasher.Finalize(String::kHashBits);
21734}
21735
21736intptr_t String::Hash(StringPtr raw) {
21737 StringHasher hasher;
21738 uword length = Smi::Value(raw->ptr()->length_);
21739 if (raw->IsOneByteString() || raw->IsExternalOneByteString()) {
21740 const uint8_t* data;
21741 if (raw->IsOneByteString()) {
21742 data = static_cast<OneByteStringPtr>(raw)->ptr()->data();
21743 } else {
21744 ASSERT(raw->IsExternalOneByteString());
21745 ExternalOneByteStringPtr str = static_cast<ExternalOneByteStringPtr>(raw);
21746 data = str->ptr()->external_data_;
21747 }
21748 return String::Hash(data, length);
21749 } else {
21750 const uint16_t* data;
21751 if (raw->IsTwoByteString()) {
21752 data = static_cast<TwoByteStringPtr>(raw)->ptr()->data();
21753 } else {
21754 ASSERT(raw->IsExternalTwoByteString());
21755 ExternalTwoByteStringPtr str = static_cast<ExternalTwoByteStringPtr>(raw);
21756 data = str->ptr()->external_data_;
21757 }
21758 return String::Hash(data, length);
21759 }
21760}
21761
21762intptr_t String::Hash(const char* characters, intptr_t len) {
21763 return HashImpl(characters, len);
21764}
21765
21766intptr_t String::Hash(const uint8_t* characters, intptr_t len) {
21767 return HashImpl(characters, len);
21768}
21769
21770intptr_t String::Hash(const uint16_t* characters, intptr_t len) {
21771 StringHasher hasher;
21772 intptr_t i = 0;
21773 while (i < len) {
21774 hasher.Add(Utf16::Next(characters, &i, len));
21775 }
21776 return hasher.Finalize(kHashBits);
21777}
21778
21779intptr_t String::Hash(const int32_t* characters, intptr_t len) {
21780 return HashImpl(characters, len);
21781}
21782
21783intptr_t String::CharSize() const {
21784 intptr_t class_id = raw()->GetClassId();
21785 if (class_id == kOneByteStringCid || class_id == kExternalOneByteStringCid) {
21786 return kOneByteChar;
21787 }
21788 ASSERT(class_id == kTwoByteStringCid ||
21789 class_id == kExternalTwoByteStringCid);
21790 return kTwoByteChar;
21791}
21792
21793void* String::GetPeer() const {
21794 intptr_t class_id = raw()->GetClassId();
21795 if (class_id == kExternalOneByteStringCid) {
21796 return ExternalOneByteString::GetPeer(*this);
21797 }
21798 ASSERT(class_id == kExternalTwoByteStringCid);
21799 return ExternalTwoByteString::GetPeer(*this);
21800}
21801
21802bool String::Equals(const Instance& other) const {
21803 if (this->raw() == other.raw()) {
21804 // Both handles point to the same raw instance.
21805 return true;
21806 }
21807
21808 if (!other.IsString()) {
21809 return false;
21810 }
21811
21812 const String& other_string = String::Cast(other);
21813 return Equals(other_string);
21814}
21815
21816bool String::Equals(const String& str,
21817 intptr_t begin_index,
21818 intptr_t len) const {
21819 ASSERT(begin_index >= 0);
21820 ASSERT((begin_index == 0) || (begin_index < str.Length()));
21821 ASSERT(len >= 0);
21822 ASSERT(len <= str.Length());
21823 if (len != this->Length()) {
21824 return false; // Lengths don't match.
21825 }
21826
21827 for (intptr_t i = 0; i < len; i++) {
21828 if (CharAt(i) != str.CharAt(begin_index + i)) {
21829 return false;
21830 }
21831 }
21832
21833 return true;
21834}
21835
21836bool String::Equals(const char* cstr) const {
21837 ASSERT(cstr != NULL);
21838 CodePointIterator it(*this);
21839 intptr_t len = strlen(cstr);
21840 while (it.Next()) {
21841 if (*cstr == '\0') {
21842 // Lengths don't match.
21843 return false;
21844 }
21845 int32_t ch;
21846 intptr_t consumed =
21847 Utf8::Decode(reinterpret_cast<const uint8_t*>(cstr), len, &ch);
21848 if (consumed == 0 || it.Current() != ch) {
21849 return false;
21850 }
21851 cstr += consumed;
21852 len -= consumed;
21853 }
21854 return *cstr == '\0';
21855}
21856
21857bool String::Equals(const uint8_t* latin1_array, intptr_t len) const {
21858 if (len != this->Length()) {
21859 // Lengths don't match.
21860 return false;
21861 }
21862
21863 for (intptr_t i = 0; i < len; i++) {
21864 if (this->CharAt(i) != latin1_array[i]) {
21865 return false;
21866 }
21867 }
21868 return true;
21869}
21870
21871bool String::Equals(const uint16_t* utf16_array, intptr_t len) const {
21872 if (len != this->Length()) {
21873 // Lengths don't match.
21874 return false;
21875 }
21876
21877 for (intptr_t i = 0; i < len; i++) {
21878 if (this->CharAt(i) != LoadUnaligned(&utf16_array[i])) {
21879 return false;
21880 }
21881 }
21882 return true;
21883}
21884
21885bool String::Equals(const int32_t* utf32_array, intptr_t len) const {
21886 if (len < 0) return false;
21887 intptr_t j = 0;
21888 for (intptr_t i = 0; i < len; ++i) {
21889 if (Utf::IsSupplementary(utf32_array[i])) {
21890 uint16_t encoded[2];
21891 Utf16::Encode(utf32_array[i], &encoded[0]);
21892 if (j + 1 >= Length()) return false;
21893 if (CharAt(j++) != encoded[0]) return false;
21894 if (CharAt(j++) != encoded[1]) return false;
21895 } else {
21896 if (j >= Length()) return false;
21897 if (CharAt(j++) != utf32_array[i]) return false;
21898 }
21899 }
21900 return j == Length();
21901}
21902
21903bool String::EqualsConcat(const String& str1, const String& str2) const {
21904 return (Length() == str1.Length() + str2.Length()) &&
21905 str1.Equals(*this, 0, str1.Length()) &&
21906 str2.Equals(*this, str1.Length(), str2.Length());
21907}
21908
21909intptr_t String::CompareTo(const String& other) const {
21910 const intptr_t this_len = this->Length();
21911 const intptr_t other_len = other.IsNull() ? 0 : other.Length();
21912 const intptr_t len = (this_len < other_len) ? this_len : other_len;
21913 for (intptr_t i = 0; i < len; i++) {
21914 uint16_t this_code_unit = this->CharAt(i);
21915 uint16_t other_code_unit = other.CharAt(i);
21916 if (this_code_unit < other_code_unit) {
21917 return -1;
21918 }
21919 if (this_code_unit > other_code_unit) {
21920 return 1;
21921 }
21922 }
21923 if (this_len < other_len) return -1;
21924 if (this_len > other_len) return 1;
21925 return 0;
21926}
21927
21928bool String::StartsWith(StringPtr str, StringPtr prefix) {
21929 if (prefix == String::null()) return false;
21930
21931 const intptr_t length = String::LengthOf(str);
21932 const intptr_t prefix_length = String::LengthOf(prefix);
21933 if (prefix_length > length) return false;
21934
21935 for (intptr_t i = 0; i < prefix_length; i++) {
21936 if (String::CharAt(str, i) != String::CharAt(prefix, i)) {
21937 return false;
21938 }
21939 }
21940 return true;
21941}
21942
21943bool String::EndsWith(const String& other) const {
21944 if (other.IsNull()) {
21945 return false;
21946 }
21947 const intptr_t len = this->Length();
21948 const intptr_t other_len = other.Length();
21949 const intptr_t offset = len - other_len;
21950
21951 if ((other_len == 0) || (other_len > len)) {
21952 return false;
21953 }
21954 for (int i = offset; i < len; i++) {
21955 if (this->CharAt(i) != other.CharAt(i - offset)) {
21956 return false;
21957 }
21958 }
21959 return true;
21960}
21961
21962InstancePtr String::CheckAndCanonicalize(Thread* thread,
21963 const char** error_str) const {
21964 if (IsCanonical()) {
21965 return this->raw();
21966 }
21967 return Symbols::New(Thread::Current(), *this);
21968}
21969
21970#if defined(DEBUG)
21971bool String::CheckIsCanonical(Thread* thread) const {
21972 Zone* zone = thread->zone();
21973 const String& str = String::Handle(zone, Symbols::Lookup(thread, *this));
21974 return (str.raw() == this->raw());
21975}
21976#endif // DEBUG
21977
21978StringPtr String::New(const char* cstr, Heap::Space space) {
21979 ASSERT(cstr != NULL);
21980 intptr_t array_len = strlen(cstr);
21981 const uint8_t* utf8_array = reinterpret_cast<const uint8_t*>(cstr);
21982 return String::FromUTF8(utf8_array, array_len, space);
21983}
21984
21985StringPtr String::FromUTF8(const uint8_t* utf8_array,
21986 intptr_t array_len,
21987 Heap::Space space) {
21988 Utf8::Type type;
21989 intptr_t len = Utf8::CodeUnitCount(utf8_array, array_len, &type);
21990 if (type == Utf8::kLatin1) {
21991 const String& strobj = String::Handle(OneByteString::New(len, space));
21992 if (len > 0) {
21993 NoSafepointScope no_safepoint;
21994 if (!Utf8::DecodeToLatin1(utf8_array, array_len,
21995 OneByteString::DataStart(strobj), len)) {
21996 Utf8::ReportInvalidByte(utf8_array, array_len, len);
21997 return String::null();
21998 }
21999 }
22000 return strobj.raw();
22001 }
22002 ASSERT((type == Utf8::kBMP) || (type == Utf8::kSupplementary));
22003 const String& strobj = String::Handle(TwoByteString::New(len, space));
22004 NoSafepointScope no_safepoint;
22005 if (!Utf8::DecodeToUTF16(utf8_array, array_len,
22006 TwoByteString::DataStart(strobj), len)) {
22007 Utf8::ReportInvalidByte(utf8_array, array_len, len);
22008 return String::null();
22009 }
22010 return strobj.raw();
22011}
22012
22013StringPtr String::FromLatin1(const uint8_t* latin1_array,
22014 intptr_t array_len,
22015 Heap::Space space) {
22016 return OneByteString::New(latin1_array, array_len, space);
22017}
22018
22019StringPtr String::FromUTF16(const uint16_t* utf16_array,
22020 intptr_t array_len,
22021 Heap::Space space) {
22022 bool is_one_byte_string = true;
22023 for (intptr_t i = 0; i < array_len; ++i) {
22024 if (!Utf::IsLatin1(LoadUnaligned(&utf16_array[i]))) {
22025 is_one_byte_string = false;
22026 break;
22027 }
22028 }
22029 if (is_one_byte_string) {
22030 return OneByteString::New(utf16_array, array_len, space);
22031 }
22032 return TwoByteString::New(utf16_array, array_len, space);
22033}
22034
22035StringPtr String::FromUTF32(const int32_t* utf32_array,
22036 intptr_t array_len,
22037 Heap::Space space) {
22038 bool is_one_byte_string = true;
22039 intptr_t utf16_len = array_len;
22040 for (intptr_t i = 0; i < array_len; ++i) {
22041 if (!Utf::IsLatin1(utf32_array[i])) {
22042 is_one_byte_string = false;
22043 if (Utf::IsSupplementary(utf32_array[i])) {
22044 utf16_len += 1;
22045 }
22046 }
22047 }
22048 if (is_one_byte_string) {
22049 return OneByteString::New(utf32_array, array_len, space);
22050 }
22051 return TwoByteString::New(utf16_len, utf32_array, array_len, space);
22052}
22053
22054StringPtr String::New(const String& str, Heap::Space space) {
22055 // Currently this just creates a copy of the string in the correct space.
22056 // Once we have external string support, this will also create a heap copy of
22057 // the string if necessary. Some optimizations are possible, such as not
22058 // copying internal strings into the same space.
22059 intptr_t len = str.Length();
22060 String& result = String::Handle();
22061 intptr_t char_size = str.CharSize();
22062 if (char_size == kOneByteChar) {
22063 result = OneByteString::New(len, space);
22064 } else {
22065 ASSERT(char_size == kTwoByteChar);
22066 result = TwoByteString::New(len, space);
22067 }
22068 String::Copy(result, 0, str, 0, len);
22069 return result.raw();
22070}
22071
22072StringPtr String::NewExternal(const uint8_t* characters,
22073 intptr_t len,
22074 void* peer,
22075 intptr_t external_allocation_size,
22076 Dart_WeakPersistentHandleFinalizer callback,
22077 Heap::Space space) {
22078 return ExternalOneByteString::New(characters, len, peer,
22079 external_allocation_size, callback, space);
22080}
22081
22082StringPtr String::NewExternal(const uint16_t* characters,
22083 intptr_t len,
22084 void* peer,
22085 intptr_t external_allocation_size,
22086 Dart_WeakPersistentHandleFinalizer callback,
22087 Heap::Space space) {
22088 return ExternalTwoByteString::New(characters, len, peer,
22089 external_allocation_size, callback, space);
22090}
22091
22092void String::Copy(const String& dst,
22093 intptr_t dst_offset,
22094 const uint8_t* characters,
22095 intptr_t len) {
22096 ASSERT(dst_offset >= 0);
22097 ASSERT(len >= 0);
22098 ASSERT(len <= (dst.Length() - dst_offset));
22099 if (dst.IsOneByteString()) {
22100 NoSafepointScope no_safepoint;
22101 if (len > 0) {
22102 memmove(OneByteString::CharAddr(dst, dst_offset), characters, len);
22103 }
22104 } else if (dst.IsTwoByteString()) {
22105 for (intptr_t i = 0; i < len; ++i) {
22106 *TwoByteString::CharAddr(dst, i + dst_offset) = characters[i];
22107 }
22108 }
22109}
22110
22111void String::Copy(const String& dst,
22112 intptr_t dst_offset,
22113 const uint16_t* utf16_array,
22114 intptr_t array_len) {
22115 ASSERT(dst_offset >= 0);
22116 ASSERT(array_len >= 0);
22117 ASSERT(array_len <= (dst.Length() - dst_offset));
22118 if (dst.IsOneByteString()) {
22119 NoSafepointScope no_safepoint;
22120 for (intptr_t i = 0; i < array_len; ++i) {
22121 ASSERT(Utf::IsLatin1(LoadUnaligned(&utf16_array[i])));
22122 *OneByteString::CharAddr(dst, i + dst_offset) = utf16_array[i];
22123 }
22124 } else {
22125 ASSERT(dst.IsTwoByteString());
22126 NoSafepointScope no_safepoint;
22127 if (array_len > 0) {
22128 memmove(TwoByteString::CharAddr(dst, dst_offset), utf16_array,
22129 array_len * 2);
22130 }
22131 }
22132}
22133
22134void String::Copy(const String& dst,
22135 intptr_t dst_offset,
22136 const String& src,
22137 intptr_t src_offset,
22138 intptr_t len) {
22139 ASSERT(dst_offset >= 0);
22140 ASSERT(src_offset >= 0);
22141 ASSERT(len >= 0);
22142 ASSERT(len <= (dst.Length() - dst_offset));
22143 ASSERT(len <= (src.Length() - src_offset));
22144 if (len > 0) {
22145 intptr_t char_size = src.CharSize();
22146 if (char_size == kOneByteChar) {
22147 if (src.IsOneByteString()) {
22148 NoSafepointScope no_safepoint;
22149 String::Copy(dst, dst_offset, OneByteString::CharAddr(src, src_offset),
22150 len);
22151 } else {
22152 ASSERT(src.IsExternalOneByteString());
22153 NoSafepointScope no_safepoint;
22154 String::Copy(dst, dst_offset,
22155 ExternalOneByteString::CharAddr(src, src_offset), len);
22156 }
22157 } else {
22158 ASSERT(char_size == kTwoByteChar);
22159 if (src.IsTwoByteString()) {
22160 NoSafepointScope no_safepoint;
22161 String::Copy(dst, dst_offset, TwoByteString::CharAddr(src, src_offset),
22162 len);
22163 } else {
22164 ASSERT(src.IsExternalTwoByteString());
22165 NoSafepointScope no_safepoint;
22166 String::Copy(dst, dst_offset,
22167 ExternalTwoByteString::CharAddr(src, src_offset), len);
22168 }
22169 }
22170 }
22171}
22172
22173StringPtr String::EscapeSpecialCharacters(const String& str) {
22174 if (str.IsOneByteString()) {
22175 return OneByteString::EscapeSpecialCharacters(str);
22176 }
22177 if (str.IsTwoByteString()) {
22178 return TwoByteString::EscapeSpecialCharacters(str);
22179 }
22180 if (str.IsExternalOneByteString()) {
22181 return ExternalOneByteString::EscapeSpecialCharacters(str);
22182 }
22183 ASSERT(str.IsExternalTwoByteString());
22184 // If EscapeSpecialCharacters is frequently called on external two byte
22185 // strings, we should implement it directly on ExternalTwoByteString rather
22186 // than first converting to a TwoByteString.
22187 return TwoByteString::EscapeSpecialCharacters(
22188 String::Handle(TwoByteString::New(str, Heap::kNew)));
22189}
22190
22191static bool IsPercent(int32_t c) {
22192 return c == '%';
22193}
22194
22195static bool IsHexCharacter(int32_t c) {
22196 if (c >= '0' && c <= '9') {
22197 return true;
22198 }
22199 if (c >= 'A' && c <= 'F') {
22200 return true;
22201 }
22202 return false;
22203}
22204
22205static bool IsURISafeCharacter(int32_t c) {
22206 if ((c >= '0') && (c <= '9')) {
22207 return true;
22208 }
22209 if ((c >= 'a') && (c <= 'z')) {
22210 return true;
22211 }
22212 if ((c >= 'A') && (c <= 'Z')) {
22213 return true;
22214 }
22215 return (c == '-') || (c == '_') || (c == '.') || (c == '~');
22216}
22217
22218static int32_t GetHexCharacter(int32_t c) {
22219 ASSERT(c >= 0);
22220 ASSERT(c < 16);
22221 const char* hex = "0123456789ABCDEF";
22222 return hex[c];
22223}
22224
22225static int32_t GetHexValue(int32_t c) {
22226 if (c >= '0' && c <= '9') {
22227 return c - '0';
22228 }
22229 if (c >= 'A' && c <= 'F') {
22230 return c - 'A' + 10;
22231 }
22232 UNREACHABLE();
22233 return 0;
22234}
22235
22236static int32_t MergeHexCharacters(int32_t c1, int32_t c2) {
22237 return GetHexValue(c1) << 4 | GetHexValue(c2);
22238}
22239
22240const char* String::EncodeIRI(const String& str) {
22241 const intptr_t len = Utf8::Length(str);
22242 Zone* zone = Thread::Current()->zone();
22243 uint8_t* utf8 = zone->Alloc<uint8_t>(len);
22244 str.ToUTF8(utf8, len);
22245 intptr_t num_escapes = 0;
22246 for (int i = 0; i < len; ++i) {
22247 uint8_t byte = utf8[i];
22248 if (!IsURISafeCharacter(byte)) {
22249 num_escapes += 2;
22250 }
22251 }
22252 intptr_t cstr_len = len + num_escapes + 1;
22253 char* cstr = zone->Alloc<char>(cstr_len);
22254 intptr_t index = 0;
22255 for (int i = 0; i < len; ++i) {
22256 uint8_t byte = utf8[i];
22257 if (!IsURISafeCharacter(byte)) {
22258 cstr[index++] = '%';
22259 cstr[index++] = GetHexCharacter(byte >> 4);
22260 cstr[index++] = GetHexCharacter(byte & 0xF);
22261 } else {
22262 ASSERT(byte <= 127);
22263 cstr[index++] = byte;
22264 }
22265 }
22266 cstr[index] = '\0';
22267 return cstr;
22268}
22269
22270StringPtr String::DecodeIRI(const String& str) {
22271 CodePointIterator cpi(str);
22272 intptr_t num_escapes = 0;
22273 intptr_t len = str.Length();
22274 {
22275 CodePointIterator cpi(str);
22276 while (cpi.Next()) {
22277 int32_t code_point = cpi.Current();
22278 if (IsPercent(code_point)) {
22279 // Verify that the two characters following the % are hex digits.
22280 if (!cpi.Next()) {
22281 return String::null();
22282 }
22283 int32_t code_point = cpi.Current();
22284 if (!IsHexCharacter(code_point)) {
22285 return String::null();
22286 }
22287 if (!cpi.Next()) {
22288 return String::null();
22289 }
22290 code_point = cpi.Current();
22291 if (!IsHexCharacter(code_point)) {
22292 return String::null();
22293 }
22294 num_escapes += 2;
22295 }
22296 }
22297 }
22298 intptr_t utf8_len = len - num_escapes;
22299 ASSERT(utf8_len >= 0);
22300 Zone* zone = Thread::Current()->zone();
22301 uint8_t* utf8 = zone->Alloc<uint8_t>(utf8_len);
22302 {
22303 intptr_t index = 0;
22304 CodePointIterator cpi(str);
22305 while (cpi.Next()) {
22306 ASSERT(index < utf8_len);
22307 int32_t code_point = cpi.Current();
22308 if (IsPercent(code_point)) {
22309 cpi.Next();
22310 int32_t ch1 = cpi.Current();
22311 cpi.Next();
22312 int32_t ch2 = cpi.Current();
22313 int32_t merged = MergeHexCharacters(ch1, ch2);
22314 ASSERT(merged >= 0 && merged < 256);
22315 utf8[index] = static_cast<uint8_t>(merged);
22316 } else {
22317 ASSERT(code_point >= 0 && code_point < 256);
22318 utf8[index] = static_cast<uint8_t>(code_point);
22319 }
22320 index++;
22321 }
22322 }
22323 return FromUTF8(utf8, utf8_len);
22324}
22325
22326StringPtr String::NewFormatted(const char* format, ...) {
22327 va_list args;
22328 va_start(args, format);
22329 StringPtr result = NewFormattedV(format, args);
22330 NoSafepointScope no_safepoint;
22331 va_end(args);
22332 return result;
22333}
22334
22335StringPtr String::NewFormatted(Heap::Space space, const char* format, ...) {
22336 va_list args;
22337 va_start(args, format);
22338 StringPtr result = NewFormattedV(format, args, space);
22339 NoSafepointScope no_safepoint;
22340 va_end(args);
22341 return result;
22342}
22343
22344StringPtr String::NewFormattedV(const char* format,
22345 va_list args,
22346 Heap::Space space) {
22347 va_list args_copy;
22348 va_copy(args_copy, args);
22349 intptr_t len = Utils::VSNPrint(NULL, 0, format, args_copy);
22350 va_end(args_copy);
22351
22352 Zone* zone = Thread::Current()->zone();
22353 char* buffer = zone->Alloc<char>(len + 1);
22354 Utils::VSNPrint(buffer, (len + 1), format, args);
22355
22356 return String::New(buffer, space);
22357}
22358
22359StringPtr String::Concat(const String& str1,
22360 const String& str2,
22361 Heap::Space space) {
22362 ASSERT(!str1.IsNull() && !str2.IsNull());
22363 intptr_t char_size = Utils::Maximum(str1.CharSize(), str2.CharSize());
22364 if (char_size == kTwoByteChar) {
22365 return TwoByteString::Concat(str1, str2, space);
22366 }
22367 return OneByteString::Concat(str1, str2, space);
22368}
22369
22370StringPtr String::ConcatAll(const Array& strings, Heap::Space space) {
22371 return ConcatAllRange(strings, 0, strings.Length(), space);
22372}
22373
22374StringPtr String::ConcatAllRange(const Array& strings,
22375 intptr_t start,
22376 intptr_t end,
22377 Heap::Space space) {
22378 ASSERT(!strings.IsNull());
22379 ASSERT(start >= 0);
22380 ASSERT(end <= strings.Length());
22381 intptr_t result_len = 0;
22382 String& str = String::Handle();
22383 intptr_t char_size = kOneByteChar;
22384 // Compute 'char_size' and 'result_len'.
22385 for (intptr_t i = start; i < end; i++) {
22386 str ^= strings.At(i);
22387 const intptr_t str_len = str.Length();
22388 if ((kMaxElements - result_len) < str_len) {
22389 Exceptions::ThrowOOM();
22390 UNREACHABLE();
22391 }
22392 result_len += str_len;
22393 char_size = Utils::Maximum(char_size, str.CharSize());
22394 }
22395 if (char_size == kOneByteChar) {
22396 return OneByteString::ConcatAll(strings, start, end, result_len, space);
22397 }
22398 ASSERT(char_size == kTwoByteChar);
22399 return TwoByteString::ConcatAll(strings, start, end, result_len, space);
22400}
22401
22402StringPtr String::SubString(const String& str,
22403 intptr_t begin_index,
22404 Heap::Space space) {
22405 ASSERT(!str.IsNull());
22406 if (begin_index >= str.Length()) {
22407 return String::null();
22408 }
22409 return String::SubString(str, begin_index, (str.Length() - begin_index),
22410 space);
22411}
22412
22413StringPtr String::SubString(Thread* thread,
22414 const String& str,
22415 intptr_t begin_index,
22416 intptr_t length,
22417 Heap::Space space) {
22418 ASSERT(!str.IsNull());
22419 ASSERT(begin_index >= 0);
22420 ASSERT(length >= 0);
22421 if (begin_index <= str.Length() && length == 0) {
22422 return Symbols::Empty().raw();
22423 }
22424 if (begin_index > str.Length()) {
22425 return String::null();
22426 }
22427 bool is_one_byte_string = true;
22428 intptr_t char_size = str.CharSize();
22429 if (char_size == kTwoByteChar) {
22430 for (intptr_t i = begin_index; i < begin_index + length; ++i) {
22431 if (!Utf::IsLatin1(str.CharAt(i))) {
22432 is_one_byte_string = false;
22433 break;
22434 }
22435 }
22436 }
22437 REUSABLE_STRING_HANDLESCOPE(thread);
22438 String& result = thread->StringHandle();
22439 if (is_one_byte_string) {
22440 result = OneByteString::New(length, space);
22441 } else {
22442 result = TwoByteString::New(length, space);
22443 }
22444 String::Copy(result, 0, str, begin_index, length);
22445 return result.raw();
22446}
22447
22448const char* String::ToCString() const {
22449 const intptr_t len = Utf8::Length(*this);
22450 Zone* zone = Thread::Current()->zone();
22451 uint8_t* result = zone->Alloc<uint8_t>(len + 1);
22452 ToUTF8(result, len);
22453 result[len] = 0;
22454 return reinterpret_cast<const char*>(result);
22455}
22456
22457char* String::ToMallocCString() const {
22458 const intptr_t len = Utf8::Length(*this);
22459 uint8_t* result = reinterpret_cast<uint8_t*>(malloc(len + 1));
22460 ToUTF8(result, len);
22461 result[len] = 0;
22462 return reinterpret_cast<char*>(result);
22463}
22464
22465void String::ToUTF8(uint8_t* utf8_array, intptr_t array_len) const {
22466 ASSERT(array_len >= Utf8::Length(*this));
22467 Utf8::Encode(*this, reinterpret_cast<char*>(utf8_array), array_len);
22468}
22469
22470static FinalizablePersistentHandle* AddFinalizer(
22471 const Object& referent,
22472 void* peer,
22473 Dart_WeakPersistentHandleFinalizer callback,
22474 intptr_t external_size) {
22475 ASSERT(callback != NULL);
22476 return FinalizablePersistentHandle::New(Isolate::Current(), referent, peer,
22477 callback, external_size,
22478 /*auto_delete=*/true);
22479}
22480
22481StringPtr String::Transform(int32_t (*mapping)(int32_t ch),
22482 const String& str,
22483 Heap::Space space) {
22484 ASSERT(!str.IsNull());
22485 bool has_mapping = false;
22486 int32_t dst_max = 0;
22487 CodePointIterator it(str);
22488 while (it.Next()) {
22489 int32_t src = it.Current();
22490 int32_t dst = mapping(src);
22491 if (src != dst) {
22492 has_mapping = true;
22493 }
22494 dst_max = Utils::Maximum(dst_max, dst);
22495 }
22496 if (!has_mapping) {
22497 return str.raw();
22498 }
22499 if (Utf::IsLatin1(dst_max)) {
22500 return OneByteString::Transform(mapping, str, space);
22501 }
22502 ASSERT(Utf::IsBmp(dst_max) || Utf::IsSupplementary(dst_max));
22503 return TwoByteString::Transform(mapping, str, space);
22504}
22505
22506StringPtr String::ToUpperCase(const String& str, Heap::Space space) {
22507 // TODO(cshapiro): create a fast-path for OneByteString instances.
22508 return Transform(CaseMapping::ToUpper, str, space);
22509}
22510
22511StringPtr String::ToLowerCase(const String& str, Heap::Space space) {
22512 // TODO(cshapiro): create a fast-path for OneByteString instances.
22513 return Transform(CaseMapping::ToLower, str, space);
22514}
22515
22516bool String::ParseDouble(const String& str,
22517 intptr_t start,
22518 intptr_t end,
22519 double* result) {
22520 ASSERT(0 <= start);
22521 ASSERT(start <= end);
22522 ASSERT(end <= str.Length());
22523 intptr_t length = end - start;
22524 NoSafepointScope no_safepoint;
22525 const uint8_t* startChar;
22526 if (str.IsOneByteString()) {
22527 startChar = OneByteString::CharAddr(str, start);
22528 } else if (str.IsExternalOneByteString()) {
22529 startChar = ExternalOneByteString::CharAddr(str, start);
22530 } else {
22531 uint8_t* chars = Thread::Current()->zone()->Alloc<uint8_t>(length);
22532 for (intptr_t i = 0; i < length; i++) {
22533 int32_t ch = str.CharAt(start + i);
22534 if (ch < 128) {
22535 chars[i] = ch;
22536 } else {
22537 return false; // Not ASCII, so definitely not valid double numeral.
22538 }
22539 }
22540 startChar = chars;
22541 }
22542 return CStringToDouble(reinterpret_cast<const char*>(startChar), length,
22543 result);
22544}
22545
22546// Check to see if 'str1' matches 'str2' as is or
22547// once the private key separator is stripped from str2.
22548//
22549// Things are made more complicated by the fact that constructors are
22550// added *after* the private suffix, so "foo@123.named" should match
22551// "foo.named".
22552//
22553// Also, the private suffix can occur more than once in the name, as in:
22554//
22555// _ReceivePortImpl@6be832b._internal@6be832b
22556//
22557template <typename T1, typename T2>
22558static bool EqualsIgnoringPrivateKey(const String& str1, const String& str2) {
22559 intptr_t len = str1.Length();
22560 intptr_t str2_len = str2.Length();
22561 if (len == str2_len) {
22562 for (intptr_t i = 0; i < len; i++) {
22563 if (T1::CharAt(str1, i) != T2::CharAt(str2, i)) {
22564 return false;
22565 }
22566 }
22567 return true;
22568 }
22569 if (len < str2_len) {
22570 return false; // No way they can match.
22571 }
22572 intptr_t pos = 0;
22573 intptr_t str2_pos = 0;
22574 while (pos < len) {
22575 int32_t ch = T1::CharAt(str1, pos);
22576 pos++;
22577
22578 if (ch == Library::kPrivateKeySeparator) {
22579 // Consume a private key separator.
22580 while ((pos < len) && (T1::CharAt(str1, pos) != '.') &&
22581 (T1::CharAt(str1, pos) != '&')) {
22582 pos++;
22583 }
22584 // Resume matching characters.
22585 continue;
22586 }
22587 if ((str2_pos == str2_len) || (ch != T2::CharAt(str2, str2_pos))) {
22588 return false;
22589 }
22590 str2_pos++;
22591 }
22592
22593 // We have reached the end of mangled_name string.
22594 ASSERT(pos == len);
22595 return (str2_pos == str2_len);
22596}
22597
22598#define EQUALS_IGNORING_PRIVATE_KEY(class_id, type, str1, str2) \
22599 switch (class_id) { \
22600 case kOneByteStringCid: \
22601 return dart::EqualsIgnoringPrivateKey<type, OneByteString>(str1, str2); \
22602 case kTwoByteStringCid: \
22603 return dart::EqualsIgnoringPrivateKey<type, TwoByteString>(str1, str2); \
22604 case kExternalOneByteStringCid: \
22605 return dart::EqualsIgnoringPrivateKey<type, ExternalOneByteString>( \
22606 str1, str2); \
22607 case kExternalTwoByteStringCid: \
22608 return dart::EqualsIgnoringPrivateKey<type, ExternalTwoByteString>( \
22609 str1, str2); \
22610 } \
22611 UNREACHABLE();
22612
22613bool String::EqualsIgnoringPrivateKey(const String& str1, const String& str2) {
22614 if (str1.raw() == str2.raw()) {
22615 return true; // Both handles point to the same raw instance.
22616 }
22617 NoSafepointScope no_safepoint;
22618 intptr_t str1_class_id = str1.raw()->GetClassId();
22619 intptr_t str2_class_id = str2.raw()->GetClassId();
22620 switch (str1_class_id) {
22621 case kOneByteStringCid:
22622 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, OneByteString, str1, str2);
22623 break;
22624 case kTwoByteStringCid:
22625 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, TwoByteString, str1, str2);
22626 break;
22627 case kExternalOneByteStringCid:
22628 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, ExternalOneByteString, str1,
22629 str2);
22630 break;
22631 case kExternalTwoByteStringCid:
22632 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, ExternalTwoByteString, str1,
22633 str2);
22634 break;
22635 }
22636 UNREACHABLE();
22637 return false;
22638}
22639
22640bool String::CodePointIterator::Next() {
22641 ASSERT(index_ >= -1);
22642 intptr_t length = Utf16::Length(ch_);
22643 if (index_ < (end_ - length)) {
22644 index_ += length;
22645 ch_ = str_.CharAt(index_);
22646 if (Utf16::IsLeadSurrogate(ch_) && (index_ < (end_ - 1))) {
22647 int32_t ch2 = str_.CharAt(index_ + 1);
22648 if (Utf16::IsTrailSurrogate(ch2)) {
22649 ch_ = Utf16::Decode(ch_, ch2);
22650 }
22651 }
22652 return true;
22653 }
22654 index_ = end_;
22655 return false;
22656}
22657
22658OneByteStringPtr OneByteString::EscapeSpecialCharacters(const String& str) {
22659 intptr_t len = str.Length();
22660 if (len > 0) {
22661 intptr_t num_escapes = 0;
22662 for (intptr_t i = 0; i < len; i++) {
22663 num_escapes += EscapeOverhead(CharAt(str, i));
22664 }
22665 const String& dststr =
22666 String::Handle(OneByteString::New(len + num_escapes, Heap::kNew));
22667 intptr_t index = 0;
22668 for (intptr_t i = 0; i < len; i++) {
22669 uint8_t ch = CharAt(str, i);
22670 if (IsSpecialCharacter(ch)) {
22671 SetCharAt(dststr, index, '\\');
22672 SetCharAt(dststr, index + 1, SpecialCharacter(ch));
22673 index += 2;
22674 } else if (IsAsciiNonprintable(ch)) {
22675 SetCharAt(dststr, index, '\\');
22676 SetCharAt(dststr, index + 1, 'x');
22677 SetCharAt(dststr, index + 2, GetHexCharacter(ch >> 4));
22678 SetCharAt(dststr, index + 3, GetHexCharacter(ch & 0xF));
22679 index += 4;
22680 } else {
22681 SetCharAt(dststr, index, ch);
22682 index += 1;
22683 }
22684 }
22685 return OneByteString::raw(dststr);
22686 }
22687 return OneByteString::raw(Symbols::Empty());
22688}
22689
22690OneByteStringPtr ExternalOneByteString::EscapeSpecialCharacters(
22691 const String& str) {
22692 intptr_t len = str.Length();
22693 if (len > 0) {
22694 intptr_t num_escapes = 0;
22695 for (intptr_t i = 0; i < len; i++) {
22696 num_escapes += EscapeOverhead(CharAt(str, i));
22697 }
22698 const String& dststr =
22699 String::Handle(OneByteString::New(len + num_escapes, Heap::kNew));
22700 intptr_t index = 0;
22701 for (intptr_t i = 0; i < len; i++) {
22702 uint8_t ch = CharAt(str, i);
22703 if (IsSpecialCharacter(ch)) {
22704 OneByteString::SetCharAt(dststr, index, '\\');
22705 OneByteString::SetCharAt(dststr, index + 1, SpecialCharacter(ch));
22706 index += 2;
22707 } else if (IsAsciiNonprintable(ch)) {
22708 OneByteString::SetCharAt(dststr, index, '\\');
22709 OneByteString::SetCharAt(dststr, index + 1, 'x');
22710 OneByteString::SetCharAt(dststr, index + 2, GetHexCharacter(ch >> 4));
22711 OneByteString::SetCharAt(dststr, index + 3, GetHexCharacter(ch & 0xF));
22712 index += 4;
22713 } else {
22714 OneByteString::SetCharAt(dststr, index, ch);
22715 index += 1;
22716 }
22717 }
22718 return OneByteString::raw(dststr);
22719 }
22720 return OneByteString::raw(Symbols::Empty());
22721}
22722
22723OneByteStringPtr OneByteString::New(intptr_t len, Heap::Space space) {
22724 ASSERT((Isolate::Current() == Dart::vm_isolate()) ||
22725 ((Isolate::Current()->object_store() != NULL) &&
22726 (Isolate::Current()->object_store()->one_byte_string_class() !=
22727 Class::null())));
22728 if (len < 0 || len > kMaxElements) {
22729 // This should be caught before we reach here.
22730 FATAL1("Fatal error in OneByteString::New: invalid len %" Pd "\n", len);
22731 }
22732 {
22733 ObjectPtr raw = Object::Allocate(OneByteString::kClassId,
22734 OneByteString::InstanceSize(len), space);
22735 NoSafepointScope no_safepoint;
22736 OneByteStringPtr result = static_cast<OneByteStringPtr>(raw);
22737 result->ptr()->StoreSmi(&(result->ptr()->length_), Smi::New(len));
22738#if !defined(HASH_IN_OBJECT_HEADER)
22739 result->ptr()->StoreSmi(&(result->ptr()->hash_), Smi::New(0));
22740#endif
22741 return result;
22742 }
22743}
22744
22745OneByteStringPtr OneByteString::New(const uint8_t* characters,
22746 intptr_t len,
22747 Heap::Space space) {
22748 const String& result = String::Handle(OneByteString::New(len, space));
22749 if (len > 0) {
22750 NoSafepointScope no_safepoint;
22751 memmove(DataStart(result), characters, len);
22752 }
22753 return OneByteString::raw(result);
22754}
22755
22756OneByteStringPtr OneByteString::New(const uint16_t* characters,
22757 intptr_t len,
22758 Heap::Space space) {
22759 const String& result = String::Handle(OneByteString::New(len, space));
22760 NoSafepointScope no_safepoint;
22761 for (intptr_t i = 0; i < len; ++i) {
22762 ASSERT(Utf::IsLatin1(characters[i]));
22763 *CharAddr(result, i) = characters[i];
22764 }
22765 return OneByteString::raw(result);
22766}
22767
22768OneByteStringPtr OneByteString::New(const int32_t* characters,
22769 intptr_t len,
22770 Heap::Space space) {
22771 const String& result = String::Handle(OneByteString::New(len, space));
22772 NoSafepointScope no_safepoint;
22773 for (intptr_t i = 0; i < len; ++i) {
22774 ASSERT(Utf::IsLatin1(characters[i]));
22775 *CharAddr(result, i) = characters[i];
22776 }
22777 return OneByteString::raw(result);
22778}
22779
22780OneByteStringPtr OneByteString::New(const String& str, Heap::Space space) {
22781 intptr_t len = str.Length();
22782 const String& result = String::Handle(OneByteString::New(len, space));
22783 String::Copy(result, 0, str, 0, len);
22784 return OneByteString::raw(result);
22785}
22786
22787OneByteStringPtr OneByteString::New(const String& other_one_byte_string,
22788 intptr_t other_start_index,
22789 intptr_t other_len,
22790 Heap::Space space) {
22791 const String& result = String::Handle(OneByteString::New(other_len, space));
22792 ASSERT(other_one_byte_string.IsOneByteString());
22793 if (other_len > 0) {
22794 NoSafepointScope no_safepoint;
22795 memmove(OneByteString::DataStart(result),
22796 OneByteString::CharAddr(other_one_byte_string, other_start_index),
22797 other_len);
22798 }
22799 return OneByteString::raw(result);
22800}
22801
22802OneByteStringPtr OneByteString::New(const TypedData& other_typed_data,
22803 intptr_t other_start_index,
22804 intptr_t other_len,
22805 Heap::Space space) {
22806 const String& result = String::Handle(OneByteString::New(other_len, space));
22807 ASSERT(other_typed_data.ElementSizeInBytes() == 1);
22808 if (other_len > 0) {
22809 NoSafepointScope no_safepoint;
22810 memmove(OneByteString::DataStart(result),
22811 other_typed_data.DataAddr(other_start_index), other_len);
22812 }
22813 return OneByteString::raw(result);
22814}
22815
22816OneByteStringPtr OneByteString::New(const ExternalTypedData& other_typed_data,
22817 intptr_t other_start_index,
22818 intptr_t other_len,
22819 Heap::Space space) {
22820 const String& result = String::Handle(OneByteString::New(other_len, space));
22821 ASSERT(other_typed_data.ElementSizeInBytes() == 1);
22822 if (other_len > 0) {
22823 NoSafepointScope no_safepoint;
22824 memmove(OneByteString::DataStart(result),
22825 other_typed_data.DataAddr(other_start_index), other_len);
22826 }
22827 return OneByteString::raw(result);
22828}
22829
22830OneByteStringPtr OneByteString::Concat(const String& str1,
22831 const String& str2,
22832 Heap::Space space) {
22833 intptr_t len1 = str1.Length();
22834 intptr_t len2 = str2.Length();
22835 intptr_t len = len1 + len2;
22836 const String& result = String::Handle(OneByteString::New(len, space));
22837 String::Copy(result, 0, str1, 0, len1);
22838 String::Copy(result, len1, str2, 0, len2);
22839 return OneByteString::raw(result);
22840}
22841
22842OneByteStringPtr OneByteString::ConcatAll(const Array& strings,
22843 intptr_t start,
22844 intptr_t end,
22845 intptr_t len,
22846 Heap::Space space) {
22847 ASSERT(!strings.IsNull());
22848 ASSERT(start >= 0);
22849 ASSERT(end <= strings.Length());
22850 const String& result = String::Handle(OneByteString::New(len, space));
22851 String& str = String::Handle();
22852 intptr_t pos = 0;
22853 for (intptr_t i = start; i < end; i++) {
22854 str ^= strings.At(i);
22855 const intptr_t str_len = str.Length();
22856 String::Copy(result, pos, str, 0, str_len);
22857 ASSERT((kMaxElements - pos) >= str_len);
22858 pos += str_len;
22859 }
22860 return OneByteString::raw(result);
22861}
22862
22863OneByteStringPtr OneByteString::Transform(int32_t (*mapping)(int32_t ch),
22864 const String& str,
22865 Heap::Space space) {
22866 ASSERT(!str.IsNull());
22867 intptr_t len = str.Length();
22868 const String& result = String::Handle(OneByteString::New(len, space));
22869 NoSafepointScope no_safepoint;
22870 for (intptr_t i = 0; i < len; ++i) {
22871 int32_t ch = mapping(str.CharAt(i));
22872 ASSERT(Utf::IsLatin1(ch));
22873 *CharAddr(result, i) = ch;
22874 }
22875 return OneByteString::raw(result);
22876}
22877
22878OneByteStringPtr OneByteString::SubStringUnchecked(const String& str,
22879 intptr_t begin_index,
22880 intptr_t length,
22881 Heap::Space space) {
22882 ASSERT(!str.IsNull() && str.IsOneByteString());
22883 ASSERT(begin_index >= 0);
22884 ASSERT(length >= 0);
22885 if (begin_index <= str.Length() && length == 0) {
22886 return OneByteString::raw(Symbols::Empty());
22887 }
22888 ASSERT(begin_index < str.Length());
22889 OneByteStringPtr result = OneByteString::New(length, space);
22890 NoSafepointScope no_safepoint;
22891 if (length > 0) {
22892 uint8_t* dest = &result->ptr()->data()[0];
22893 const uint8_t* src = &raw_ptr(str)->data()[begin_index];
22894 memmove(dest, src, length);
22895 }
22896 return result;
22897}
22898
22899TwoByteStringPtr TwoByteString::EscapeSpecialCharacters(const String& str) {
22900 intptr_t len = str.Length();
22901 if (len > 0) {
22902 intptr_t num_escapes = 0;
22903 for (intptr_t i = 0; i < len; i++) {
22904 num_escapes += EscapeOverhead(CharAt(str, i));
22905 }
22906 const String& dststr =
22907 String::Handle(TwoByteString::New(len + num_escapes, Heap::kNew));
22908 intptr_t index = 0;
22909 for (intptr_t i = 0; i < len; i++) {
22910 uint16_t ch = CharAt(str, i);
22911 if (IsSpecialCharacter(ch)) {
22912 SetCharAt(dststr, index, '\\');
22913 SetCharAt(dststr, index + 1, SpecialCharacter(ch));
22914 index += 2;
22915 } else if (IsAsciiNonprintable(ch)) {
22916 SetCharAt(dststr, index, '\\');
22917 SetCharAt(dststr, index + 1, 'x');
22918 SetCharAt(dststr, index + 2, GetHexCharacter(ch >> 4));
22919 SetCharAt(dststr, index + 3, GetHexCharacter(ch & 0xF));
22920 index += 4;
22921 } else {
22922 SetCharAt(dststr, index, ch);
22923 index += 1;
22924 }
22925 }
22926 return TwoByteString::raw(dststr);
22927 }
22928 return TwoByteString::New(0, Heap::kNew);
22929}
22930
22931TwoByteStringPtr TwoByteString::New(intptr_t len, Heap::Space space) {
22932 ASSERT(Isolate::Current()->object_store()->two_byte_string_class() !=
22933 nullptr);
22934 if (len < 0 || len > kMaxElements) {
22935 // This should be caught before we reach here.
22936 FATAL1("Fatal error in TwoByteString::New: invalid len %" Pd "\n", len);
22937 }
22938 String& result = String::Handle();
22939 {
22940 ObjectPtr raw = Object::Allocate(TwoByteString::kClassId,
22941 TwoByteString::InstanceSize(len), space);
22942 NoSafepointScope no_safepoint;
22943 result ^= raw;
22944 result.SetLength(len);
22945 result.SetHash(0);
22946 }
22947 return TwoByteString::raw(result);
22948}
22949
22950TwoByteStringPtr TwoByteString::New(const uint16_t* utf16_array,
22951 intptr_t array_len,
22952 Heap::Space space) {
22953 ASSERT(array_len > 0);
22954 const String& result = String::Handle(TwoByteString::New(array_len, space));
22955 {
22956 NoSafepointScope no_safepoint;
22957 memmove(DataStart(result), utf16_array, (array_len * 2));
22958 }
22959 return TwoByteString::raw(result);
22960}
22961
22962TwoByteStringPtr TwoByteString::New(intptr_t utf16_len,
22963 const int32_t* utf32_array,
22964 intptr_t array_len,
22965 Heap::Space space) {
22966 ASSERT((array_len > 0) && (utf16_len >= array_len));
22967 const String& result = String::Handle(TwoByteString::New(utf16_len, space));
22968 {
22969 NoSafepointScope no_safepoint;
22970 intptr_t j = 0;
22971 for (intptr_t i = 0; i < array_len; ++i) {
22972 if (Utf::IsSupplementary(utf32_array[i])) {
22973 ASSERT(j < (utf16_len - 1));
22974 Utf16::Encode(utf32_array[i], CharAddr(result, j));
22975 j += 2;
22976 } else {
22977 ASSERT(j < utf16_len);
22978 *CharAddr(result, j) = utf32_array[i];
22979 j += 1;
22980 }
22981 }
22982 }
22983 return TwoByteString::raw(result);
22984}
22985
22986TwoByteStringPtr TwoByteString::New(const String& str, Heap::Space space) {
22987 intptr_t len = str.Length();
22988 const String& result = String::Handle(TwoByteString::New(len, space));
22989 String::Copy(result, 0, str, 0, len);
22990 return TwoByteString::raw(result);
22991}
22992
22993TwoByteStringPtr TwoByteString::New(const TypedData& other_typed_data,
22994 intptr_t other_start_index,
22995 intptr_t other_len,
22996 Heap::Space space) {
22997 const String& result = String::Handle(TwoByteString::New(other_len, space));
22998 if (other_len > 0) {
22999 NoSafepointScope no_safepoint;
23000 memmove(TwoByteString::DataStart(result),
23001 other_typed_data.DataAddr(other_start_index),
23002 other_len * sizeof(uint16_t));
23003 }
23004 return TwoByteString::raw(result);
23005}
23006
23007TwoByteStringPtr TwoByteString::New(const ExternalTypedData& other_typed_data,
23008 intptr_t other_start_index,
23009 intptr_t other_len,
23010 Heap::Space space) {
23011 const String& result = String::Handle(TwoByteString::New(other_len, space));
23012 if (other_len > 0) {
23013 NoSafepointScope no_safepoint;
23014 memmove(TwoByteString::DataStart(result),
23015 other_typed_data.DataAddr(other_start_index),
23016 other_len * sizeof(uint16_t));
23017 }
23018 return TwoByteString::raw(result);
23019}
23020
23021TwoByteStringPtr TwoByteString::Concat(const String& str1,
23022 const String& str2,
23023 Heap::Space space) {
23024 intptr_t len1 = str1.Length();
23025 intptr_t len2 = str2.Length();
23026 intptr_t len = len1 + len2;
23027 const String& result = String::Handle(TwoByteString::New(len, space));
23028 String::Copy(result, 0, str1, 0, len1);
23029 String::Copy(result, len1, str2, 0, len2);
23030 return TwoByteString::raw(result);
23031}
23032
23033TwoByteStringPtr TwoByteString::ConcatAll(const Array& strings,
23034 intptr_t start,
23035 intptr_t end,
23036 intptr_t len,
23037 Heap::Space space) {
23038 ASSERT(!strings.IsNull());
23039 ASSERT(start >= 0);
23040 ASSERT(end <= strings.Length());
23041 const String& result = String::Handle(TwoByteString::New(len, space));
23042 String& str = String::Handle();
23043 intptr_t pos = 0;
23044 for (intptr_t i = start; i < end; i++) {
23045 str ^= strings.At(i);
23046 const intptr_t str_len = str.Length();
23047 String::Copy(result, pos, str, 0, str_len);
23048 ASSERT((kMaxElements - pos) >= str_len);
23049 pos += str_len;
23050 }
23051 return TwoByteString::raw(result);
23052}
23053
23054TwoByteStringPtr TwoByteString::Transform(int32_t (*mapping)(int32_t ch),
23055 const String& str,
23056 Heap::Space space) {
23057 ASSERT(!str.IsNull());
23058 intptr_t len = str.Length();
23059 const String& result = String::Handle(TwoByteString::New(len, space));
23060 String::CodePointIterator it(str);
23061 intptr_t i = 0;
23062 NoSafepointScope no_safepoint;
23063 while (it.Next()) {
23064 int32_t src = it.Current();
23065 int32_t dst = mapping(src);
23066 ASSERT(dst >= 0 && dst <= 0x10FFFF);
23067 intptr_t len = Utf16::Length(dst);
23068 if (len == 1) {
23069 *CharAddr(result, i) = dst;
23070 } else {
23071 ASSERT(len == 2);
23072 Utf16::Encode(dst, CharAddr(result, i));
23073 }
23074 i += len;
23075 }
23076 return TwoByteString::raw(result);
23077}
23078
23079ExternalOneByteStringPtr ExternalOneByteString::New(
23080 const uint8_t* data,
23081 intptr_t len,
23082 void* peer,
23083 intptr_t external_allocation_size,
23084 Dart_WeakPersistentHandleFinalizer callback,
23085 Heap::Space space) {
23086 ASSERT(Isolate::Current()->object_store()->external_one_byte_string_class() !=
23087 Class::null());
23088 if (len < 0 || len > kMaxElements) {
23089 // This should be caught before we reach here.
23090 FATAL1("Fatal error in ExternalOneByteString::New: invalid len %" Pd "\n",
23091 len);
23092 }
23093 String& result = String::Handle();
23094 {
23095 ObjectPtr raw =
23096 Object::Allocate(ExternalOneByteString::kClassId,
23097 ExternalOneByteString::InstanceSize(), space);
23098 NoSafepointScope no_safepoint;
23099 result ^= raw;
23100 result.SetLength(len);
23101 result.SetHash(0);
23102 SetExternalData(result, data, peer);
23103 }
23104 AddFinalizer(result, peer, callback, external_allocation_size);
23105 return ExternalOneByteString::raw(result);
23106}
23107
23108ExternalTwoByteStringPtr ExternalTwoByteString::New(
23109 const uint16_t* data,
23110 intptr_t len,
23111 void* peer,
23112 intptr_t external_allocation_size,
23113 Dart_WeakPersistentHandleFinalizer callback,
23114 Heap::Space space) {
23115 ASSERT(Isolate::Current()->object_store()->external_two_byte_string_class() !=
23116 Class::null());
23117 if (len < 0 || len > kMaxElements) {
23118 // This should be caught before we reach here.
23119 FATAL1("Fatal error in ExternalTwoByteString::New: invalid len %" Pd "\n",
23120 len);
23121 }
23122 String& result = String::Handle();
23123 {
23124 ObjectPtr raw =
23125 Object::Allocate(ExternalTwoByteString::kClassId,
23126 ExternalTwoByteString::InstanceSize(), space);
23127 NoSafepointScope no_safepoint;
23128 result ^= raw;
23129 result.SetLength(len);
23130 result.SetHash(0);
23131 SetExternalData(result, data, peer);
23132 }
23133 AddFinalizer(result, peer, callback, external_allocation_size);
23134 return ExternalTwoByteString::raw(result);
23135}
23136
23137BoolPtr Bool::New(bool value) {
23138 ASSERT(Isolate::Current()->object_store()->bool_class() != Class::null());
23139 Bool& result = Bool::Handle();
23140 {
23141 // Since the two boolean instances are singletons we allocate them straight
23142 // in the old generation.
23143 ObjectPtr raw =
23144 Object::Allocate(Bool::kClassId, Bool::InstanceSize(), Heap::kOld);
23145 NoSafepointScope no_safepoint;
23146 result ^= raw;
23147 }
23148 result.set_value(value);
23149 result.SetCanonical();
23150 return result.raw();
23151}
23152
23153const char* Bool::ToCString() const {
23154 return value() ? "true" : "false";
23155}
23156
23157bool Array::CanonicalizeEquals(const Instance& other) const {
23158 if (this->raw() == other.raw()) {
23159 // Both handles point to the same raw instance.
23160 return true;
23161 }
23162
23163 // An Array may be compared to an ImmutableArray.
23164 if (!other.IsArray() || other.IsNull()) {
23165 return false;
23166 }
23167
23168 // First check if both arrays have the same length and elements.
23169 const Array& other_arr = Array::Cast(other);
23170
23171 intptr_t len = this->Length();
23172 if (len != other_arr.Length()) {
23173 return false;
23174 }
23175
23176 for (intptr_t i = 0; i < len; i++) {
23177 if (this->At(i) != other_arr.At(i)) {
23178 return false;
23179 }
23180 }
23181
23182 // Now check if both arrays have the same type arguments.
23183 if (GetTypeArguments() == other.GetTypeArguments()) {
23184 return true;
23185 }
23186 const TypeArguments& type_args = TypeArguments::Handle(GetTypeArguments());
23187 const TypeArguments& other_type_args =
23188 TypeArguments::Handle(other.GetTypeArguments());
23189 if (!type_args.Equals(other_type_args)) {
23190 return false;
23191 }
23192 return true;
23193}
23194
23195uint32_t Array::CanonicalizeHash() const {
23196 intptr_t len = Length();
23197 if (len == 0) {
23198 return 1;
23199 }
23200 Thread* thread = Thread::Current();
23201 uint32_t hash = thread->heap()->GetCanonicalHash(raw());
23202 if (hash != 0) {
23203 return hash;
23204 }
23205 hash = len;
23206 Instance& member = Instance::Handle(GetTypeArguments());
23207 hash = CombineHashes(hash, member.CanonicalizeHash());
23208 for (intptr_t i = 0; i < len; i++) {
23209 member ^= At(i);
23210 hash = CombineHashes(hash, member.CanonicalizeHash());
23211 }
23212 hash = FinalizeHash(hash, kHashBits);
23213 thread->heap()->SetCanonicalHash(raw(), hash);
23214 return hash;
23215}
23216
23217ArrayPtr Array::New(intptr_t len, Heap::Space space) {
23218 ASSERT(Isolate::Current()->object_store()->array_class() != Class::null());
23219 ArrayPtr result = New(kClassId, len, space);
23220 if (UseCardMarkingForAllocation(len)) {
23221 ASSERT(result->IsOldObject());
23222 result->ptr()->SetCardRememberedBitUnsynchronized();
23223 }
23224 return result;
23225}
23226
23227ArrayPtr Array::New(intptr_t len,
23228 const AbstractType& element_type,
23229 Heap::Space space) {
23230 const Array& result = Array::Handle(Array::New(len, space));
23231 if (!element_type.IsDynamicType()) {
23232 TypeArguments& type_args = TypeArguments::Handle(TypeArguments::New(1));
23233 type_args.SetTypeAt(0, element_type);
23234 type_args = type_args.Canonicalize();
23235 result.SetTypeArguments(type_args);
23236 }
23237 return result.raw();
23238}
23239
23240ArrayPtr Array::New(intptr_t class_id, intptr_t len, Heap::Space space) {
23241 if (!IsValidLength(len)) {
23242 // This should be caught before we reach here.
23243 FATAL1("Fatal error in Array::New: invalid len %" Pd "\n", len);
23244 }
23245 {
23246 ArrayPtr raw = static_cast<ArrayPtr>(
23247 Object::Allocate(class_id, Array::InstanceSize(len), space));
23248 NoSafepointScope no_safepoint;
23249 raw->ptr()->StoreSmi(&(raw->ptr()->length_), Smi::New(len));
23250 return raw;
23251 }
23252}
23253
23254ArrayPtr Array::Slice(intptr_t start,
23255 intptr_t count,
23256 bool with_type_argument) const {
23257 // TODO(vegorov) introduce an array allocation method that fills newly
23258 // allocated array with values from the given source array instead of
23259 // null-initializing all elements.
23260 Array& dest = Array::Handle(Array::New(count));
23261 dest.StoreArrayPointers(dest.ObjectAddr(0), ObjectAddr(start), count);
23262
23263 if (with_type_argument) {
23264 dest.SetTypeArguments(TypeArguments::Handle(GetTypeArguments()));
23265 }
23266
23267 return dest.raw();
23268}
23269
23270void Array::MakeImmutable() const {
23271 if (IsImmutable()) return;
23272 ASSERT(!IsCanonical());
23273 NoSafepointScope no_safepoint;
23274 uint32_t tags = raw_ptr()->tags_;
23275 uint32_t old_tags;
23276 do {
23277 old_tags = tags;
23278 uint32_t new_tags =
23279 ObjectLayout::ClassIdTag::update(kImmutableArrayCid, old_tags);
23280 tags = CompareAndSwapTags(old_tags, new_tags);
23281 } while (tags != old_tags);
23282}
23283
23284const char* Array::ToCString() const {
23285 if (IsNull()) {
23286 return IsImmutable() ? "_ImmutableList NULL" : "_List NULL";
23287 }
23288 Zone* zone = Thread::Current()->zone();
23289 const char* format =
23290 IsImmutable() ? "_ImmutableList len:%" Pd : "_List len:%" Pd;
23291 return zone->PrintToString(format, Length());
23292}
23293
23294ArrayPtr Array::Grow(const Array& source,
23295 intptr_t new_length,
23296 Heap::Space space) {
23297 Zone* zone = Thread::Current()->zone();
23298 const Array& result = Array::Handle(zone, Array::New(new_length, space));
23299 intptr_t len = 0;
23300 if (!source.IsNull()) {
23301 len = source.Length();
23302 result.SetTypeArguments(
23303 TypeArguments::Handle(zone, source.GetTypeArguments()));
23304 }
23305 ASSERT(new_length >= len); // Cannot copy 'source' into new array.
23306 ASSERT(new_length != len); // Unnecessary copying of array.
23307 PassiveObject& obj = PassiveObject::Handle(zone);
23308 for (int i = 0; i < len; i++) {
23309 obj = source.At(i);
23310 result.SetAt(i, obj);
23311 }
23312 return result.raw();
23313}
23314
23315void Array::Truncate(intptr_t new_len) const {
23316 if (IsNull()) {
23317 return;
23318 }
23319 Thread* thread = Thread::Current();
23320 Zone* zone = thread->zone();
23321 const Array& array = Array::Handle(zone, this->raw());
23322
23323 intptr_t old_len = array.Length();
23324 ASSERT(new_len <= old_len);
23325 if (old_len == new_len) {
23326 return;
23327 }
23328 intptr_t old_size = Array::InstanceSize(old_len);
23329 intptr_t new_size = Array::InstanceSize(new_len);
23330
23331 NoSafepointScope no_safepoint;
23332
23333 // If there is any left over space fill it with either an Array object or
23334 // just a plain object (depending on the amount of left over space) so
23335 // that it can be traversed over successfully during garbage collection.
23336 Object::MakeUnusedSpaceTraversable(array, old_size, new_size);
23337
23338 // For the heap to remain walkable by the sweeper, it must observe the
23339 // creation of the filler object no later than the new length of the array.
23340 std::atomic_thread_fence(std::memory_order_release);
23341
23342 // Update the size in the header field and length of the array object.
23343 uint32_t tags = array.raw_ptr()->tags_;
23344 ASSERT(kArrayCid == ObjectLayout::ClassIdTag::decode(tags));
23345 uint32_t old_tags;
23346 do {
23347 old_tags = tags;
23348 uint32_t new_tags = ObjectLayout::SizeTag::update(new_size, old_tags);
23349 tags = CompareAndSwapTags(old_tags, new_tags);
23350 } while (tags != old_tags);
23351
23352 // Between the CAS of the header above and the SetLength below, the array is
23353 // temporarily in an inconsistent state. The header is considered the
23354 // overriding source of object size by ObjectLayout::Size, but the ASSERTs in
23355 // ObjectLayout::SizeFromClass must handle this special case.
23356 array.SetLengthIgnoreRace(new_len);
23357}
23358
23359ArrayPtr Array::MakeFixedLength(const GrowableObjectArray& growable_array,
23360 bool unique) {
23361 ASSERT(!growable_array.IsNull());
23362 Thread* thread = Thread::Current();
23363 Zone* zone = thread->zone();
23364 intptr_t used_len = growable_array.Length();
23365 // Get the type arguments and prepare to copy them.
23366 const TypeArguments& type_arguments =
23367 TypeArguments::Handle(growable_array.GetTypeArguments());
23368 if (used_len == 0) {
23369 if (type_arguments.IsNull() && !unique) {
23370 // This is a raw List (as in no type arguments), so we can return the
23371 // simple empty array.
23372 return Object::empty_array().raw();
23373 }
23374
23375 // The backing array may be a shared instance, or may not have correct
23376 // type parameters. Create a new empty array.
23377 Heap::Space space = thread->IsMutatorThread() ? Heap::kNew : Heap::kOld;
23378 Array& array = Array::Handle(zone, Array::New(0, space));
23379 array.SetTypeArguments(type_arguments);
23380 return array.raw();
23381 }
23382 const Array& array = Array::Handle(zone, growable_array.data());
23383 ASSERT(array.IsArray());
23384 array.SetTypeArguments(type_arguments);
23385
23386 // Null the GrowableObjectArray, we are removing its backing array.
23387 growable_array.SetLength(0);
23388 growable_array.SetData(Object::empty_array());
23389
23390 // Truncate the old backing array and return it.
23391 array.Truncate(used_len);
23392 return array.raw();
23393}
23394
23395bool Array::CheckAndCanonicalizeFields(Thread* thread,
23396 const char** error_str) const {
23397 ASSERT(error_str != NULL);
23398 ASSERT(*error_str == NULL);
23399 intptr_t len = Length();
23400 if (len > 0) {
23401 Zone* zone = thread->zone();
23402 Object& obj = Object::Handle(zone);
23403 // Iterate over all elements, canonicalize numbers and strings, expect all
23404 // other instances to be canonical otherwise report error (return false).
23405 for (intptr_t i = 0; i < len; i++) {
23406 obj = At(i);
23407 if (obj.IsInstance() && !obj.IsSmi() && !obj.IsCanonical()) {
23408 if (obj.IsNumber() || obj.IsString()) {
23409 obj = Instance::Cast(obj).CheckAndCanonicalize(thread, error_str);
23410 if (*error_str != NULL) {
23411 return false;
23412 }
23413 ASSERT(!obj.IsNull());
23414 this->SetAt(i, obj);
23415 } else {
23416 char* chars = OS::SCreate(zone, "element at index %" Pd ": %s\n", i,
23417 obj.ToCString());
23418 *error_str = chars;
23419 return false;
23420 }
23421 }
23422 }
23423 }
23424 return true;
23425}
23426
23427ImmutableArrayPtr ImmutableArray::New(intptr_t len, Heap::Space space) {
23428 ASSERT(Isolate::Current()->object_store()->immutable_array_class() !=
23429 Class::null());
23430 return static_cast<ImmutableArrayPtr>(Array::New(kClassId, len, space));
23431}
23432
23433void GrowableObjectArray::Add(const Object& value, Heap::Space space) const {
23434 ASSERT(!IsNull());
23435 if (Length() == Capacity()) {
23436 // Grow from 0 to 3, and then double + 1.
23437 intptr_t new_capacity = (Capacity() * 2) | 3;
23438 if (new_capacity <= Capacity()) {
23439 Exceptions::ThrowOOM();
23440 UNREACHABLE();
23441 }
23442 Grow(new_capacity, space);
23443 }
23444 ASSERT(Length() < Capacity());
23445 intptr_t index = Length();
23446 SetLength(index + 1);
23447 SetAt(index, value);
23448}
23449
23450void GrowableObjectArray::Grow(intptr_t new_capacity, Heap::Space space) const {
23451 ASSERT(new_capacity > Capacity());
23452 const Array& contents = Array::Handle(data());
23453 const Array& new_contents =
23454 Array::Handle(Array::Grow(contents, new_capacity, space));
23455 StorePointer(&(raw_ptr()->data_), new_contents.raw());
23456}
23457
23458ObjectPtr GrowableObjectArray::RemoveLast() const {
23459 ASSERT(!IsNull());
23460 ASSERT(Length() > 0);
23461 intptr_t index = Length() - 1;
23462 const Array& contents = Array::Handle(data());
23463 const PassiveObject& obj = PassiveObject::Handle(contents.At(index));
23464 contents.SetAt(index, Object::null_object());
23465 SetLength(index);
23466 return obj.raw();
23467}
23468
23469GrowableObjectArrayPtr GrowableObjectArray::New(intptr_t capacity,
23470 Heap::Space space) {
23471 ArrayPtr raw_data = (capacity == 0) ? Object::empty_array().raw()
23472 : Array::New(capacity, space);
23473 const Array& data = Array::Handle(raw_data);
23474 return New(data, space);
23475}
23476
23477GrowableObjectArrayPtr GrowableObjectArray::New(const Array& array,
23478 Heap::Space space) {
23479 ASSERT(Isolate::Current()->object_store()->growable_object_array_class() !=
23480 Class::null());
23481 GrowableObjectArray& result = GrowableObjectArray::Handle();
23482 {
23483 ObjectPtr raw =
23484 Object::Allocate(GrowableObjectArray::kClassId,
23485 GrowableObjectArray::InstanceSize(), space);
23486 NoSafepointScope no_safepoint;
23487 result ^= raw;
23488 result.SetLength(0);
23489 result.SetData(array);
23490 }
23491 return result.raw();
23492}
23493
23494const char* GrowableObjectArray::ToCString() const {
23495 if (IsNull()) {
23496 return "_GrowableList: null";
23497 }
23498 return OS::SCreate(Thread::Current()->zone(),
23499 "Instance(length:%" Pd ") of '_GrowableList'", Length());
23500}
23501
23502// Equivalent to Dart's operator "==" and hashCode.
23503class DefaultHashTraits {
23504 public:
23505 static const char* Name() { return "DefaultHashTraits"; }
23506 static bool ReportStats() { return false; }
23507
23508 static bool IsMatch(const Object& a, const Object& b) {
23509 if (a.IsNull() || b.IsNull()) {
23510 return (a.IsNull() && b.IsNull());
23511 } else {
23512 return Instance::Cast(a).OperatorEquals(Instance::Cast(b));
23513 }
23514 }
23515 static uword Hash(const Object& obj) {
23516 if (obj.IsNull()) {
23517 return 0;
23518 }
23519 // TODO(koda): Ensure VM classes only produce Smi hash codes, and remove
23520 // non-Smi cases once Dart-side implementation is complete.
23521 Thread* thread = Thread::Current();
23522 REUSABLE_INSTANCE_HANDLESCOPE(thread);
23523 Instance& hash_code = thread->InstanceHandle();
23524 hash_code ^= Instance::Cast(obj).HashCode();
23525 if (hash_code.IsSmi()) {
23526 // May waste some bits on 64-bit, to ensure consistency with non-Smi case.
23527 return static_cast<uword>(Smi::Cast(hash_code).AsTruncatedUint32Value());
23528 } else if (hash_code.IsInteger()) {
23529 return static_cast<uword>(
23530 Integer::Cast(hash_code).AsTruncatedUint32Value());
23531 } else {
23532 return 0;
23533 }
23534 }
23535};
23536
23537LinkedHashMapPtr LinkedHashMap::NewDefault(Heap::Space space) {
23538 const Array& data = Array::Handle(Array::New(kInitialIndexSize, space));
23539 const TypedData& index = TypedData::Handle(
23540 TypedData::New(kTypedDataUint32ArrayCid, kInitialIndexSize, space));
23541 // On 32-bit, the top bits are wasted to avoid Mint allocation.
23542 static const intptr_t kAvailableBits = (kSmiBits >= 32) ? 32 : kSmiBits;
23543 static const intptr_t kInitialHashMask =
23544 (1 << (kAvailableBits - kInitialIndexBits)) - 1;
23545 return LinkedHashMap::New(data, index, kInitialHashMask, 0, 0, space);
23546}
23547
23548LinkedHashMapPtr LinkedHashMap::New(const Array& data,
23549 const TypedData& index,
23550 intptr_t hash_mask,
23551 intptr_t used_data,
23552 intptr_t deleted_keys,
23553 Heap::Space space) {
23554 ASSERT(Isolate::Current()->object_store()->linked_hash_map_class() !=
23555 Class::null());
23556 LinkedHashMap& result =
23557 LinkedHashMap::Handle(LinkedHashMap::NewUninitialized(space));
23558 result.SetData(data);
23559 result.SetIndex(index);
23560 result.SetHashMask(hash_mask);
23561 result.SetUsedData(used_data);
23562 result.SetDeletedKeys(deleted_keys);
23563 return result.raw();
23564}
23565
23566LinkedHashMapPtr LinkedHashMap::NewUninitialized(Heap::Space space) {
23567 ASSERT(Isolate::Current()->object_store()->linked_hash_map_class() !=
23568 Class::null());
23569 LinkedHashMap& result = LinkedHashMap::Handle();
23570 {
23571 ObjectPtr raw = Object::Allocate(LinkedHashMap::kClassId,
23572 LinkedHashMap::InstanceSize(), space);
23573 NoSafepointScope no_safepoint;
23574 result ^= raw;
23575 }
23576 return result.raw();
23577}
23578
23579const char* LinkedHashMap::ToCString() const {
23580 Zone* zone = Thread::Current()->zone();
23581 return zone->PrintToString("_LinkedHashMap len:%" Pd, Length());
23582}
23583
23584const char* FutureOr::ToCString() const {
23585 // FutureOr is an abstract class.
23586 UNREACHABLE();
23587}
23588
23589Float32x4Ptr Float32x4::New(float v0,
23590 float v1,
23591 float v2,
23592 float v3,
23593 Heap::Space space) {
23594 ASSERT(Isolate::Current()->object_store()->float32x4_class() !=
23595 Class::null());
23596 Float32x4& result = Float32x4::Handle();
23597 {
23598 ObjectPtr raw =
23599 Object::Allocate(Float32x4::kClassId, Float32x4::InstanceSize(), space);
23600 NoSafepointScope no_safepoint;
23601 result ^= raw;
23602 }
23603 result.set_x(v0);
23604 result.set_y(v1);
23605 result.set_z(v2);
23606 result.set_w(v3);
23607 return result.raw();
23608}
23609
23610Float32x4Ptr Float32x4::New(simd128_value_t value, Heap::Space space) {
23611 ASSERT(Isolate::Current()->object_store()->float32x4_class() !=
23612 Class::null());
23613 Float32x4& result = Float32x4::Handle();
23614 {
23615 ObjectPtr raw =
23616 Object::Allocate(Float32x4::kClassId, Float32x4::InstanceSize(), space);
23617 NoSafepointScope no_safepoint;
23618 result ^= raw;
23619 }
23620 result.set_value(value);
23621 return result.raw();
23622}
23623
23624simd128_value_t Float32x4::value() const {
23625 return LoadUnaligned(
23626 reinterpret_cast<const simd128_value_t*>(&raw_ptr()->value_));
23627}
23628
23629void Float32x4::set_value(simd128_value_t value) const {
23630 StoreUnaligned(reinterpret_cast<simd128_value_t*>(&raw()->ptr()->value_),
23631 value);
23632}
23633
23634void Float32x4::set_x(float value) const {
23635 StoreNonPointer(&raw_ptr()->value_[0], value);
23636}
23637
23638void Float32x4::set_y(float value) const {
23639 StoreNonPointer(&raw_ptr()->value_[1], value);
23640}
23641
23642void Float32x4::set_z(float value) const {
23643 StoreNonPointer(&raw_ptr()->value_[2], value);
23644}
23645
23646void Float32x4::set_w(float value) const {
23647 StoreNonPointer(&raw_ptr()->value_[3], value);
23648}
23649
23650float Float32x4::x() const {
23651 return raw_ptr()->value_[0];
23652}
23653
23654float Float32x4::y() const {
23655 return raw_ptr()->value_[1];
23656}
23657
23658float Float32x4::z() const {
23659 return raw_ptr()->value_[2];
23660}
23661
23662float Float32x4::w() const {
23663 return raw_ptr()->value_[3];
23664}
23665
23666const char* Float32x4::ToCString() const {
23667 float _x = x();
23668 float _y = y();
23669 float _z = z();
23670 float _w = w();
23671 return OS::SCreate(Thread::Current()->zone(), "[%f, %f, %f, %f]", _x, _y, _z,
23672 _w);
23673}
23674
23675Int32x4Ptr Int32x4::New(int32_t v0,
23676 int32_t v1,
23677 int32_t v2,
23678 int32_t v3,
23679 Heap::Space space) {
23680 ASSERT(Isolate::Current()->object_store()->int32x4_class() != Class::null());
23681 Int32x4& result = Int32x4::Handle();
23682 {
23683 ObjectPtr raw =
23684 Object::Allocate(Int32x4::kClassId, Int32x4::InstanceSize(), space);
23685 NoSafepointScope no_safepoint;
23686 result ^= raw;
23687 }
23688 result.set_x(v0);
23689 result.set_y(v1);
23690 result.set_z(v2);
23691 result.set_w(v3);
23692 return result.raw();
23693}
23694
23695Int32x4Ptr Int32x4::New(simd128_value_t value, Heap::Space space) {
23696 ASSERT(Isolate::Current()->object_store()->int32x4_class() != Class::null());
23697 Int32x4& result = Int32x4::Handle();
23698 {
23699 ObjectPtr raw =
23700 Object::Allocate(Int32x4::kClassId, Int32x4::InstanceSize(), space);
23701 NoSafepointScope no_safepoint;
23702 result ^= raw;
23703 }
23704 result.set_value(value);
23705 return result.raw();
23706}
23707
23708void Int32x4::set_x(int32_t value) const {
23709 StoreNonPointer(&raw_ptr()->value_[0], value);
23710}
23711
23712void Int32x4::set_y(int32_t value) const {
23713 StoreNonPointer(&raw_ptr()->value_[1], value);
23714}
23715
23716void Int32x4::set_z(int32_t value) const {
23717 StoreNonPointer(&raw_ptr()->value_[2], value);
23718}
23719
23720void Int32x4::set_w(int32_t value) const {
23721 StoreNonPointer(&raw_ptr()->value_[3], value);
23722}
23723
23724int32_t Int32x4::x() const {
23725 return raw_ptr()->value_[0];
23726}
23727
23728int32_t Int32x4::y() const {
23729 return raw_ptr()->value_[1];
23730}
23731
23732int32_t Int32x4::z() const {
23733 return raw_ptr()->value_[2];
23734}
23735
23736int32_t Int32x4::w() const {
23737 return raw_ptr()->value_[3];
23738}
23739
23740simd128_value_t Int32x4::value() const {
23741 return LoadUnaligned(
23742 reinterpret_cast<const simd128_value_t*>(&raw_ptr()->value_));
23743}
23744
23745void Int32x4::set_value(simd128_value_t value) const {
23746 StoreUnaligned(reinterpret_cast<simd128_value_t*>(&raw()->ptr()->value_),
23747 value);
23748}
23749
23750const char* Int32x4::ToCString() const {
23751 int32_t _x = x();
23752 int32_t _y = y();
23753 int32_t _z = z();
23754 int32_t _w = w();
23755 return OS::SCreate(Thread::Current()->zone(), "[%08x, %08x, %08x, %08x]", _x,
23756 _y, _z, _w);
23757}
23758
23759Float64x2Ptr Float64x2::New(double value0, double value1, Heap::Space space) {
23760 ASSERT(Isolate::Current()->object_store()->float64x2_class() !=
23761 Class::null());
23762 Float64x2& result = Float64x2::Handle();
23763 {
23764 ObjectPtr raw =
23765 Object::Allocate(Float64x2::kClassId, Float64x2::InstanceSize(), space);
23766 NoSafepointScope no_safepoint;
23767 result ^= raw;
23768 }
23769 result.set_x(value0);
23770 result.set_y(value1);
23771 return result.raw();
23772}
23773
23774Float64x2Ptr Float64x2::New(simd128_value_t value, Heap::Space space) {
23775 ASSERT(Isolate::Current()->object_store()->float64x2_class() !=
23776 Class::null());
23777 Float64x2& result = Float64x2::Handle();
23778 {
23779 ObjectPtr raw =
23780 Object::Allocate(Float64x2::kClassId, Float64x2::InstanceSize(), space);
23781 NoSafepointScope no_safepoint;
23782 result ^= raw;
23783 }
23784 result.set_value(value);
23785 return result.raw();
23786}
23787
23788double Float64x2::x() const {
23789 return raw_ptr()->value_[0];
23790}
23791
23792double Float64x2::y() const {
23793 return raw_ptr()->value_[1];
23794}
23795
23796void Float64x2::set_x(double x) const {
23797 StoreNonPointer(&raw_ptr()->value_[0], x);
23798}
23799
23800void Float64x2::set_y(double y) const {
23801 StoreNonPointer(&raw_ptr()->value_[1], y);
23802}
23803
23804simd128_value_t Float64x2::value() const {
23805 return simd128_value_t().readFrom(&raw_ptr()->value_[0]);
23806}
23807
23808void Float64x2::set_value(simd128_value_t value) const {
23809 StoreSimd128(&raw_ptr()->value_[0], value);
23810}
23811
23812const char* Float64x2::ToCString() const {
23813 double _x = x();
23814 double _y = y();
23815 return OS::SCreate(Thread::Current()->zone(), "[%f, %f]", _x, _y);
23816}
23817
23818const intptr_t
23819 TypedDataBase::element_size_table[TypedDataBase::kNumElementSizes] = {
23820 1, // kTypedDataInt8ArrayCid.
23821 1, // kTypedDataUint8ArrayCid.
23822 1, // kTypedDataUint8ClampedArrayCid.
23823 2, // kTypedDataInt16ArrayCid.
23824 2, // kTypedDataUint16ArrayCid.
23825 4, // kTypedDataInt32ArrayCid.
23826 4, // kTypedDataUint32ArrayCid.
23827 8, // kTypedDataInt64ArrayCid.
23828 8, // kTypedDataUint64ArrayCid.
23829 4, // kTypedDataFloat32ArrayCid.
23830 8, // kTypedDataFloat64ArrayCid.
23831 16, // kTypedDataFloat32x4ArrayCid.
23832 16, // kTypedDataInt32x4ArrayCid.
23833 16, // kTypedDataFloat64x2ArrayCid,
23834};
23835
23836bool TypedData::CanonicalizeEquals(const Instance& other) const {
23837 if (this->raw() == other.raw()) {
23838 // Both handles point to the same raw instance.
23839 return true;
23840 }
23841
23842 if (!other.IsTypedData() || other.IsNull()) {
23843 return false;
23844 }
23845
23846 const TypedData& other_typed_data = TypedData::Cast(other);
23847
23848 if (this->ElementType() != other_typed_data.ElementType()) {
23849 return false;
23850 }
23851
23852 const intptr_t len = this->LengthInBytes();
23853 if (len != other_typed_data.LengthInBytes()) {
23854 return false;
23855 }
23856 NoSafepointScope no_safepoint;
23857 return (len == 0) ||
23858 (memcmp(DataAddr(0), other_typed_data.DataAddr(0), len) == 0);
23859}
23860
23861uint32_t TypedData::CanonicalizeHash() const {
23862 const intptr_t len = this->LengthInBytes();
23863 if (len == 0) {
23864 return 1;
23865 }
23866 uint32_t hash = len;
23867 for (intptr_t i = 0; i < len; i++) {
23868 hash = CombineHashes(len, GetUint8(i));
23869 }
23870 return FinalizeHash(hash, kHashBits);
23871}
23872
23873TypedDataPtr TypedData::New(intptr_t class_id,
23874 intptr_t len,
23875 Heap::Space space) {
23876 if (len < 0 || len > TypedData::MaxElements(class_id)) {
23877 FATAL1("Fatal error in TypedData::New: invalid len %" Pd "\n", len);
23878 }
23879 TypedData& result = TypedData::Handle();
23880 {
23881 const intptr_t length_in_bytes = len * ElementSizeInBytes(class_id);
23882 ObjectPtr raw = Object::Allocate(
23883 class_id, TypedData::InstanceSize(length_in_bytes), space);
23884 NoSafepointScope no_safepoint;
23885 result ^= raw;
23886 result.SetLength(len);
23887 result.RecomputeDataField();
23888 }
23889 return result.raw();
23890}
23891
23892const char* TypedData::ToCString() const {
23893 switch (GetClassId()) {
23894#define CASE_TYPED_DATA_CLASS(clazz) \
23895 case kTypedData##clazz##Cid: \
23896 return #clazz;
23897 CLASS_LIST_TYPED_DATA(CASE_TYPED_DATA_CLASS);
23898#undef CASE_TYPED_DATA_CLASS
23899 }
23900 return "TypedData";
23901}
23902
23903FinalizablePersistentHandle* ExternalTypedData::AddFinalizer(
23904 void* peer,
23905 Dart_WeakPersistentHandleFinalizer callback,
23906 intptr_t external_size) const {
23907 return dart::AddFinalizer(*this, peer, callback, external_size);
23908}
23909
23910ExternalTypedDataPtr ExternalTypedData::New(
23911 intptr_t class_id,
23912 uint8_t* data,
23913 intptr_t len,
23914 Heap::Space space,
23915 bool perform_eager_msan_initialization_check) {
23916 if (len < 0 || len > ExternalTypedData::MaxElements(class_id)) {
23917 FATAL1("Fatal error in ExternalTypedData::New: invalid len %" Pd "\n", len);
23918 }
23919
23920 if (perform_eager_msan_initialization_check) {
23921 // Once the TypedData is created, Dart might read this memory. Check for
23922 // intialization at construction to make it easier to track the source.
23923 MSAN_CHECK_INITIALIZED(data, len);
23924 }
23925
23926 ExternalTypedData& result = ExternalTypedData::Handle();
23927 {
23928 ObjectPtr raw =
23929 Object::Allocate(class_id, ExternalTypedData::InstanceSize(), space);
23930 NoSafepointScope no_safepoint;
23931 result ^= raw;
23932 result.SetLength(len);
23933 result.SetData(data);
23934 }
23935 return result.raw();
23936}
23937
23938ExternalTypedDataPtr ExternalTypedData::NewFinalizeWithFree(uint8_t* data,
23939 intptr_t len) {
23940 ExternalTypedData& result = ExternalTypedData::Handle(ExternalTypedData::New(
23941 kExternalTypedDataUint8ArrayCid, data, len, Heap::kOld));
23942 result.AddFinalizer(
23943 data,
23944 [](void* isolate_callback_data, Dart_WeakPersistentHandle handle,
23945 void* data) { free(data); },
23946 len);
23947 return result.raw();
23948}
23949
23950TypedDataViewPtr TypedDataView::New(intptr_t class_id, Heap::Space space) {
23951 auto& result = TypedDataView::Handle();
23952 {
23953 ObjectPtr raw =
23954 Object::Allocate(class_id, TypedDataView::InstanceSize(), space);
23955 NoSafepointScope no_safepoint;
23956 result ^= raw;
23957 result.Clear();
23958 }
23959 return result.raw();
23960}
23961
23962TypedDataViewPtr TypedDataView::New(intptr_t class_id,
23963 const TypedDataBase& typed_data,
23964 intptr_t offset_in_bytes,
23965 intptr_t length,
23966 Heap::Space space) {
23967 auto& result = TypedDataView::Handle(TypedDataView::New(class_id, space));
23968 result.InitializeWith(typed_data, offset_in_bytes, length);
23969 return result.raw();
23970}
23971
23972const char* TypedDataBase::ToCString() const {
23973 // There are no instances of RawTypedDataBase.
23974 UNREACHABLE();
23975 return nullptr;
23976}
23977
23978const char* TypedDataView::ToCString() const {
23979 auto zone = Thread::Current()->zone();
23980 return OS::SCreate(zone, "TypedDataView(cid: %" Pd ")", GetClassId());
23981}
23982
23983const char* ExternalTypedData::ToCString() const {
23984 return "ExternalTypedData";
23985}
23986
23987PointerPtr Pointer::New(const AbstractType& type_arg,
23988 uword native_address,
23989 Heap::Space space) {
23990 Thread* thread = Thread::Current();
23991 Zone* zone = thread->zone();
23992
23993 TypeArguments& type_args = TypeArguments::Handle(zone);
23994 type_args = TypeArguments::New(1);
23995 type_args.SetTypeAt(Pointer::kNativeTypeArgPos, type_arg);
23996 type_args = type_args.Canonicalize();
23997
23998 const Class& cls =
23999 Class::Handle(Isolate::Current()->class_table()->At(kFfiPointerCid));
24000 cls.EnsureIsFinalized(Thread::Current());
24001
24002 Pointer& result = Pointer::Handle(zone);
24003 result ^= Object::Allocate(kFfiPointerCid, Pointer::InstanceSize(), space);
24004 result.SetTypeArguments(type_args);
24005 result.SetNativeAddress(native_address);
24006
24007 return result.raw();
24008}
24009
24010const char* Pointer::ToCString() const {
24011 TypeArguments& type_args = TypeArguments::Handle(GetTypeArguments());
24012 String& type_args_name = String::Handle(type_args.UserVisibleName());
24013 return OS::SCreate(Thread::Current()->zone(), "Pointer%s: address=0x%" Px,
24014 type_args_name.ToCString(), NativeAddress());
24015}
24016
24017DynamicLibraryPtr DynamicLibrary::New(void* handle, Heap::Space space) {
24018 DynamicLibrary& result = DynamicLibrary::Handle();
24019 result ^= Object::Allocate(kFfiDynamicLibraryCid,
24020 DynamicLibrary::InstanceSize(), space);
24021 NoSafepointScope no_safepoint;
24022 result.SetHandle(handle);
24023 return result.raw();
24024}
24025
24026bool Pointer::IsPointer(const Instance& obj) {
24027 return IsFfiPointerClassId(obj.raw()->GetClassId());
24028}
24029
24030bool Instance::IsPointer() const {
24031 return Pointer::IsPointer(*this);
24032}
24033
24034const char* DynamicLibrary::ToCString() const {
24035 return OS::SCreate(Thread::Current()->zone(), "DynamicLibrary: handle=0x%" Px,
24036 reinterpret_cast<uintptr_t>(GetHandle()));
24037}
24038
24039CapabilityPtr Capability::New(uint64_t id, Heap::Space space) {
24040 Capability& result = Capability::Handle();
24041 {
24042 ObjectPtr raw = Object::Allocate(Capability::kClassId,
24043 Capability::InstanceSize(), space);
24044 NoSafepointScope no_safepoint;
24045 result ^= raw;
24046 result.StoreNonPointer(&result.raw_ptr()->id_, id);
24047 }
24048 return result.raw();
24049}
24050
24051const char* Capability::ToCString() const {
24052 return "Capability";
24053}
24054
24055ReceivePortPtr ReceivePort::New(Dart_Port id,
24056 bool is_control_port,
24057 Heap::Space space) {
24058 ASSERT(id != ILLEGAL_PORT);
24059 Thread* thread = Thread::Current();
24060 Zone* zone = thread->zone();
24061 const SendPort& send_port =
24062 SendPort::Handle(zone, SendPort::New(id, thread->isolate()->origin_id()));
24063
24064 ReceivePort& result = ReceivePort::Handle(zone);
24065 {
24066 ObjectPtr raw = Object::Allocate(ReceivePort::kClassId,
24067 ReceivePort::InstanceSize(), space);
24068 NoSafepointScope no_safepoint;
24069 result ^= raw;
24070 result.StorePointer(&result.raw_ptr()->send_port_, send_port.raw());
24071 }
24072 if (is_control_port) {
24073 PortMap::SetPortState(id, PortMap::kControlPort);
24074 } else {
24075 PortMap::SetPortState(id, PortMap::kLivePort);
24076 }
24077 return result.raw();
24078}
24079
24080const char* ReceivePort::ToCString() const {
24081 return "ReceivePort";
24082}
24083
24084SendPortPtr SendPort::New(Dart_Port id, Heap::Space space) {
24085 return New(id, Isolate::Current()->origin_id(), space);
24086}
24087
24088SendPortPtr SendPort::New(Dart_Port id,
24089 Dart_Port origin_id,
24090 Heap::Space space) {
24091 ASSERT(id != ILLEGAL_PORT);
24092 SendPort& result = SendPort::Handle();
24093 {
24094 ObjectPtr raw =
24095 Object::Allocate(SendPort::kClassId, SendPort::InstanceSize(), space);
24096 NoSafepointScope no_safepoint;
24097 result ^= raw;
24098 result.StoreNonPointer(&result.raw_ptr()->id_, id);
24099 result.StoreNonPointer(&result.raw_ptr()->origin_id_, origin_id);
24100 }
24101 return result.raw();
24102}
24103
24104const char* SendPort::ToCString() const {
24105 return "SendPort";
24106}
24107
24108static void TransferableTypedDataFinalizer(void* isolate_callback_data,
24109 void* peer) {
24110 delete (reinterpret_cast<TransferableTypedDataPeer*>(peer));
24111}
24112
24113TransferableTypedDataPtr TransferableTypedData::New(uint8_t* data,
24114 intptr_t length,
24115 Heap::Space space) {
24116 TransferableTypedDataPeer* peer = new TransferableTypedDataPeer(data, length);
24117
24118 Thread* thread = Thread::Current();
24119 TransferableTypedData& result = TransferableTypedData::Handle();
24120 {
24121 ObjectPtr raw =
24122 Object::Allocate(TransferableTypedData::kClassId,
24123 TransferableTypedData::InstanceSize(), space);
24124 NoSafepointScope no_safepoint;
24125 thread->heap()->SetPeer(raw, peer);
24126 result ^= raw;
24127 }
24128 // Set up finalizer so it frees allocated memory if handle is
24129 // garbage-collected.
24130 peer->set_handle(FinalizablePersistentHandle::New(
24131 thread->isolate(), result, peer, &TransferableTypedDataFinalizer, length,
24132 /*auto_delete=*/true));
24133
24134 return result.raw();
24135}
24136
24137const char* TransferableTypedData::ToCString() const {
24138 return "TransferableTypedData";
24139}
24140
24141intptr_t Closure::NumTypeParameters(Thread* thread) const {
24142 if (delayed_type_arguments() != Object::null_type_arguments().raw() &&
24143 delayed_type_arguments() != Object::empty_type_arguments().raw()) {
24144 return 0;
24145 } else {
24146 const auto& closure_function = Function::Handle(thread->zone(), function());
24147 return closure_function.NumTypeParameters(thread);
24148 }
24149}
24150
24151const char* Closure::ToCString() const {
24152 Zone* zone = Thread::Current()->zone();
24153 const Function& fun = Function::Handle(zone, function());
24154 const bool is_implicit_closure = fun.IsImplicitClosureFunction();
24155 const Function& sig_fun =
24156 Function::Handle(zone, GetInstantiatedSignature(zone));
24157 const char* fun_sig =
24158 String::Handle(zone, sig_fun.UserVisibleSignature()).ToCString();
24159 const char* from = is_implicit_closure ? " from " : "";
24160 const char* fun_desc = is_implicit_closure ? fun.ToCString() : "";
24161 return OS::SCreate(zone, "Closure: %s%s%s", fun_sig, from, fun_desc);
24162}
24163
24164int64_t Closure::ComputeHash() const {
24165 Thread* thread = Thread::Current();
24166 DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
24167 Zone* zone = thread->zone();
24168 const Function& func = Function::Handle(zone, function());
24169 uint32_t result = 0;
24170 if (func.IsImplicitInstanceClosureFunction()) {
24171 // Implicit instance closures are not unique, so combine function's hash
24172 // code with identityHashCode of cached receiver.
24173 result = static_cast<uint32_t>(func.ComputeClosureHash());
24174 const Context& context = Context::Handle(zone, this->context());
24175 const Instance& receiver =
24176 Instance::Handle(zone, Instance::RawCast(context.At(0)));
24177 const Object& receiverHash =
24178 Object::Handle(zone, receiver.IdentityHashCode());
24179 if (receiverHash.IsError()) {
24180 Exceptions::PropagateError(Error::Cast(receiverHash));
24181 UNREACHABLE();
24182 }
24183 result = CombineHashes(
24184 result, Integer::Cast(receiverHash).AsTruncatedUint32Value());
24185 } else {
24186 // Explicit closures and implicit static closures are unique,
24187 // so identityHashCode of closure object is good enough.
24188 const Object& identityHash = Object::Handle(zone, this->IdentityHashCode());
24189 if (identityHash.IsError()) {
24190 Exceptions::PropagateError(Error::Cast(identityHash));
24191 UNREACHABLE();
24192 }
24193 result = Integer::Cast(identityHash).AsTruncatedUint32Value();
24194 }
24195 return FinalizeHash(result, String::kHashBits);
24196}
24197
24198ClosurePtr Closure::New(const TypeArguments& instantiator_type_arguments,
24199 const TypeArguments& function_type_arguments,
24200 const Function& function,
24201 const Context& context,
24202 Heap::Space space) {
24203 return Closure::New(instantiator_type_arguments, function_type_arguments,
24204 function.IsGeneric() ? Object::empty_type_arguments()
24205 : Object::null_type_arguments(),
24206 function, context, space);
24207}
24208
24209ClosurePtr Closure::New(const TypeArguments& instantiator_type_arguments,
24210 const TypeArguments& function_type_arguments,
24211 const TypeArguments& delayed_type_arguments,
24212 const Function& function,
24213 const Context& context,
24214 Heap::Space space) {
24215 Closure& result = Closure::Handle();
24216 {
24217 ObjectPtr raw =
24218 Object::Allocate(Closure::kClassId, Closure::InstanceSize(), space);
24219 NoSafepointScope no_safepoint;
24220 result ^= raw;
24221 result.StorePointer(&result.raw_ptr()->instantiator_type_arguments_,
24222 instantiator_type_arguments.raw());
24223 result.StorePointer(&result.raw_ptr()->function_type_arguments_,
24224 function_type_arguments.raw());
24225 result.StorePointer(&result.raw_ptr()->delayed_type_arguments_,
24226 delayed_type_arguments.raw());
24227 result.StorePointer(&result.raw_ptr()->function_, function.raw());
24228 result.StorePointer(&result.raw_ptr()->context_, context.raw());
24229 }
24230 return result.raw();
24231}
24232
24233ClosurePtr Closure::New() {
24234 ObjectPtr raw =
24235 Object::Allocate(Closure::kClassId, Closure::InstanceSize(), Heap::kOld);
24236 return static_cast<ClosurePtr>(raw);
24237}
24238
24239FunctionPtr Closure::GetInstantiatedSignature(Zone* zone) const {
24240 Function& sig_fun = Function::Handle(zone, function());
24241 TypeArguments& fn_type_args =
24242 TypeArguments::Handle(zone, function_type_arguments());
24243 const TypeArguments& delayed_type_args =
24244 TypeArguments::Handle(zone, delayed_type_arguments());
24245 const TypeArguments& inst_type_args =
24246 TypeArguments::Handle(zone, instantiator_type_arguments());
24247
24248 // We detect the case of a partial tearoff type application and substitute the
24249 // type arguments for the type parameters of the function.
24250 intptr_t num_free_params;
24251 if (delayed_type_args.raw() != Object::empty_type_arguments().raw()) {
24252 num_free_params = kCurrentAndEnclosingFree;
24253 fn_type_args = delayed_type_args.Prepend(
24254 zone, fn_type_args, sig_fun.NumParentTypeParameters(),
24255 sig_fun.NumTypeParameters() + sig_fun.NumParentTypeParameters());
24256 } else {
24257 num_free_params = kAllFree;
24258 }
24259 if (num_free_params == kCurrentAndEnclosingFree ||
24260 !sig_fun.HasInstantiatedSignature(kAny)) {
24261 return sig_fun.InstantiateSignatureFrom(inst_type_args, fn_type_args,
24262 num_free_params, Heap::kOld);
24263 }
24264 return sig_fun.raw();
24265}
24266
24267bool StackTrace::skip_sync_start_in_parent_stack() const {
24268 return raw_ptr()->skip_sync_start_in_parent_stack;
24269}
24270
24271void StackTrace::set_skip_sync_start_in_parent_stack(bool value) const {
24272 StoreNonPointer(&raw_ptr()->skip_sync_start_in_parent_stack, value);
24273}
24274
24275intptr_t StackTrace::Length() const {
24276 const Array& code_array = Array::Handle(raw_ptr()->code_array_);
24277 return code_array.Length();
24278}
24279
24280ObjectPtr StackTrace::CodeAtFrame(intptr_t frame_index) const {
24281 const Array& code_array = Array::Handle(raw_ptr()->code_array_);
24282 return code_array.At(frame_index);
24283}
24284
24285void StackTrace::SetCodeAtFrame(intptr_t frame_index,
24286 const Object& code) const {
24287 const Array& code_array = Array::Handle(raw_ptr()->code_array_);
24288 code_array.SetAt(frame_index, code);
24289}
24290
24291SmiPtr StackTrace::PcOffsetAtFrame(intptr_t frame_index) const {
24292 const Array& pc_offset_array = Array::Handle(raw_ptr()->pc_offset_array_);
24293 return static_cast<SmiPtr>(pc_offset_array.At(frame_index));
24294}
24295
24296void StackTrace::SetPcOffsetAtFrame(intptr_t frame_index,
24297 const Smi& pc_offset) const {
24298 const Array& pc_offset_array = Array::Handle(raw_ptr()->pc_offset_array_);
24299 pc_offset_array.SetAt(frame_index, pc_offset);
24300}
24301
24302void StackTrace::set_async_link(const StackTrace& async_link) const {
24303 StorePointer(&raw_ptr()->async_link_, async_link.raw());
24304}
24305
24306void StackTrace::set_code_array(const Array& code_array) const {
24307 StorePointer(&raw_ptr()->code_array_, code_array.raw());
24308}
24309
24310void StackTrace::set_pc_offset_array(const Array& pc_offset_array) const {
24311 StorePointer(&raw_ptr()->pc_offset_array_, pc_offset_array.raw());
24312}
24313
24314void StackTrace::set_expand_inlined(bool value) const {
24315 StoreNonPointer(&raw_ptr()->expand_inlined_, value);
24316}
24317
24318bool StackTrace::expand_inlined() const {
24319 return raw_ptr()->expand_inlined_;
24320}
24321
24322StackTracePtr StackTrace::New(const Array& code_array,
24323 const Array& pc_offset_array,
24324 Heap::Space space) {
24325 StackTrace& result = StackTrace::Handle();
24326 {
24327 ObjectPtr raw = Object::Allocate(StackTrace::kClassId,
24328 StackTrace::InstanceSize(), space);
24329 NoSafepointScope no_safepoint;
24330 result ^= raw;
24331 }
24332 result.set_code_array(code_array);
24333 result.set_pc_offset_array(pc_offset_array);
24334 result.set_expand_inlined(true); // default.
24335 result.set_skip_sync_start_in_parent_stack(false);
24336 return result.raw();
24337}
24338
24339StackTracePtr StackTrace::New(const Array& code_array,
24340 const Array& pc_offset_array,
24341 const StackTrace& async_link,
24342 bool skip_sync_start_in_parent_stack,
24343 Heap::Space space) {
24344 StackTrace& result = StackTrace::Handle();
24345 {
24346 ObjectPtr raw = Object::Allocate(StackTrace::kClassId,
24347 StackTrace::InstanceSize(), space);
24348 NoSafepointScope no_safepoint;
24349 result ^= raw;
24350 }
24351 result.set_async_link(async_link);
24352 result.set_code_array(code_array);
24353 result.set_pc_offset_array(pc_offset_array);
24354 result.set_expand_inlined(true); // default.
24355 result.set_skip_sync_start_in_parent_stack(skip_sync_start_in_parent_stack);
24356 return result.raw();
24357}
24358
24359#if defined(DART_PRECOMPILED_RUNTIME)
24360// Prints the best representation(s) for the call address.
24361static void PrintNonSymbolicStackFrameBody(BaseTextBuffer* buffer,
24362 uword call_addr,
24363 uword isolate_instructions,
24364 uword vm_instructions,
24365 uword isolate_relocated_address) {
24366 const Image vm_image(reinterpret_cast<const void*>(vm_instructions));
24367 const Image isolate_image(
24368 reinterpret_cast<const void*>(isolate_instructions));
24369
24370 if (isolate_image.contains(call_addr)) {
24371 auto const symbol_name = kIsolateSnapshotInstructionsAsmSymbol;
24372 auto const offset = call_addr - isolate_instructions;
24373 // Only print the relocated address of the call when we know the saved
24374 // debugging information (if any) will have the same relocated address.
24375 if (isolate_image.compiled_to_elf()) {
24376 buffer->Printf(" virt %" Pp "", isolate_relocated_address + offset);
24377 }
24378 buffer->Printf(" %s+0x%" Px "", symbol_name, offset);
24379 } else if (vm_image.contains(call_addr)) {
24380 auto const offset = call_addr - vm_instructions;
24381 // We currently don't print 'virt' entries for vm addresses, even if
24382 // they were compiled to ELF, as we should never encounter these in
24383 // non-symbolic stack traces (since stub addresses are stripped).
24384 //
24385 // In case they leak due to code issues elsewhere, we still print them as
24386 // <vm symbol>+<offset>, just to distinguish from other cases.
24387 buffer->Printf(" %s+0x%" Px "", kVmSnapshotInstructionsAsmSymbol, offset);
24388 } else {
24389 // This case should never happen, since these are not addresses within the
24390 // VM or app isolate instructions sections, so make it easy to notice.
24391 buffer->Printf(" <invalid Dart instruction address>");
24392 }
24393 buffer->Printf("\n");
24394}
24395#endif
24396
24397static void PrintSymbolicStackFrameIndex(BaseTextBuffer* buffer,
24398 intptr_t frame_index) {
24399 buffer->Printf("#%-6" Pd "", frame_index);
24400}
24401
24402static void PrintSymbolicStackFrameBody(BaseTextBuffer* buffer,
24403 const char* function_name,
24404 const char* url,
24405 intptr_t line = -1,
24406 intptr_t column = -1) {
24407 buffer->Printf(" %s (%s", function_name, url);
24408 if (line >= 0) {
24409 buffer->Printf(":%" Pd "", line);
24410 if (column >= 0) {
24411 buffer->Printf(":%" Pd "", column);
24412 }
24413 }
24414 buffer->Printf(")\n");
24415}
24416
24417static void PrintSymbolicStackFrame(Zone* zone,
24418 BaseTextBuffer* buffer,
24419 const Function& function,
24420 TokenPosition token_pos,
24421 intptr_t frame_index) {
24422 ASSERT(!function.IsNull());
24423 const auto& script = Script::Handle(zone, function.script());
24424 auto& handle = String::Handle(zone, function.QualifiedUserVisibleName());
24425 auto const function_name = handle.ToCString();
24426 handle = script.IsNull() ? String::New("Kernel") : script.url();
24427 auto url = handle.ToCString();
24428
24429 // If the URI starts with "data:application/dart;" this is a URI encoded
24430 // script so we shouldn't print the entire URI because it could be very long.
24431 if (strstr(url, "data:application/dart;") == url) {
24432 url = "<data:application/dart>";
24433 }
24434
24435 intptr_t line = -1;
24436 intptr_t column = -1;
24437 if (FLAG_precompiled_mode) {
24438 line = token_pos.value();
24439 } else if (token_pos.IsSourcePosition()) {
24440 ASSERT(!script.IsNull());
24441 script.GetTokenLocation(token_pos.SourcePosition(), &line, &column);
24442 }
24443
24444 PrintSymbolicStackFrameIndex(buffer, frame_index);
24445 PrintSymbolicStackFrameBody(buffer, function_name, url, line, column);
24446}
24447
24448// Find the relocated base of the given instructions section.
24449uword InstructionsRelocatedAddress(uword instructions_start) {
24450 Image image(reinterpret_cast<const uint8_t*>(instructions_start));
24451 auto const bss_start =
24452 reinterpret_cast<const uword*>(instructions_start + image.bss_offset());
24453 auto const index =
24454 BSS::RelocationIndex(BSS::Relocation::InstructionsRelocatedAddress);
24455 return bss_start[index];
24456}
24457
24458const char* StackTrace::ToCString() const {
24459 auto const T = Thread::Current();
24460 auto const zone = T->zone();
24461 auto& stack_trace = StackTrace::Handle(zone, this->raw());
24462 auto& function = Function::Handle(zone);
24463 auto& code_object = Object::Handle(zone);
24464 auto& code = Code::Handle(zone);
24465 auto& bytecode = Bytecode::Handle(zone);
24466
24467 GrowableArray<const Function*> inlined_functions;
24468 GrowableArray<TokenPosition> inlined_token_positions;
24469 ZoneTextBuffer buffer(zone, 1024);
24470
24471#if defined(DART_PRECOMPILED_RUNTIME)
24472 auto const isolate_instructions = reinterpret_cast<uword>(
24473 T->isolate_group()->source()->snapshot_instructions);
24474 auto const vm_instructions = reinterpret_cast<uword>(
24475 Dart::vm_isolate()->group()->source()->snapshot_instructions);
24476 auto const vm_relocated_address =
24477 InstructionsRelocatedAddress(vm_instructions);
24478 auto const isolate_relocated_address =
24479 InstructionsRelocatedAddress(isolate_instructions);
24480 if (FLAG_dwarf_stack_traces_mode) {
24481 // The Dart standard requires the output of StackTrace.toString to include
24482 // all pending activations with precise source locations (i.e., to expand
24483 // inlined frames and provide line and column numbers).
24484 buffer.Printf(
24485 "Warning: This VM has been configured to produce stack traces "
24486 "that violate the Dart standard.\n");
24487 // This prologue imitates Android's debuggerd to make it possible to paste
24488 // the stack trace into ndk-stack.
24489 buffer.Printf(
24490 "*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***\n");
24491 OSThread* thread = OSThread::Current();
24492 buffer.Printf("pid: %" Pd ", tid: %" Pd ", name %s\n", OS::ProcessId(),
24493 OSThread::ThreadIdToIntPtr(thread->id()), thread->name());
24494 // Print the dso_base of the VM and isolate_instructions. We print both here
24495 // as the VM and isolate may be loaded from different snapshot images.
24496 buffer.Printf("isolate_dso_base: %" Px "",
24497 isolate_instructions - isolate_relocated_address);
24498 buffer.Printf(", vm_dso_base: %" Px "\n",
24499 vm_instructions - vm_relocated_address);
24500 buffer.Printf("isolate_instructions: %" Px "", isolate_instructions);
24501 buffer.Printf(", vm_instructions: %" Px "\n", vm_instructions);
24502 }
24503#endif
24504
24505 // Iterate through the stack frames and create C string description
24506 // for each frame.
24507 intptr_t frame_index = 0;
24508 uint32_t frame_skip = 0;
24509 do {
24510 for (intptr_t i = frame_skip; i < stack_trace.Length(); i++) {
24511 code_object = stack_trace.CodeAtFrame(i);
24512 if (code_object.IsNull()) {
24513 // Check for a null function, which indicates a gap in a StackOverflow
24514 // or OutOfMemory trace.
24515 if ((i < (stack_trace.Length() - 1)) &&
24516 (stack_trace.CodeAtFrame(i + 1) != Code::null())) {
24517 buffer.AddString("...\n...\n");
24518 ASSERT(stack_trace.PcOffsetAtFrame(i) != Smi::null());
24519 // To account for gap frames.
24520 frame_index += Smi::Value(stack_trace.PcOffsetAtFrame(i));
24521 }
24522 } else if (code_object.raw() == StubCode::AsynchronousGapMarker().raw()) {
24523 buffer.AddString("<asynchronous suspension>\n");
24524 } else {
24525 intptr_t pc_offset = Smi::Value(stack_trace.PcOffsetAtFrame(i));
24526 if (code_object.IsCode()) {
24527 code ^= code_object.raw();
24528 ASSERT(code.IsFunctionCode());
24529 function = code.function();
24530 const uword pc = code.PayloadStart() + pc_offset;
24531#if defined(DART_PRECOMPILED_RUNTIME)
24532 // When printing non-symbolic frames, we normally print call
24533 // addresses, not return addresses, by subtracting one from the PC to
24534 // get an address within the preceding instruction.
24535 //
24536 // The one exception is a normal closure registered as a listener on a
24537 // future. In this case, the returned pc_offset is 0, as the closure
24538 // is invoked with the value of the resolved future. Thus, we must
24539 // report the return address, as returning a value before the closure
24540 // payload will cause failures to decode the frame using DWARF info.
24541 const bool is_future_listener = pc_offset == 0;
24542 const uword call_addr = is_future_listener ? pc : pc - 1;
24543 if (FLAG_dwarf_stack_traces_mode) {
24544 // If we have access to the owning function and it would be
24545 // invisible in a symbolic stack trace, don't show this frame.
24546 // (We can't do the same for inlined functions, though.)
24547 if (!FLAG_show_invisible_frames && !function.IsNull() &&
24548 !function.is_visible()) {
24549 continue;
24550 }
24551 // This output is formatted like Android's debuggerd. Note debuggerd
24552 // prints call addresses instead of return addresses.
24553 buffer.Printf(" #%02" Pd " abs %" Pp "", frame_index, call_addr);
24554 PrintNonSymbolicStackFrameBody(
24555 &buffer, call_addr, isolate_instructions, vm_instructions,
24556 isolate_relocated_address);
24557 frame_index++;
24558 continue;
24559 } else if (function.IsNull()) {
24560 // We can't print the symbolic information since the owner was not
24561 // retained, so instead print the static symbol + offset like the
24562 // non-symbolic stack traces.
24563 PrintSymbolicStackFrameIndex(&buffer, frame_index);
24564 PrintNonSymbolicStackFrameBody(
24565 &buffer, call_addr, isolate_instructions, vm_instructions,
24566 isolate_relocated_address);
24567 frame_index++;
24568 continue;
24569 }
24570#endif
24571 if (code.is_optimized() && stack_trace.expand_inlined()) {
24572 code.GetInlinedFunctionsAtReturnAddress(
24573 pc_offset, &inlined_functions, &inlined_token_positions);
24574 ASSERT(inlined_functions.length() >= 1);
24575 for (intptr_t j = inlined_functions.length() - 1; j >= 0; j--) {
24576 const auto& inlined = *inlined_functions[j];
24577 auto const pos = inlined_token_positions[j];
24578 if (FLAG_show_invisible_frames || function.is_visible()) {
24579 PrintSymbolicStackFrame(zone, &buffer, inlined, pos,
24580 frame_index);
24581 frame_index++;
24582 }
24583 }
24584 } else if (FLAG_show_invisible_frames || function.is_visible()) {
24585 auto const pos = code.GetTokenIndexOfPC(pc);
24586 PrintSymbolicStackFrame(zone, &buffer, function, pos, frame_index);
24587 frame_index++;
24588 }
24589 } else {
24590 ASSERT(code_object.IsBytecode());
24591 bytecode ^= code_object.raw();
24592 function = bytecode.function();
24593 if (FLAG_show_invisible_frames || function.is_visible()) {
24594 uword pc = bytecode.PayloadStart() + pc_offset;
24595 auto const pos = bytecode.GetTokenIndexOfPC(pc);
24596 PrintSymbolicStackFrame(zone, &buffer, function, pos, frame_index);
24597 frame_index++;
24598 }
24599 }
24600 }
24601 }
24602 // Follow the link.
24603 frame_skip = stack_trace.skip_sync_start_in_parent_stack()
24604 ? StackTrace::kSyncAsyncCroppedFrames
24605 : 0;
24606 stack_trace = stack_trace.async_link();
24607 } while (!stack_trace.IsNull());
24608
24609 return buffer.buffer();
24610}
24611
24612static void DwarfStackTracesHandler(bool value) {
24613 FLAG_dwarf_stack_traces_mode = value;
24614
24615#if defined(PRODUCT)
24616 // We can safely remove function objects in precompiled snapshots if the
24617 // runtime will generate DWARF stack traces and we don't have runtime
24618 // debugging options like the observatory available.
24619 if (value) {
24620 FLAG_retain_function_objects = false;
24621 }
24622#endif
24623}
24624
24625DEFINE_FLAG_HANDLER(DwarfStackTracesHandler,
24626 dwarf_stack_traces,
24627 "Omit CodeSourceMaps in precompiled snapshots and don't "
24628 "symbolize stack traces in the precompiled runtime.");
24629
24630void RegExp::set_pattern(const String& pattern) const {
24631 StorePointer(&raw_ptr()->pattern_, pattern.raw());
24632}
24633
24634void RegExp::set_function(intptr_t cid,
24635 bool sticky,
24636 const Function& value) const {
24637 StorePointer(FunctionAddr(cid, sticky), value.raw());
24638}
24639
24640void RegExp::set_bytecode(bool is_one_byte,
24641 bool sticky,
24642 const TypedData& bytecode) const {
24643 if (sticky) {
24644 if (is_one_byte) {
24645 StorePointer(&raw_ptr()->one_byte_sticky_.bytecode_, bytecode.raw());
24646 } else {
24647 StorePointer(&raw_ptr()->two_byte_sticky_.bytecode_, bytecode.raw());
24648 }
24649 } else {
24650 if (is_one_byte) {
24651 StorePointer(&raw_ptr()->one_byte_.bytecode_, bytecode.raw());
24652 } else {
24653 StorePointer(&raw_ptr()->two_byte_.bytecode_, bytecode.raw());
24654 }
24655 }
24656}
24657
24658void RegExp::set_num_bracket_expressions(intptr_t value) const {
24659 StoreSmi(&raw_ptr()->num_bracket_expressions_, Smi::New(value));
24660}
24661
24662void RegExp::set_capture_name_map(const Array& array) const {
24663 StorePointer(&raw_ptr()->capture_name_map_, array.raw());
24664}
24665
24666RegExpPtr RegExp::New(Heap::Space space) {
24667 RegExp& result = RegExp::Handle();
24668 {
24669 ObjectPtr raw =
24670 Object::Allocate(RegExp::kClassId, RegExp::InstanceSize(), space);
24671 NoSafepointScope no_safepoint;
24672 result ^= raw;
24673 result.set_type(kUninitialized);
24674 result.set_flags(RegExpFlags());
24675 result.set_num_registers(/*is_one_byte=*/false, -1);
24676 result.set_num_registers(/*is_one_byte=*/true, -1);
24677 }
24678 return result.raw();
24679}
24680
24681const char* RegExpFlags::ToCString() const {
24682 switch (value_ & ~kGlobal) {
24683 case kIgnoreCase | kMultiLine | kDotAll | kUnicode:
24684 return "imsu";
24685 case kIgnoreCase | kMultiLine | kDotAll:
24686 return "ims";
24687 case kIgnoreCase | kMultiLine | kUnicode:
24688 return "imu";
24689 case kIgnoreCase | kUnicode | kDotAll:
24690 return "ius";
24691 case kMultiLine | kDotAll | kUnicode:
24692 return "msu";
24693 case kIgnoreCase | kMultiLine:
24694 return "im";
24695 case kIgnoreCase | kDotAll:
24696 return "is";
24697 case kIgnoreCase | kUnicode:
24698 return "iu";
24699 case kMultiLine | kDotAll:
24700 return "ms";
24701 case kMultiLine | kUnicode:
24702 return "mu";
24703 case kDotAll | kUnicode:
24704 return "su";
24705 case kIgnoreCase:
24706 return "i";
24707 case kMultiLine:
24708 return "m";
24709 case kDotAll:
24710 return "s";
24711 case kUnicode:
24712 return "u";
24713 default:
24714 break;
24715 }
24716 return "";
24717}
24718
24719bool RegExp::CanonicalizeEquals(const Instance& other) const {
24720 if (this->raw() == other.raw()) {
24721 return true; // "===".
24722 }
24723 if (other.IsNull() || !other.IsRegExp()) {
24724 return false;
24725 }
24726 const RegExp& other_js = RegExp::Cast(other);
24727 // Match the pattern.
24728 const String& str1 = String::Handle(pattern());
24729 const String& str2 = String::Handle(other_js.pattern());
24730 if (!str1.Equals(str2)) {
24731 return false;
24732 }
24733 // Match the flags.
24734 if (flags() != other_js.flags()) {
24735 return false;
24736 }
24737 return true;
24738}
24739
24740const char* RegExp::ToCString() const {
24741 const String& str = String::Handle(pattern());
24742 return OS::SCreate(Thread::Current()->zone(), "RegExp: pattern=%s flags=%s",
24743 str.ToCString(), flags().ToCString());
24744}
24745
24746WeakPropertyPtr WeakProperty::New(Heap::Space space) {
24747 ASSERT(Isolate::Current()->object_store()->weak_property_class() !=
24748 Class::null());
24749 ObjectPtr raw = Object::Allocate(WeakProperty::kClassId,
24750 WeakProperty::InstanceSize(), space);
24751 WeakPropertyPtr result = static_cast<WeakPropertyPtr>(raw);
24752 result->ptr()->next_ = 0; // Init the list to NULL.
24753 return result;
24754}
24755
24756const char* WeakProperty::ToCString() const {
24757 return "_WeakProperty";
24758}
24759
24760AbstractTypePtr MirrorReference::GetAbstractTypeReferent() const {
24761 ASSERT(Object::Handle(referent()).IsAbstractType());
24762 return AbstractType::Cast(Object::Handle(referent())).raw();
24763}
24764
24765ClassPtr MirrorReference::GetClassReferent() const {
24766 ASSERT(Object::Handle(referent()).IsClass());
24767 return Class::Cast(Object::Handle(referent())).raw();
24768}
24769
24770FieldPtr MirrorReference::GetFieldReferent() const {
24771 ASSERT(Object::Handle(referent()).IsField());
24772 return Field::Cast(Object::Handle(referent())).raw();
24773}
24774
24775FunctionPtr MirrorReference::GetFunctionReferent() const {
24776 ASSERT(Object::Handle(referent()).IsFunction());
24777 return Function::Cast(Object::Handle(referent())).raw();
24778}
24779
24780LibraryPtr MirrorReference::GetLibraryReferent() const {
24781 ASSERT(Object::Handle(referent()).IsLibrary());
24782 return Library::Cast(Object::Handle(referent())).raw();
24783}
24784
24785TypeParameterPtr MirrorReference::GetTypeParameterReferent() const {
24786 ASSERT(Object::Handle(referent()).IsTypeParameter());
24787 return TypeParameter::Cast(Object::Handle(referent())).raw();
24788}
24789
24790MirrorReferencePtr MirrorReference::New(const Object& referent,
24791 Heap::Space space) {
24792 MirrorReference& result = MirrorReference::Handle();
24793 {
24794 ObjectPtr raw = Object::Allocate(MirrorReference::kClassId,
24795 MirrorReference::InstanceSize(), space);
24796 NoSafepointScope no_safepoint;
24797 result ^= raw;
24798 }
24799 result.set_referent(referent);
24800 return result.raw();
24801}
24802
24803const char* MirrorReference::ToCString() const {
24804 return "_MirrorReference";
24805}
24806
24807void UserTag::MakeActive() const {
24808 Isolate* isolate = Isolate::Current();
24809 ASSERT(isolate != NULL);
24810 isolate->set_current_tag(*this);
24811}
24812
24813UserTagPtr UserTag::New(const String& label, Heap::Space space) {
24814 Thread* thread = Thread::Current();
24815 Isolate* isolate = thread->isolate();
24816 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
24817 // Canonicalize by name.
24818 UserTag& result = UserTag::Handle(FindTagInIsolate(thread, label));
24819 if (!result.IsNull()) {
24820 // Tag already exists, return existing instance.
24821 return result.raw();
24822 }
24823 if (TagTableIsFull(thread)) {
24824 const String& error = String::Handle(String::NewFormatted(
24825 "UserTag instance limit (%" Pd ") reached.", UserTags::kMaxUserTags));
24826 const Array& args = Array::Handle(Array::New(1));
24827 args.SetAt(0, error);
24828 Exceptions::ThrowByType(Exceptions::kUnsupported, args);
24829 }
24830 // No tag with label exists, create and register with isolate tag table.
24831 {
24832 ObjectPtr raw =
24833 Object::Allocate(UserTag::kClassId, UserTag::InstanceSize(), space);
24834 NoSafepointScope no_safepoint;
24835 result ^= raw;
24836 }
24837 result.set_label(label);
24838 AddTagToIsolate(thread, result);
24839 return result.raw();
24840}
24841
24842UserTagPtr UserTag::DefaultTag() {
24843 Thread* thread = Thread::Current();
24844 Zone* zone = thread->zone();
24845 Isolate* isolate = thread->isolate();
24846 ASSERT(isolate != NULL);
24847 if (isolate->default_tag() != UserTag::null()) {
24848 // Already created.
24849 return isolate->default_tag();
24850 }
24851 // Create default tag.
24852 const UserTag& result =
24853 UserTag::Handle(zone, UserTag::New(Symbols::Default()));
24854 ASSERT(result.tag() == UserTags::kDefaultUserTag);
24855 isolate->set_default_tag(result);
24856 return result.raw();
24857}
24858
24859UserTagPtr UserTag::FindTagInIsolate(Thread* thread, const String& label) {
24860 Isolate* isolate = thread->isolate();
24861 Zone* zone = thread->zone();
24862 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
24863 const GrowableObjectArray& tag_table =
24864 GrowableObjectArray::Handle(zone, isolate->tag_table());
24865 UserTag& other = UserTag::Handle(zone);
24866 String& tag_label = String::Handle(zone);
24867 for (intptr_t i = 0; i < tag_table.Length(); i++) {
24868 other ^= tag_table.At(i);
24869 ASSERT(!other.IsNull());
24870 tag_label = other.label();
24871 ASSERT(!tag_label.IsNull());
24872 if (tag_label.Equals(label)) {
24873 return other.raw();
24874 }
24875 }
24876 return UserTag::null();
24877}
24878
24879void UserTag::AddTagToIsolate(Thread* thread, const UserTag& tag) {
24880 Isolate* isolate = thread->isolate();
24881 Zone* zone = thread->zone();
24882 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
24883 const GrowableObjectArray& tag_table =
24884 GrowableObjectArray::Handle(zone, isolate->tag_table());
24885 ASSERT(!TagTableIsFull(thread));
24886#if defined(DEBUG)
24887 // Verify that no existing tag has the same tag id.
24888 UserTag& other = UserTag::Handle(thread->zone());
24889 for (intptr_t i = 0; i < tag_table.Length(); i++) {
24890 other ^= tag_table.At(i);
24891 ASSERT(!other.IsNull());
24892 ASSERT(tag.tag() != other.tag());
24893 }
24894#endif
24895 // Generate the UserTag tag id by taking the length of the isolate's
24896 // tag table + kUserTagIdOffset.
24897 uword tag_id = tag_table.Length() + UserTags::kUserTagIdOffset;
24898 ASSERT(tag_id >= UserTags::kUserTagIdOffset);
24899 ASSERT(tag_id < (UserTags::kUserTagIdOffset + UserTags::kMaxUserTags));
24900 tag.set_tag(tag_id);
24901 tag_table.Add(tag);
24902}
24903
24904bool UserTag::TagTableIsFull(Thread* thread) {
24905 Isolate* isolate = thread->isolate();
24906 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
24907 const GrowableObjectArray& tag_table =
24908 GrowableObjectArray::Handle(thread->zone(), isolate->tag_table());
24909 ASSERT(tag_table.Length() <= UserTags::kMaxUserTags);
24910 return tag_table.Length() == UserTags::kMaxUserTags;
24911}
24912
24913UserTagPtr UserTag::FindTagById(uword tag_id) {
24914 Thread* thread = Thread::Current();
24915 Zone* zone = thread->zone();
24916 Isolate* isolate = thread->isolate();
24917 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
24918 const GrowableObjectArray& tag_table =
24919 GrowableObjectArray::Handle(zone, isolate->tag_table());
24920 UserTag& tag = UserTag::Handle(zone);
24921 for (intptr_t i = 0; i < tag_table.Length(); i++) {
24922 tag ^= tag_table.At(i);
24923 if (tag.tag() == tag_id) {
24924 return tag.raw();
24925 }
24926 }
24927 return UserTag::null();
24928}
24929
24930const char* UserTag::ToCString() const {
24931 const String& tag_label = String::Handle(label());
24932 return tag_label.ToCString();
24933}
24934
24935void DumpTypeTable(Isolate* isolate) {
24936 OS::PrintErr("canonical types:\n");
24937 CanonicalTypeSet table(isolate->object_store()->canonical_types());
24938 table.Dump();
24939 table.Release();
24940}
24941
24942void DumpTypeParameterTable(Isolate* isolate) {
24943 OS::PrintErr("canonical type parameters (cloned from declarations):\n");
24944 CanonicalTypeParameterSet table(
24945 isolate->object_store()->canonical_type_parameters());
24946 table.Dump();
24947 table.Release();
24948}
24949
24950void DumpTypeArgumentsTable(Isolate* isolate) {
24951 OS::PrintErr("canonical type arguments:\n");
24952 CanonicalTypeArgumentsSet table(
24953 isolate->object_store()->canonical_type_arguments());
24954 table.Dump();
24955 table.Release();
24956}
24957
24958EntryPointPragma FindEntryPointPragma(Isolate* I,
24959 const Array& metadata,
24960 Field* reusable_field_handle,
24961 Object* pragma) {
24962 for (intptr_t i = 0; i < metadata.Length(); i++) {
24963 *pragma = metadata.At(i);
24964 if (pragma->clazz() != I->object_store()->pragma_class()) {
24965 continue;
24966 }
24967 *reusable_field_handle = I->object_store()->pragma_name();
24968 if (Instance::Cast(*pragma).GetField(*reusable_field_handle) !=
24969 Symbols::vm_entry_point().raw()) {
24970 continue;
24971 }
24972 *reusable_field_handle = I->object_store()->pragma_options();
24973 *pragma = Instance::Cast(*pragma).GetField(*reusable_field_handle);
24974 if (pragma->raw() == Bool::null() || pragma->raw() == Bool::True().raw()) {
24975 return EntryPointPragma::kAlways;
24976 break;
24977 }
24978 if (pragma->raw() == Symbols::Get().raw()) {
24979 return EntryPointPragma::kGetterOnly;
24980 }
24981 if (pragma->raw() == Symbols::Set().raw()) {
24982 return EntryPointPragma::kSetterOnly;
24983 }
24984 if (pragma->raw() == Symbols::Call().raw()) {
24985 return EntryPointPragma::kCallOnly;
24986 }
24987 }
24988 return EntryPointPragma::kNever;
24989}
24990
24991DART_WARN_UNUSED_RESULT
24992ErrorPtr VerifyEntryPoint(
24993 const Library& lib,
24994 const Object& member,
24995 const Object& annotated,
24996 std::initializer_list<EntryPointPragma> allowed_kinds) {
24997#if defined(DART_PRECOMPILED_RUNTIME)
24998 // Annotations are discarded in the AOT snapshot, so we can't determine
24999 // precisely if this member was marked as an entry-point. Instead, we use
25000 // "has_pragma()" as a proxy, since that bit is usually retained.
25001 bool is_marked_entrypoint = true;
25002 if (annotated.IsClass() && !Class::Cast(annotated).has_pragma()) {
25003 is_marked_entrypoint = false;
25004 } else if (annotated.IsField() && !Field::Cast(annotated).has_pragma()) {
25005 is_marked_entrypoint = false;
25006 } else if (annotated.IsFunction() &&
25007 !Function::Cast(annotated).has_pragma()) {
25008 is_marked_entrypoint = false;
25009 }
25010#else
25011 Object& metadata = Object::Handle(Object::empty_array().raw());
25012 if (!annotated.IsNull()) {
25013 metadata = lib.GetMetadata(annotated);
25014 }
25015 if (metadata.IsError()) return Error::RawCast(metadata.raw());
25016 ASSERT(!metadata.IsNull() && metadata.IsArray());
25017 EntryPointPragma pragma =
25018 FindEntryPointPragma(Isolate::Current(), Array::Cast(metadata),
25019 &Field::Handle(), &Object::Handle());
25020 bool is_marked_entrypoint = pragma == EntryPointPragma::kAlways;
25021 if (!is_marked_entrypoint) {
25022 for (const auto allowed_kind : allowed_kinds) {
25023 if (pragma == allowed_kind) {
25024 is_marked_entrypoint = true;
25025 break;
25026 }
25027 }
25028 }
25029#endif
25030 if (!is_marked_entrypoint) {
25031 const char* member_cstring =
25032 member.IsFunction()
25033 ? OS::SCreate(
25034 Thread::Current()->zone(), "%s (kind %s)",
25035 Function::Cast(member).ToLibNamePrefixedQualifiedCString(),
25036 Function::KindToCString(Function::Cast(member).kind()))
25037 : member.ToCString();
25038 char const* error = OS::SCreate(
25039 Thread::Current()->zone(),
25040 "ERROR: It is illegal to access '%s' through Dart C API.\n"
25041 "ERROR: See "
25042 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
25043 "aot/entry_point_pragma.md\n",
25044 member_cstring);
25045 OS::PrintErr("%s", error);
25046 return ApiError::New(String::Handle(String::New(error)));
25047 }
25048 return Error::null();
25049}
25050
25051DART_WARN_UNUSED_RESULT
25052ErrorPtr EntryPointFieldInvocationError(const String& getter_name) {
25053 if (!FLAG_verify_entry_points) return Error::null();
25054
25055 char const* error = OS::SCreate(
25056 Thread::Current()->zone(),
25057 "ERROR: Entry-points do not allow invoking fields "
25058 "(failure to resolve '%s')\n"
25059 "ERROR: See "
25060 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
25061 "aot/entry_point_pragma.md\n",
25062 getter_name.ToCString());
25063 OS::PrintErr("%s", error);
25064 return ApiError::New(String::Handle(String::New(error)));
25065}
25066
25067ErrorPtr Function::VerifyCallEntryPoint() const {
25068 if (!FLAG_verify_entry_points) return Error::null();
25069
25070 const Class& cls = Class::Handle(Owner());
25071 const Library& lib = Library::Handle(cls.library());
25072 switch (kind()) {
25073 case FunctionLayout::kRegularFunction:
25074 case FunctionLayout::kSetterFunction:
25075 case FunctionLayout::kConstructor:
25076 return dart::VerifyEntryPoint(lib, *this, *this,
25077 {EntryPointPragma::kCallOnly});
25078 break;
25079 case FunctionLayout::kGetterFunction:
25080 return dart::VerifyEntryPoint(
25081 lib, *this, *this,
25082 {EntryPointPragma::kCallOnly, EntryPointPragma::kGetterOnly});
25083 break;
25084 case FunctionLayout::kImplicitGetter:
25085 return dart::VerifyEntryPoint(lib, *this, Field::Handle(accessor_field()),
25086 {EntryPointPragma::kGetterOnly});
25087 break;
25088 case FunctionLayout::kImplicitSetter:
25089 return dart::VerifyEntryPoint(lib, *this, Field::Handle(accessor_field()),
25090 {EntryPointPragma::kSetterOnly});
25091 case FunctionLayout::kMethodExtractor:
25092 return Function::Handle(extracted_method_closure())
25093 .VerifyClosurizedEntryPoint();
25094 break;
25095 default:
25096 return dart::VerifyEntryPoint(lib, *this, Object::Handle(), {});
25097 break;
25098 }
25099}
25100
25101ErrorPtr Function::VerifyClosurizedEntryPoint() const {
25102 if (!FLAG_verify_entry_points) return Error::null();
25103
25104 const Class& cls = Class::Handle(Owner());
25105 const Library& lib = Library::Handle(cls.library());
25106 switch (kind()) {
25107 case FunctionLayout::kRegularFunction:
25108 case FunctionLayout::kImplicitClosureFunction:
25109 return dart::VerifyEntryPoint(lib, *this, *this,
25110 {EntryPointPragma::kGetterOnly});
25111 default:
25112 UNREACHABLE();
25113 }
25114}
25115
25116ErrorPtr Field::VerifyEntryPoint(EntryPointPragma pragma) const {
25117 if (!FLAG_verify_entry_points) return Error::null();
25118 const Class& cls = Class::Handle(Owner());
25119 const Library& lib = Library::Handle(cls.library());
25120 return dart::VerifyEntryPoint(lib, *this, *this, {pragma});
25121}
25122
25123ErrorPtr Class::VerifyEntryPoint() const {
25124 if (!FLAG_verify_entry_points) return Error::null();
25125 const Library& lib = Library::Handle(library());
25126 if (!lib.IsNull()) {
25127 return dart::VerifyEntryPoint(lib, *this, *this, {});
25128 } else {
25129 return Error::null();
25130 }
25131}
25132
25133} // namespace dart
25134