1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_OBJECT_H_
6#define RUNTIME_VM_OBJECT_H_
7
8#if defined(SHOULD_NOT_INCLUDE_RUNTIME)
9#error "Should not include runtime"
10#endif
11
12#include <limits>
13#include <tuple>
14
15#include "include/dart_api.h"
16#include "platform/assert.h"
17#include "platform/atomic.h"
18#include "platform/thread_sanitizer.h"
19#include "platform/utils.h"
20#include "vm/bitmap.h"
21#include "vm/code_entry_kind.h"
22#include "vm/compiler/assembler/object_pool_builder.h"
23#include "vm/compiler/method_recognizer.h"
24#include "vm/compiler/runtime_api.h"
25#include "vm/dart.h"
26#include "vm/flags.h"
27#include "vm/globals.h"
28#include "vm/growable_array.h"
29#include "vm/handles.h"
30#include "vm/heap/heap.h"
31#include "vm/isolate.h"
32#include "vm/json_stream.h"
33#include "vm/os.h"
34#include "vm/raw_object.h"
35#include "vm/report.h"
36#include "vm/static_type_exactness_state.h"
37#include "vm/tags.h"
38#include "vm/thread.h"
39#include "vm/token_position.h"
40
41namespace dart {
42
43// Forward declarations.
44namespace compiler {
45class Assembler;
46}
47
48namespace kernel {
49class Program;
50class TreeNode;
51} // namespace kernel
52
53#define DEFINE_FORWARD_DECLARATION(clazz) class clazz;
54CLASS_LIST(DEFINE_FORWARD_DECLARATION)
55#undef DEFINE_FORWARD_DECLARATION
56class Api;
57class ArgumentsDescriptor;
58class Closure;
59class Code;
60class DeoptInstr;
61class DisassemblyFormatter;
62class FinalizablePersistentHandle;
63class FlowGraphCompiler;
64class HierarchyInfo;
65class LocalScope;
66class CodeStatistics;
67class IsolateGroupReloadContext;
68
69#define REUSABLE_FORWARD_DECLARATION(name) class Reusable##name##HandleScope;
70REUSABLE_HANDLE_LIST(REUSABLE_FORWARD_DECLARATION)
71#undef REUSABLE_FORWARD_DECLARATION
72
73class Symbols;
74class BaseTextBuffer;
75
76#if defined(DEBUG)
77#define CHECK_HANDLE() CheckHandle();
78#else
79#define CHECK_HANDLE()
80#endif
81
82#define BASE_OBJECT_IMPLEMENTATION(object, super) \
83 public: /* NOLINT */ \
84 using ObjectLayoutType = dart::object##Layout; \
85 using ObjectPtrType = dart::object##Ptr; \
86 object##Ptr raw() const { return static_cast<object##Ptr>(raw_); } \
87 bool Is##object() const { return true; } \
88 DART_NOINLINE static object& Handle() { \
89 return HandleImpl(Thread::Current()->zone(), object::null()); \
90 } \
91 DART_NOINLINE static object& Handle(Zone* zone) { \
92 return HandleImpl(zone, object::null()); \
93 } \
94 DART_NOINLINE static object& Handle(object##Ptr raw_ptr) { \
95 return HandleImpl(Thread::Current()->zone(), raw_ptr); \
96 } \
97 DART_NOINLINE static object& Handle(Zone* zone, object##Ptr raw_ptr) { \
98 return HandleImpl(zone, raw_ptr); \
99 } \
100 DART_NOINLINE static object& ZoneHandle() { \
101 return ZoneHandleImpl(Thread::Current()->zone(), object::null()); \
102 } \
103 DART_NOINLINE static object& ZoneHandle(Zone* zone) { \
104 return ZoneHandleImpl(zone, object::null()); \
105 } \
106 DART_NOINLINE static object& ZoneHandle(object##Ptr raw_ptr) { \
107 return ZoneHandleImpl(Thread::Current()->zone(), raw_ptr); \
108 } \
109 DART_NOINLINE static object& ZoneHandle(Zone* zone, object##Ptr raw_ptr) { \
110 return ZoneHandleImpl(zone, raw_ptr); \
111 } \
112 DART_NOINLINE static object* ReadOnlyHandle() { \
113 object* obj = reinterpret_cast<object*>(Dart::AllocateReadOnlyHandle()); \
114 initializeHandle(obj, object::null()); \
115 return obj; \
116 } \
117 DART_NOINLINE static object& CheckedHandle(Zone* zone, ObjectPtr raw_ptr) { \
118 object* obj = reinterpret_cast<object*>(VMHandles::AllocateHandle(zone)); \
119 initializeHandle(obj, raw_ptr); \
120 if (!obj->Is##object()) { \
121 FATAL2("Handle check failed: saw %s expected %s", obj->ToCString(), \
122 #object); \
123 } \
124 return *obj; \
125 } \
126 DART_NOINLINE static object& CheckedZoneHandle(Zone* zone, \
127 ObjectPtr raw_ptr) { \
128 object* obj = \
129 reinterpret_cast<object*>(VMHandles::AllocateZoneHandle(zone)); \
130 initializeHandle(obj, raw_ptr); \
131 if (!obj->Is##object()) { \
132 FATAL2("Handle check failed: saw %s expected %s", obj->ToCString(), \
133 #object); \
134 } \
135 return *obj; \
136 } \
137 DART_NOINLINE static object& CheckedZoneHandle(ObjectPtr raw_ptr) { \
138 return CheckedZoneHandle(Thread::Current()->zone(), raw_ptr); \
139 } \
140 /* T::Cast cannot be applied to a null Object, because the object vtable */ \
141 /* is not setup for type T, although some methods are supposed to work */ \
142 /* with null, for example Instance::Equals(). */ \
143 static const object& Cast(const Object& obj) { \
144 ASSERT(obj.Is##object()); \
145 return reinterpret_cast<const object&>(obj); \
146 } \
147 static object##Ptr RawCast(ObjectPtr raw) { \
148 ASSERT(Object::Handle(raw).IsNull() || Object::Handle(raw).Is##object()); \
149 return static_cast<object##Ptr>(raw); \
150 } \
151 static object##Ptr null() { \
152 return static_cast<object##Ptr>(Object::null()); \
153 } \
154 virtual const char* ToCString() const; \
155 static const ClassId kClassId = k##object##Cid; \
156 \
157 private: /* NOLINT */ \
158 static object& HandleImpl(Zone* zone, object##Ptr raw_ptr) { \
159 object* obj = reinterpret_cast<object*>(VMHandles::AllocateHandle(zone)); \
160 initializeHandle(obj, raw_ptr); \
161 return *obj; \
162 } \
163 static object& ZoneHandleImpl(Zone* zone, object##Ptr raw_ptr) { \
164 object* obj = \
165 reinterpret_cast<object*>(VMHandles::AllocateZoneHandle(zone)); \
166 initializeHandle(obj, raw_ptr); \
167 return *obj; \
168 } \
169 /* Initialize the handle based on the raw_ptr in the presence of null. */ \
170 static void initializeHandle(object* obj, ObjectPtr raw_ptr) { \
171 if (raw_ptr != Object::null()) { \
172 obj->SetRaw(raw_ptr); \
173 } else { \
174 obj->raw_ = Object::null(); \
175 object fake_object; \
176 obj->set_vtable(fake_object.vtable()); \
177 } \
178 } \
179 /* Disallow allocation, copy constructors and override super assignment. */ \
180 public: /* NOLINT */ \
181 void operator delete(void* pointer) { UNREACHABLE(); } \
182 \
183 private: /* NOLINT */ \
184 void* operator new(size_t size); \
185 object(const object& value) = delete; \
186 void operator=(super##Ptr value) = delete; \
187 void operator=(const object& value) = delete; \
188 void operator=(const super& value) = delete;
189
190// Conditionally include object_service.cc functionality in the vtable to avoid
191// link errors like the following:
192//
193// object.o:(.rodata._ZTVN4....E[_ZTVN4...E]+0x278):
194// undefined reference to
195// `dart::Instance::PrintSharedInstanceJSON(dart::JSONObject*, bool) const'.
196//
197#ifndef PRODUCT
198#define OBJECT_SERVICE_SUPPORT(object) \
199 protected: /* NOLINT */ \
200 /* Object is printed as JSON into stream. If ref is true only a header */ \
201 /* with an object id is printed. If ref is false the object is fully */ \
202 /* printed. */ \
203 virtual void PrintJSONImpl(JSONStream* stream, bool ref) const; \
204 virtual const char* JSONType() const { return "" #object; }
205#else
206#define OBJECT_SERVICE_SUPPORT(object) protected: /* NOLINT */
207#endif // !PRODUCT
208
209#define SNAPSHOT_READER_SUPPORT(object) \
210 static object##Ptr ReadFrom(SnapshotReader* reader, intptr_t object_id, \
211 intptr_t tags, Snapshot::Kind, \
212 bool as_reference); \
213 friend class SnapshotReader;
214
215#define OBJECT_IMPLEMENTATION(object, super) \
216 public: /* NOLINT */ \
217 void operator=(object##Ptr value) { initializeHandle(this, value); } \
218 void operator^=(ObjectPtr value) { \
219 initializeHandle(this, value); \
220 ASSERT(IsNull() || Is##object()); \
221 } \
222 \
223 protected: /* NOLINT */ \
224 object() : super() {} \
225 BASE_OBJECT_IMPLEMENTATION(object, super) \
226 OBJECT_SERVICE_SUPPORT(object) \
227 friend class Object;
228
229#define HEAP_OBJECT_IMPLEMENTATION(object, super) \
230 OBJECT_IMPLEMENTATION(object, super); \
231 const object##Layout* raw_ptr() const { \
232 ASSERT(raw() != null()); \
233 return raw()->ptr(); \
234 } \
235 SNAPSHOT_READER_SUPPORT(object) \
236 friend class StackFrame; \
237 friend class Thread;
238
239// This macro is used to denote types that do not have a sub-type.
240#define FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, rettype, super) \
241 public: /* NOLINT */ \
242 void operator=(object##Ptr value) { \
243 raw_ = value; \
244 CHECK_HANDLE(); \
245 } \
246 void operator^=(ObjectPtr value) { \
247 raw_ = value; \
248 CHECK_HANDLE(); \
249 } \
250 \
251 private: /* NOLINT */ \
252 object() : super() {} \
253 BASE_OBJECT_IMPLEMENTATION(object, super) \
254 OBJECT_SERVICE_SUPPORT(object) \
255 const object##Layout* raw_ptr() const { \
256 ASSERT(raw() != null()); \
257 return raw()->ptr(); \
258 } \
259 static intptr_t NextFieldOffset() { return -kWordSize; } \
260 SNAPSHOT_READER_SUPPORT(rettype) \
261 friend class Object; \
262 friend class StackFrame; \
263 friend class Thread;
264
265#define FINAL_HEAP_OBJECT_IMPLEMENTATION(object, super) \
266 FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, object, super)
267
268#define MINT_OBJECT_IMPLEMENTATION(object, rettype, super) \
269 FINAL_HEAP_OBJECT_IMPLEMENTATION_HELPER(object, rettype, super)
270
271// In precompiled runtime, there is no access to runtime_api.cc since host
272// and target are the same. In those cases, the namespace dart is used to refer
273// to the target namespace
274#if defined(DART_PRECOMPILED_RUNTIME)
275namespace RTN = dart;
276#else
277namespace RTN = dart::compiler::target;
278#endif // defined(DART_PRECOMPILED_RUNTIME)
279
280class Object {
281 public:
282 using ObjectLayoutType = ObjectLayout;
283 using ObjectPtrType = ObjectPtr;
284
285 static ObjectPtr RawCast(ObjectPtr obj) { return obj; }
286
287 virtual ~Object() {}
288
289 ObjectPtr raw() const { return raw_; }
290 void operator=(ObjectPtr value) { initializeHandle(this, value); }
291
292 uint32_t CompareAndSwapTags(uint32_t old_tags, uint32_t new_tags) const {
293 raw()->ptr()->tags_.StrongCAS(old_tags, new_tags);
294 return old_tags;
295 }
296 bool IsCanonical() const { return raw()->ptr()->IsCanonical(); }
297 void SetCanonical() const { raw()->ptr()->SetCanonical(); }
298 void ClearCanonical() const { raw()->ptr()->ClearCanonical(); }
299 intptr_t GetClassId() const {
300 return !raw()->IsHeapObject() ? static_cast<intptr_t>(kSmiCid)
301 : raw()->ptr()->GetClassId();
302 }
303 inline ClassPtr clazz() const;
304 static intptr_t tags_offset() { return OFFSET_OF(ObjectLayout, tags_); }
305
306// Class testers.
307#define DEFINE_CLASS_TESTER(clazz) \
308 virtual bool Is##clazz() const { return false; }
309 CLASS_LIST_FOR_HANDLES(DEFINE_CLASS_TESTER);
310#undef DEFINE_CLASS_TESTER
311
312 bool IsNull() const { return raw_ == null_; }
313
314 // Matches Object.toString on instances (except String::ToCString, bug 20583).
315 virtual const char* ToCString() const {
316 if (IsNull()) {
317 return "null";
318 } else {
319 return "Object";
320 }
321 }
322
323#ifndef PRODUCT
324 void PrintJSON(JSONStream* stream, bool ref = true) const;
325 virtual void PrintJSONImpl(JSONStream* stream, bool ref) const;
326 virtual const char* JSONType() const { return IsNull() ? "null" : "Object"; }
327#endif
328
329 // Returns the name that is used to identify an object in the
330 // namespace dictionary.
331 // Object::DictionaryName() returns String::null(). Only subclasses
332 // of Object that need to be entered in the library and library prefix
333 // namespaces need to provide an implementation.
334 virtual StringPtr DictionaryName() const;
335
336 bool IsNew() const { return raw()->IsNewObject(); }
337 bool IsOld() const { return raw()->IsOldObject(); }
338#if defined(DEBUG)
339 bool InVMIsolateHeap() const;
340#else
341 bool InVMIsolateHeap() const { return raw()->ptr()->InVMIsolateHeap(); }
342#endif // DEBUG
343
344 // Print the object on stdout for debugging.
345 void Print() const;
346
347 bool IsZoneHandle() const {
348 return VMHandles::IsZoneHandle(reinterpret_cast<uword>(this));
349 }
350
351 bool IsReadOnlyHandle() const;
352
353 bool IsNotTemporaryScopedHandle() const;
354
355 static Object& Handle(Zone* zone, ObjectPtr raw_ptr) {
356 Object* obj = reinterpret_cast<Object*>(VMHandles::AllocateHandle(zone));
357 initializeHandle(obj, raw_ptr);
358 return *obj;
359 }
360 static Object* ReadOnlyHandle() {
361 Object* obj = reinterpret_cast<Object*>(Dart::AllocateReadOnlyHandle());
362 initializeHandle(obj, Object::null());
363 return obj;
364 }
365
366 static Object& Handle() { return Handle(Thread::Current()->zone(), null_); }
367
368 static Object& Handle(Zone* zone) { return Handle(zone, null_); }
369
370 static Object& Handle(ObjectPtr raw_ptr) {
371 return Handle(Thread::Current()->zone(), raw_ptr);
372 }
373
374 static Object& ZoneHandle(Zone* zone, ObjectPtr raw_ptr) {
375 Object* obj =
376 reinterpret_cast<Object*>(VMHandles::AllocateZoneHandle(zone));
377 initializeHandle(obj, raw_ptr);
378 return *obj;
379 }
380
381 static Object& ZoneHandle(Zone* zone) { return ZoneHandle(zone, null_); }
382
383 static Object& ZoneHandle() {
384 return ZoneHandle(Thread::Current()->zone(), null_);
385 }
386
387 static Object& ZoneHandle(ObjectPtr raw_ptr) {
388 return ZoneHandle(Thread::Current()->zone(), raw_ptr);
389 }
390
391 static ObjectPtr null() { return null_; }
392
393#if defined(HASH_IN_OBJECT_HEADER)
394 static uint32_t GetCachedHash(const ObjectPtr obj) {
395 return obj->ptr()->hash_;
396 }
397
398 static void SetCachedHash(ObjectPtr obj, uint32_t hash) {
399 obj->ptr()->hash_ = hash;
400 }
401#endif
402
403 // The list below enumerates read-only handles for singleton
404 // objects that are shared between the different isolates.
405 //
406 // - sentinel is a value that cannot be produced by Dart code. It can be used
407 // to mark special values, for example to distinguish "uninitialized" fields.
408 // - transition_sentinel is a value marking that we are transitioning from
409 // sentinel, e.g., computing a field value. Used to detect circular
410 // initialization.
411 // - unknown_constant and non_constant are optimizing compiler's constant
412 // propagation constants.
413#define SHARED_READONLY_HANDLES_LIST(V) \
414 V(Object, null_object) \
415 V(Array, null_array) \
416 V(String, null_string) \
417 V(Instance, null_instance) \
418 V(Function, null_function) \
419 V(TypeArguments, null_type_arguments) \
420 V(CompressedStackMaps, null_compressed_stack_maps) \
421 V(TypeArguments, empty_type_arguments) \
422 V(Array, empty_array) \
423 V(Array, zero_array) \
424 V(ContextScope, empty_context_scope) \
425 V(ObjectPool, empty_object_pool) \
426 V(PcDescriptors, empty_descriptors) \
427 V(LocalVarDescriptors, empty_var_descriptors) \
428 V(ExceptionHandlers, empty_exception_handlers) \
429 V(Array, extractor_parameter_types) \
430 V(Array, extractor_parameter_names) \
431 V(Bytecode, implicit_getter_bytecode) \
432 V(Bytecode, implicit_setter_bytecode) \
433 V(Bytecode, implicit_static_getter_bytecode) \
434 V(Bytecode, method_extractor_bytecode) \
435 V(Bytecode, invoke_closure_bytecode) \
436 V(Bytecode, invoke_field_bytecode) \
437 V(Bytecode, nsm_dispatcher_bytecode) \
438 V(Bytecode, dynamic_invocation_forwarder_bytecode) \
439 V(Instance, sentinel) \
440 V(Instance, transition_sentinel) \
441 V(Instance, unknown_constant) \
442 V(Instance, non_constant) \
443 V(Bool, bool_true) \
444 V(Bool, bool_false) \
445 V(Smi, smi_illegal_cid) \
446 V(Smi, smi_zero) \
447 V(ApiError, typed_data_acquire_error) \
448 V(LanguageError, snapshot_writer_error) \
449 V(LanguageError, branch_offset_error) \
450 V(LanguageError, speculative_inlining_error) \
451 V(LanguageError, background_compilation_error) \
452 V(LanguageError, out_of_memory_error) \
453 V(Array, vm_isolate_snapshot_object_table) \
454 V(Type, dynamic_type) \
455 V(Type, void_type) \
456 V(AbstractType, null_abstract_type)
457
458#define DEFINE_SHARED_READONLY_HANDLE_GETTER(Type, name) \
459 static const Type& name() { \
460 ASSERT(name##_ != nullptr); \
461 return *name##_; \
462 }
463 SHARED_READONLY_HANDLES_LIST(DEFINE_SHARED_READONLY_HANDLE_GETTER)
464#undef DEFINE_SHARED_READONLY_HANDLE_GETTER
465
466 static void set_vm_isolate_snapshot_object_table(const Array& table);
467
468 static ClassPtr class_class() { return class_class_; }
469 static ClassPtr dynamic_class() { return dynamic_class_; }
470 static ClassPtr void_class() { return void_class_; }
471 static ClassPtr type_arguments_class() { return type_arguments_class_; }
472 static ClassPtr patch_class_class() { return patch_class_class_; }
473 static ClassPtr function_class() { return function_class_; }
474 static ClassPtr closure_data_class() { return closure_data_class_; }
475 static ClassPtr signature_data_class() { return signature_data_class_; }
476 static ClassPtr redirection_data_class() { return redirection_data_class_; }
477 static ClassPtr ffi_trampoline_data_class() {
478 return ffi_trampoline_data_class_;
479 }
480 static ClassPtr field_class() { return field_class_; }
481 static ClassPtr script_class() { return script_class_; }
482 static ClassPtr library_class() { return library_class_; }
483 static ClassPtr namespace_class() { return namespace_class_; }
484 static ClassPtr kernel_program_info_class() {
485 return kernel_program_info_class_;
486 }
487 static ClassPtr code_class() { return code_class_; }
488 static ClassPtr bytecode_class() { return bytecode_class_; }
489 static ClassPtr instructions_class() { return instructions_class_; }
490 static ClassPtr instructions_section_class() {
491 return instructions_section_class_;
492 }
493 static ClassPtr object_pool_class() { return object_pool_class_; }
494 static ClassPtr pc_descriptors_class() { return pc_descriptors_class_; }
495 static ClassPtr code_source_map_class() { return code_source_map_class_; }
496 static ClassPtr compressed_stackmaps_class() {
497 return compressed_stackmaps_class_;
498 }
499 static ClassPtr var_descriptors_class() { return var_descriptors_class_; }
500 static ClassPtr exception_handlers_class() {
501 return exception_handlers_class_;
502 }
503 static ClassPtr deopt_info_class() { return deopt_info_class_; }
504 static ClassPtr context_class() { return context_class_; }
505 static ClassPtr context_scope_class() { return context_scope_class_; }
506 static ClassPtr api_error_class() { return api_error_class_; }
507 static ClassPtr language_error_class() { return language_error_class_; }
508 static ClassPtr unhandled_exception_class() {
509 return unhandled_exception_class_;
510 }
511 static ClassPtr unwind_error_class() { return unwind_error_class_; }
512 static ClassPtr dyncalltypecheck_class() { return dyncalltypecheck_class_; }
513 static ClassPtr singletargetcache_class() { return singletargetcache_class_; }
514 static ClassPtr unlinkedcall_class() { return unlinkedcall_class_; }
515 static ClassPtr monomorphicsmiablecall_class() {
516 return monomorphicsmiablecall_class_;
517 }
518 static ClassPtr icdata_class() { return icdata_class_; }
519 static ClassPtr megamorphic_cache_class() { return megamorphic_cache_class_; }
520 static ClassPtr subtypetestcache_class() { return subtypetestcache_class_; }
521 static ClassPtr loadingunit_class() { return loadingunit_class_; }
522 static ClassPtr weak_serialization_reference_class() {
523 return weak_serialization_reference_class_;
524 }
525
526 // Initialize the VM isolate.
527 static void InitNullAndBool(Isolate* isolate);
528 static void Init(Isolate* isolate);
529 static void InitVtables();
530 static void FinishInit(Isolate* isolate);
531 static void FinalizeVMIsolate(Isolate* isolate);
532 static void FinalizeReadOnlyObject(ObjectPtr object);
533
534 static void Cleanup();
535
536 // Initialize a new isolate either from a Kernel IR, from source, or from a
537 // snapshot.
538 static ErrorPtr Init(Isolate* isolate,
539 const uint8_t* kernel_buffer,
540 intptr_t kernel_buffer_size);
541
542 static void MakeUnusedSpaceTraversable(const Object& obj,
543 intptr_t original_size,
544 intptr_t used_size);
545
546 static intptr_t InstanceSize() {
547 return RoundedAllocationSize(sizeof(ObjectLayout));
548 }
549
550 template <class FakeObject>
551 static void VerifyBuiltinVtable(intptr_t cid) {
552 FakeObject fake;
553 if (cid >= kNumPredefinedCids) {
554 cid = kInstanceCid;
555 }
556 ASSERT(builtin_vtables_[cid] == fake.vtable());
557 }
558 static void VerifyBuiltinVtables();
559
560 static const ClassId kClassId = kObjectCid;
561
562 // Different kinds of name visibility.
563 enum NameVisibility {
564 // Internal names are the true names of classes, fields,
565 // etc. inside the vm. These names include privacy suffixes,
566 // getter prefixes, and trailing dots on unnamed constructors.
567 //
568 // The names of core implementation classes (like _OneByteString)
569 // are preserved as well.
570 //
571 // e.g.
572 // private getter -> get:foo@6be832b
573 // private constructor -> _MyClass@6b3832b.
574 // private named constructor -> _MyClass@6b3832b.named
575 // core impl class name shown -> _OneByteString
576 kInternalName = 0,
577
578 // Scrubbed names drop privacy suffixes, getter prefixes, and
579 // trailing dots on unnamed constructors. These names are used in
580 // the vm service.
581 //
582 // e.g.
583 // get:foo@6be832b -> foo
584 // _MyClass@6b3832b. -> _MyClass
585 // _MyClass@6b3832b.named -> _MyClass.named
586 // _OneByteString -> _OneByteString (not remapped)
587 kScrubbedName,
588
589 // User visible names are appropriate for reporting type errors
590 // directly to programmers. The names have been scrubbed and
591 // the names of core implementation classes are remapped to their
592 // public interface names.
593 //
594 // e.g.
595 // get:foo@6be832b -> foo
596 // _MyClass@6b3832b. -> _MyClass
597 // _MyClass@6b3832b.named -> _MyClass.named
598 // _OneByteString -> String (remapped)
599 kUserVisibleName
600 };
601
602 // Sometimes simple formating might produce the same name for two different
603 // entities, for example we might inject a synthetic forwarder into the
604 // class which has the same name as an already existing function, or
605 // two different types can be formatted as X<T> because T has different
606 // meaning (refers to a different type parameter) in these two types.
607 // Such ambiguity might be acceptable in some contexts but not in others, so
608 // some formatting methods have two modes - one which tries to be more
609 // user friendly, and another one which tries to avoid name conflicts by
610 // emitting longer and less user friendly names.
611 enum class NameDisambiguation {
612 kYes,
613 kNo,
614 };
615
616 protected:
617 // Used for extracting the C++ vtable during bringup.
618 Object() : raw_(null_) {}
619
620 uword raw_value() const { return static_cast<uword>(raw()); }
621
622 inline void SetRaw(ObjectPtr value);
623 void CheckHandle() const;
624
625 cpp_vtable vtable() const { return bit_copy<cpp_vtable>(*this); }
626 void set_vtable(cpp_vtable value) { *vtable_address() = value; }
627
628 static ObjectPtr Allocate(intptr_t cls_id, intptr_t size, Heap::Space space);
629
630 static intptr_t RoundedAllocationSize(intptr_t size) {
631 return Utils::RoundUp(size, kObjectAlignment);
632 }
633
634 bool Contains(uword addr) const { return raw()->ptr()->Contains(addr); }
635
636 // Start of field mutator guards.
637 //
638 // All writes to heap objects should ultimately pass through one of the
639 // methods below or their counterparts in RawObject, to ensure that the
640 // write barrier is correctly applied.
641
642 template <typename type, std::memory_order order = std::memory_order_relaxed>
643 type LoadPointer(type const* addr) const {
644 return raw()->ptr()->LoadPointer<type, order>(addr);
645 }
646
647 template <typename type, std::memory_order order = std::memory_order_relaxed>
648 void StorePointer(type const* addr, type value) const {
649 raw()->ptr()->StorePointer<type, order>(addr, value);
650 }
651
652 // Use for storing into an explicitly Smi-typed field of an object
653 // (i.e., both the previous and new value are Smis).
654 void StoreSmi(SmiPtr const* addr, SmiPtr value) const {
655 raw()->ptr()->StoreSmi(addr, value);
656 }
657 void StoreSmiIgnoreRace(SmiPtr const* addr, SmiPtr value) const {
658 raw()->ptr()->StoreSmiIgnoreRace(addr, value);
659 }
660
661 template <typename FieldType>
662 void StoreSimd128(const FieldType* addr, simd128_value_t value) const {
663 ASSERT(Contains(reinterpret_cast<uword>(addr)));
664 value.writeTo(const_cast<FieldType*>(addr));
665 }
666
667 template <typename FieldType>
668 FieldType LoadNonPointer(const FieldType* addr) const {
669 return *const_cast<FieldType*>(addr);
670 }
671
672 template <typename FieldType, std::memory_order order>
673 FieldType LoadNonPointer(const FieldType* addr) const {
674 return reinterpret_cast<std::atomic<FieldType>*>(
675 const_cast<FieldType*>(addr))
676 ->load(order);
677 }
678
679 // Needs two template arguments to allow assigning enums to fixed-size ints.
680 template <typename FieldType, typename ValueType>
681 void StoreNonPointer(const FieldType* addr, ValueType value) const {
682 // Can't use Contains, as it uses tags_, which is set through this method.
683 ASSERT(reinterpret_cast<uword>(addr) >= ObjectLayout::ToAddr(raw()));
684 *const_cast<FieldType*>(addr) = value;
685 }
686
687 template <typename FieldType, typename ValueType, std::memory_order order>
688 void StoreNonPointer(const FieldType* addr, ValueType value) const {
689 // Can't use Contains, as it uses tags_, which is set through this method.
690 ASSERT(reinterpret_cast<uword>(addr) >= ObjectLayout::ToAddr(raw()));
691 reinterpret_cast<std::atomic<FieldType>*>(const_cast<FieldType*>(addr))
692 ->store(value, order);
693 }
694
695 // Provides non-const access to non-pointer fields within the object. Such
696 // access does not need a write barrier, but it is *not* GC-safe, since the
697 // object might move, hence must be fully contained within a NoSafepointScope.
698 template <typename FieldType>
699 FieldType* UnsafeMutableNonPointer(const FieldType* addr) const {
700 // Allow pointers at the end of variable-length data, and disallow pointers
701 // within the header word.
702 ASSERT(Contains(reinterpret_cast<uword>(addr) - 1) &&
703 Contains(reinterpret_cast<uword>(addr) - kWordSize));
704 // At least check that there is a NoSafepointScope and hope it's big enough.
705 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
706 return const_cast<FieldType*>(addr);
707 }
708
709// Fail at link time if StoreNonPointer or UnsafeMutableNonPointer is
710// instantiated with an object pointer type.
711#define STORE_NON_POINTER_ILLEGAL_TYPE(type) \
712 template <typename ValueType> \
713 void StoreNonPointer(type##Ptr const* addr, ValueType value) const { \
714 UnimplementedMethod(); \
715 } \
716 type##Ptr* UnsafeMutableNonPointer(type##Ptr const* addr) const { \
717 UnimplementedMethod(); \
718 return NULL; \
719 }
720
721 CLASS_LIST(STORE_NON_POINTER_ILLEGAL_TYPE);
722 void UnimplementedMethod() const;
723#undef STORE_NON_POINTER_ILLEGAL_TYPE
724
725 // Allocate an object and copy the body of 'orig'.
726 static ObjectPtr Clone(const Object& orig, Heap::Space space);
727
728 // End of field mutator guards.
729
730 ObjectPtr raw_; // The raw object reference.
731
732 protected:
733 void AddCommonObjectProperties(JSONObject* jsobj,
734 const char* protocol_type,
735 bool ref) const;
736
737 private:
738 static intptr_t NextFieldOffset() {
739 // Indicates this class cannot be extended by dart code.
740 return -kWordSize;
741 }
742
743 static void InitializeObject(uword address, intptr_t id, intptr_t size);
744
745 static void RegisterClass(const Class& cls,
746 const String& name,
747 const Library& lib);
748 static void RegisterPrivateClass(const Class& cls,
749 const String& name,
750 const Library& lib);
751
752 /* Initialize the handle based on the raw_ptr in the presence of null. */
753 static void initializeHandle(Object* obj, ObjectPtr raw_ptr) {
754 if (raw_ptr != Object::null()) {
755 obj->SetRaw(raw_ptr);
756 } else {
757 obj->raw_ = Object::null();
758 Object fake_object;
759 obj->set_vtable(fake_object.vtable());
760 }
761 }
762
763 cpp_vtable* vtable_address() const {
764 uword vtable_addr = reinterpret_cast<uword>(this);
765 return reinterpret_cast<cpp_vtable*>(vtable_addr);
766 }
767
768 static cpp_vtable builtin_vtables_[kNumPredefinedCids];
769
770 // The static values below are singletons shared between the different
771 // isolates. They are all allocated in the non-GC'd Dart::vm_isolate_.
772 static ObjectPtr null_;
773 static BoolPtr true_;
774 static BoolPtr false_;
775
776 static ClassPtr class_class_; // Class of the Class vm object.
777 static ClassPtr dynamic_class_; // Class of the 'dynamic' type.
778 static ClassPtr void_class_; // Class of the 'void' type.
779 static ClassPtr type_arguments_class_; // Class of TypeArguments vm object.
780 static ClassPtr patch_class_class_; // Class of the PatchClass vm object.
781 static ClassPtr function_class_; // Class of the Function vm object.
782 static ClassPtr closure_data_class_; // Class of ClosureData vm obj.
783 static ClassPtr signature_data_class_; // Class of SignatureData vm obj.
784 static ClassPtr redirection_data_class_; // Class of RedirectionData vm obj.
785 static ClassPtr ffi_trampoline_data_class_; // Class of FfiTrampolineData
786 // vm obj.
787 static ClassPtr field_class_; // Class of the Field vm object.
788 static ClassPtr script_class_; // Class of the Script vm object.
789 static ClassPtr library_class_; // Class of the Library vm object.
790 static ClassPtr namespace_class_; // Class of Namespace vm object.
791 static ClassPtr kernel_program_info_class_; // Class of KernelProgramInfo vm
792 // object.
793 static ClassPtr code_class_; // Class of the Code vm object.
794 static ClassPtr bytecode_class_; // Class of the Bytecode vm object.
795 static ClassPtr instructions_class_; // Class of the Instructions vm object.
796 static ClassPtr instructions_section_class_; // Class of InstructionsSection.
797 static ClassPtr object_pool_class_; // Class of the ObjectPool vm object.
798 static ClassPtr pc_descriptors_class_; // Class of PcDescriptors vm object.
799 static ClassPtr code_source_map_class_; // Class of CodeSourceMap vm object.
800 static ClassPtr compressed_stackmaps_class_; // Class of CompressedStackMaps.
801 static ClassPtr var_descriptors_class_; // Class of LocalVarDescriptors.
802 static ClassPtr exception_handlers_class_; // Class of ExceptionHandlers.
803 static ClassPtr deopt_info_class_; // Class of DeoptInfo.
804 static ClassPtr context_class_; // Class of the Context vm object.
805 static ClassPtr context_scope_class_; // Class of ContextScope vm object.
806 static ClassPtr dyncalltypecheck_class_; // Class of ParameterTypeCheck.
807 static ClassPtr singletargetcache_class_; // Class of SingleTargetCache.
808 static ClassPtr unlinkedcall_class_; // Class of UnlinkedCall.
809 static ClassPtr
810 monomorphicsmiablecall_class_; // Class of MonomorphicSmiableCall.
811 static ClassPtr icdata_class_; // Class of ICData.
812 static ClassPtr megamorphic_cache_class_; // Class of MegamorphiCache.
813 static ClassPtr subtypetestcache_class_; // Class of SubtypeTestCache.
814 static ClassPtr loadingunit_class_; // Class of LoadingUnit.
815 static ClassPtr api_error_class_; // Class of ApiError.
816 static ClassPtr language_error_class_; // Class of LanguageError.
817 static ClassPtr unhandled_exception_class_; // Class of UnhandledException.
818 static ClassPtr unwind_error_class_; // Class of UnwindError.
819 // Class of WeakSerializationReference.
820 static ClassPtr weak_serialization_reference_class_;
821
822#define DECLARE_SHARED_READONLY_HANDLE(Type, name) static Type* name##_;
823 SHARED_READONLY_HANDLES_LIST(DECLARE_SHARED_READONLY_HANDLE)
824#undef DECLARE_SHARED_READONLY_HANDLE
825
826 friend void ClassTable::Register(const Class& cls);
827 friend void ObjectLayout::Validate(IsolateGroup* isolate_group) const;
828 friend class Closure;
829 friend class SnapshotReader;
830 friend class InstanceDeserializationCluster;
831 friend class OneByteString;
832 friend class TwoByteString;
833 friend class ExternalOneByteString;
834 friend class ExternalTwoByteString;
835 friend class Thread;
836
837#define REUSABLE_FRIEND_DECLARATION(name) \
838 friend class Reusable##name##HandleScope;
839 REUSABLE_HANDLE_LIST(REUSABLE_FRIEND_DECLARATION)
840#undef REUSABLE_FRIEND_DECLARATION
841
842 DISALLOW_ALLOCATION();
843 DISALLOW_COPY_AND_ASSIGN(Object);
844};
845
846class PassiveObject : public Object {
847 public:
848 void operator=(ObjectPtr value) { raw_ = value; }
849 void operator^=(ObjectPtr value) { raw_ = value; }
850
851 static PassiveObject& Handle(Zone* zone, ObjectPtr raw_ptr) {
852 PassiveObject* obj =
853 reinterpret_cast<PassiveObject*>(VMHandles::AllocateHandle(zone));
854 obj->raw_ = raw_ptr;
855 obj->set_vtable(0);
856 return *obj;
857 }
858 static PassiveObject& Handle(ObjectPtr raw_ptr) {
859 return Handle(Thread::Current()->zone(), raw_ptr);
860 }
861 static PassiveObject& Handle() {
862 return Handle(Thread::Current()->zone(), Object::null());
863 }
864 static PassiveObject& Handle(Zone* zone) {
865 return Handle(zone, Object::null());
866 }
867 static PassiveObject& ZoneHandle(Zone* zone, ObjectPtr raw_ptr) {
868 PassiveObject* obj =
869 reinterpret_cast<PassiveObject*>(VMHandles::AllocateZoneHandle(zone));
870 obj->raw_ = raw_ptr;
871 obj->set_vtable(0);
872 return *obj;
873 }
874 static PassiveObject& ZoneHandle(ObjectPtr raw_ptr) {
875 return ZoneHandle(Thread::Current()->zone(), raw_ptr);
876 }
877 static PassiveObject& ZoneHandle() {
878 return ZoneHandle(Thread::Current()->zone(), Object::null());
879 }
880 static PassiveObject& ZoneHandle(Zone* zone) {
881 return ZoneHandle(zone, Object::null());
882 }
883
884 private:
885 PassiveObject() : Object() {}
886 DISALLOW_ALLOCATION();
887 DISALLOW_COPY_AND_ASSIGN(PassiveObject);
888};
889
890typedef ZoneGrowableHandlePtrArray<const AbstractType> Trail;
891typedef ZoneGrowableHandlePtrArray<const AbstractType>* TrailPtr;
892
893// A URIs array contains triplets of strings.
894// The first string in the triplet is a type name (usually a class).
895// The second string in the triplet is the URI of the type.
896// The third string in the triplet is "print" if the triplet should be printed.
897typedef ZoneGrowableHandlePtrArray<const String> URIs;
898
899enum class Nullability : int8_t {
900 kNullable = 0,
901 kNonNullable = 1,
902 kLegacy = 2,
903 // Adjust kNullabilityBitSize in clustered_snapshot.cc if adding new values.
904};
905
906// Equality kind between types.
907enum class TypeEquality {
908 kCanonical = 0,
909 kSyntactical = 1,
910 kInSubtypeTest = 2,
911};
912
913// The NNBDMode reflects the opted-in status of libraries.
914// Note that the weak or strong checking mode is not reflected in NNBDMode.
915enum class NNBDMode {
916 // Status of the library:
917 kLegacyLib = 0, // Library is legacy.
918 kOptedInLib = 1, // Library is opted-in.
919};
920
921// The NNBDCompiledMode reflects the mode in which constants of the library were
922// compiled by CFE.
923enum class NNBDCompiledMode {
924 kDisabled = 0,
925 kWeak = 1,
926 kStrong = 2,
927 kAgnostic = 3,
928};
929
930class Class : public Object {
931 public:
932 enum InvocationDispatcherEntry {
933 kInvocationDispatcherName,
934 kInvocationDispatcherArgsDesc,
935 kInvocationDispatcherFunction,
936 kInvocationDispatcherEntrySize,
937 };
938
939 intptr_t host_instance_size() const {
940 ASSERT(is_finalized() || is_prefinalized());
941 return (raw_ptr()->host_instance_size_in_words_ * kWordSize);
942 }
943 intptr_t target_instance_size() const {
944 ASSERT(is_finalized() || is_prefinalized());
945#if !defined(DART_PRECOMPILED_RUNTIME)
946 return (raw_ptr()->target_instance_size_in_words_ *
947 compiler::target::kWordSize);
948#else
949 return host_instance_size();
950#endif // !defined(DART_PRECOMPILED_RUNTIME)
951 }
952 static intptr_t host_instance_size(ClassPtr clazz) {
953 return (clazz->ptr()->host_instance_size_in_words_ * kWordSize);
954 }
955 static intptr_t target_instance_size(ClassPtr clazz) {
956#if !defined(DART_PRECOMPILED_RUNTIME)
957 return (clazz->ptr()->target_instance_size_in_words_ *
958 compiler::target::kWordSize);
959#else
960 return host_instance_size(clazz);
961#endif // !defined(DART_PRECOMPILED_RUNTIME)
962 }
963 void set_instance_size(intptr_t host_value_in_bytes,
964 intptr_t target_value_in_bytes) const {
965 ASSERT(kWordSize != 0);
966 set_instance_size_in_words(
967 host_value_in_bytes / kWordSize,
968 target_value_in_bytes / compiler::target::kWordSize);
969 }
970 void set_instance_size_in_words(intptr_t host_value,
971 intptr_t target_value) const {
972 ASSERT(Utils::IsAligned((host_value * kWordSize), kObjectAlignment));
973 StoreNonPointer(&raw_ptr()->host_instance_size_in_words_, host_value);
974#if !defined(DART_PRECOMPILED_RUNTIME)
975 ASSERT(Utils::IsAligned((target_value * compiler::target::kWordSize),
976 compiler::target::kObjectAlignment));
977 StoreNonPointer(&raw_ptr()->target_instance_size_in_words_, target_value);
978#else
979 ASSERT(host_value == target_value);
980#endif // #!defined(DART_PRECOMPILED_RUNTIME)
981 }
982
983 intptr_t host_next_field_offset() const {
984 return raw_ptr()->host_next_field_offset_in_words_ * kWordSize;
985 }
986 intptr_t target_next_field_offset() const {
987#if !defined(DART_PRECOMPILED_RUNTIME)
988 return raw_ptr()->target_next_field_offset_in_words_ *
989 compiler::target::kWordSize;
990#else
991 return host_next_field_offset();
992#endif // #!defined(DART_PRECOMPILED_RUNTIME)
993 }
994 void set_next_field_offset(intptr_t host_value_in_bytes,
995 intptr_t target_value_in_bytes) const {
996 set_next_field_offset_in_words(
997 host_value_in_bytes / kWordSize,
998 target_value_in_bytes / compiler::target::kWordSize);
999 }
1000 void set_next_field_offset_in_words(intptr_t host_value,
1001 intptr_t target_value) const {
1002 ASSERT((host_value == -1) ||
1003 (Utils::IsAligned((host_value * kWordSize), kObjectAlignment) &&
1004 (host_value == raw_ptr()->host_instance_size_in_words_)) ||
1005 (!Utils::IsAligned((host_value * kWordSize), kObjectAlignment) &&
1006 ((host_value + 1) == raw_ptr()->host_instance_size_in_words_)));
1007 StoreNonPointer(&raw_ptr()->host_next_field_offset_in_words_, host_value);
1008#if !defined(DART_PRECOMPILED_RUNTIME)
1009 ASSERT((target_value == -1) ||
1010 (Utils::IsAligned((target_value * compiler::target::kWordSize),
1011 compiler::target::kObjectAlignment) &&
1012 (target_value == raw_ptr()->target_instance_size_in_words_)) ||
1013 (!Utils::IsAligned((target_value * compiler::target::kWordSize),
1014 compiler::target::kObjectAlignment) &&
1015 ((target_value + 1) == raw_ptr()->target_instance_size_in_words_)));
1016 StoreNonPointer(&raw_ptr()->target_next_field_offset_in_words_,
1017 target_value);
1018#else
1019 ASSERT(host_value == target_value);
1020#endif // #!defined(DART_PRECOMPILED_RUNTIME)
1021 }
1022
1023 static bool is_valid_id(intptr_t value) {
1024 return ObjectLayout::ClassIdTag::is_valid(value);
1025 }
1026 intptr_t id() const { return raw_ptr()->id_; }
1027 void set_id(intptr_t value) const {
1028 ASSERT(value >= 0 && value < std::numeric_limits<classid_t>::max());
1029 StoreNonPointer(&raw_ptr()->id_, value);
1030 }
1031 static intptr_t id_offset() { return OFFSET_OF(ClassLayout, id_); }
1032 static intptr_t num_type_arguments_offset() {
1033 return OFFSET_OF(ClassLayout, num_type_arguments_);
1034 }
1035
1036 StringPtr Name() const;
1037 StringPtr ScrubbedName() const;
1038 const char* ScrubbedNameCString() const;
1039 StringPtr UserVisibleName() const;
1040 const char* UserVisibleNameCString() const;
1041
1042 const char* NameCString(NameVisibility name_visibility) const;
1043
1044 // The mixin for this class if one exists. Otherwise, returns a raw pointer
1045 // to this class.
1046 ClassPtr Mixin() const;
1047
1048 // The NNBD mode of the library declaring this class.
1049 NNBDMode nnbd_mode() const;
1050
1051 bool IsInFullSnapshot() const;
1052
1053 virtual StringPtr DictionaryName() const { return Name(); }
1054
1055 ScriptPtr script() const { return raw_ptr()->script_; }
1056 void set_script(const Script& value) const;
1057
1058 TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
1059 void set_token_pos(TokenPosition value) const;
1060 TokenPosition end_token_pos() const { return raw_ptr()->end_token_pos_; }
1061 void set_end_token_pos(TokenPosition value) const;
1062
1063 int32_t SourceFingerprint() const;
1064
1065 // This class represents a typedef if the signature function is not null.
1066 FunctionPtr signature_function() const {
1067 return raw_ptr()->signature_function_;
1068 }
1069 void set_signature_function(const Function& value) const;
1070
1071 // Return the Type with type parameters declared by this class filled in with
1072 // dynamic and type parameters declared in superclasses filled in as declared
1073 // in superclass clauses.
1074 AbstractTypePtr RareType() const;
1075
1076 // Return the Type whose arguments are the type parameters declared by this
1077 // class preceded by the type arguments declared for superclasses, etc.
1078 // e.g. given
1079 // class B<T, S>
1080 // class C<R> extends B<R, int>
1081 // C.DeclarationType() --> C [R, int, R]
1082 // The declaration type's nullability is either legacy or non-nullable when
1083 // the non-nullable experiment is enabled.
1084 TypePtr DeclarationType() const;
1085
1086 static intptr_t declaration_type_offset() {
1087 return OFFSET_OF(ClassLayout, declaration_type_);
1088 }
1089
1090 LibraryPtr library() const { return raw_ptr()->library_; }
1091 void set_library(const Library& value) const;
1092
1093 // The type parameters (and their bounds) are specified as an array of
1094 // TypeParameter.
1095 TypeArgumentsPtr type_parameters() const {
1096 ASSERT(is_declaration_loaded());
1097 return raw_ptr()->type_parameters_;
1098 }
1099 void set_type_parameters(const TypeArguments& value) const;
1100 intptr_t NumTypeParameters(Thread* thread) const;
1101 intptr_t NumTypeParameters() const {
1102 return NumTypeParameters(Thread::Current());
1103 }
1104
1105 // Return a TypeParameter if the type_name is a type parameter of this class.
1106 // Return null otherwise.
1107 TypeParameterPtr LookupTypeParameter(const String& type_name) const;
1108
1109 // The type argument vector is flattened and includes the type arguments of
1110 // the super class.
1111 intptr_t NumTypeArguments() const;
1112
1113 // Return true if this class declares type parameters.
1114 bool IsGeneric() const { return NumTypeParameters(Thread::Current()) > 0; }
1115
1116 // If this class is parameterized, each instance has a type_arguments field.
1117 static const intptr_t kNoTypeArguments = -1;
1118 intptr_t host_type_arguments_field_offset() const {
1119 ASSERT(is_type_finalized() || is_prefinalized());
1120 if (raw_ptr()->host_type_arguments_field_offset_in_words_ ==
1121 kNoTypeArguments) {
1122 return kNoTypeArguments;
1123 }
1124 return raw_ptr()->host_type_arguments_field_offset_in_words_ * kWordSize;
1125 }
1126 intptr_t target_type_arguments_field_offset() const {
1127#if !defined(DART_PRECOMPILED_RUNTIME)
1128 ASSERT(is_type_finalized() || is_prefinalized());
1129 if (raw_ptr()->target_type_arguments_field_offset_in_words_ ==
1130 compiler::target::Class::kNoTypeArguments) {
1131 return compiler::target::Class::kNoTypeArguments;
1132 }
1133 return raw_ptr()->target_type_arguments_field_offset_in_words_ *
1134 compiler::target::kWordSize;
1135#else
1136 return host_type_arguments_field_offset();
1137#endif // !defined(DART_PRECOMPILED_RUNTIME)
1138 }
1139 void set_type_arguments_field_offset(intptr_t host_value_in_bytes,
1140 intptr_t target_value_in_bytes) const {
1141 intptr_t host_value, target_value;
1142 if (host_value_in_bytes == kNoTypeArguments ||
1143 target_value_in_bytes == RTN::Class::kNoTypeArguments) {
1144 ASSERT(host_value_in_bytes == kNoTypeArguments &&
1145 target_value_in_bytes == RTN::Class::kNoTypeArguments);
1146 host_value = kNoTypeArguments;
1147 target_value = RTN::Class::kNoTypeArguments;
1148 } else {
1149 ASSERT(kWordSize != 0 && compiler::target::kWordSize);
1150 host_value = host_value_in_bytes / kWordSize;
1151 target_value = target_value_in_bytes / compiler::target::kWordSize;
1152 }
1153 set_type_arguments_field_offset_in_words(host_value, target_value);
1154 }
1155 void set_type_arguments_field_offset_in_words(intptr_t host_value,
1156 intptr_t target_value) const {
1157 StoreNonPointer(&raw_ptr()->host_type_arguments_field_offset_in_words_,
1158 host_value);
1159#if !defined(DART_PRECOMPILED_RUNTIME)
1160 StoreNonPointer(&raw_ptr()->target_type_arguments_field_offset_in_words_,
1161 target_value);
1162#else
1163 ASSERT(host_value == target_value);
1164#endif // !defined(DART_PRECOMPILED_RUNTIME)
1165 }
1166 static intptr_t host_type_arguments_field_offset_in_words_offset() {
1167 return OFFSET_OF(ClassLayout, host_type_arguments_field_offset_in_words_);
1168 }
1169
1170 static intptr_t target_type_arguments_field_offset_in_words_offset() {
1171#if !defined(DART_PRECOMPILED_RUNTIME)
1172 return OFFSET_OF(ClassLayout, target_type_arguments_field_offset_in_words_);
1173#else
1174 return host_type_arguments_field_offset_in_words_offset();
1175#endif // !defined(DART_PRECOMPILED_RUNTIME)
1176 }
1177
1178 // The super type of this class, Object type if not explicitly specified.
1179 AbstractTypePtr super_type() const {
1180 ASSERT(is_declaration_loaded());
1181 return raw_ptr()->super_type_;
1182 }
1183 void set_super_type(const AbstractType& value) const;
1184 static intptr_t super_type_offset() {
1185 return OFFSET_OF(ClassLayout, super_type_);
1186 }
1187
1188 // Asserts that the class of the super type has been resolved.
1189 // |original_classes| only has an effect when reloading. If true and we
1190 // are reloading, it will prefer the original classes to the replacement
1191 // classes.
1192 ClassPtr SuperClass(bool original_classes = false) const;
1193
1194 // Interfaces is an array of Types.
1195 ArrayPtr interfaces() const {
1196 ASSERT(is_declaration_loaded());
1197 return raw_ptr()->interfaces_;
1198 }
1199 void set_interfaces(const Array& value) const;
1200
1201 // Returns the list of classes directly implementing this class.
1202 GrowableObjectArrayPtr direct_implementors() const {
1203 return raw_ptr()->direct_implementors_;
1204 }
1205 void AddDirectImplementor(const Class& subclass, bool is_mixin) const;
1206 void ClearDirectImplementors() const;
1207
1208 // Returns the list of classes having this class as direct superclass.
1209 GrowableObjectArrayPtr direct_subclasses() const {
1210 return raw_ptr()->direct_subclasses_;
1211 }
1212 void AddDirectSubclass(const Class& subclass) const;
1213 void ClearDirectSubclasses() const;
1214
1215 // Check if this class represents the class of null.
1216 bool IsNullClass() const { return id() == kNullCid; }
1217
1218 // Check if this class represents the 'dynamic' class.
1219 bool IsDynamicClass() const { return id() == kDynamicCid; }
1220
1221 // Check if this class represents the 'void' class.
1222 bool IsVoidClass() const { return id() == kVoidCid; }
1223
1224 // Check if this class represents the 'Never' class.
1225 bool IsNeverClass() const { return id() == kNeverCid; }
1226
1227 // Check if this class represents the 'Object' class.
1228 bool IsObjectClass() const { return id() == kInstanceCid; }
1229
1230 // Check if this class represents the 'Function' class.
1231 bool IsDartFunctionClass() const;
1232
1233 // Check if this class represents the 'Future' class.
1234 bool IsFutureClass() const;
1235
1236 // Check if this class represents the 'FutureOr' class.
1237 bool IsFutureOrClass() const { return id() == kFutureOrCid; }
1238
1239 // Check if this class represents the 'Closure' class.
1240 bool IsClosureClass() const { return id() == kClosureCid; }
1241 static bool IsClosureClass(ClassPtr cls) {
1242 NoSafepointScope no_safepoint;
1243 return cls->ptr()->id_ == kClosureCid;
1244 }
1245
1246 // Check if this class represents a typedef class.
1247 bool IsTypedefClass() const { return signature_function() != Object::null(); }
1248
1249 static bool IsInFullSnapshot(ClassPtr cls) {
1250 NoSafepointScope no_safepoint;
1251 return LibraryLayout::InFullSnapshotBit::decode(
1252 cls->ptr()->library_->ptr()->flags_);
1253 }
1254
1255 // Returns true if the type specified by cls, type_arguments, and nullability
1256 // is a subtype of the other type.
1257 static bool IsSubtypeOf(const Class& cls,
1258 const TypeArguments& type_arguments,
1259 Nullability nullability,
1260 const AbstractType& other,
1261 Heap::Space space,
1262 TrailPtr trail = nullptr);
1263
1264 // Check if this is the top level class.
1265 bool IsTopLevel() const;
1266
1267 bool IsPrivate() const;
1268
1269 DART_WARN_UNUSED_RESULT
1270 ErrorPtr VerifyEntryPoint() const;
1271
1272 // Returns an array of instance and static fields defined by this class.
1273 ArrayPtr fields() const { return raw_ptr()->fields_; }
1274 void SetFields(const Array& value) const;
1275 void AddField(const Field& field) const;
1276 void AddFields(const GrowableArray<const Field*>& fields) const;
1277
1278 // If this is a dart:internal.ClassID class, then inject our own const
1279 // fields. Returns true if synthetic fields are injected and regular
1280 // field declarations should be ignored.
1281 bool InjectCIDFields() const;
1282
1283 // Returns an array of all instance fields of this class and its superclasses
1284 // indexed by offset in words.
1285 // |original_classes| only has an effect when reloading. If true and we
1286 // are reloading, it will prefer the original classes to the replacement
1287 // classes.
1288 ArrayPtr OffsetToFieldMap(bool original_classes = false) const;
1289
1290 // Returns true if non-static fields are defined.
1291 bool HasInstanceFields() const;
1292
1293 // TODO(koda): Unite w/ hash table.
1294 ArrayPtr functions() const { return raw_ptr()->functions_; }
1295 void SetFunctions(const Array& value) const;
1296 void AddFunction(const Function& function) const;
1297 void RemoveFunction(const Function& function) const;
1298 FunctionPtr FunctionFromIndex(intptr_t idx) const;
1299 intptr_t FindImplicitClosureFunctionIndex(const Function& needle) const;
1300 FunctionPtr ImplicitClosureFunctionFromIndex(intptr_t idx) const;
1301
1302 FunctionPtr LookupDynamicFunction(const String& name) const;
1303 FunctionPtr LookupDynamicFunctionAllowAbstract(const String& name) const;
1304 FunctionPtr LookupDynamicFunctionAllowPrivate(const String& name) const;
1305 FunctionPtr LookupStaticFunction(const String& name) const;
1306 FunctionPtr LookupStaticFunctionAllowPrivate(const String& name) const;
1307 FunctionPtr LookupConstructor(const String& name) const;
1308 FunctionPtr LookupConstructorAllowPrivate(const String& name) const;
1309 FunctionPtr LookupFactory(const String& name) const;
1310 FunctionPtr LookupFactoryAllowPrivate(const String& name) const;
1311 FunctionPtr LookupFunction(const String& name) const;
1312 FunctionPtr LookupFunctionAllowPrivate(const String& name) const;
1313 FunctionPtr LookupGetterFunction(const String& name) const;
1314 FunctionPtr LookupSetterFunction(const String& name) const;
1315 FieldPtr LookupInstanceField(const String& name) const;
1316 FieldPtr LookupStaticField(const String& name) const;
1317 FieldPtr LookupField(const String& name) const;
1318 FieldPtr LookupFieldAllowPrivate(const String& name,
1319 bool instance_only = false) const;
1320 FieldPtr LookupInstanceFieldAllowPrivate(const String& name) const;
1321 FieldPtr LookupStaticFieldAllowPrivate(const String& name) const;
1322
1323 DoublePtr LookupCanonicalDouble(Zone* zone, double value) const;
1324 MintPtr LookupCanonicalMint(Zone* zone, int64_t value) const;
1325
1326 // The methods above are more efficient than this generic one.
1327 InstancePtr LookupCanonicalInstance(Zone* zone, const Instance& value) const;
1328
1329 InstancePtr InsertCanonicalConstant(Zone* zone,
1330 const Instance& constant) const;
1331 void InsertCanonicalDouble(Zone* zone, const Double& constant) const;
1332 void InsertCanonicalMint(Zone* zone, const Mint& constant) const;
1333
1334 void RehashConstants(Zone* zone) const;
1335
1336 bool RequireLegacyErasureOfConstants(Zone* zone) const;
1337
1338 static intptr_t InstanceSize() {
1339 return RoundedAllocationSize(sizeof(ClassLayout));
1340 }
1341
1342 bool is_implemented() const {
1343 return ImplementedBit::decode(raw_ptr()->state_bits_);
1344 }
1345 void set_is_implemented() const;
1346
1347 bool is_abstract() const {
1348 return AbstractBit::decode(raw_ptr()->state_bits_);
1349 }
1350 void set_is_abstract() const;
1351
1352 ClassLayout::ClassLoadingState class_loading_state() const {
1353 return ClassLoadingBits::decode(raw_ptr()->state_bits_);
1354 }
1355
1356 bool is_declaration_loaded() const {
1357 return class_loading_state() >= ClassLayout::kDeclarationLoaded;
1358 }
1359 void set_is_declaration_loaded() const;
1360
1361 bool is_type_finalized() const {
1362 return class_loading_state() >= ClassLayout::kTypeFinalized;
1363 }
1364 void set_is_type_finalized() const;
1365
1366 bool is_synthesized_class() const {
1367 return SynthesizedClassBit::decode(raw_ptr()->state_bits_);
1368 }
1369 void set_is_synthesized_class() const;
1370
1371 bool is_enum_class() const { return EnumBit::decode(raw_ptr()->state_bits_); }
1372 void set_is_enum_class() const;
1373
1374 bool is_finalized() const {
1375 return ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
1376 ClassLayout::kFinalized ||
1377 ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
1378 ClassLayout::kAllocateFinalized;
1379 }
1380 void set_is_finalized() const;
1381
1382 bool is_allocate_finalized() const {
1383 return ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
1384 ClassLayout::kAllocateFinalized;
1385 }
1386 void set_is_allocate_finalized() const;
1387
1388 bool is_prefinalized() const {
1389 return ClassFinalizedBits::decode(raw_ptr()->state_bits_) ==
1390 ClassLayout::kPreFinalized;
1391 }
1392
1393 void set_is_prefinalized() const;
1394
1395 bool is_const() const { return ConstBit::decode(raw_ptr()->state_bits_); }
1396 void set_is_const() const;
1397
1398 // Tests if this is a mixin application class which was desugared
1399 // to a normal class by kernel mixin transformation
1400 // (pkg/kernel/lib/transformations/mixin_full_resolution.dart).
1401 //
1402 // In such case, its mixed-in type was pulled into the end of
1403 // interfaces list.
1404 bool is_transformed_mixin_application() const {
1405 return TransformedMixinApplicationBit::decode(raw_ptr()->state_bits_);
1406 }
1407 void set_is_transformed_mixin_application() const;
1408
1409 bool is_fields_marked_nullable() const {
1410 return FieldsMarkedNullableBit::decode(raw_ptr()->state_bits_);
1411 }
1412 void set_is_fields_marked_nullable() const;
1413
1414 bool is_allocated() const {
1415 return IsAllocatedBit::decode(raw_ptr()->state_bits_);
1416 }
1417 void set_is_allocated(bool value) const;
1418
1419 bool is_loaded() const { return IsLoadedBit::decode(raw_ptr()->state_bits_); }
1420 void set_is_loaded(bool value) const;
1421
1422 uint16_t num_native_fields() const { return raw_ptr()->num_native_fields_; }
1423 void set_num_native_fields(uint16_t value) const {
1424 StoreNonPointer(&raw_ptr()->num_native_fields_, value);
1425 }
1426
1427 CodePtr allocation_stub() const { return raw_ptr()->allocation_stub_; }
1428 void set_allocation_stub(const Code& value) const;
1429
1430#if !defined(DART_PRECOMPILED_RUNTIME)
1431 intptr_t binary_declaration_offset() const {
1432 return ClassLayout::BinaryDeclarationOffset::decode(
1433 raw_ptr()->binary_declaration_);
1434 }
1435 void set_binary_declaration_offset(intptr_t value) const {
1436 ASSERT(value >= 0);
1437 StoreNonPointer(&raw_ptr()->binary_declaration_,
1438 ClassLayout::BinaryDeclarationOffset::update(
1439 value, raw_ptr()->binary_declaration_));
1440 }
1441#endif // !defined(DART_PRECOMPILED_RUNTIME)
1442
1443 intptr_t kernel_offset() const {
1444#if defined(DART_PRECOMPILED_RUNTIME)
1445 return 0;
1446#else
1447 ASSERT(!is_declared_in_bytecode());
1448 return binary_declaration_offset();
1449#endif
1450 }
1451
1452 void set_kernel_offset(intptr_t value) const {
1453#if defined(DART_PRECOMPILED_RUNTIME)
1454 UNREACHABLE();
1455#else
1456 ASSERT(!is_declared_in_bytecode());
1457 set_binary_declaration_offset(value);
1458#endif
1459 }
1460
1461 intptr_t bytecode_offset() const {
1462#if defined(DART_PRECOMPILED_RUNTIME)
1463 return 0;
1464#else
1465 ASSERT(is_declared_in_bytecode());
1466 return binary_declaration_offset();
1467#endif
1468 }
1469
1470 void set_bytecode_offset(intptr_t value) const {
1471#if defined(DART_PRECOMPILED_RUNTIME)
1472 UNREACHABLE();
1473#else
1474 ASSERT(is_declared_in_bytecode());
1475 set_binary_declaration_offset(value);
1476#endif
1477 }
1478
1479 bool is_declared_in_bytecode() const {
1480#if defined(DART_PRECOMPILED_RUNTIME)
1481 return false;
1482#else
1483 return ClassLayout::IsDeclaredInBytecode::decode(
1484 raw_ptr()->binary_declaration_);
1485#endif
1486 }
1487
1488#if !defined(DART_PRECOMPILED_RUNTIME)
1489 void set_is_declared_in_bytecode(bool value) const {
1490 StoreNonPointer(&raw_ptr()->binary_declaration_,
1491 ClassLayout::IsDeclaredInBytecode::update(
1492 value, raw_ptr()->binary_declaration_));
1493 }
1494#endif // !defined(DART_PRECOMPILED_RUNTIME)
1495
1496 void DisableAllocationStub() const;
1497
1498 ArrayPtr constants() const;
1499 void set_constants(const Array& value) const;
1500
1501 intptr_t FindInvocationDispatcherFunctionIndex(const Function& needle) const;
1502 FunctionPtr InvocationDispatcherFunctionFromIndex(intptr_t idx) const;
1503
1504 FunctionPtr GetInvocationDispatcher(const String& target_name,
1505 const Array& args_desc,
1506 FunctionLayout::Kind kind,
1507 bool create_if_absent) const;
1508
1509 void Finalize() const;
1510
1511 ObjectPtr Invoke(const String& selector,
1512 const Array& arguments,
1513 const Array& argument_names,
1514 bool respect_reflectable = true,
1515 bool check_is_entrypoint = false) const;
1516 ObjectPtr InvokeGetter(const String& selector,
1517 bool throw_nsm_if_absent,
1518 bool respect_reflectable = true,
1519 bool check_is_entrypoint = false) const;
1520 ObjectPtr InvokeSetter(const String& selector,
1521 const Instance& argument,
1522 bool respect_reflectable = true,
1523 bool check_is_entrypoint = false) const;
1524
1525 // Evaluate the given expression as if it appeared in a static method of this
1526 // class and return the resulting value, or an error object if evaluating the
1527 // expression fails. The method has the formal (type) parameters given in
1528 // (type_)param_names, and is invoked with the (type)argument values given in
1529 // (type_)param_values.
1530 ObjectPtr EvaluateCompiledExpression(
1531 const ExternalTypedData& kernel_buffer,
1532 const Array& type_definitions,
1533 const Array& param_values,
1534 const TypeArguments& type_param_values) const;
1535
1536 // Load class declaration (super type, interfaces, type parameters and
1537 // number of type arguments) if it is not loaded yet.
1538 void EnsureDeclarationLoaded() const;
1539
1540 ErrorPtr EnsureIsFinalized(Thread* thread) const;
1541 ErrorPtr EnsureIsAllocateFinalized(Thread* thread) const;
1542
1543 // Allocate a class used for VM internal objects.
1544 template <class FakeObject, class TargetFakeObject>
1545 static ClassPtr New(Isolate* isolate, bool register_class = true);
1546
1547 // Allocate instance classes.
1548 static ClassPtr New(const Library& lib,
1549 const String& name,
1550 const Script& script,
1551 TokenPosition token_pos,
1552 bool register_class = true);
1553 static ClassPtr NewNativeWrapper(const Library& library,
1554 const String& name,
1555 int num_fields);
1556
1557 // Allocate the raw string classes.
1558 static ClassPtr NewStringClass(intptr_t class_id, Isolate* isolate);
1559
1560 // Allocate the raw TypedData classes.
1561 static ClassPtr NewTypedDataClass(intptr_t class_id, Isolate* isolate);
1562
1563 // Allocate the raw TypedDataView/ByteDataView classes.
1564 static ClassPtr NewTypedDataViewClass(intptr_t class_id, Isolate* isolate);
1565
1566 // Allocate the raw ExternalTypedData classes.
1567 static ClassPtr NewExternalTypedDataClass(intptr_t class_id,
1568 Isolate* isolate);
1569
1570 // Allocate the raw Pointer classes.
1571 static ClassPtr NewPointerClass(intptr_t class_id, Isolate* isolate);
1572
1573 // Register code that has used CHA for optimization.
1574 // TODO(srdjan): Also register kind of CHA optimization (e.g.: leaf class,
1575 // leaf method, ...).
1576 void RegisterCHACode(const Code& code);
1577
1578 void DisableCHAOptimizedCode(const Class& subclass);
1579
1580 void DisableAllCHAOptimizedCode();
1581
1582 void DisableCHAImplementorUsers() { DisableAllCHAOptimizedCode(); }
1583
1584 // Return the list of code objects that were compiled using CHA of this class.
1585 // These code objects will be invalidated if new subclasses of this class
1586 // are finalized.
1587 ArrayPtr dependent_code() const { return raw_ptr()->dependent_code_; }
1588 void set_dependent_code(const Array& array) const;
1589
1590 bool TraceAllocation(Isolate* isolate) const;
1591 void SetTraceAllocation(bool trace_allocation) const;
1592
1593 void ReplaceEnum(IsolateReloadContext* reload_context,
1594 const Class& old_enum) const;
1595 void CopyStaticFieldValues(IsolateReloadContext* reload_context,
1596 const Class& old_cls) const;
1597 void PatchFieldsAndFunctions() const;
1598 void MigrateImplicitStaticClosures(IsolateReloadContext* context,
1599 const Class& new_cls) const;
1600 void CopyCanonicalConstants(const Class& old_cls) const;
1601 void CopyDeclarationType(const Class& old_cls) const;
1602 void CheckReload(const Class& replacement,
1603 IsolateReloadContext* context) const;
1604
1605 void AddInvocationDispatcher(const String& target_name,
1606 const Array& args_desc,
1607 const Function& dispatcher) const;
1608
1609 static int32_t host_instance_size_in_words(const ClassPtr cls) {
1610 return cls->ptr()->host_instance_size_in_words_;
1611 }
1612
1613 static int32_t target_instance_size_in_words(const ClassPtr cls) {
1614#if !defined(DART_PRECOMPILED_RUNTIME)
1615 return cls->ptr()->target_instance_size_in_words_;
1616#else
1617 return host_instance_size_in_words(cls);
1618#endif // !defined(DART_PRECOMPILED_RUNTIME)
1619 }
1620
1621 static int32_t host_next_field_offset_in_words(const ClassPtr cls) {
1622 return cls->ptr()->host_next_field_offset_in_words_;
1623 }
1624
1625 static int32_t target_next_field_offset_in_words(const ClassPtr cls) {
1626#if !defined(DART_PRECOMPILED_RUNTIME)
1627 return cls->ptr()->target_next_field_offset_in_words_;
1628#else
1629 return host_next_field_offset_in_words(cls);
1630#endif // !defined(DART_PRECOMPILED_RUNTIME)
1631 }
1632
1633 static int32_t host_type_arguments_field_offset_in_words(const ClassPtr cls) {
1634 return cls->ptr()->host_type_arguments_field_offset_in_words_;
1635 }
1636
1637 static int32_t target_type_arguments_field_offset_in_words(
1638 const ClassPtr cls) {
1639#if !defined(DART_PRECOMPILED_RUNTIME)
1640 return cls->ptr()->target_type_arguments_field_offset_in_words_;
1641#else
1642 return host_type_arguments_field_offset_in_words(cls);
1643#endif // !defined(DART_PRECOMPILED_RUNTIME)
1644 }
1645
1646 private:
1647 TypePtr declaration_type() const { return raw_ptr()->declaration_type_; }
1648
1649 // Caches the declaration type of this class.
1650 void set_declaration_type(const Type& type) const;
1651
1652 bool CanReloadFinalized(const Class& replacement,
1653 IsolateReloadContext* context) const;
1654 bool CanReloadPreFinalized(const Class& replacement,
1655 IsolateReloadContext* context) const;
1656
1657 // Tells whether instances need morphing for reload.
1658 bool RequiresInstanceMorphing(const Class& replacement) const;
1659
1660 template <class FakeInstance, class TargetFakeInstance>
1661 static ClassPtr NewCommon(intptr_t index);
1662
1663 enum MemberKind {
1664 kAny = 0,
1665 kStatic,
1666 kInstance,
1667 kInstanceAllowAbstract,
1668 kConstructor,
1669 kFactory,
1670 };
1671 enum StateBits {
1672 kConstBit = 0,
1673 kImplementedBit = 1,
1674 kClassFinalizedPos = 2,
1675 kClassFinalizedSize = 2,
1676 kClassLoadingPos = kClassFinalizedPos + kClassFinalizedSize, // = 4
1677 kClassLoadingSize = 2,
1678 kAbstractBit = kClassLoadingPos + kClassLoadingSize, // = 6
1679 kSynthesizedClassBit,
1680 kMixinAppAliasBit,
1681 kMixinTypeAppliedBit,
1682 kFieldsMarkedNullableBit,
1683 kEnumBit,
1684 kTransformedMixinApplicationBit,
1685 kIsAllocatedBit,
1686 kIsLoadedBit,
1687 kHasPragmaBit,
1688 };
1689 class ConstBit : public BitField<uint32_t, bool, kConstBit, 1> {};
1690 class ImplementedBit : public BitField<uint32_t, bool, kImplementedBit, 1> {};
1691 class ClassFinalizedBits : public BitField<uint32_t,
1692 ClassLayout::ClassFinalizedState,
1693 kClassFinalizedPos,
1694 kClassFinalizedSize> {};
1695 class ClassLoadingBits : public BitField<uint32_t,
1696 ClassLayout::ClassLoadingState,
1697 kClassLoadingPos,
1698 kClassLoadingSize> {};
1699 class AbstractBit : public BitField<uint32_t, bool, kAbstractBit, 1> {};
1700 class SynthesizedClassBit
1701 : public BitField<uint32_t, bool, kSynthesizedClassBit, 1> {};
1702 class FieldsMarkedNullableBit
1703 : public BitField<uint32_t, bool, kFieldsMarkedNullableBit, 1> {};
1704 class EnumBit : public BitField<uint32_t, bool, kEnumBit, 1> {};
1705 class TransformedMixinApplicationBit
1706 : public BitField<uint32_t, bool, kTransformedMixinApplicationBit, 1> {};
1707 class IsAllocatedBit : public BitField<uint32_t, bool, kIsAllocatedBit, 1> {};
1708 class IsLoadedBit : public BitField<uint32_t, bool, kIsLoadedBit, 1> {};
1709 class HasPragmaBit : public BitField<uint32_t, bool, kHasPragmaBit, 1> {};
1710
1711 void set_name(const String& value) const;
1712 void set_user_name(const String& value) const;
1713 const char* GenerateUserVisibleName() const;
1714 void set_state_bits(intptr_t bits) const;
1715
1716 ArrayPtr invocation_dispatcher_cache() const;
1717 void set_invocation_dispatcher_cache(const Array& cache) const;
1718 FunctionPtr CreateInvocationDispatcher(const String& target_name,
1719 const Array& args_desc,
1720 FunctionLayout::Kind kind) const;
1721
1722 // Returns the bitmap of unboxed fields
1723 UnboxedFieldBitmap CalculateFieldOffsets() const;
1724
1725 // functions_hash_table is in use iff there are at least this many functions.
1726 static const intptr_t kFunctionLookupHashTreshold = 16;
1727
1728 // Initial value for the cached number of type arguments.
1729 static const intptr_t kUnknownNumTypeArguments = -1;
1730
1731 int16_t num_type_arguments() const { return raw_ptr()->num_type_arguments_; }
1732
1733 public:
1734 void set_num_type_arguments(intptr_t value) const;
1735
1736 bool has_pragma() const {
1737 return HasPragmaBit::decode(raw_ptr()->state_bits_);
1738 }
1739 void set_has_pragma(bool has_pragma) const;
1740
1741 private:
1742 // Calculates number of type arguments of this class.
1743 // This includes type arguments of a superclass and takes overlapping
1744 // of type arguments into account.
1745 intptr_t ComputeNumTypeArguments() const;
1746
1747 // Assigns empty array to all raw class array fields.
1748 void InitEmptyFields();
1749
1750 static FunctionPtr CheckFunctionType(const Function& func, MemberKind kind);
1751 FunctionPtr LookupFunction(const String& name, MemberKind kind) const;
1752 FunctionPtr LookupFunctionAllowPrivate(const String& name,
1753 MemberKind kind) const;
1754 FieldPtr LookupField(const String& name, MemberKind kind) const;
1755
1756 FunctionPtr LookupAccessorFunction(const char* prefix,
1757 intptr_t prefix_length,
1758 const String& name) const;
1759
1760 // Allocate an instance class which has a VM implementation.
1761 template <class FakeInstance, class TargetFakeInstance>
1762 static ClassPtr New(intptr_t id,
1763 Isolate* isolate,
1764 bool register_class = true,
1765 bool is_abstract = false);
1766
1767 // Helper that calls 'Class::New<Instance>(kIllegalCid)'.
1768 static ClassPtr NewInstanceClass();
1769
1770 FINAL_HEAP_OBJECT_IMPLEMENTATION(Class, Object);
1771 friend class AbstractType;
1772 friend class Instance;
1773 friend class Object;
1774 friend class Type;
1775 friend class InterpreterHelpers;
1776 friend class Intrinsifier;
1777 friend class ProgramWalker;
1778 friend class Precompiler;
1779};
1780
1781// Classification of type genericity according to type parameter owners.
1782enum Genericity {
1783 kAny, // Consider type params of current class and functions.
1784 kCurrentClass, // Consider type params of current class only.
1785 kFunctions, // Consider type params of current and parent functions.
1786};
1787
1788class PatchClass : public Object {
1789 public:
1790 ClassPtr patched_class() const { return raw_ptr()->patched_class_; }
1791 ClassPtr origin_class() const { return raw_ptr()->origin_class_; }
1792 ScriptPtr script() const { return raw_ptr()->script_; }
1793 ExternalTypedDataPtr library_kernel_data() const {
1794 return raw_ptr()->library_kernel_data_;
1795 }
1796 void set_library_kernel_data(const ExternalTypedData& data) const;
1797
1798 intptr_t library_kernel_offset() const {
1799#if !defined(DART_PRECOMPILED_RUNTIME)
1800 return raw_ptr()->library_kernel_offset_;
1801#else
1802 return -1;
1803#endif
1804 }
1805 void set_library_kernel_offset(intptr_t offset) const {
1806 NOT_IN_PRECOMPILED(
1807 StoreNonPointer(&raw_ptr()->library_kernel_offset_, offset));
1808 }
1809
1810 static intptr_t InstanceSize() {
1811 return RoundedAllocationSize(sizeof(PatchClassLayout));
1812 }
1813 static bool IsInFullSnapshot(PatchClassPtr cls) {
1814 NoSafepointScope no_safepoint;
1815 return Class::IsInFullSnapshot(cls->ptr()->patched_class_);
1816 }
1817
1818 static PatchClassPtr New(const Class& patched_class,
1819 const Class& origin_class);
1820
1821 static PatchClassPtr New(const Class& patched_class, const Script& source);
1822
1823 private:
1824 void set_patched_class(const Class& value) const;
1825 void set_origin_class(const Class& value) const;
1826 void set_script(const Script& value) const;
1827
1828 static PatchClassPtr New();
1829
1830 FINAL_HEAP_OBJECT_IMPLEMENTATION(PatchClass, Object);
1831 friend class Class;
1832};
1833
1834class ParameterTypeCheck : public Object {
1835 public:
1836 // The FP-relative index of the parameter in a bytecode frame (after optional
1837 // parameter marshalling) whose assignability needs to be checked, or 0 if
1838 // this is a type parameter check.
1839 intptr_t index() const { return raw_ptr()->index_; }
1840 void set_index(intptr_t i) const { StoreNonPointer(&raw_ptr()->index_, i); }
1841
1842 // The type parameter to whose bound needs to be checked, or null if this is
1843 // an ordinary parameter check.
1844 AbstractTypePtr param() const { return raw_ptr()->param_; }
1845 void set_param(const AbstractType& t) const;
1846
1847 // FP[index] assignable to type, OR param is subtype of bound.
1848 AbstractTypePtr type_or_bound() const { return raw_ptr()->type_or_bound_; }
1849 void set_type_or_bound(const AbstractType& t) const;
1850
1851 // The parameter or type parameter's name to use in an error message.
1852 StringPtr name() const { return raw_ptr()->name_; }
1853 void set_name(const String& n) const;
1854
1855 SubtypeTestCachePtr cache() const { return raw_ptr()->cache_; }
1856 void set_cache(const SubtypeTestCache& c) const;
1857
1858 static intptr_t InstanceSize() {
1859 return RoundedAllocationSize(sizeof(ParameterTypeCheckLayout));
1860 }
1861
1862 static ParameterTypeCheckPtr New();
1863
1864 private:
1865 FINAL_HEAP_OBJECT_IMPLEMENTATION(ParameterTypeCheck, Object);
1866 friend class Class;
1867};
1868
1869class SingleTargetCache : public Object {
1870 public:
1871 CodePtr target() const { return raw_ptr()->target_; }
1872 void set_target(const Code& target) const;
1873 static intptr_t target_offset() {
1874 return OFFSET_OF(SingleTargetCacheLayout, target_);
1875 }
1876
1877#define DEFINE_NON_POINTER_FIELD_ACCESSORS(type, name) \
1878 type name() const { return raw_ptr()->name##_; } \
1879 void set_##name(type value) const { \
1880 StoreNonPointer(&raw_ptr()->name##_, value); \
1881 } \
1882 static intptr_t name##_offset() { \
1883 return OFFSET_OF(SingleTargetCacheLayout, name##_); \
1884 }
1885
1886 DEFINE_NON_POINTER_FIELD_ACCESSORS(uword, entry_point);
1887 DEFINE_NON_POINTER_FIELD_ACCESSORS(intptr_t, lower_limit);
1888 DEFINE_NON_POINTER_FIELD_ACCESSORS(intptr_t, upper_limit);
1889#undef DEFINE_NON_POINTER_FIELD_ACCESSORS
1890
1891 static intptr_t InstanceSize() {
1892 return RoundedAllocationSize(sizeof(SingleTargetCacheLayout));
1893 }
1894
1895 static SingleTargetCachePtr New();
1896
1897 private:
1898 FINAL_HEAP_OBJECT_IMPLEMENTATION(SingleTargetCache, Object);
1899 friend class Class;
1900};
1901
1902class MonomorphicSmiableCall : public Object {
1903 public:
1904 CodePtr target() const { return raw_ptr()->target_; }
1905 classid_t expected_cid() const { return raw_ptr()->expected_cid_; }
1906
1907 static intptr_t InstanceSize() {
1908 return RoundedAllocationSize(sizeof(MonomorphicSmiableCallLayout));
1909 }
1910
1911 static MonomorphicSmiableCallPtr New(classid_t expected_cid,
1912 const Code& target);
1913
1914 static intptr_t expected_cid_offset() {
1915 return OFFSET_OF(MonomorphicSmiableCallLayout, expected_cid_);
1916 }
1917
1918 static intptr_t target_offset() {
1919 return OFFSET_OF(MonomorphicSmiableCallLayout, target_);
1920 }
1921
1922 static intptr_t entrypoint_offset() {
1923 return OFFSET_OF(MonomorphicSmiableCallLayout, entrypoint_);
1924 }
1925
1926 private:
1927 FINAL_HEAP_OBJECT_IMPLEMENTATION(MonomorphicSmiableCall, Object);
1928 friend class Class;
1929};
1930
1931class CallSiteData : public Object {
1932 public:
1933 StringPtr target_name() const { return raw_ptr()->target_name_; }
1934 ArrayPtr arguments_descriptor() const { return raw_ptr()->args_descriptor_; }
1935
1936 static intptr_t target_name_offset() {
1937 return OFFSET_OF(CallSiteDataLayout, target_name_);
1938 }
1939
1940 static intptr_t arguments_descriptor_offset() {
1941 return OFFSET_OF(CallSiteDataLayout, args_descriptor_);
1942 }
1943
1944 private:
1945 void set_target_name(const String& value) const;
1946 void set_arguments_descriptor(const Array& value) const;
1947
1948 HEAP_OBJECT_IMPLEMENTATION(CallSiteData, Object)
1949
1950 friend class ICData;
1951 friend class MegamorphicCache;
1952};
1953
1954class UnlinkedCall : public CallSiteData {
1955 public:
1956 bool can_patch_to_monomorphic() const {
1957 return raw_ptr()->can_patch_to_monomorphic_;
1958 }
1959
1960 static intptr_t InstanceSize() {
1961 return RoundedAllocationSize(sizeof(UnlinkedCallLayout));
1962 }
1963
1964 intptr_t Hashcode() const;
1965 bool Equals(const UnlinkedCall& other) const;
1966
1967 static UnlinkedCallPtr New();
1968
1969 private:
1970 friend class ICData; // For set_*() methods.
1971
1972 void set_can_patch_to_monomorphic(bool value) const;
1973
1974 FINAL_HEAP_OBJECT_IMPLEMENTATION(UnlinkedCall, CallSiteData);
1975 friend class Class;
1976};
1977
1978// Object holding information about an IC: test classes and their
1979// corresponding targets. The owner of the ICData can be either the function
1980// or the original ICData object. In case of background compilation we
1981// copy the ICData in a child object, thus freezing it during background
1982// compilation. Code may contain only original ICData objects.
1983class ICData : public CallSiteData {
1984 public:
1985 FunctionPtr Owner() const;
1986
1987 ICDataPtr Original() const;
1988
1989 void SetOriginal(const ICData& value) const;
1990
1991 bool IsOriginal() const { return Original() == this->raw(); }
1992
1993 intptr_t NumArgsTested() const;
1994
1995 intptr_t TypeArgsLen() const;
1996
1997 intptr_t CountWithTypeArgs() const;
1998
1999 intptr_t CountWithoutTypeArgs() const;
2000
2001 intptr_t SizeWithoutTypeArgs() const;
2002
2003 intptr_t SizeWithTypeArgs() const;
2004
2005 intptr_t deopt_id() const {
2006#if defined(DART_PRECOMPILED_RUNTIME)
2007 UNREACHABLE();
2008 return -1;
2009#else
2010 return raw_ptr()->deopt_id_;
2011#endif
2012 }
2013
2014 bool IsImmutable() const;
2015
2016#if !defined(DART_PRECOMPILED_RUNTIME)
2017 AbstractTypePtr receivers_static_type() const {
2018 return raw_ptr()->receivers_static_type_;
2019 }
2020 void SetReceiversStaticType(const AbstractType& type) const;
2021 bool is_tracking_exactness() const {
2022 return TrackingExactnessBit::decode(raw_ptr()->state_bits_);
2023 }
2024 void set_tracking_exactness(bool value) const {
2025 StoreNonPointer(
2026 &raw_ptr()->state_bits_,
2027 TrackingExactnessBit::update(value, raw_ptr()->state_bits_));
2028 }
2029#else
2030 bool is_tracking_exactness() const { return false; }
2031#endif
2032
2033 void Reset(Zone* zone) const;
2034
2035// Note: only deopts with reasons before Unknown in this list are recorded in
2036// the ICData. All other reasons are used purely for informational messages
2037// printed during deoptimization itself.
2038#define DEOPT_REASONS(V) \
2039 V(BinarySmiOp) \
2040 V(BinaryInt64Op) \
2041 V(DoubleToSmi) \
2042 V(CheckSmi) \
2043 V(CheckClass) \
2044 V(Unknown) \
2045 V(PolymorphicInstanceCallTestFail) \
2046 V(UnaryInt64Op) \
2047 V(BinaryDoubleOp) \
2048 V(UnaryOp) \
2049 V(UnboxInteger) \
2050 V(Unbox) \
2051 V(CheckArrayBound) \
2052 V(AtCall) \
2053 V(GuardField) \
2054 V(TestCids) \
2055 V(NumReasons)
2056
2057 enum DeoptReasonId {
2058#define DEFINE_ENUM_LIST(name) kDeopt##name,
2059 DEOPT_REASONS(DEFINE_ENUM_LIST)
2060#undef DEFINE_ENUM_LIST
2061 };
2062
2063 static const intptr_t kLastRecordedDeoptReason = kDeoptUnknown - 1;
2064
2065 enum DeoptFlags {
2066 // Deoptimization is caused by an optimistically hoisted instruction.
2067 kHoisted = 1 << 0,
2068
2069 // Deoptimization is caused by an optimistically generalized bounds check.
2070 kGeneralized = 1 << 1
2071 };
2072
2073 bool HasDeoptReasons() const { return DeoptReasons() != 0; }
2074 uint32_t DeoptReasons() const;
2075 void SetDeoptReasons(uint32_t reasons) const;
2076
2077 bool HasDeoptReason(ICData::DeoptReasonId reason) const;
2078 void AddDeoptReason(ICData::DeoptReasonId reason) const;
2079
2080 // Call site classification that is helpful for hot-reload. Call sites with
2081 // different `RebindRule` have to be rebound differently.
2082#define FOR_EACH_REBIND_RULE(V) \
2083 V(Instance) \
2084 V(NoRebind) \
2085 V(NSMDispatch) \
2086 V(Optimized) \
2087 V(Static) \
2088 V(Super)
2089
2090 enum RebindRule {
2091#define REBIND_ENUM_DEF(name) k##name,
2092 FOR_EACH_REBIND_RULE(REBIND_ENUM_DEF)
2093#undef REBIND_ENUM_DEF
2094 kNumRebindRules,
2095 };
2096 static const char* RebindRuleToCString(RebindRule r);
2097 static bool ParseRebindRule(const char* str, RebindRule* out);
2098 RebindRule rebind_rule() const;
2099 void set_rebind_rule(uint32_t rebind_rule) const;
2100
2101 void set_is_megamorphic(bool value) const {
2102 // We don't have concurrent RW access to [state_bits_].
2103 const uint32_t updated_bits =
2104 MegamorphicBit::update(value, raw_ptr()->state_bits_);
2105
2106 // Though we ensure that once the state bits are updated, all other previous
2107 // writes to the IC are visible as well.
2108 StoreNonPointer<uint32_t, uint32_t, std::memory_order_release>(
2109 &raw_ptr()->state_bits_, updated_bits);
2110 }
2111
2112 // The length of the array. This includes all sentinel entries including
2113 // the final one.
2114 intptr_t Length() const;
2115
2116 // Takes O(result) time!
2117 intptr_t NumberOfChecks() const;
2118
2119 // Discounts any checks with usage of zero.
2120 // Takes O(result)) time!
2121 intptr_t NumberOfUsedChecks() const;
2122
2123 // Takes O(n) time!
2124 bool NumberOfChecksIs(intptr_t n) const;
2125
2126 static intptr_t InstanceSize() {
2127 return RoundedAllocationSize(sizeof(ICDataLayout));
2128 }
2129
2130 static intptr_t state_bits_offset() {
2131 return OFFSET_OF(ICDataLayout, state_bits_);
2132 }
2133
2134 static intptr_t NumArgsTestedShift() { return kNumArgsTestedPos; }
2135
2136 static intptr_t NumArgsTestedMask() {
2137 return ((1 << kNumArgsTestedSize) - 1) << kNumArgsTestedPos;
2138 }
2139
2140 static intptr_t entries_offset() { return OFFSET_OF(ICDataLayout, entries_); }
2141
2142 static intptr_t owner_offset() { return OFFSET_OF(ICDataLayout, owner_); }
2143
2144#if !defined(DART_PRECOMPILED_RUNTIME)
2145 static intptr_t receivers_static_type_offset() {
2146 return OFFSET_OF(ICDataLayout, receivers_static_type_);
2147 }
2148#endif
2149
2150 // Replaces entry |index| with the sentinel.
2151 void WriteSentinelAt(intptr_t index) const;
2152
2153 // Clears the count for entry |index|.
2154 void ClearCountAt(intptr_t index) const;
2155
2156 // Clear all entries with the sentinel value and reset the first entry
2157 // with the dummy target entry.
2158 void ClearAndSetStaticTarget(const Function& func) const;
2159
2160 void DebugDump() const;
2161
2162 // Returns true if this is a two arg smi operation.
2163 bool AddSmiSmiCheckForFastSmiStubs() const;
2164
2165 // Used for unoptimized static calls when no class-ids are checked.
2166 void AddTarget(const Function& target) const;
2167
2168 // Adding checks.
2169
2170 // Adds one more class test to ICData. Length of 'classes' must be equal to
2171 // the number of arguments tested. Use only for num_args_tested > 1.
2172 void AddCheck(const GrowableArray<intptr_t>& class_ids,
2173 const Function& target,
2174 intptr_t count = 1) const;
2175
2176 StaticTypeExactnessState GetExactnessAt(intptr_t count) const;
2177
2178 // Adds sorted so that Smi is the first class-id. Use only for
2179 // num_args_tested == 1.
2180 void AddReceiverCheck(intptr_t receiver_class_id,
2181 const Function& target,
2182 intptr_t count = 1,
2183 StaticTypeExactnessState exactness =
2184 StaticTypeExactnessState::NotTracking()) const;
2185
2186 // Does entry |index| contain the sentinel value?
2187 bool IsSentinelAt(intptr_t index) const;
2188
2189 // Retrieving checks.
2190
2191 void GetCheckAt(intptr_t index,
2192 GrowableArray<intptr_t>* class_ids,
2193 Function* target) const;
2194 void GetClassIdsAt(intptr_t index, GrowableArray<intptr_t>* class_ids) const;
2195
2196 // Only for 'num_args_checked == 1'.
2197 void GetOneClassCheckAt(intptr_t index,
2198 intptr_t* class_id,
2199 Function* target) const;
2200 // Only for 'num_args_checked == 1'.
2201 intptr_t GetCidAt(intptr_t index) const;
2202
2203 intptr_t GetReceiverClassIdAt(intptr_t index) const;
2204 intptr_t GetClassIdAt(intptr_t index, intptr_t arg_nr) const;
2205
2206 FunctionPtr GetTargetAt(intptr_t index) const;
2207
2208 ObjectPtr GetTargetOrCodeAt(intptr_t index) const;
2209 void SetCodeAt(intptr_t index, const Code& value) const;
2210 void SetEntryPointAt(intptr_t index, const Smi& value) const;
2211
2212 void IncrementCountAt(intptr_t index, intptr_t value) const;
2213 void SetCountAt(intptr_t index, intptr_t value) const;
2214 intptr_t GetCountAt(intptr_t index) const;
2215 intptr_t AggregateCount() const;
2216
2217 // Returns this->raw() if num_args_tested == 1 and arg_nr == 1, otherwise
2218 // returns a new ICData object containing only unique arg_nr checks.
2219 // Returns only used entries.
2220 ICDataPtr AsUnaryClassChecksForArgNr(intptr_t arg_nr) const;
2221 ICDataPtr AsUnaryClassChecks() const { return AsUnaryClassChecksForArgNr(0); }
2222 ICDataPtr AsUnaryClassChecksForCid(intptr_t cid,
2223 const Function& target) const;
2224
2225 // Returns ICData with aggregated receiver count, sorted by highest count.
2226 // Smi not first!! (the convention for ICData used in code generation is that
2227 // Smi check is first)
2228 // Used for printing and optimizations.
2229 ICDataPtr AsUnaryClassChecksSortedByCount() const;
2230
2231 UnlinkedCallPtr AsUnlinkedCall() const;
2232
2233 bool HasReceiverClassId(intptr_t class_id) const;
2234
2235 // Note: passing non-null receiver_type enables exactness tracking for
2236 // the receiver type. Receiver type is expected to be a fully
2237 // instantiated generic (but not a FutureOr).
2238 // See StaticTypeExactnessState for more information.
2239 static ICDataPtr New(
2240 const Function& owner,
2241 const String& target_name,
2242 const Array& arguments_descriptor,
2243 intptr_t deopt_id,
2244 intptr_t num_args_tested,
2245 RebindRule rebind_rule,
2246 const AbstractType& receiver_type = Object::null_abstract_type());
2247 static ICDataPtr NewFrom(const ICData& from, intptr_t num_args_tested);
2248
2249 // Generates a new ICData with descriptor and data array copied (deep clone).
2250 static ICDataPtr Clone(const ICData& from);
2251
2252 static intptr_t TestEntryLengthFor(intptr_t num_args,
2253 bool tracking_exactness);
2254
2255 static intptr_t CountIndexFor(intptr_t num_args) { return num_args; }
2256 static intptr_t EntryPointIndexFor(intptr_t num_args) { return num_args; }
2257
2258 static intptr_t TargetIndexFor(intptr_t num_args) { return num_args + 1; }
2259 static intptr_t CodeIndexFor(intptr_t num_args) { return num_args + 1; }
2260
2261 static intptr_t ExactnessIndexFor(intptr_t num_args) { return num_args + 2; }
2262
2263 bool IsUsedAt(intptr_t i) const;
2264
2265 void PrintToJSONArray(const JSONArray& jsarray,
2266 TokenPosition token_pos) const;
2267
2268 // Initialize the preallocated empty ICData entry arrays.
2269 static void Init();
2270
2271 // Clear the preallocated empty ICData entry arrays.
2272 static void Cleanup();
2273
2274 // We cache ICData with 0, 1, 2 arguments tested without exactness
2275 // tracking and with 1 argument tested with exactness tracking.
2276 enum {
2277 kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx = 0,
2278 kCachedICDataMaxArgsTestedWithoutExactnessTracking = 2,
2279 kCachedICDataOneArgWithExactnessTrackingIdx =
2280 kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx +
2281 kCachedICDataMaxArgsTestedWithoutExactnessTracking + 1,
2282 kCachedICDataArrayCount = kCachedICDataOneArgWithExactnessTrackingIdx + 1,
2283 };
2284
2285 bool is_static_call() const;
2286
2287 intptr_t FindCheck(const GrowableArray<intptr_t>& cids) const;
2288
2289 ArrayPtr entries() const {
2290 return LoadPointer<ArrayPtr, std::memory_order_acquire>(
2291 &raw_ptr()->entries_);
2292 }
2293
2294 bool receiver_cannot_be_smi() const {
2295 return ReceiverCannotBeSmiBit::decode(
2296 LoadNonPointer(&raw_ptr()->state_bits_));
2297 }
2298
2299 void set_receiver_cannot_be_smi(bool value) const {
2300 set_state_bits(ReceiverCannotBeSmiBit::encode(value) |
2301 LoadNonPointer(&raw_ptr()->state_bits_));
2302 }
2303
2304 private:
2305 friend class FlowGraphSerializer; // For is_megamorphic()
2306
2307 static ICDataPtr New();
2308
2309 // Grows the array and also sets the argument to the index that should be used
2310 // for the new entry.
2311 ArrayPtr Grow(intptr_t* index) const;
2312
2313 void set_owner(const Function& value) const;
2314 void set_deopt_id(intptr_t value) const;
2315 void SetNumArgsTested(intptr_t value) const;
2316 void set_entries(const Array& value) const;
2317 void set_state_bits(uint32_t bits) const;
2318
2319 // This bit is set when a call site becomes megamorphic and starts using a
2320 // MegamorphicCache instead of ICData. It means that the entries in the
2321 // ICData are incomplete and the MegamorphicCache needs to also be consulted
2322 // to list the call site's observed receiver classes and targets.
2323 // In the compiler, this should only be read once by CallTargets to avoid the
2324 // compiler seeing an unstable set of feedback.
2325 bool is_megamorphic() const {
2326 // Ensure any following load instructions do not get performed before this
2327 // one.
2328 const uint32_t bits = LoadNonPointer<uint32_t, std::memory_order_acquire>(
2329 &raw_ptr()->state_bits_);
2330 return MegamorphicBit::decode(bits);
2331 }
2332
2333 bool ValidateInterceptor(const Function& target) const;
2334
2335 enum {
2336 kNumArgsTestedPos = 0,
2337 kNumArgsTestedSize = 2,
2338 kTrackingExactnessPos = kNumArgsTestedPos + kNumArgsTestedSize,
2339 kTrackingExactnessSize = 1,
2340 kDeoptReasonPos = kTrackingExactnessPos + kTrackingExactnessSize,
2341 kDeoptReasonSize = kLastRecordedDeoptReason + 1,
2342 kRebindRulePos = kDeoptReasonPos + kDeoptReasonSize,
2343 kRebindRuleSize = 3,
2344 kMegamorphicPos = kRebindRulePos + kRebindRuleSize,
2345 kMegamorphicSize = 1,
2346 kReceiverCannotBeSmiPos = kMegamorphicPos + kMegamorphicSize,
2347 kReceiverCannotBeSmiSize = 1,
2348 };
2349
2350 COMPILE_ASSERT(kReceiverCannotBeSmiPos + kReceiverCannotBeSmiSize <=
2351 sizeof(ICDataLayout::state_bits_) * kBitsPerWord);
2352 COMPILE_ASSERT(kNumRebindRules <= (1 << kRebindRuleSize));
2353
2354 class NumArgsTestedBits : public BitField<uint32_t,
2355 uint32_t,
2356 kNumArgsTestedPos,
2357 kNumArgsTestedSize> {};
2358 class TrackingExactnessBit : public BitField<uint32_t,
2359 bool,
2360 kTrackingExactnessPos,
2361 kTrackingExactnessSize> {};
2362 class DeoptReasonBits : public BitField<uint32_t,
2363 uint32_t,
2364 ICData::kDeoptReasonPos,
2365 ICData::kDeoptReasonSize> {};
2366 class RebindRuleBits : public BitField<uint32_t,
2367 uint32_t,
2368 ICData::kRebindRulePos,
2369 ICData::kRebindRuleSize> {};
2370 class MegamorphicBit
2371 : public BitField<uint32_t, bool, kMegamorphicPos, kMegamorphicSize> {};
2372
2373 class ReceiverCannotBeSmiBit : public BitField<uint32_t,
2374 bool,
2375 kReceiverCannotBeSmiPos,
2376 kReceiverCannotBeSmiSize> {};
2377
2378#if defined(DEBUG)
2379 // Used in asserts to verify that a check is not added twice.
2380 bool HasCheck(const GrowableArray<intptr_t>& cids) const;
2381#endif // DEBUG
2382
2383 intptr_t TestEntryLength() const;
2384 static ArrayPtr NewNonCachedEmptyICDataArray(intptr_t num_args_tested,
2385 bool tracking_exactness);
2386 static ArrayPtr CachedEmptyICDataArray(intptr_t num_args_tested,
2387 bool tracking_exactness);
2388 static ICDataPtr NewDescriptor(Zone* zone,
2389 const Function& owner,
2390 const String& target_name,
2391 const Array& arguments_descriptor,
2392 intptr_t deopt_id,
2393 intptr_t num_args_tested,
2394 RebindRule rebind_rule,
2395 const AbstractType& receiver_type);
2396
2397 static void WriteSentinel(const Array& data, intptr_t test_entry_length);
2398
2399 // A cache of VM heap allocated preinitialized empty ic data entry arrays.
2400 static ArrayPtr cached_icdata_arrays_[kCachedICDataArrayCount];
2401
2402 FINAL_HEAP_OBJECT_IMPLEMENTATION(ICData, CallSiteData);
2403 friend class CallSiteResetter;
2404 friend class CallTargets;
2405 friend class Class;
2406 friend class Deserializer;
2407 friend class ICDataTestTask;
2408 friend class Interpreter;
2409 friend class Serializer;
2410 friend class SnapshotWriter;
2411};
2412
2413// Often used constants for number of free function type parameters.
2414enum {
2415 kNoneFree = 0,
2416
2417 // 'kCurrentAndEnclosingFree' is used when partially applying a signature
2418 // function to a set of type arguments. It indicates that the set of type
2419 // parameters declared by the current function and enclosing functions should
2420 // be considered free, and the current function type parameters should be
2421 // substituted as well.
2422 //
2423 // For instance, if the signature "<T>(T, R) => T" is instantiated with
2424 // function type arguments [int, String] and kCurrentAndEnclosingFree is
2425 // supplied, the result of the instantiation will be "(String, int) => int".
2426 kCurrentAndEnclosingFree = kMaxInt32 - 1,
2427
2428 // Only parameters declared by enclosing functions are free.
2429 kAllFree = kMaxInt32,
2430};
2431
2432// Formatting configuration for Function::PrintName.
2433struct NameFormattingParams {
2434 Object::NameVisibility name_visibility;
2435 bool disambiguate_names;
2436
2437 // By default function name includes the name of the enclosing class if any.
2438 // However in some contexts this information is redundant and class name
2439 // is already known. In this case setting |include_class_name| to false
2440 // allows you to exclude this information from the formatted name.
2441 bool include_class_name = true;
2442
2443 // By default function name includes the name of the enclosing function if
2444 // any. However in some contexts this information is redundant and
2445 // the name of the enclosing function is already known. In this case
2446 // setting |include_parent_name| to false allows to exclude this information
2447 // from the formatted name.
2448 bool include_parent_name = true;
2449
2450 NameFormattingParams(Object::NameVisibility visibility,
2451 Object::NameDisambiguation name_disambiguation =
2452 Object::NameDisambiguation::kNo)
2453 : name_visibility(visibility),
2454 disambiguate_names(name_disambiguation ==
2455 Object::NameDisambiguation::kYes) {}
2456
2457 static NameFormattingParams DisambiguatedWithoutClassName(
2458 Object::NameVisibility visibility) {
2459 NameFormattingParams params(visibility, Object::NameDisambiguation::kYes);
2460 params.include_class_name = false;
2461 return params;
2462 }
2463
2464 static NameFormattingParams DisambiguatedUnqualified(
2465 Object::NameVisibility visibility) {
2466 NameFormattingParams params(visibility, Object::NameDisambiguation::kYes);
2467 params.include_class_name = false;
2468 params.include_parent_name = false;
2469 return params;
2470 }
2471};
2472
2473class Function : public Object {
2474 public:
2475 StringPtr name() const { return raw_ptr()->name_; }
2476 StringPtr UserVisibleName() const; // Same as scrubbed name.
2477 const char* UserVisibleNameCString() const;
2478
2479 const char* NameCString(NameVisibility name_visibility) const;
2480
2481 void PrintName(const NameFormattingParams& params,
2482 BaseTextBuffer* printer) const;
2483 StringPtr QualifiedScrubbedName() const;
2484 StringPtr QualifiedUserVisibleName() const;
2485
2486 virtual StringPtr DictionaryName() const { return name(); }
2487
2488 StringPtr GetSource() const;
2489
2490 // Return the type of this function's signature. It may not be canonical yet.
2491 // For example, if this function has a signature of the form
2492 // '(T, [B, C]) => R', where 'T' and 'R' are type parameters of the
2493 // owner class of this function, then its signature type is a parameterized
2494 // function type with uninstantiated type arguments 'T' and 'R' as elements of
2495 // its type argument vector.
2496 // A function type is non-nullable by default.
2497 TypePtr SignatureType(
2498 Nullability nullability = Nullability::kNonNullable) const;
2499 TypePtr ExistingSignatureType() const;
2500
2501 // Update the signature type (with a canonical version).
2502 void SetSignatureType(const Type& value) const;
2503
2504 // Set the "C signature" function for an FFI trampoline.
2505 // Can only be used on FFI trampolines.
2506 void SetFfiCSignature(const Function& sig) const;
2507
2508 // Retrieves the "C signature" function for an FFI trampoline.
2509 // Can only be used on FFI trampolines.
2510 FunctionPtr FfiCSignature() const;
2511
2512 bool FfiCSignatureContainsHandles() const;
2513
2514 // Can only be called on FFI trampolines.
2515 // -1 for Dart -> native calls.
2516 int32_t FfiCallbackId() const;
2517
2518 // Can only be called on FFI trampolines.
2519 void SetFfiCallbackId(int32_t value) const;
2520
2521 // Can only be called on FFI trampolines.
2522 // Null for Dart -> native calls.
2523 FunctionPtr FfiCallbackTarget() const;
2524
2525 // Can only be called on FFI trampolines.
2526 void SetFfiCallbackTarget(const Function& target) const;
2527
2528 // Can only be called on FFI trampolines.
2529 // Null for Dart -> native calls.
2530 InstancePtr FfiCallbackExceptionalReturn() const;
2531
2532 // Can only be called on FFI trampolines.
2533 void SetFfiCallbackExceptionalReturn(const Instance& value) const;
2534
2535 // Return a new function with instantiated result and parameter types.
2536 FunctionPtr InstantiateSignatureFrom(
2537 const TypeArguments& instantiator_type_arguments,
2538 const TypeArguments& function_type_arguments,
2539 intptr_t num_free_fun_type_params,
2540 Heap::Space space) const;
2541
2542 // Build a string of the form '<T>(T, {B b, C c}) => R' representing the
2543 // internal signature of the given function. In this example, T is a type
2544 // parameter of this function and R is a type parameter of class C, the owner
2545 // of the function. B and C are not type parameters.
2546 StringPtr Signature() const;
2547
2548 // Build a string of the form '<T>(T, {B b, C c}) => R' representing the
2549 // user visible signature of the given function. In this example, T is a type
2550 // parameter of this function and R is a type parameter of class C, the owner
2551 // of the function. B and C are not type parameters.
2552 // Implicit parameters are hidden.
2553 StringPtr UserVisibleSignature() const;
2554
2555 void PrintSignature(NameVisibility name_visibility,
2556 BaseTextBuffer* printer) const;
2557
2558 // Returns true if the signature of this function is instantiated, i.e. if it
2559 // does not involve generic parameter types or generic result type.
2560 // Note that function type parameters declared by this function do not make
2561 // its signature uninstantiated, only type parameters declared by parent
2562 // generic functions or class type parameters.
2563 bool HasInstantiatedSignature(Genericity genericity = kAny,
2564 intptr_t num_free_fun_type_params = kAllFree,
2565 TrailPtr trail = nullptr) const;
2566
2567 ClassPtr Owner() const;
2568 void set_owner(const Object& value) const;
2569 ClassPtr origin() const;
2570 ScriptPtr script() const;
2571 ObjectPtr RawOwner() const { return raw_ptr()->owner_; }
2572
2573 // The NNBD mode of the library declaring this function.
2574 // TODO(alexmarkov): nnbd_mode() doesn't work for mixins.
2575 // It should be either removed or fixed.
2576 NNBDMode nnbd_mode() const { return Class::Handle(origin()).nnbd_mode(); }
2577
2578 RegExpPtr regexp() const;
2579 intptr_t string_specialization_cid() const;
2580 bool is_sticky_specialization() const;
2581 void SetRegExpData(const RegExp& regexp,
2582 intptr_t string_specialization_cid,
2583 bool sticky) const;
2584
2585 StringPtr native_name() const;
2586 void set_native_name(const String& name) const;
2587
2588 AbstractTypePtr result_type() const { return raw_ptr()->result_type_; }
2589 void set_result_type(const AbstractType& value) const;
2590
2591 // The parameters, starting with NumImplicitParameters() parameters which are
2592 // only visible to the VM, but not to Dart users.
2593 // Note that type checks exclude implicit parameters.
2594 AbstractTypePtr ParameterTypeAt(intptr_t index) const;
2595 void SetParameterTypeAt(intptr_t index, const AbstractType& value) const;
2596 ArrayPtr parameter_types() const { return raw_ptr()->parameter_types_; }
2597 void set_parameter_types(const Array& value) const;
2598
2599 // Parameter names are valid for all valid parameter indices, and are not
2600 // limited to named optional parameters. If there are parameter flags (eg
2601 // required) they're stored at the end of this array, so the size of this
2602 // array isn't necessarily NumParameters(), but the first NumParameters()
2603 // elements are the names.
2604 StringPtr ParameterNameAt(intptr_t index) const;
2605 void SetParameterNameAt(intptr_t index, const String& value) const;
2606 ArrayPtr parameter_names() const { return raw_ptr()->parameter_names_; }
2607 void set_parameter_names(const Array& value) const;
2608
2609 // The required flags are stored at the end of the parameter_names. The flags
2610 // are packed into SMIs, but omitted if they're 0.
2611 bool IsRequiredAt(intptr_t index) const;
2612 void SetIsRequiredAt(intptr_t index) const;
2613
2614 // Truncate the parameter names array to remove any unused flag slots. Make
2615 // sure to only do this after calling SetIsRequiredAt as necessary.
2616 void TruncateUnusedParameterFlags() const;
2617
2618 // Returns the length of the parameter names array that is required to store
2619 // all the names plus all their flags. This may be an overestimate if some
2620 // parameters don't have flags.
2621 static intptr_t NameArrayLengthIncludingFlags(intptr_t num_parameters);
2622
2623 // The type parameters (and their bounds) are specified as an array of
2624 // TypeParameter.
2625 TypeArgumentsPtr type_parameters() const {
2626 return raw_ptr()->type_parameters_;
2627 }
2628 void set_type_parameters(const TypeArguments& value) const;
2629 intptr_t NumTypeParameters(Thread* thread) const;
2630 intptr_t NumTypeParameters() const {
2631 return NumTypeParameters(Thread::Current());
2632 }
2633
2634 // Returns true if this function has the same number of type parameters with
2635 // equal bounds as the other function. Type parameter names are ignored.
2636 bool HasSameTypeParametersAndBounds(const Function& other,
2637 TypeEquality kind) const;
2638
2639 // Return the number of type parameters declared in parent generic functions.
2640 intptr_t NumParentTypeParameters() const;
2641
2642 // Print the signature type of this function and of all of its parents.
2643 void PrintSignatureTypes() const;
2644
2645 // Return a TypeParameter if the type_name is a type parameter of this
2646 // function or of one of its parent functions.
2647 // Unless NULL, adjust function_level accordingly (in and out parameter).
2648 // Return null otherwise.
2649 TypeParameterPtr LookupTypeParameter(const String& type_name,
2650 intptr_t* function_level) const;
2651
2652 // Return true if this function declares type parameters.
2653 bool IsGeneric() const { return NumTypeParameters(Thread::Current()) > 0; }
2654
2655 // Return true if any parent function of this function is generic.
2656 bool HasGenericParent() const;
2657
2658 // Not thread-safe; must be called in the main thread.
2659 // Sets function's code and code's function.
2660 void InstallOptimizedCode(const Code& code) const;
2661 void AttachCode(const Code& value) const;
2662 void SetInstructions(const Code& value) const;
2663 void ClearCode() const;
2664 void ClearBytecode() const;
2665
2666 // Disables optimized code and switches to unoptimized code.
2667 void SwitchToUnoptimizedCode() const;
2668
2669 // Ensures that the function has code. If there is no code it compiles the
2670 // unoptimized version of the code. If the code contains errors, it calls
2671 // Exceptions::PropagateError and does not return. Normally returns the
2672 // current code, whether it is optimized or unoptimized.
2673 CodePtr EnsureHasCode() const;
2674
2675 // Disables optimized code and switches to unoptimized code (or the lazy
2676 // compilation stub).
2677 void SwitchToLazyCompiledUnoptimizedCode() const;
2678
2679 // Compiles unoptimized code (if necessary) and attaches it to the function.
2680 void EnsureHasCompiledUnoptimizedCode() const;
2681
2682 // Return the most recently compiled and installed code for this function.
2683 // It is not the only Code object that points to this function.
2684 CodePtr CurrentCode() const { return CurrentCodeOf(raw()); }
2685
2686 bool SafeToClosurize() const;
2687
2688 static CodePtr CurrentCodeOf(const FunctionPtr function) {
2689 return function->ptr()->code_;
2690 }
2691
2692 CodePtr unoptimized_code() const {
2693#if defined(DART_PRECOMPILED_RUNTIME)
2694 return static_cast<CodePtr>(Object::null());
2695#else
2696 return raw_ptr()->unoptimized_code_;
2697#endif
2698 }
2699 void set_unoptimized_code(const Code& value) const;
2700 bool HasCode() const;
2701 static bool HasCode(FunctionPtr function);
2702#if !defined(DART_PRECOMPILED_RUNTIME)
2703 static inline bool HasBytecode(FunctionPtr function);
2704#endif
2705
2706 static intptr_t code_offset() { return OFFSET_OF(FunctionLayout, code_); }
2707
2708 static intptr_t result_type_offset() {
2709 return OFFSET_OF(FunctionLayout, result_type_);
2710 }
2711
2712 static intptr_t entry_point_offset(
2713 CodeEntryKind entry_kind = CodeEntryKind::kNormal) {
2714 switch (entry_kind) {
2715 case CodeEntryKind::kNormal:
2716 return OFFSET_OF(FunctionLayout, entry_point_);
2717 case CodeEntryKind::kUnchecked:
2718 return OFFSET_OF(FunctionLayout, unchecked_entry_point_);
2719 default:
2720 UNREACHABLE();
2721 }
2722 }
2723
2724 static intptr_t unchecked_entry_point_offset() {
2725 return OFFSET_OF(FunctionLayout, unchecked_entry_point_);
2726 }
2727
2728#if !defined(DART_PRECOMPILED_RUNTIME)
2729 bool IsBytecodeAllowed(Zone* zone) const;
2730 void AttachBytecode(const Bytecode& bytecode) const;
2731 BytecodePtr bytecode() const { return raw_ptr()->bytecode_; }
2732 inline bool HasBytecode() const;
2733#else
2734 inline bool HasBytecode() const { return false; }
2735#endif
2736
2737 virtual intptr_t Hash() const;
2738
2739 // Returns true if there is at least one debugger breakpoint
2740 // set in this function.
2741 bool HasBreakpoint() const;
2742
2743 ContextScopePtr context_scope() const;
2744 void set_context_scope(const ContextScope& value) const;
2745
2746 // Enclosing function of this local function.
2747 FunctionPtr parent_function() const;
2748
2749 // Enclosing outermost function of this local function.
2750 FunctionPtr GetOutermostFunction() const;
2751
2752 void set_extracted_method_closure(const Function& function) const;
2753 FunctionPtr extracted_method_closure() const;
2754
2755 void set_saved_args_desc(const Array& array) const;
2756 ArrayPtr saved_args_desc() const;
2757
2758 void set_accessor_field(const Field& value) const;
2759 FieldPtr accessor_field() const;
2760
2761 bool IsRegularFunction() const {
2762 return kind() == FunctionLayout::kRegularFunction;
2763 }
2764
2765 bool IsMethodExtractor() const {
2766 return kind() == FunctionLayout::kMethodExtractor;
2767 }
2768
2769 bool IsNoSuchMethodDispatcher() const {
2770 return kind() == FunctionLayout::kNoSuchMethodDispatcher;
2771 }
2772
2773 bool IsInvokeFieldDispatcher() const {
2774 return kind() == FunctionLayout::kInvokeFieldDispatcher;
2775 }
2776
2777 bool IsDynamicInvocationForwarder() const {
2778 return kind() == FunctionLayout::kDynamicInvocationForwarder;
2779 }
2780
2781 bool IsImplicitGetterOrSetter() const {
2782 return kind() == FunctionLayout::kImplicitGetter ||
2783 kind() == FunctionLayout::kImplicitSetter ||
2784 kind() == FunctionLayout::kImplicitStaticGetter;
2785 }
2786
2787 // Returns true iff an implicit closure function has been created
2788 // for this function.
2789 bool HasImplicitClosureFunction() const {
2790 return implicit_closure_function() != null();
2791 }
2792
2793 // Returns the closure function implicitly created for this function. If none
2794 // exists yet, create one and remember it. Implicit closure functions are
2795 // used in VM Closure instances that represent results of tear-off operations.
2796 FunctionPtr ImplicitClosureFunction() const;
2797 void DropUncompiledImplicitClosureFunction() const;
2798
2799 // Return the closure implicitly created for this function.
2800 // If none exists yet, create one and remember it.
2801 InstancePtr ImplicitStaticClosure() const;
2802
2803 InstancePtr ImplicitInstanceClosure(const Instance& receiver) const;
2804
2805 intptr_t ComputeClosureHash() const;
2806
2807 // Redirection information for a redirecting factory.
2808 bool IsRedirectingFactory() const;
2809 TypePtr RedirectionType() const;
2810 void SetRedirectionType(const Type& type) const;
2811 StringPtr RedirectionIdentifier() const;
2812 void SetRedirectionIdentifier(const String& identifier) const;
2813 FunctionPtr RedirectionTarget() const;
2814 void SetRedirectionTarget(const Function& target) const;
2815
2816 FunctionPtr ForwardingTarget() const;
2817 void SetForwardingChecks(const Array& checks) const;
2818
2819 FunctionLayout::Kind kind() const {
2820 return KindBits::decode(raw_ptr()->kind_tag_);
2821 }
2822 static FunctionLayout::Kind kind(FunctionPtr function) {
2823 return KindBits::decode(function->ptr()->kind_tag_);
2824 }
2825
2826 FunctionLayout::AsyncModifier modifier() const {
2827 return ModifierBits::decode(raw_ptr()->kind_tag_);
2828 }
2829
2830 static const char* KindToCString(FunctionLayout::Kind kind);
2831
2832 bool IsGenerativeConstructor() const {
2833 return (kind() == FunctionLayout::kConstructor) && !is_static();
2834 }
2835 bool IsImplicitConstructor() const;
2836 bool IsFactory() const {
2837 return (kind() == FunctionLayout::kConstructor) && is_static();
2838 }
2839
2840 static bool ClosureBodiesContainNonCovariantChecks() {
2841 return FLAG_precompiled_mode || FLAG_lazy_dispatchers;
2842 }
2843
2844 // Whether this function can receive an invocation where the number and names
2845 // of arguments have not been checked.
2846 bool CanReceiveDynamicInvocation() const {
2847 return (IsClosureFunction() && ClosureBodiesContainNonCovariantChecks()) ||
2848 IsFfiTrampoline();
2849 }
2850
2851 bool HasThisParameter() const {
2852 return IsDynamicFunction(/*allow_abstract=*/true) ||
2853 IsGenerativeConstructor() || (IsFieldInitializer() && !is_static());
2854 }
2855
2856 bool IsDynamicFunction(bool allow_abstract = false) const {
2857 if (is_static() || (!allow_abstract && is_abstract())) {
2858 return false;
2859 }
2860 switch (kind()) {
2861 case FunctionLayout::kRegularFunction:
2862 case FunctionLayout::kGetterFunction:
2863 case FunctionLayout::kSetterFunction:
2864 case FunctionLayout::kImplicitGetter:
2865 case FunctionLayout::kImplicitSetter:
2866 case FunctionLayout::kMethodExtractor:
2867 case FunctionLayout::kNoSuchMethodDispatcher:
2868 case FunctionLayout::kInvokeFieldDispatcher:
2869 case FunctionLayout::kDynamicInvocationForwarder:
2870 return true;
2871 case FunctionLayout::kClosureFunction:
2872 case FunctionLayout::kImplicitClosureFunction:
2873 case FunctionLayout::kSignatureFunction:
2874 case FunctionLayout::kConstructor:
2875 case FunctionLayout::kImplicitStaticGetter:
2876 case FunctionLayout::kFieldInitializer:
2877 case FunctionLayout::kIrregexpFunction:
2878 return false;
2879 default:
2880 UNREACHABLE();
2881 return false;
2882 }
2883 }
2884 bool IsStaticFunction() const {
2885 if (!is_static()) {
2886 return false;
2887 }
2888 switch (kind()) {
2889 case FunctionLayout::kRegularFunction:
2890 case FunctionLayout::kGetterFunction:
2891 case FunctionLayout::kSetterFunction:
2892 case FunctionLayout::kImplicitGetter:
2893 case FunctionLayout::kImplicitSetter:
2894 case FunctionLayout::kImplicitStaticGetter:
2895 case FunctionLayout::kFieldInitializer:
2896 case FunctionLayout::kIrregexpFunction:
2897 return true;
2898 case FunctionLayout::kClosureFunction:
2899 case FunctionLayout::kImplicitClosureFunction:
2900 case FunctionLayout::kSignatureFunction:
2901 case FunctionLayout::kConstructor:
2902 case FunctionLayout::kMethodExtractor:
2903 case FunctionLayout::kNoSuchMethodDispatcher:
2904 case FunctionLayout::kInvokeFieldDispatcher:
2905 case FunctionLayout::kDynamicInvocationForwarder:
2906 return false;
2907 default:
2908 UNREACHABLE();
2909 return false;
2910 }
2911 }
2912 bool IsInFactoryScope() const;
2913
2914 bool NeedsArgumentTypeChecks() const {
2915 return (IsClosureFunction() && ClosureBodiesContainNonCovariantChecks()) ||
2916 !(is_static() || (kind() == FunctionLayout::kConstructor));
2917 }
2918
2919 bool NeedsMonomorphicCheckedEntry(Zone* zone) const;
2920 bool HasDynamicCallers(Zone* zone) const;
2921 bool PrologueNeedsArgumentsDescriptor() const;
2922
2923 bool MayHaveUncheckedEntryPoint() const;
2924
2925 TokenPosition token_pos() const {
2926#if defined(DART_PRECOMPILED_RUNTIME)
2927 return TokenPosition();
2928#else
2929 return raw_ptr()->token_pos_;
2930#endif
2931 }
2932 void set_token_pos(TokenPosition value) const;
2933
2934 TokenPosition end_token_pos() const {
2935#if defined(DART_PRECOMPILED_RUNTIME)
2936 return TokenPosition();
2937#else
2938 return raw_ptr()->end_token_pos_;
2939#endif
2940 }
2941 void set_end_token_pos(TokenPosition value) const {
2942#if defined(DART_PRECOMPILED_RUNTIME)
2943 UNREACHABLE();
2944#else
2945 StoreNonPointer(&raw_ptr()->end_token_pos_, value);
2946#endif
2947 }
2948
2949 intptr_t num_fixed_parameters() const {
2950 return FunctionLayout::PackedNumFixedParameters::decode(
2951 raw_ptr()->packed_fields_);
2952 }
2953 void set_num_fixed_parameters(intptr_t value) const;
2954
2955 uint32_t packed_fields() const { return raw_ptr()->packed_fields_; }
2956 void set_packed_fields(uint32_t packed_fields) const;
2957
2958 bool HasOptionalParameters() const {
2959 return FunctionLayout::PackedNumOptionalParameters::decode(
2960 raw_ptr()->packed_fields_) > 0;
2961 }
2962 bool HasOptionalNamedParameters() const {
2963 return HasOptionalParameters() &&
2964 FunctionLayout::PackedHasNamedOptionalParameters::decode(
2965 raw_ptr()->packed_fields_);
2966 }
2967 bool HasOptionalPositionalParameters() const {
2968 return HasOptionalParameters() && !HasOptionalNamedParameters();
2969 }
2970 intptr_t NumOptionalParameters() const {
2971 return FunctionLayout::PackedNumOptionalParameters::decode(
2972 raw_ptr()->packed_fields_);
2973 }
2974 void SetNumOptionalParameters(intptr_t num_optional_parameters,
2975 bool are_optional_positional) const;
2976
2977 intptr_t NumOptionalPositionalParameters() const {
2978 return HasOptionalPositionalParameters() ? NumOptionalParameters() : 0;
2979 }
2980
2981 intptr_t NumOptionalNamedParameters() const {
2982 return HasOptionalNamedParameters() ? NumOptionalParameters() : 0;
2983 }
2984
2985 intptr_t NumParameters() const;
2986
2987 intptr_t NumImplicitParameters() const;
2988
2989#if defined(DART_PRECOMPILED_RUNTIME)
2990#define DEFINE_GETTERS_AND_SETTERS(return_type, type, name) \
2991 static intptr_t name##_offset() { \
2992 UNREACHABLE(); \
2993 return 0; \
2994 } \
2995 return_type name() const { return 0; } \
2996 \
2997 void set_##name(type value) const { UNREACHABLE(); }
2998#else
2999#define DEFINE_GETTERS_AND_SETTERS(return_type, type, name) \
3000 static intptr_t name##_offset() { \
3001 return OFFSET_OF(FunctionLayout, name##_); \
3002 } \
3003 return_type name() const { return raw_ptr()->name##_; } \
3004 \
3005 void set_##name(type value) const { \
3006 StoreNonPointer(&raw_ptr()->name##_, value); \
3007 }
3008#endif
3009
3010 JIT_FUNCTION_COUNTERS(DEFINE_GETTERS_AND_SETTERS)
3011
3012#undef DEFINE_GETTERS_AND_SETTERS
3013
3014#if !defined(DART_PRECOMPILED_RUNTIME)
3015 intptr_t binary_declaration_offset() const {
3016 return FunctionLayout::BinaryDeclarationOffset::decode(
3017 raw_ptr()->binary_declaration_);
3018 }
3019 void set_binary_declaration_offset(intptr_t value) const {
3020 ASSERT(value >= 0);
3021 StoreNonPointer(&raw_ptr()->binary_declaration_,
3022 FunctionLayout::BinaryDeclarationOffset::update(
3023 value, raw_ptr()->binary_declaration_));
3024 }
3025#endif // !defined(DART_PRECOMPILED_RUNTIME)
3026
3027 intptr_t kernel_offset() const {
3028#if defined(DART_PRECOMPILED_RUNTIME)
3029 return 0;
3030#else
3031 ASSERT(!is_declared_in_bytecode());
3032 return binary_declaration_offset();
3033#endif
3034 }
3035
3036 void set_kernel_offset(intptr_t value) const {
3037#if defined(DART_PRECOMPILED_RUNTIME)
3038 UNREACHABLE();
3039#else
3040 ASSERT(!is_declared_in_bytecode());
3041 set_binary_declaration_offset(value);
3042#endif
3043 }
3044
3045 intptr_t bytecode_offset() const {
3046#if defined(DART_PRECOMPILED_RUNTIME)
3047 return 0;
3048#else
3049 ASSERT(is_declared_in_bytecode());
3050 return binary_declaration_offset();
3051#endif
3052 }
3053
3054 void set_bytecode_offset(intptr_t value) const {
3055#if defined(DART_PRECOMPILED_RUNTIME)
3056 UNREACHABLE();
3057#else
3058 ASSERT(is_declared_in_bytecode());
3059 set_binary_declaration_offset(value);
3060#endif
3061 }
3062
3063 bool is_declared_in_bytecode() const {
3064#if defined(DART_PRECOMPILED_RUNTIME)
3065 return false;
3066#else
3067 return FunctionLayout::IsDeclaredInBytecode::decode(
3068 raw_ptr()->binary_declaration_);
3069#endif
3070 }
3071
3072#if !defined(DART_PRECOMPILED_RUNTIME)
3073 void set_is_declared_in_bytecode(bool value) const {
3074 StoreNonPointer(&raw_ptr()->binary_declaration_,
3075 FunctionLayout::IsDeclaredInBytecode::update(
3076 value, raw_ptr()->binary_declaration_));
3077 }
3078#endif // !defined(DART_PRECOMPILED_RUNTIME)
3079
3080 void InheritBinaryDeclarationFrom(const Function& src) const;
3081 void InheritBinaryDeclarationFrom(const Field& src) const;
3082
3083 static const intptr_t kMaxInstructionCount = (1 << 16) - 1;
3084
3085 void SetOptimizedInstructionCountClamped(uintptr_t value) const {
3086 if (value > kMaxInstructionCount) value = kMaxInstructionCount;
3087 set_optimized_instruction_count(value);
3088 }
3089
3090 void SetOptimizedCallSiteCountClamped(uintptr_t value) const {
3091 if (value > kMaxInstructionCount) value = kMaxInstructionCount;
3092 set_optimized_call_site_count(value);
3093 }
3094
3095 void SetKernelDataAndScript(const Script& script,
3096 const ExternalTypedData& data,
3097 intptr_t offset) const;
3098
3099 intptr_t KernelDataProgramOffset() const;
3100
3101 ExternalTypedDataPtr KernelData() const;
3102
3103 bool IsOptimizable() const;
3104 void SetIsOptimizable(bool value) const;
3105
3106 // Whether this function must be optimized immediately and cannot be compiled
3107 // with the unoptimizing compiler. Such a function must be sure to not
3108 // deoptimize, since we won't generate deoptimization info or register
3109 // dependencies. It will be compiled into optimized code immediately when it's
3110 // run.
3111 bool ForceOptimize() const {
3112 return IsFfiFromAddress() || IsFfiGetAddress() || IsFfiLoad() ||
3113 IsFfiStore() || IsFfiTrampoline() || IsTypedDataViewFactory() ||
3114 IsUtf8Scan();
3115 }
3116
3117 bool CanBeInlined() const;
3118
3119 MethodRecognizer::Kind recognized_kind() const {
3120 return RecognizedBits::decode(raw_ptr()->kind_tag_);
3121 }
3122 void set_recognized_kind(MethodRecognizer::Kind value) const;
3123
3124 bool IsRecognized() const {
3125 return recognized_kind() != MethodRecognizer::kUnknown;
3126 }
3127
3128 bool HasOptimizedCode() const;
3129
3130 // Whether the function is ready for compiler optimizations.
3131 bool ShouldCompilerOptimize() const;
3132
3133 // Returns true if the argument counts are valid for calling this function.
3134 // Otherwise, it returns false and the reason (if error_message is not NULL).
3135 bool AreValidArgumentCounts(intptr_t num_type_arguments,
3136 intptr_t num_arguments,
3137 intptr_t num_named_arguments,
3138 String* error_message) const;
3139
3140 // Returns a TypeError if the provided arguments don't match the function
3141 // parameter types, null otherwise. Assumes AreValidArguments is called first.
3142 //
3143 // If the function has a non-null receiver in the arguments, the instantiator
3144 // type arguments are retrieved from the receiver, otherwise the null type
3145 // arguments vector is used.
3146 //
3147 // If the function is generic, the appropriate function type arguments are
3148 // retrieved either from the arguments array or the receiver (if a closure).
3149 // If no function type arguments are available in either location, the bounds
3150 // of the function type parameters are instantiated and used as the function
3151 // type arguments.
3152 //
3153 // The local function type arguments (_not_ parent function type arguments)
3154 // are also checked against the bounds of the corresponding parameters to
3155 // ensure they are appropriate subtypes if the function is generic.
3156 ObjectPtr DoArgumentTypesMatch(const Array& args,
3157 const ArgumentsDescriptor& arg_names) const;
3158
3159 // Returns a TypeError if the provided arguments don't match the function
3160 // parameter types, null otherwise. Assumes AreValidArguments is called first.
3161 //
3162 // If the function is generic, the appropriate function type arguments are
3163 // retrieved either from the arguments array or the receiver (if a closure).
3164 // If no function type arguments are available in either location, the bounds
3165 // of the function type parameters are instantiated and used as the function
3166 // type arguments.
3167 //
3168 // The local function type arguments (_not_ parent function type arguments)
3169 // are also checked against the bounds of the corresponding parameters to
3170 // ensure they are appropriate subtypes if the function is generic.
3171 ObjectPtr DoArgumentTypesMatch(
3172 const Array& args,
3173 const ArgumentsDescriptor& arg_names,
3174 const TypeArguments& instantiator_type_args) const;
3175
3176 // Returns a TypeError if the provided arguments don't match the function
3177 // parameter types, null otherwise. Assumes AreValidArguments is called first.
3178 //
3179 // The local function type arguments (_not_ parent function type arguments)
3180 // are also checked against the bounds of the corresponding parameters to
3181 // ensure they are appropriate subtypes if the function is generic.
3182 ObjectPtr DoArgumentTypesMatch(const Array& args,
3183 const ArgumentsDescriptor& arg_names,
3184 const TypeArguments& instantiator_type_args,
3185 const TypeArguments& function_type_args) const;
3186
3187 // Returns true if the type argument count, total argument count and the names
3188 // of optional arguments are valid for calling this function.
3189 // Otherwise, it returns false and the reason (if error_message is not NULL).
3190 bool AreValidArguments(intptr_t num_type_arguments,
3191 intptr_t num_arguments,
3192 const Array& argument_names,
3193 String* error_message) const;
3194 bool AreValidArguments(const ArgumentsDescriptor& args_desc,
3195 String* error_message) const;
3196
3197 // Fully qualified name uniquely identifying the function under gdb and during
3198 // ast printing. The special ':' character, if present, is replaced by '_'.
3199 const char* ToFullyQualifiedCString() const;
3200
3201 const char* ToLibNamePrefixedQualifiedCString() const;
3202
3203 const char* ToQualifiedCString() const;
3204
3205 static constexpr intptr_t maximum_unboxed_parameter_count() {
3206 // Subtracts one that represents the return value
3207 return FunctionLayout::UnboxedParameterBitmap::kCapacity - 1;
3208 }
3209
3210 void reset_unboxed_parameters_and_return() const {
3211#if !defined(DART_PRECOMPILED_RUNTIME)
3212 StoreNonPointer(&raw_ptr()->unboxed_parameters_info_,
3213 FunctionLayout::UnboxedParameterBitmap());
3214#endif // !defined(DART_PRECOMPILED_RUNTIME)
3215 }
3216
3217 void set_unboxed_integer_parameter_at(intptr_t index) const {
3218#if !defined(DART_PRECOMPILED_RUNTIME)
3219 ASSERT(index >= 0 && index < maximum_unboxed_parameter_count());
3220 index++; // position 0 is reserved for the return value
3221 const_cast<FunctionLayout::UnboxedParameterBitmap*>(
3222 &raw_ptr()->unboxed_parameters_info_)
3223 ->SetUnboxedInteger(index);
3224#else
3225 UNREACHABLE();
3226#endif // !defined(DART_PRECOMPILED_RUNTIME)
3227 }
3228
3229 void set_unboxed_double_parameter_at(intptr_t index) const {
3230#if !defined(DART_PRECOMPILED_RUNTIME)
3231 ASSERT(index >= 0 && index < maximum_unboxed_parameter_count());
3232 index++; // position 0 is reserved for the return value
3233 const_cast<FunctionLayout::UnboxedParameterBitmap*>(
3234 &raw_ptr()->unboxed_parameters_info_)
3235 ->SetUnboxedDouble(index);
3236
3237#else
3238 UNREACHABLE();
3239#endif // !defined(DART_PRECOMPILED_RUNTIME)
3240 }
3241
3242 void set_unboxed_integer_return() const {
3243#if !defined(DART_PRECOMPILED_RUNTIME)
3244 const_cast<FunctionLayout::UnboxedParameterBitmap*>(
3245 &raw_ptr()->unboxed_parameters_info_)
3246 ->SetUnboxedInteger(0);
3247#else
3248 UNREACHABLE();
3249#endif // !defined(DART_PRECOMPILED_RUNTIME)
3250 }
3251
3252 void set_unboxed_double_return() const {
3253#if !defined(DART_PRECOMPILED_RUNTIME)
3254 const_cast<FunctionLayout::UnboxedParameterBitmap*>(
3255 &raw_ptr()->unboxed_parameters_info_)
3256 ->SetUnboxedDouble(0);
3257
3258#else
3259 UNREACHABLE();
3260#endif // !defined(DART_PRECOMPILED_RUNTIME)
3261 }
3262
3263 bool is_unboxed_parameter_at(intptr_t index) const {
3264#if !defined(DART_PRECOMPILED_RUNTIME)
3265 ASSERT(index >= 0);
3266 index++; // position 0 is reserved for the return value
3267 return raw_ptr()->unboxed_parameters_info_.IsUnboxed(index);
3268#else
3269 return false;
3270#endif // !defined(DART_PRECOMPILED_RUNTIME)
3271 }
3272
3273 bool is_unboxed_integer_parameter_at(intptr_t index) const {
3274#if !defined(DART_PRECOMPILED_RUNTIME)
3275 ASSERT(index >= 0);
3276 index++; // position 0 is reserved for the return value
3277 return raw_ptr()->unboxed_parameters_info_.IsUnboxedInteger(index);
3278#else
3279 return false;
3280#endif // !defined(DART_PRECOMPILED_RUNTIME)
3281 }
3282
3283 bool is_unboxed_double_parameter_at(intptr_t index) const {
3284#if !defined(DART_PRECOMPILED_RUNTIME)
3285 ASSERT(index >= 0);
3286 index++; // position 0 is reserved for the return value
3287 return raw_ptr()->unboxed_parameters_info_.IsUnboxedDouble(index);
3288#else
3289 return false;
3290#endif // !defined(DART_PRECOMPILED_RUNTIME)
3291 }
3292
3293 bool has_unboxed_return() const {
3294#if !defined(DART_PRECOMPILED_RUNTIME)
3295 return raw_ptr()->unboxed_parameters_info_.IsUnboxed(0);
3296#else
3297 return false;
3298#endif // !defined(DART_PRECOMPILED_RUNTIME)
3299 }
3300
3301 bool has_unboxed_integer_return() const {
3302#if !defined(DART_PRECOMPILED_RUNTIME)
3303 return raw_ptr()->unboxed_parameters_info_.IsUnboxedInteger(0);
3304#else
3305 return false;
3306#endif // !defined(DART_PRECOMPILED_RUNTIME)
3307 }
3308
3309 bool has_unboxed_double_return() const {
3310#if !defined(DART_PRECOMPILED_RUNTIME)
3311 return raw_ptr()->unboxed_parameters_info_.IsUnboxedDouble(0);
3312#else
3313 return false;
3314#endif // !defined(DART_PRECOMPILED_RUNTIME)
3315 }
3316
3317#if !defined(DART_PRECOMPILED_RUNTIME)
3318 bool HasUnboxedParameters() const {
3319 return raw_ptr()->unboxed_parameters_info_.HasUnboxedParameters();
3320 }
3321 bool HasUnboxedReturnValue() const {
3322 return raw_ptr()->unboxed_parameters_info_.HasUnboxedReturnValue();
3323 }
3324#endif // !defined(DART_PRECOMPILED_RUNTIME)
3325
3326 // Returns true if the type of this function is a subtype of the type of
3327 // the other function.
3328 bool IsSubtypeOf(const Function& other, Heap::Space space) const;
3329
3330 bool IsDispatcherOrImplicitAccessor() const {
3331 switch (kind()) {
3332 case FunctionLayout::kImplicitGetter:
3333 case FunctionLayout::kImplicitSetter:
3334 case FunctionLayout::kImplicitStaticGetter:
3335 case FunctionLayout::kNoSuchMethodDispatcher:
3336 case FunctionLayout::kInvokeFieldDispatcher:
3337 case FunctionLayout::kDynamicInvocationForwarder:
3338 return true;
3339 default:
3340 return false;
3341 }
3342 }
3343
3344 // Returns true if this function represents an explicit getter function.
3345 bool IsGetterFunction() const {
3346 return kind() == FunctionLayout::kGetterFunction;
3347 }
3348
3349 // Returns true if this function represents an implicit getter function.
3350 bool IsImplicitGetterFunction() const {
3351 return kind() == FunctionLayout::kImplicitGetter;
3352 }
3353
3354 // Returns true if this function represents an implicit static getter
3355 // function.
3356 bool IsImplicitStaticGetterFunction() const {
3357 return kind() == FunctionLayout::kImplicitStaticGetter;
3358 }
3359
3360 // Returns true if this function represents an explicit setter function.
3361 bool IsSetterFunction() const {
3362 return kind() == FunctionLayout::kSetterFunction;
3363 }
3364
3365 // Returns true if this function represents an implicit setter function.
3366 bool IsImplicitSetterFunction() const {
3367 return kind() == FunctionLayout::kImplicitSetter;
3368 }
3369
3370 // Returns true if this function represents an initializer for a static or
3371 // instance field. The function returns the initial value and the caller is
3372 // responsible for setting the field.
3373 bool IsFieldInitializer() const {
3374 return kind() == FunctionLayout::kFieldInitializer;
3375 }
3376
3377 // Returns true if this function represents a (possibly implicit) closure
3378 // function.
3379 bool IsClosureFunction() const {
3380 FunctionLayout::Kind k = kind();
3381 return (k == FunctionLayout::kClosureFunction) ||
3382 (k == FunctionLayout::kImplicitClosureFunction);
3383 }
3384
3385 // Returns true if this function represents a generated irregexp function.
3386 bool IsIrregexpFunction() const {
3387 return kind() == FunctionLayout::kIrregexpFunction;
3388 }
3389
3390 // Returns true if this function represents an implicit closure function.
3391 bool IsImplicitClosureFunction() const {
3392 return kind() == FunctionLayout::kImplicitClosureFunction;
3393 }
3394
3395 // Returns true if this function represents a non implicit closure function.
3396 bool IsNonImplicitClosureFunction() const {
3397 return IsClosureFunction() && !IsImplicitClosureFunction();
3398 }
3399
3400 // Returns true if this function represents an implicit static closure
3401 // function.
3402 bool IsImplicitStaticClosureFunction() const {
3403 return IsImplicitClosureFunction() && is_static();
3404 }
3405 static bool IsImplicitStaticClosureFunction(FunctionPtr func);
3406
3407 // Returns true if this function represents an implicit instance closure
3408 // function.
3409 bool IsImplicitInstanceClosureFunction() const {
3410 return IsImplicitClosureFunction() && !is_static();
3411 }
3412
3413 // Returns true if this function represents a local function.
3414 bool IsLocalFunction() const { return parent_function() != Function::null(); }
3415
3416 // Returns true if this function represents a signature function without code.
3417 bool IsSignatureFunction() const {
3418 return kind() == FunctionLayout::kSignatureFunction;
3419 }
3420 static bool IsSignatureFunction(FunctionPtr function) {
3421 NoSafepointScope no_safepoint;
3422 return KindBits::decode(function->ptr()->kind_tag_) ==
3423 FunctionLayout::kSignatureFunction;
3424 }
3425
3426 // Returns true if this function represents an ffi trampoline.
3427 bool IsFfiTrampoline() const {
3428 return kind() == FunctionLayout::kFfiTrampoline;
3429 }
3430 static bool IsFfiTrampoline(FunctionPtr function) {
3431 NoSafepointScope no_safepoint;
3432 return KindBits::decode(function->ptr()->kind_tag_) ==
3433 FunctionLayout::kFfiTrampoline;
3434 }
3435
3436 bool IsFfiLoad() const {
3437 const auto kind = recognized_kind();
3438 return MethodRecognizer::kFfiLoadInt8 <= kind &&
3439 kind <= MethodRecognizer::kFfiLoadPointer;
3440 }
3441
3442 bool IsFfiStore() const {
3443 const auto kind = recognized_kind();
3444 return MethodRecognizer::kFfiStoreInt8 <= kind &&
3445 kind <= MethodRecognizer::kFfiStorePointer;
3446 }
3447
3448 bool IsFfiFromAddress() const {
3449 const auto kind = recognized_kind();
3450 return kind == MethodRecognizer::kFfiFromAddress;
3451 }
3452
3453 bool IsFfiGetAddress() const {
3454 const auto kind = recognized_kind();
3455 return kind == MethodRecognizer::kFfiGetAddress;
3456 }
3457
3458 bool IsUtf8Scan() const {
3459 const auto kind = recognized_kind();
3460 return kind == MethodRecognizer::kUtf8DecoderScan;
3461 }
3462
3463 bool IsAsyncFunction() const { return modifier() == FunctionLayout::kAsync; }
3464
3465 bool IsAsyncClosure() const {
3466 return is_generated_body() &&
3467 Function::Handle(parent_function()).IsAsyncFunction();
3468 }
3469
3470 bool IsGenerator() const {
3471 return (modifier() & FunctionLayout::kGeneratorBit) != 0;
3472 }
3473
3474 bool IsSyncGenerator() const {
3475 return modifier() == FunctionLayout::kSyncGen;
3476 }
3477
3478 bool IsSyncGenClosure() const {
3479 return is_generated_body() &&
3480 Function::Handle(parent_function()).IsSyncGenerator();
3481 }
3482
3483 bool IsGeneratorClosure() const {
3484 return is_generated_body() &&
3485 Function::Handle(parent_function()).IsGenerator();
3486 }
3487
3488 bool IsAsyncGenerator() const {
3489 return modifier() == FunctionLayout::kAsyncGen;
3490 }
3491
3492 bool IsAsyncGenClosure() const {
3493 return is_generated_body() &&
3494 Function::Handle(parent_function()).IsAsyncGenerator();
3495 }
3496
3497 bool IsAsyncOrGenerator() const {
3498 return modifier() != FunctionLayout::kNoModifier;
3499 }
3500
3501 // Recognise synthetic sync-yielding functions like the inner-most:
3502 // user_func /* was sync* */ {
3503 // :sync_op_gen() {
3504 // :sync_op() yielding {
3505 // // ...
3506 // }
3507 // }
3508 // }
3509 bool IsSyncYielding() const {
3510 return (parent_function() != Function::null())
3511 ? Function::Handle(parent_function()).IsSyncGenClosure()
3512 : false;
3513 }
3514
3515 bool IsTypedDataViewFactory() const {
3516 if (is_native() && kind() == FunctionLayout::kConstructor) {
3517 // This is a native factory constructor.
3518 const Class& klass = Class::Handle(Owner());
3519 return IsTypedDataViewClassId(klass.id());
3520 }
3521 return false;
3522 }
3523
3524 DART_WARN_UNUSED_RESULT
3525 ErrorPtr VerifyCallEntryPoint() const;
3526
3527 DART_WARN_UNUSED_RESULT
3528 ErrorPtr VerifyClosurizedEntryPoint() const;
3529
3530 static intptr_t InstanceSize() {
3531 return RoundedAllocationSize(sizeof(FunctionLayout));
3532 }
3533
3534 static FunctionPtr New(const String& name,
3535 FunctionLayout::Kind kind,
3536 bool is_static,
3537 bool is_const,
3538 bool is_abstract,
3539 bool is_external,
3540 bool is_native,
3541 const Object& owner,
3542 TokenPosition token_pos,
3543 Heap::Space space = Heap::kOld);
3544
3545 // Allocates a new Function object representing a closure function
3546 // with given kind - kClosureFunction or kImplicitClosureFunction.
3547 static FunctionPtr NewClosureFunctionWithKind(FunctionLayout::Kind kind,
3548 const String& name,
3549 const Function& parent,
3550 TokenPosition token_pos,
3551 const Object& owner);
3552
3553 // Allocates a new Function object representing a closure function.
3554 static FunctionPtr NewClosureFunction(const String& name,
3555 const Function& parent,
3556 TokenPosition token_pos);
3557
3558 // Allocates a new Function object representing an implicit closure function.
3559 static FunctionPtr NewImplicitClosureFunction(const String& name,
3560 const Function& parent,
3561 TokenPosition token_pos);
3562
3563 // Allocates a new Function object representing a signature function.
3564 // The owner is the scope class of the function type.
3565 // The parent is the enclosing function or null if none.
3566 static FunctionPtr NewSignatureFunction(const Object& owner,
3567 const Function& parent,
3568 TokenPosition token_pos,
3569 Heap::Space space = Heap::kOld);
3570
3571 static FunctionPtr NewEvalFunction(const Class& owner,
3572 const Script& script,
3573 bool is_static);
3574
3575 FunctionPtr CreateMethodExtractor(const String& getter_name) const;
3576 FunctionPtr GetMethodExtractor(const String& getter_name) const;
3577
3578 static bool IsDynamicInvocationForwarderName(const String& name);
3579 static bool IsDynamicInvocationForwarderName(StringPtr name);
3580
3581 static StringPtr DemangleDynamicInvocationForwarderName(const String& name);
3582
3583 static StringPtr CreateDynamicInvocationForwarderName(const String& name);
3584
3585#if !defined(DART_PRECOMPILED_RUNTIME)
3586 FunctionPtr CreateDynamicInvocationForwarder(
3587 const String& mangled_name) const;
3588
3589 FunctionPtr GetDynamicInvocationForwarder(const String& mangled_name,
3590 bool allow_add = true) const;
3591#endif
3592
3593 // Slow function, use in asserts to track changes in important library
3594 // functions.
3595 int32_t SourceFingerprint() const;
3596
3597 // Return false and report an error if the fingerprint does not match.
3598 bool CheckSourceFingerprint(int32_t fp) const;
3599
3600 // Works with map [deopt-id] -> ICData.
3601 void SaveICDataMap(
3602 const ZoneGrowableArray<const ICData*>& deopt_id_to_ic_data,
3603 const Array& edge_counters_array) const;
3604 // Uses 'ic_data_array' to populate the table 'deopt_id_to_ic_data'. Clone
3605 // ic_data (array and descriptor) if 'clone_ic_data' is true.
3606 void RestoreICDataMap(ZoneGrowableArray<const ICData*>* deopt_id_to_ic_data,
3607 bool clone_ic_data) const;
3608
3609 ArrayPtr ic_data_array() const;
3610 void ClearICDataArray() const;
3611 ICDataPtr FindICData(intptr_t deopt_id) const;
3612
3613 // Sets deopt reason in all ICData-s with given deopt_id.
3614 void SetDeoptReasonForAll(intptr_t deopt_id, ICData::DeoptReasonId reason);
3615
3616 void set_modifier(FunctionLayout::AsyncModifier value) const;
3617
3618// 'WasCompiled' is true if the function was compiled once in this
3619// VM instantiation. It is independent from presence of type feedback
3620// (ic_data_array) and code, which may be loaded from a snapshot.
3621// 'WasExecuted' is true if the usage counter has ever been positive.
3622// 'ProhibitsHoistingCheckClass' is true if this function deoptimized before on
3623// a hoisted check class instruction.
3624// 'ProhibitsBoundsCheckGeneralization' is true if this function deoptimized
3625// before on a generalized bounds check.
3626#define STATE_BITS_LIST(V) \
3627 V(WasCompiled) \
3628 V(WasExecutedBit) \
3629 V(ProhibitsHoistingCheckClass) \
3630 V(ProhibitsBoundsCheckGeneralization)
3631
3632 enum StateBits {
3633#define DECLARE_FLAG_POS(Name) k##Name##Pos,
3634 STATE_BITS_LIST(DECLARE_FLAG_POS)
3635#undef DECLARE_FLAG_POS
3636 };
3637#define DEFINE_FLAG_BIT(Name) \
3638 class Name##Bit : public BitField<uint8_t, bool, k##Name##Pos, 1> {};
3639 STATE_BITS_LIST(DEFINE_FLAG_BIT)
3640#undef DEFINE_FLAG_BIT
3641
3642#define DEFINE_FLAG_ACCESSORS(Name) \
3643 void Set##Name(bool value) const { \
3644 set_state_bits(Name##Bit::update(value, state_bits())); \
3645 } \
3646 bool Name() const { return Name##Bit::decode(state_bits()); }
3647 STATE_BITS_LIST(DEFINE_FLAG_ACCESSORS)
3648#undef DEFINE_FLAG_ACCESSORS
3649
3650 void SetUsageCounter(intptr_t value) const {
3651 if (usage_counter() > 0) {
3652 SetWasExecuted(true);
3653 }
3654 set_usage_counter(value);
3655 }
3656
3657 bool WasExecuted() const { return (usage_counter() > 0) || WasExecutedBit(); }
3658
3659 void SetWasExecuted(bool value) const { SetWasExecutedBit(value); }
3660
3661 // static: Considered during class-side or top-level resolution rather than
3662 // instance-side resolution.
3663 // const: Valid target of a const constructor call.
3664 // abstract: Skipped during instance-side resolution.
3665 // reflectable: Enumerated by mirrors, invocable by mirrors. False for private
3666 // functions of dart: libraries.
3667 // debuggable: Valid location of a breakpoint. Synthetic code is not
3668 // debuggable.
3669 // visible: Frame is included in stack traces. Synthetic code such as
3670 // dispatchers is not visible. Synthetic code that can trigger
3671 // exceptions such as the outer async functions that create Futures
3672 // is visible.
3673 // instrinsic: Has a hand-written assembly prologue.
3674 // inlinable: Candidate for inlining. False for functions with features we
3675 // don't support during inlining (e.g., optional parameters),
3676 // functions which are too big, etc.
3677 // native: Bridge to C/C++ code.
3678 // redirecting: Redirecting generative or factory constructor.
3679 // external: Just a declaration that expects to be defined in another patch
3680 // file.
3681 // generated_body: Has a generated body.
3682 // polymorphic_target: A polymorphic method.
3683 // has_pragma: Has a @pragma decoration.
3684 // no_such_method_forwarder: A stub method that just calls noSuchMethod.
3685
3686#define FOR_EACH_FUNCTION_KIND_BIT(V) \
3687 V(Static, is_static) \
3688 V(Const, is_const) \
3689 V(Abstract, is_abstract) \
3690 V(Reflectable, is_reflectable) \
3691 V(Visible, is_visible) \
3692 V(Debuggable, is_debuggable) \
3693 V(Inlinable, is_inlinable) \
3694 V(Intrinsic, is_intrinsic) \
3695 V(Native, is_native) \
3696 V(Redirecting, is_redirecting) \
3697 V(External, is_external) \
3698 V(GeneratedBody, is_generated_body) \
3699 V(PolymorphicTarget, is_polymorphic_target) \
3700 V(HasPragma, has_pragma) \
3701 V(IsSynthetic, is_synthetic) \
3702 V(IsExtensionMember, is_extension_member)
3703
3704#define DEFINE_ACCESSORS(name, accessor_name) \
3705 void set_##accessor_name(bool value) const { \
3706 set_kind_tag(name##Bit::update(value, raw_ptr()->kind_tag_)); \
3707 } \
3708 bool accessor_name() const { return name##Bit::decode(raw_ptr()->kind_tag_); }
3709 FOR_EACH_FUNCTION_KIND_BIT(DEFINE_ACCESSORS)
3710#undef DEFINE_ACCESSORS
3711
3712 // optimizable: Candidate for going through the optimizing compiler. False for
3713 // some functions known to be execute infrequently and functions
3714 // which have been de-optimized too many times.
3715 bool is_optimizable() const {
3716 return FunctionLayout::OptimizableBit::decode(raw_ptr()->packed_fields_);
3717 }
3718 void set_is_optimizable(bool value) const {
3719 set_packed_fields(FunctionLayout::OptimizableBit::update(
3720 value, raw_ptr()->packed_fields_));
3721 }
3722
3723 // Indicates whether this function can be optimized on the background compiler
3724 // thread.
3725 bool is_background_optimizable() const {
3726 return FunctionLayout::BackgroundOptimizableBit::decode(
3727 raw_ptr()->packed_fields_);
3728 }
3729
3730 void set_is_background_optimizable(bool value) const {
3731 set_packed_fields(FunctionLayout::BackgroundOptimizableBit::update(
3732 value, raw_ptr()->packed_fields_));
3733 }
3734
3735 private:
3736 void set_ic_data_array(const Array& value) const;
3737 void SetInstructionsSafe(const Code& value) const;
3738
3739 enum KindTagBits {
3740 kKindTagPos = 0,
3741 kKindTagSize = 5,
3742 kRecognizedTagPos = kKindTagPos + kKindTagSize,
3743 kRecognizedTagSize = 9,
3744 kModifierPos = kRecognizedTagPos + kRecognizedTagSize,
3745 kModifierSize = 2,
3746 kLastModifierBitPos = kModifierPos + (kModifierSize - 1),
3747// Single bit sized fields start here.
3748#define DECLARE_BIT(name, _) k##name##Bit,
3749 FOR_EACH_FUNCTION_KIND_BIT(DECLARE_BIT)
3750#undef DECLARE_BIT
3751 kNumTagBits
3752 };
3753
3754 COMPILE_ASSERT(MethodRecognizer::kNumRecognizedMethods <
3755 (1 << kRecognizedTagSize));
3756 COMPILE_ASSERT(kNumTagBits <=
3757 (kBitsPerByte *
3758 sizeof(static_cast<FunctionLayout*>(nullptr)->kind_tag_)));
3759
3760 class KindBits : public BitField<uint32_t,
3761 FunctionLayout::Kind,
3762 kKindTagPos,
3763 kKindTagSize> {};
3764
3765 class RecognizedBits : public BitField<uint32_t,
3766 MethodRecognizer::Kind,
3767 kRecognizedTagPos,
3768 kRecognizedTagSize> {};
3769 class ModifierBits : public BitField<uint32_t,
3770 FunctionLayout::AsyncModifier,
3771 kModifierPos,
3772 kModifierSize> {};
3773
3774#define DEFINE_BIT(name, _) \
3775 class name##Bit : public BitField<uint32_t, bool, k##name##Bit, 1> {};
3776 FOR_EACH_FUNCTION_KIND_BIT(DEFINE_BIT)
3777#undef DEFINE_BIT
3778
3779 void set_name(const String& value) const;
3780 void set_kind(FunctionLayout::Kind value) const;
3781 void set_parent_function(const Function& value) const;
3782 FunctionPtr implicit_closure_function() const;
3783 void set_implicit_closure_function(const Function& value) const;
3784 InstancePtr implicit_static_closure() const;
3785 void set_implicit_static_closure(const Instance& closure) const;
3786 ScriptPtr eval_script() const;
3787 void set_eval_script(const Script& value) const;
3788 void set_num_optional_parameters(intptr_t value) const; // Encoded value.
3789 void set_kind_tag(uint32_t value) const;
3790 void set_data(const Object& value) const;
3791 static FunctionPtr New(Heap::Space space = Heap::kOld);
3792
3793 void PrintSignatureParameters(Thread* thread,
3794 Zone* zone,
3795 NameVisibility name_visibility,
3796 BaseTextBuffer* printer) const;
3797
3798 // Returns true if the type of the formal parameter at the given position in
3799 // this function is contravariant with the type of the other formal parameter
3800 // at the given position in the other function.
3801 bool IsContravariantParameter(intptr_t parameter_position,
3802 const Function& other,
3803 intptr_t other_parameter_position,
3804 Heap::Space space) const;
3805
3806 // Returns the index in the parameter names array of the corresponding flag
3807 // for the given parametere index. Also returns (via flag_mask) the
3808 // corresponding mask within the flag.
3809 intptr_t GetRequiredFlagIndex(intptr_t index, intptr_t* flag_mask) const;
3810
3811 FINAL_HEAP_OBJECT_IMPLEMENTATION(Function, Object);
3812 friend class Class;
3813 friend class SnapshotWriter;
3814 friend class Parser; // For set_eval_script.
3815 // FunctionLayout::VisitFunctionPointers accesses the private constructor of
3816 // Function.
3817 friend class FunctionLayout;
3818 friend class ClassFinalizer; // To reset parent_function.
3819 friend class Type; // To adjust parent_function.
3820};
3821
3822class ClosureData : public Object {
3823 public:
3824 static intptr_t InstanceSize() {
3825 return RoundedAllocationSize(sizeof(ClosureDataLayout));
3826 }
3827
3828 private:
3829 ContextScopePtr context_scope() const { return raw_ptr()->context_scope_; }
3830 void set_context_scope(const ContextScope& value) const;
3831
3832 // Enclosing function of this local function.
3833 FunctionPtr parent_function() const { return raw_ptr()->parent_function_; }
3834 void set_parent_function(const Function& value) const;
3835
3836 // Signature type of this closure function.
3837 TypePtr signature_type() const { return raw_ptr()->signature_type_; }
3838 void set_signature_type(const Type& value) const;
3839
3840 InstancePtr implicit_static_closure() const { return raw_ptr()->closure_; }
3841 void set_implicit_static_closure(const Instance& closure) const;
3842
3843 static ClosureDataPtr New();
3844
3845 FINAL_HEAP_OBJECT_IMPLEMENTATION(ClosureData, Object);
3846 friend class Class;
3847 friend class Function;
3848 friend class HeapProfiler;
3849};
3850
3851class SignatureData : public Object {
3852 public:
3853 static intptr_t InstanceSize() {
3854 return RoundedAllocationSize(sizeof(SignatureDataLayout));
3855 }
3856
3857 private:
3858 // Enclosing function of this signature function.
3859 FunctionPtr parent_function() const { return raw_ptr()->parent_function_; }
3860 void set_parent_function(const Function& value) const;
3861
3862 // Signature type of this signature function.
3863 TypePtr signature_type() const { return raw_ptr()->signature_type_; }
3864 void set_signature_type(const Type& value) const;
3865
3866 static SignatureDataPtr New(Heap::Space space = Heap::kOld);
3867
3868 FINAL_HEAP_OBJECT_IMPLEMENTATION(SignatureData, Object);
3869 friend class Class;
3870 friend class Function;
3871 friend class HeapProfiler;
3872};
3873
3874class RedirectionData : public Object {
3875 public:
3876 static intptr_t InstanceSize() {
3877 return RoundedAllocationSize(sizeof(RedirectionDataLayout));
3878 }
3879
3880 private:
3881 // The type specifies the class and type arguments of the target constructor.
3882 TypePtr type() const { return raw_ptr()->type_; }
3883 void set_type(const Type& value) const;
3884
3885 // The optional identifier specifies a named constructor.
3886 StringPtr identifier() const { return raw_ptr()->identifier_; }
3887 void set_identifier(const String& value) const;
3888
3889 // The resolved constructor or factory target of the redirection.
3890 FunctionPtr target() const { return raw_ptr()->target_; }
3891 void set_target(const Function& value) const;
3892
3893 static RedirectionDataPtr New();
3894
3895 FINAL_HEAP_OBJECT_IMPLEMENTATION(RedirectionData, Object);
3896 friend class Class;
3897 friend class Function;
3898 friend class HeapProfiler;
3899};
3900
3901enum class EntryPointPragma {
3902 kAlways,
3903 kNever,
3904 kGetterOnly,
3905 kSetterOnly,
3906 kCallOnly
3907};
3908
3909class FfiTrampolineData : public Object {
3910 public:
3911 static intptr_t InstanceSize() {
3912 return RoundedAllocationSize(sizeof(FfiTrampolineDataLayout));
3913 }
3914
3915 private:
3916 // Signature type of this closure function.
3917 TypePtr signature_type() const { return raw_ptr()->signature_type_; }
3918 void set_signature_type(const Type& value) const;
3919
3920 FunctionPtr c_signature() const { return raw_ptr()->c_signature_; }
3921 void set_c_signature(const Function& value) const;
3922
3923 FunctionPtr callback_target() const { return raw_ptr()->callback_target_; }
3924 void set_callback_target(const Function& value) const;
3925
3926 InstancePtr callback_exceptional_return() const {
3927 return raw_ptr()->callback_exceptional_return_;
3928 }
3929 void set_callback_exceptional_return(const Instance& value) const;
3930
3931 int32_t callback_id() const { return raw_ptr()->callback_id_; }
3932 void set_callback_id(int32_t value) const;
3933
3934 static FfiTrampolineDataPtr New();
3935
3936 FINAL_HEAP_OBJECT_IMPLEMENTATION(FfiTrampolineData, Object);
3937 friend class Class;
3938 friend class Function;
3939 friend class HeapProfiler;
3940};
3941
3942class Field : public Object {
3943 public:
3944 // The field that this field was cloned from, or this field itself if it isn't
3945 // a clone. The purpose of cloning is that the fields the background compiler
3946 // sees are consistent.
3947 FieldPtr Original() const;
3948
3949 // Set the original field that this field was cloned from.
3950 void SetOriginal(const Field& value) const;
3951
3952 // Returns whether this field is an original or a clone.
3953 bool IsOriginal() const {
3954 if (IsNull()) {
3955 return true;
3956 }
3957 NoSafepointScope no_safepoint;
3958 return !raw_ptr()->owner_->IsField();
3959 }
3960
3961 // Returns whether fields must be cloned via [CloneFromOriginal] for the
3962 // current compilation thread.
3963 static bool ShouldCloneFields();
3964
3965 // Returns a field cloned from 'this'. 'this' is set as the
3966 // original field of result.
3967 FieldPtr CloneFromOriginal() const;
3968
3969 StringPtr name() const { return raw_ptr()->name_; }
3970 StringPtr UserVisibleName() const; // Same as scrubbed name.
3971 const char* UserVisibleNameCString() const;
3972 virtual StringPtr DictionaryName() const { return name(); }
3973
3974 bool is_static() const { return StaticBit::decode(raw_ptr()->kind_bits_); }
3975 bool is_instance() const { return !is_static(); }
3976 bool is_final() const { return FinalBit::decode(raw_ptr()->kind_bits_); }
3977 bool is_const() const { return ConstBit::decode(raw_ptr()->kind_bits_); }
3978 bool is_late() const { return IsLateBit::decode(raw_ptr()->kind_bits_); }
3979 bool is_extension_member() const {
3980 return IsExtensionMemberBit::decode(raw_ptr()->kind_bits_);
3981 }
3982 bool needs_load_guard() const {
3983 return NeedsLoadGuardBit::decode(raw_ptr()->kind_bits_);
3984 }
3985 bool is_reflectable() const {
3986 return ReflectableBit::decode(raw_ptr()->kind_bits_);
3987 }
3988 void set_is_reflectable(bool value) const {
3989 ASSERT(IsOriginal());
3990 set_kind_bits(ReflectableBit::update(value, raw_ptr()->kind_bits_));
3991 }
3992 bool is_double_initialized() const {
3993 return DoubleInitializedBit::decode(raw_ptr()->kind_bits_);
3994 }
3995 // Called in parser after allocating field, immutable property otherwise.
3996 // Marks fields that are initialized with a simple double constant.
3997 void set_is_double_initialized(bool value) const {
3998 ASSERT(Thread::Current()->IsMutatorThread());
3999 ASSERT(IsOriginal());
4000 set_kind_bits(DoubleInitializedBit::update(value, raw_ptr()->kind_bits_));
4001 }
4002
4003 bool initializer_changed_after_initialization() const {
4004 return InitializerChangedAfterInitializatonBit::decode(
4005 raw_ptr()->kind_bits_);
4006 }
4007 void set_initializer_changed_after_initialization(bool value) const {
4008 set_kind_bits(InitializerChangedAfterInitializatonBit::update(
4009 value, raw_ptr()->kind_bits_));
4010 }
4011
4012 bool has_pragma() const {
4013 return HasPragmaBit::decode(raw_ptr()->kind_bits_);
4014 }
4015 void set_has_pragma(bool value) const {
4016 set_kind_bits(HasPragmaBit::update(value, raw_ptr()->kind_bits_));
4017 }
4018
4019 bool is_covariant() const {
4020 return CovariantBit::decode(raw_ptr()->kind_bits_);
4021 }
4022 void set_is_covariant(bool value) const {
4023 set_kind_bits(CovariantBit::update(value, raw_ptr()->kind_bits_));
4024 }
4025
4026 bool is_generic_covariant_impl() const {
4027 return GenericCovariantImplBit::decode(raw_ptr()->kind_bits_);
4028 }
4029 void set_is_generic_covariant_impl(bool value) const {
4030 set_kind_bits(
4031 GenericCovariantImplBit::update(value, raw_ptr()->kind_bits_));
4032 }
4033
4034#if !defined(DART_PRECOMPILED_RUNTIME)
4035 intptr_t binary_declaration_offset() const {
4036 return FieldLayout::BinaryDeclarationOffset::decode(
4037 raw_ptr()->binary_declaration_);
4038 }
4039 void set_binary_declaration_offset(intptr_t value) const {
4040 ASSERT(value >= 0);
4041 StoreNonPointer(&raw_ptr()->binary_declaration_,
4042 FieldLayout::BinaryDeclarationOffset::update(
4043 value, raw_ptr()->binary_declaration_));
4044 }
4045#endif // !defined(DART_PRECOMPILED_RUNTIME)
4046
4047 intptr_t kernel_offset() const {
4048#if defined(DART_PRECOMPILED_RUNTIME)
4049 return 0;
4050#else
4051 ASSERT(!is_declared_in_bytecode());
4052 return binary_declaration_offset();
4053#endif
4054 }
4055
4056 void set_kernel_offset(intptr_t value) const {
4057#if defined(DART_PRECOMPILED_RUNTIME)
4058 UNREACHABLE();
4059#else
4060 ASSERT(!is_declared_in_bytecode());
4061 set_binary_declaration_offset(value);
4062#endif
4063 }
4064
4065 intptr_t bytecode_offset() const {
4066#if defined(DART_PRECOMPILED_RUNTIME)
4067 return 0;
4068#else
4069 ASSERT(is_declared_in_bytecode());
4070 return binary_declaration_offset();
4071#endif
4072 }
4073
4074 void set_bytecode_offset(intptr_t value) const {
4075#if defined(DART_PRECOMPILED_RUNTIME)
4076 UNREACHABLE();
4077#else
4078 ASSERT(is_declared_in_bytecode());
4079 set_binary_declaration_offset(value);
4080#endif
4081 }
4082
4083 bool is_declared_in_bytecode() const {
4084#if defined(DART_PRECOMPILED_RUNTIME)
4085 return false;
4086#else
4087 return FieldLayout::IsDeclaredInBytecode::decode(
4088 raw_ptr()->binary_declaration_);
4089#endif
4090 }
4091
4092#if !defined(DART_PRECOMPILED_RUNTIME)
4093 void set_is_declared_in_bytecode(bool value) const {
4094 StoreNonPointer(&raw_ptr()->binary_declaration_,
4095 FieldLayout::IsDeclaredInBytecode::update(
4096 value, raw_ptr()->binary_declaration_));
4097 }
4098#endif // !defined(DART_PRECOMPILED_RUNTIME)
4099
4100 void InheritBinaryDeclarationFrom(const Field& src) const;
4101
4102 ExternalTypedDataPtr KernelData() const;
4103
4104 intptr_t KernelDataProgramOffset() const;
4105
4106 // Called during class finalization.
4107 inline void SetOffset(intptr_t host_offset_in_bytes,
4108 intptr_t target_offset_in_bytes) const;
4109
4110 inline intptr_t HostOffset() const;
4111 static intptr_t host_offset_or_field_id_offset() {
4112 return OFFSET_OF(FieldLayout, host_offset_or_field_id_);
4113 }
4114
4115 inline intptr_t TargetOffset() const;
4116 static inline intptr_t TargetOffsetOf(FieldPtr field);
4117
4118 inline InstancePtr StaticValue() const;
4119 void SetStaticValue(const Instance& value,
4120 bool save_initial_value = false) const;
4121
4122 inline intptr_t field_id() const;
4123 inline void set_field_id(intptr_t field_id) const;
4124
4125#ifndef DART_PRECOMPILED_RUNTIME
4126 InstancePtr saved_initial_value() const {
4127 return raw_ptr()->saved_initial_value_;
4128 }
4129 inline void set_saved_initial_value(const Instance& value) const;
4130#endif
4131
4132 ClassPtr Owner() const;
4133 ClassPtr Origin() const; // Either mixin class, or same as owner().
4134 ScriptPtr Script() const;
4135 ObjectPtr RawOwner() const;
4136
4137 AbstractTypePtr type() const { return raw_ptr()->type_; }
4138 // Used by class finalizer, otherwise initialized in constructor.
4139 void SetFieldType(const AbstractType& value) const;
4140
4141 DART_WARN_UNUSED_RESULT
4142 ErrorPtr VerifyEntryPoint(EntryPointPragma kind) const;
4143
4144 static intptr_t InstanceSize() {
4145 return RoundedAllocationSize(sizeof(FieldLayout));
4146 }
4147
4148 static FieldPtr New(const String& name,
4149 bool is_static,
4150 bool is_final,
4151 bool is_const,
4152 bool is_reflectable,
4153 bool is_late,
4154 const Object& owner,
4155 const AbstractType& type,
4156 TokenPosition token_pos,
4157 TokenPosition end_token_pos);
4158
4159 static FieldPtr NewTopLevel(const String& name,
4160 bool is_final,
4161 bool is_const,
4162 bool is_late,
4163 const Object& owner,
4164 TokenPosition token_pos,
4165 TokenPosition end_token_pos);
4166
4167 // Allocate new field object, clone values from this field. The
4168 // original is specified.
4169 FieldPtr Clone(const Field& original) const;
4170
4171 static intptr_t kind_bits_offset() {
4172 return OFFSET_OF(FieldLayout, kind_bits_);
4173 }
4174
4175 TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
4176 TokenPosition end_token_pos() const { return raw_ptr()->end_token_pos_; }
4177
4178 int32_t SourceFingerprint() const;
4179
4180 StringPtr InitializingExpression() const;
4181
4182 bool has_nontrivial_initializer() const {
4183 return HasNontrivialInitializerBit::decode(raw_ptr()->kind_bits_);
4184 }
4185 // Called by parser after allocating field.
4186 void set_has_nontrivial_initializer(bool has_nontrivial_initializer) const {
4187 ASSERT(IsOriginal());
4188 ASSERT(Thread::Current()->IsMutatorThread());
4189 set_kind_bits(HasNontrivialInitializerBit::update(
4190 has_nontrivial_initializer, raw_ptr()->kind_bits_));
4191 }
4192
4193 bool has_initializer() const {
4194 return HasInitializerBit::decode(raw_ptr()->kind_bits_);
4195 }
4196 // Called by parser after allocating field.
4197 void set_has_initializer(bool has_initializer) const {
4198 ASSERT(IsOriginal());
4199 ASSERT(Thread::Current()->IsMutatorThread());
4200 set_kind_bits(
4201 HasInitializerBit::update(has_initializer, raw_ptr()->kind_bits_));
4202 }
4203
4204 bool has_trivial_initializer() const {
4205 return has_initializer() && !has_nontrivial_initializer();
4206 }
4207
4208 bool is_non_nullable_integer() const {
4209 return IsNonNullableIntBit::decode(raw_ptr()->kind_bits_);
4210 }
4211
4212 void set_is_non_nullable_integer(bool is_non_nullable_integer) const {
4213 ASSERT(Thread::Current()->IsMutatorThread());
4214 set_kind_bits(IsNonNullableIntBit::update(is_non_nullable_integer,
4215 raw_ptr()->kind_bits_));
4216 }
4217
4218 StaticTypeExactnessState static_type_exactness_state() const {
4219 return StaticTypeExactnessState::Decode(
4220 raw_ptr()->static_type_exactness_state_);
4221 }
4222
4223 void set_static_type_exactness_state(StaticTypeExactnessState state) const {
4224 StoreNonPointer(&raw_ptr()->static_type_exactness_state_, state.Encode());
4225 }
4226
4227 static intptr_t static_type_exactness_state_offset() {
4228 return OFFSET_OF(FieldLayout, static_type_exactness_state_);
4229 }
4230
4231 // Return class id that any non-null value read from this field is guaranteed
4232 // to have or kDynamicCid if such class id is not known.
4233 // Stores to this field must update this information hence the name.
4234 intptr_t guarded_cid() const {
4235#if defined(DEBUG)
4236 // This assertion ensures that the cid seen by the background compiler is
4237 // consistent. So the assertion passes if the field is a clone. It also
4238 // passes if the field is static, because we don't use field guards on
4239 // static fields.
4240 Thread* thread = Thread::Current();
4241 ASSERT(!IsOriginal() || is_static() || thread->IsMutatorThread() ||
4242 thread->IsAtSafepoint());
4243#endif
4244 return raw_ptr()->guarded_cid_;
4245 }
4246
4247 void set_guarded_cid(intptr_t cid) const {
4248#if defined(DEBUG)
4249 Thread* thread = Thread::Current();
4250 ASSERT(!IsOriginal() || is_static() || thread->IsMutatorThread() ||
4251 thread->IsAtSafepoint());
4252#endif
4253 StoreNonPointer(&raw_ptr()->guarded_cid_, cid);
4254 }
4255 static intptr_t guarded_cid_offset() {
4256 return OFFSET_OF(FieldLayout, guarded_cid_);
4257 }
4258 // Return the list length that any list stored in this field is guaranteed
4259 // to have. If length is kUnknownFixedLength the length has not
4260 // been determined. If length is kNoFixedLength this field has multiple
4261 // list lengths associated with it and cannot be predicted.
4262 intptr_t guarded_list_length() const;
4263 void set_guarded_list_length(intptr_t list_length) const;
4264 static intptr_t guarded_list_length_offset() {
4265 return OFFSET_OF(FieldLayout, guarded_list_length_);
4266 }
4267 intptr_t guarded_list_length_in_object_offset() const;
4268 void set_guarded_list_length_in_object_offset(intptr_t offset) const;
4269 static intptr_t guarded_list_length_in_object_offset_offset() {
4270 return OFFSET_OF(FieldLayout, guarded_list_length_in_object_offset_);
4271 }
4272
4273 bool needs_length_check() const {
4274 const bool r = guarded_list_length() >= Field::kUnknownFixedLength;
4275 ASSERT(!r || is_final());
4276 return r;
4277 }
4278
4279 bool NeedsSetter() const;
4280 bool NeedsGetter() const;
4281
4282 bool NeedsInitializationCheckOnLoad() const {
4283 return needs_load_guard() || (is_late() && !has_trivial_initializer());
4284 }
4285
4286 const char* GuardedPropertiesAsCString() const;
4287
4288 intptr_t UnboxedFieldCid() const { return guarded_cid(); }
4289
4290 bool is_unboxing_candidate() const {
4291 return UnboxingCandidateBit::decode(raw_ptr()->kind_bits_);
4292 }
4293 // Default 'true', set to false once optimizing compiler determines it should
4294 // be boxed.
4295 void set_is_unboxing_candidate(bool b) const {
4296 ASSERT(IsOriginal());
4297 set_kind_bits(UnboxingCandidateBit::update(b, raw_ptr()->kind_bits_));
4298 }
4299
4300 enum {
4301 kUnknownLengthOffset = -1,
4302 kUnknownFixedLength = -1,
4303 kNoFixedLength = -2,
4304 };
4305 void set_is_late(bool value) const {
4306 set_kind_bits(IsLateBit::update(value, raw_ptr()->kind_bits_));
4307 }
4308 void set_is_extension_member(bool value) const {
4309 set_kind_bits(IsExtensionMemberBit::update(value, raw_ptr()->kind_bits_));
4310 }
4311 void set_needs_load_guard(bool value) const {
4312 set_kind_bits(NeedsLoadGuardBit::update(value, raw_ptr()->kind_bits_));
4313 }
4314 // Returns false if any value read from this field is guaranteed to be
4315 // not null.
4316 // Internally we is_nullable_ field contains either kNullCid (nullable) or
4317 // kInvalidCid (non-nullable) instead of boolean. This is done to simplify
4318 // guarding sequence in the generated code.
4319 bool is_nullable(bool silence_assert = false) const {
4320#if defined(DEBUG)
4321 if (!silence_assert) {
4322 // Same assert as guarded_cid(), because is_nullable() also needs to be
4323 // consistent for the background compiler.
4324 Thread* thread = Thread::Current();
4325 ASSERT(!IsOriginal() || is_static() || thread->IsMutatorThread() ||
4326 thread->IsAtSafepoint());
4327 }
4328#endif
4329 return raw_ptr()->is_nullable_ == kNullCid;
4330 }
4331 void set_is_nullable(bool val) const {
4332 ASSERT(Thread::Current()->IsMutatorThread());
4333 StoreNonPointer(&raw_ptr()->is_nullable_, val ? kNullCid : kIllegalCid);
4334 }
4335 static intptr_t is_nullable_offset() {
4336 return OFFSET_OF(FieldLayout, is_nullable_);
4337 }
4338
4339 // Record store of the given value into this field. May trigger
4340 // deoptimization of dependent optimized code.
4341 void RecordStore(const Object& value) const;
4342
4343 void InitializeGuardedListLengthInObjectOffset() const;
4344
4345 // Return the list of optimized code objects that were optimized under
4346 // assumptions about guarded class id and nullability of this field.
4347 // These code objects must be deoptimized when field's properties change.
4348 // Code objects are held weakly via an indirection through WeakProperty.
4349 ArrayPtr dependent_code() const;
4350 void set_dependent_code(const Array& array) const;
4351
4352 // Add the given code object to the list of dependent ones.
4353 void RegisterDependentCode(const Code& code) const;
4354
4355 // Deoptimize all dependent code objects.
4356 void DeoptimizeDependentCode() const;
4357
4358 // Used by background compiler to check consistency of field copy with its
4359 // original.
4360 bool IsConsistentWith(const Field& field) const;
4361
4362 bool IsUninitialized() const;
4363
4364 // Run initializer and set field value.
4365 DART_WARN_UNUSED_RESULT ErrorPtr
4366 InitializeInstance(const Instance& instance) const;
4367 DART_WARN_UNUSED_RESULT ErrorPtr InitializeStatic() const;
4368
4369 // Run initializer only.
4370 DART_WARN_UNUSED_RESULT ObjectPtr EvaluateInitializer() const;
4371
4372 FunctionPtr EnsureInitializerFunction() const;
4373 FunctionPtr InitializerFunction() const {
4374 // We rely on the fact that any loads from the initializer function
4375 // are dependent loads and avoid the load-acquire barrier here.
4376 return raw_ptr()->initializer_function_;
4377 }
4378 void SetInitializerFunction(const Function& initializer) const;
4379 bool HasInitializerFunction() const;
4380 static intptr_t initializer_function_offset() {
4381 return OFFSET_OF(FieldLayout, initializer_function_);
4382 }
4383
4384 // For static fields only. Constructs a closure that gets/sets the
4385 // field value.
4386 InstancePtr GetterClosure() const;
4387 InstancePtr SetterClosure() const;
4388 InstancePtr AccessorClosure(bool make_setter) const;
4389
4390 // Constructs getter and setter names for fields and vice versa.
4391 static StringPtr GetterName(const String& field_name);
4392 static StringPtr GetterSymbol(const String& field_name);
4393 // Returns String::null() if getter symbol does not exist.
4394 static StringPtr LookupGetterSymbol(const String& field_name);
4395 static StringPtr SetterName(const String& field_name);
4396 static StringPtr SetterSymbol(const String& field_name);
4397 // Returns String::null() if setter symbol does not exist.
4398 static StringPtr LookupSetterSymbol(const String& field_name);
4399 static StringPtr NameFromGetter(const String& getter_name);
4400 static StringPtr NameFromSetter(const String& setter_name);
4401 static StringPtr NameFromInit(const String& init_name);
4402 static bool IsGetterName(const String& function_name);
4403 static bool IsSetterName(const String& function_name);
4404 static bool IsInitName(const String& function_name);
4405
4406#if !defined(DART_PRECOMPILED_RUNTIME)
4407 SubtypeTestCachePtr type_test_cache() const {
4408 return raw_ptr()->type_test_cache_;
4409 }
4410 void set_type_test_cache(const SubtypeTestCache& cache) const;
4411#endif
4412
4413 // Unboxed fields require exclusive ownership of the box.
4414 // Ensure this by cloning the box if necessary.
4415 const Object* CloneForUnboxed(const Object& value) const;
4416
4417 private:
4418 static void InitializeNew(const Field& result,
4419 const String& name,
4420 bool is_static,
4421 bool is_final,
4422 bool is_const,
4423 bool is_reflectable,
4424 bool is_late,
4425 const Object& owner,
4426 TokenPosition token_pos,
4427 TokenPosition end_token_pos);
4428 friend class Interpreter; // Access to bit field.
4429 friend class StoreInstanceFieldInstr; // Generated code access to bit field.
4430
4431 enum {
4432 kConstBit = 0,
4433 kStaticBit,
4434 kFinalBit,
4435 kHasNontrivialInitializerBit,
4436 kUnboxingCandidateBit,
4437 kReflectableBit,
4438 kDoubleInitializedBit,
4439 kInitializerChangedAfterInitializatonBit,
4440 kHasPragmaBit,
4441 kCovariantBit,
4442 kGenericCovariantImplBit,
4443 kIsLateBit,
4444 kIsExtensionMemberBit,
4445 kNeedsLoadGuardBit,
4446 kHasInitializerBit,
4447 kIsNonNullableIntBit,
4448 };
4449 class ConstBit : public BitField<uint16_t, bool, kConstBit, 1> {};
4450 class StaticBit : public BitField<uint16_t, bool, kStaticBit, 1> {};
4451 class FinalBit : public BitField<uint16_t, bool, kFinalBit, 1> {};
4452 class HasNontrivialInitializerBit
4453 : public BitField<uint16_t, bool, kHasNontrivialInitializerBit, 1> {};
4454 class UnboxingCandidateBit
4455 : public BitField<uint16_t, bool, kUnboxingCandidateBit, 1> {};
4456 class ReflectableBit : public BitField<uint16_t, bool, kReflectableBit, 1> {};
4457 class DoubleInitializedBit
4458 : public BitField<uint16_t, bool, kDoubleInitializedBit, 1> {};
4459 class InitializerChangedAfterInitializatonBit
4460 : public BitField<uint16_t,
4461 bool,
4462 kInitializerChangedAfterInitializatonBit,
4463 1> {};
4464 class HasPragmaBit : public BitField<uint16_t, bool, kHasPragmaBit, 1> {};
4465 class CovariantBit : public BitField<uint16_t, bool, kCovariantBit, 1> {};
4466 class GenericCovariantImplBit
4467 : public BitField<uint16_t, bool, kGenericCovariantImplBit, 1> {};
4468 class IsLateBit : public BitField<uint16_t, bool, kIsLateBit, 1> {};
4469 class IsExtensionMemberBit
4470 : public BitField<uint16_t, bool, kIsExtensionMemberBit, 1> {};
4471 class NeedsLoadGuardBit
4472 : public BitField<uint16_t, bool, kNeedsLoadGuardBit, 1> {};
4473 class HasInitializerBit
4474 : public BitField<uint16_t, bool, kHasInitializerBit, 1> {};
4475 class IsNonNullableIntBit
4476 : public BitField<uint16_t, bool, kIsNonNullableIntBit, 1> {};
4477
4478 // Update guarded cid and guarded length for this field. Returns true, if
4479 // deoptimization of dependent code is required.
4480 bool UpdateGuardedCidAndLength(const Object& value) const;
4481
4482 // Update guarded exactness state for this field. Returns true, if
4483 // deoptimization of dependent code is required.
4484 // Assumes that guarded cid was already updated.
4485 bool UpdateGuardedExactnessState(const Object& value) const;
4486
4487 // Force this field's guard to be dynamic and deoptimize dependent code.
4488 void ForceDynamicGuardedCidAndLength() const;
4489
4490 void set_name(const String& value) const;
4491 void set_is_static(bool is_static) const {
4492 set_kind_bits(StaticBit::update(is_static, raw_ptr()->kind_bits_));
4493 }
4494 void set_is_final(bool is_final) const {
4495 set_kind_bits(FinalBit::update(is_final, raw_ptr()->kind_bits_));
4496 }
4497 void set_is_const(bool value) const {
4498 set_kind_bits(ConstBit::update(value, raw_ptr()->kind_bits_));
4499 }
4500 void set_owner(const Object& value) const {
4501 StorePointer(&raw_ptr()->owner_, value.raw());
4502 }
4503 void set_token_pos(TokenPosition token_pos) const {
4504 StoreNonPointer(&raw_ptr()->token_pos_, token_pos);
4505 }
4506 void set_end_token_pos(TokenPosition token_pos) const {
4507 StoreNonPointer(&raw_ptr()->end_token_pos_, token_pos);
4508 }
4509 void set_kind_bits(uint16_t value) const {
4510 StoreNonPointer(&raw_ptr()->kind_bits_, value);
4511 }
4512
4513 static FieldPtr New();
4514
4515 FINAL_HEAP_OBJECT_IMPLEMENTATION(Field, Object);
4516 friend class Class;
4517 friend class HeapProfiler;
4518 friend class FieldLayout;
4519 friend class FieldSerializationCluster;
4520 friend class FieldDeserializationCluster;
4521};
4522
4523class Script : public Object {
4524 public:
4525 StringPtr url() const { return raw_ptr()->url_; }
4526 void set_url(const String& value) const;
4527
4528 // The actual url which was loaded from disk, if provided by the embedder.
4529 StringPtr resolved_url() const { return raw_ptr()->resolved_url_; }
4530 bool HasSource() const;
4531 StringPtr Source() const;
4532 bool IsPartOfDartColonLibrary() const;
4533
4534 void LookupSourceAndLineStarts(Zone* zone) const;
4535 GrowableObjectArrayPtr GenerateLineNumberArray() const;
4536
4537 intptr_t line_offset() const { return raw_ptr()->line_offset_; }
4538 intptr_t col_offset() const { return raw_ptr()->col_offset_; }
4539
4540 // The load time in milliseconds since epoch.
4541 int64_t load_timestamp() const { return raw_ptr()->load_timestamp_; }
4542
4543 ArrayPtr compile_time_constants() const {
4544 return raw_ptr()->compile_time_constants_;
4545 }
4546 void set_compile_time_constants(const Array& value) const;
4547
4548 KernelProgramInfoPtr kernel_program_info() const {
4549 return raw_ptr()->kernel_program_info_;
4550 }
4551 void set_kernel_program_info(const KernelProgramInfo& info) const;
4552
4553 intptr_t kernel_script_index() const {
4554 return raw_ptr()->kernel_script_index_;
4555 }
4556 void set_kernel_script_index(const intptr_t kernel_script_index) const;
4557
4558 TypedDataPtr kernel_string_offsets() const;
4559
4560 TypedDataPtr line_starts() const;
4561
4562 void set_line_starts(const TypedData& value) const;
4563
4564 void set_debug_positions(const Array& value) const;
4565
4566 LibraryPtr FindLibrary() const;
4567 StringPtr GetLine(intptr_t line_number, Heap::Space space = Heap::kNew) const;
4568 StringPtr GetSnippet(TokenPosition from, TokenPosition to) const;
4569 StringPtr GetSnippet(intptr_t from_line,
4570 intptr_t from_column,
4571 intptr_t to_line,
4572 intptr_t to_column) const;
4573
4574 void SetLocationOffset(intptr_t line_offset, intptr_t col_offset) const;
4575
4576 bool GetTokenLocationUsingLineStarts(TokenPosition token_pos,
4577 intptr_t* line,
4578 intptr_t* column) const;
4579 void GetTokenLocation(TokenPosition token_pos,
4580 intptr_t* line,
4581 intptr_t* column,
4582 intptr_t* token_len = NULL) const;
4583
4584 // Returns index of first and last token on the given line. Returns both
4585 // indices < 0 if no token exists on or after the line. If a token exists
4586 // after, but not on given line, returns in *first_token_index the index of
4587 // the first token after the line, and a negative value in *last_token_index.
4588 void TokenRangeAtLine(intptr_t line_number,
4589 TokenPosition* first_token_index,
4590 TokenPosition* last_token_index) const;
4591
4592 static intptr_t InstanceSize() {
4593 return RoundedAllocationSize(sizeof(ScriptLayout));
4594 }
4595
4596 static ScriptPtr New(const String& url, const String& source);
4597
4598 static ScriptPtr New(const String& url,
4599 const String& resolved_url,
4600 const String& source);
4601
4602#if !defined(DART_PRECOMPILED_RUNTIME)
4603 void LoadSourceFromKernel(const uint8_t* kernel_buffer,
4604 intptr_t kernel_buffer_len) const;
4605#endif // !defined(DART_PRECOMPILED_RUNTIME)
4606
4607 void SetLazyLookupSourceAndLineStarts(bool value) const;
4608 bool IsLazyLookupSourceAndLineStarts() const;
4609
4610 private:
4611 void set_resolved_url(const String& value) const;
4612 void set_source(const String& value) const;
4613 void set_flags(uint8_t value) const;
4614 void set_load_timestamp(int64_t value) const;
4615 ArrayPtr debug_positions() const;
4616
4617 static ScriptPtr New();
4618
4619 FINAL_HEAP_OBJECT_IMPLEMENTATION(Script, Object);
4620 friend class Class;
4621 friend class Precompiler;
4622};
4623
4624class DictionaryIterator : public ValueObject {
4625 public:
4626 explicit DictionaryIterator(const Library& library);
4627
4628 bool HasNext() const { return next_ix_ < size_; }
4629
4630 // Returns next non-null raw object.
4631 ObjectPtr GetNext();
4632
4633 private:
4634 void MoveToNextObject();
4635
4636 const Array& array_;
4637 const int size_; // Number of elements to iterate over.
4638 int next_ix_; // Index of next element.
4639
4640 friend class ClassDictionaryIterator;
4641 DISALLOW_COPY_AND_ASSIGN(DictionaryIterator);
4642};
4643
4644class ClassDictionaryIterator : public DictionaryIterator {
4645 public:
4646 enum IterationKind {
4647 // TODO(hausner): fix call sites that use kIteratePrivate. There is only
4648 // one top-level class per library left, not an array to iterate over.
4649 kIteratePrivate,
4650 kNoIteratePrivate
4651 };
4652
4653 ClassDictionaryIterator(const Library& library,
4654 IterationKind kind = kNoIteratePrivate);
4655
4656 bool HasNext() const {
4657 return (next_ix_ < size_) || !toplevel_class_.IsNull();
4658 }
4659
4660 // Returns a non-null raw class.
4661 ClassPtr GetNextClass();
4662
4663 private:
4664 void MoveToNextClass();
4665
4666 Class& toplevel_class_;
4667
4668 DISALLOW_COPY_AND_ASSIGN(ClassDictionaryIterator);
4669};
4670
4671class Library : public Object {
4672 public:
4673 StringPtr name() const { return raw_ptr()->name_; }
4674 void SetName(const String& name) const;
4675
4676 StringPtr url() const { return raw_ptr()->url_; }
4677 StringPtr private_key() const { return raw_ptr()->private_key_; }
4678 bool LoadNotStarted() const {
4679 return raw_ptr()->load_state_ == LibraryLayout::kAllocated;
4680 }
4681 bool LoadRequested() const {
4682 return raw_ptr()->load_state_ == LibraryLayout::kLoadRequested;
4683 }
4684 bool LoadInProgress() const {
4685 return raw_ptr()->load_state_ == LibraryLayout::kLoadInProgress;
4686 }
4687 void SetLoadRequested() const;
4688 void SetLoadInProgress() const;
4689 bool Loaded() const {
4690 return raw_ptr()->load_state_ == LibraryLayout::kLoaded;
4691 }
4692 void SetLoaded() const;
4693
4694 LoadingUnitPtr loading_unit() const { return raw_ptr()->loading_unit_; }
4695 void set_loading_unit(const LoadingUnit& value) const;
4696
4697 static intptr_t InstanceSize() {
4698 return RoundedAllocationSize(sizeof(LibraryLayout));
4699 }
4700
4701 static LibraryPtr New(const String& url);
4702
4703 ObjectPtr Invoke(const String& selector,
4704 const Array& arguments,
4705 const Array& argument_names,
4706 bool respect_reflectable = true,
4707 bool check_is_entrypoint = false) const;
4708 ObjectPtr InvokeGetter(const String& selector,
4709 bool throw_nsm_if_absent,
4710 bool respect_reflectable = true,
4711 bool check_is_entrypoint = false) const;
4712 ObjectPtr InvokeSetter(const String& selector,
4713 const Instance& argument,
4714 bool respect_reflectable = true,
4715 bool check_is_entrypoint = false) const;
4716
4717 // Evaluate the given expression as if it appeared in an top-level method of
4718 // this library and return the resulting value, or an error object if
4719 // evaluating the expression fails. The method has the formal (type)
4720 // parameters given in (type_)param_names, and is invoked with the (type)
4721 // argument values given in (type_)param_values.
4722 ObjectPtr EvaluateCompiledExpression(
4723 const ExternalTypedData& kernel_buffer,
4724 const Array& type_definitions,
4725 const Array& param_values,
4726 const TypeArguments& type_param_values) const;
4727
4728 // Library scope name dictionary.
4729 //
4730 // TODO(turnidge): The Lookup functions are not consistent in how
4731 // they deal with private names. Go through and make them a bit
4732 // more regular.
4733 void AddClass(const Class& cls) const;
4734 void AddObject(const Object& obj, const String& name) const;
4735 ObjectPtr LookupReExport(const String& name,
4736 ZoneGrowableArray<intptr_t>* visited = NULL) const;
4737 ObjectPtr LookupObjectAllowPrivate(const String& name) const;
4738 ObjectPtr LookupLocalOrReExportObject(const String& name) const;
4739 ObjectPtr LookupImportedObject(const String& name) const;
4740 ClassPtr LookupClass(const String& name) const;
4741 ClassPtr LookupClassAllowPrivate(const String& name) const;
4742 ClassPtr SlowLookupClassAllowMultiPartPrivate(const String& name) const;
4743 ClassPtr LookupLocalClass(const String& name) const;
4744 FieldPtr LookupFieldAllowPrivate(const String& name) const;
4745 FieldPtr LookupLocalField(const String& name) const;
4746 FunctionPtr LookupFunctionAllowPrivate(const String& name) const;
4747 FunctionPtr LookupLocalFunction(const String& name) const;
4748 LibraryPrefixPtr LookupLocalLibraryPrefix(const String& name) const;
4749
4750 // Look up a Script based on a url. If 'useResolvedUri' is not provided or is
4751 // false, 'url' should have a 'dart:' scheme for Dart core libraries,
4752 // a 'package:' scheme for packages, and 'file:' scheme otherwise.
4753 //
4754 // If 'useResolvedUri' is true, 'url' should have a 'org-dartlang-sdk:' scheme
4755 // for Dart core libraries and a 'file:' scheme otherwise.
4756 ScriptPtr LookupScript(const String& url, bool useResolvedUri = false) const;
4757 ArrayPtr LoadedScripts() const;
4758
4759 // Resolve name in the scope of this library. First check the cache
4760 // of already resolved names for this library. Then look in the
4761 // local dictionary for the unmangled name N, the getter name get:N
4762 // and setter name set:N.
4763 // If the local dictionary contains no entry for these names,
4764 // look in the scopes of all libraries that are imported
4765 // without a library prefix.
4766 ObjectPtr ResolveName(const String& name) const;
4767
4768 void AddAnonymousClass(const Class& cls) const;
4769
4770 void AddExport(const Namespace& ns) const;
4771
4772 void AddClassMetadata(const Class& cls,
4773 const Object& tl_owner,
4774 TokenPosition token_pos,
4775 intptr_t kernel_offset,
4776 intptr_t bytecode_offset) const;
4777 void AddFieldMetadata(const Field& field,
4778 TokenPosition token_pos,
4779 intptr_t kernel_offset,
4780 intptr_t bytecode_offset) const;
4781 void AddFunctionMetadata(const Function& func,
4782 TokenPosition token_pos,
4783 intptr_t kernel_offset,
4784 intptr_t bytecode_offset) const;
4785 void AddLibraryMetadata(const Object& tl_owner,
4786 TokenPosition token_pos,
4787 intptr_t kernel_offset,
4788 intptr_t bytecode_offset) const;
4789 void AddTypeParameterMetadata(const TypeParameter& param,
4790 TokenPosition token_pos) const;
4791 void CloneMetadataFrom(const Library& from_library,
4792 const Function& from_fun,
4793 const Function& to_fun) const;
4794 ObjectPtr GetMetadata(const Object& obj) const;
4795 ArrayPtr GetExtendedMetadata(const Object& obj, intptr_t count) const;
4796
4797 // Tries to finds a @pragma annotation on [object].
4798 //
4799 // If successful returns `true`. If an error happens during constant
4800 // evaluation, returns `false.
4801 //
4802 // If [only_core] is true, then the annotations on the object will only
4803 // be inspected if it is part of a core library.
4804 //
4805 // WARNING: If the isolate received an [UnwindError] this function will not
4806 // return and rather unwinds until the enclosing setjmp() handler.
4807 static bool FindPragma(Thread* T,
4808 bool only_core,
4809 const Object& object,
4810 const String& pragma_name,
4811 Object* options);
4812
4813 ClassPtr toplevel_class() const { return raw_ptr()->toplevel_class_; }
4814 void set_toplevel_class(const Class& value) const;
4815
4816 GrowableObjectArrayPtr used_scripts() const {
4817 return raw_ptr()->used_scripts_;
4818 }
4819
4820 // Library imports.
4821 ArrayPtr imports() const { return raw_ptr()->imports_; }
4822 ArrayPtr exports() const { return raw_ptr()->exports_; }
4823 void AddImport(const Namespace& ns) const;
4824 intptr_t num_imports() const { return raw_ptr()->num_imports_; }
4825 NamespacePtr ImportAt(intptr_t index) const;
4826 LibraryPtr ImportLibraryAt(intptr_t index) const;
4827
4828 ArrayPtr dependencies() const { return raw_ptr()->dependencies_; }
4829 void set_dependencies(const Array& deps) const;
4830
4831 void DropDependenciesAndCaches() const;
4832
4833 // Resolving native methods for script loaded in the library.
4834 Dart_NativeEntryResolver native_entry_resolver() const {
4835 return LoadNonPointer<Dart_NativeEntryResolver, std::memory_order_relaxed>(
4836 &raw_ptr()->native_entry_resolver_);
4837 }
4838 void set_native_entry_resolver(Dart_NativeEntryResolver value) const {
4839 StoreNonPointer<Dart_NativeEntryResolver, Dart_NativeEntryResolver,
4840 std::memory_order_relaxed>(
4841 &raw_ptr()->native_entry_resolver_, value);
4842 }
4843 Dart_NativeEntrySymbol native_entry_symbol_resolver() const {
4844 return LoadNonPointer<Dart_NativeEntrySymbol, std::memory_order_relaxed>(
4845 &raw_ptr()->native_entry_symbol_resolver_);
4846 }
4847 void set_native_entry_symbol_resolver(
4848 Dart_NativeEntrySymbol native_symbol_resolver) const {
4849 StoreNonPointer<Dart_NativeEntrySymbol, Dart_NativeEntrySymbol,
4850 std::memory_order_relaxed>(
4851 &raw_ptr()->native_entry_symbol_resolver_, native_symbol_resolver);
4852 }
4853
4854 bool is_in_fullsnapshot() const {
4855 return LibraryLayout::InFullSnapshotBit::decode(raw_ptr()->flags_);
4856 }
4857 void set_is_in_fullsnapshot(bool value) const {
4858 set_flags(
4859 LibraryLayout::InFullSnapshotBit::update(value, raw_ptr()->flags_));
4860 }
4861
4862 bool is_nnbd() const {
4863 return LibraryLayout::NnbdBit::decode(raw_ptr()->flags_);
4864 }
4865 void set_is_nnbd(bool value) const {
4866 set_flags(LibraryLayout::NnbdBit::update(value, raw_ptr()->flags_));
4867 }
4868
4869 NNBDMode nnbd_mode() const {
4870 return is_nnbd() ? NNBDMode::kOptedInLib : NNBDMode::kLegacyLib;
4871 }
4872
4873 NNBDCompiledMode nnbd_compiled_mode() const {
4874 return static_cast<NNBDCompiledMode>(
4875 LibraryLayout::NnbdCompiledModeBits::decode(raw_ptr()->flags_));
4876 }
4877 void set_nnbd_compiled_mode(NNBDCompiledMode value) const {
4878 set_flags(LibraryLayout::NnbdCompiledModeBits::update(
4879 static_cast<uint8_t>(value), raw_ptr()->flags_));
4880 }
4881
4882 StringPtr PrivateName(const String& name) const;
4883
4884 intptr_t index() const { return raw_ptr()->index_; }
4885 void set_index(intptr_t value) const {
4886 ASSERT(value == -1 ||
4887 value >= 0 && value < std::numeric_limits<classid_t>::max());
4888 StoreNonPointer(&raw_ptr()->index_, value);
4889 }
4890
4891 void Register(Thread* thread) const;
4892 static void RegisterLibraries(Thread* thread,
4893 const GrowableObjectArray& libs);
4894
4895 bool IsDebuggable() const {
4896 return LibraryLayout::DebuggableBit::decode(raw_ptr()->flags_);
4897 }
4898 void set_debuggable(bool value) const {
4899 set_flags(LibraryLayout::DebuggableBit::update(value, raw_ptr()->flags_));
4900 }
4901
4902 bool is_dart_scheme() const {
4903 return LibraryLayout::DartSchemeBit::decode(raw_ptr()->flags_);
4904 }
4905 void set_is_dart_scheme(bool value) const {
4906 set_flags(LibraryLayout::DartSchemeBit::update(value, raw_ptr()->flags_));
4907 }
4908
4909 // Includes 'dart:async', 'dart:typed_data', etc.
4910 bool IsAnyCoreLibrary() const;
4911
4912 inline intptr_t UrlHash() const;
4913
4914 ExternalTypedDataPtr kernel_data() const { return raw_ptr()->kernel_data_; }
4915 void set_kernel_data(const ExternalTypedData& data) const;
4916
4917#if !defined(DART_PRECOMPILED_RUNTIME)
4918 intptr_t binary_declaration_offset() const {
4919 return LibraryLayout::BinaryDeclarationOffset::decode(
4920 raw_ptr()->binary_declaration_);
4921 }
4922 void set_binary_declaration_offset(intptr_t value) const {
4923 ASSERT(value >= 0);
4924 StoreNonPointer(&raw_ptr()->binary_declaration_,
4925 LibraryLayout::BinaryDeclarationOffset::update(
4926 value, raw_ptr()->binary_declaration_));
4927 }
4928#endif // !defined(DART_PRECOMPILED_RUNTIME)
4929
4930 intptr_t kernel_offset() const {
4931#if defined(DART_PRECOMPILED_RUNTIME)
4932 return 0;
4933#else
4934 ASSERT(!is_declared_in_bytecode());
4935 return binary_declaration_offset();
4936#endif
4937 }
4938
4939 void set_kernel_offset(intptr_t value) const {
4940#if defined(DART_PRECOMPILED_RUNTIME)
4941 UNREACHABLE();
4942#else
4943 ASSERT(!is_declared_in_bytecode());
4944 set_binary_declaration_offset(value);
4945#endif
4946 }
4947
4948 intptr_t bytecode_offset() const {
4949#if defined(DART_PRECOMPILED_RUNTIME)
4950 return 0;
4951#else
4952 ASSERT(is_declared_in_bytecode());
4953 return binary_declaration_offset();
4954#endif
4955 }
4956
4957 void set_bytecode_offset(intptr_t value) const {
4958#if defined(DART_PRECOMPILED_RUNTIME)
4959 UNREACHABLE();
4960#else
4961 ASSERT(is_declared_in_bytecode());
4962 set_binary_declaration_offset(value);
4963#endif
4964 }
4965
4966 bool is_declared_in_bytecode() const {
4967#if defined(DART_PRECOMPILED_RUNTIME)
4968 return false;
4969#else
4970 return LibraryLayout::IsDeclaredInBytecode::decode(
4971 raw_ptr()->binary_declaration_);
4972#endif
4973 }
4974
4975#if !defined(DART_PRECOMPILED_RUNTIME)
4976 void set_is_declared_in_bytecode(bool value) const {
4977 StoreNonPointer(&raw_ptr()->binary_declaration_,
4978 LibraryLayout::IsDeclaredInBytecode::update(
4979 value, raw_ptr()->binary_declaration_));
4980 }
4981#endif // !defined(DART_PRECOMPILED_RUNTIME)
4982
4983 static LibraryPtr LookupLibrary(Thread* thread, const String& url);
4984 static LibraryPtr GetLibrary(intptr_t index);
4985
4986 static void InitCoreLibrary(Isolate* isolate);
4987 static void InitNativeWrappersLibrary(Isolate* isolate, bool is_kernel_file);
4988
4989 static LibraryPtr AsyncLibrary();
4990 static LibraryPtr ConvertLibrary();
4991 static LibraryPtr CoreLibrary();
4992 static LibraryPtr CollectionLibrary();
4993 static LibraryPtr DeveloperLibrary();
4994 static LibraryPtr FfiLibrary();
4995 static LibraryPtr InternalLibrary();
4996 static LibraryPtr IsolateLibrary();
4997 static LibraryPtr MathLibrary();
4998#if !defined(DART_PRECOMPILED_RUNTIME)
4999 static LibraryPtr MirrorsLibrary();
5000#endif
5001 static LibraryPtr NativeWrappersLibrary();
5002 static LibraryPtr ProfilerLibrary();
5003 static LibraryPtr TypedDataLibrary();
5004 static LibraryPtr VMServiceLibrary();
5005 static LibraryPtr WasmLibrary();
5006
5007 // Eagerly compile all classes and functions in the library.
5008 static ErrorPtr CompileAll(bool ignore_error = false);
5009#if !defined(DART_PRECOMPILED_RUNTIME)
5010 // Finalize all classes in all libraries.
5011 static ErrorPtr FinalizeAllClasses();
5012 // Eagerly read all bytecode.
5013 static ErrorPtr ReadAllBytecode();
5014#endif
5015
5016#if defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME)
5017 // Checks function fingerprints. Prints mismatches and aborts if
5018 // mismatch found.
5019 static void CheckFunctionFingerprints();
5020#endif // defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME).
5021
5022 static bool IsPrivate(const String& name);
5023
5024 // Construct the full name of a corelib member.
5025 static const String& PrivateCoreLibName(const String& member);
5026
5027 // Returns true if [name] matches full name of corelib [member].
5028 static bool IsPrivateCoreLibName(const String& name, const String& member);
5029
5030 // Lookup class in the core lib which also contains various VM
5031 // helper methods and classes. Allow look up of private classes.
5032 static ClassPtr LookupCoreClass(const String& class_name);
5033
5034 // Return Function::null() if function does not exist in libs.
5035 static FunctionPtr GetFunction(const GrowableArray<Library*>& libs,
5036 const char* class_name,
5037 const char* function_name);
5038
5039 // Character used to indicate a private identifier.
5040 static const char kPrivateIdentifierStart = '_';
5041
5042 // Character used to separate private identifiers from
5043 // the library-specific key.
5044 static const char kPrivateKeySeparator = '@';
5045
5046 void CheckReload(const Library& replacement,
5047 IsolateReloadContext* context) const;
5048
5049 // Returns a closure of top level function 'name' in the exported namespace
5050 // of this library. If a top level function 'name' does not exist we look
5051 // for a top level getter 'name' that returns a closure.
5052 ObjectPtr GetFunctionClosure(const String& name) const;
5053
5054 // Ensures that all top-level functions and variables (fields) are loaded.
5055 void EnsureTopLevelClassIsFinalized() const;
5056
5057 private:
5058 static const int kInitialImportsCapacity = 4;
5059 static const int kImportsCapacityIncrement = 8;
5060
5061 static LibraryPtr New();
5062
5063 // These methods are only used by the Precompiler to obfuscate
5064 // the name and url.
5065 void set_name(const String& name) const;
5066 void set_url(const String& url) const;
5067
5068 void set_num_imports(intptr_t value) const;
5069 void set_flags(uint8_t flags) const;
5070 bool HasExports() const;
5071 ArrayPtr loaded_scripts() const { return raw_ptr()->loaded_scripts_; }
5072 GrowableObjectArrayPtr metadata() const { return raw_ptr()->metadata_; }
5073 void set_metadata(const GrowableObjectArray& value) const;
5074 ArrayPtr dictionary() const { return raw_ptr()->dictionary_; }
5075 void InitClassDictionary() const;
5076
5077 ArrayPtr resolved_names() const { return raw_ptr()->resolved_names_; }
5078 bool LookupResolvedNamesCache(const String& name, Object* obj) const;
5079 void AddToResolvedNamesCache(const String& name, const Object& obj) const;
5080 void InitResolvedNamesCache() const;
5081 void ClearResolvedNamesCache() const;
5082 void InvalidateResolvedName(const String& name) const;
5083 void InvalidateResolvedNamesCache() const;
5084
5085 ArrayPtr exported_names() const { return raw_ptr()->exported_names_; }
5086 bool LookupExportedNamesCache(const String& name, Object* obj) const;
5087 void AddToExportedNamesCache(const String& name, const Object& obj) const;
5088 void InitExportedNamesCache() const;
5089 void ClearExportedNamesCache() const;
5090 static void InvalidateExportedNamesCaches();
5091
5092 void InitImportList() const;
5093 void RehashDictionary(const Array& old_dict, intptr_t new_dict_size) const;
5094 static LibraryPtr NewLibraryHelper(const String& url, bool import_core_lib);
5095 ObjectPtr LookupEntry(const String& name, intptr_t* index) const;
5096 ObjectPtr LookupLocalObjectAllowPrivate(const String& name) const;
5097 ObjectPtr LookupLocalObject(const String& name) const;
5098
5099 void AllocatePrivateKey() const;
5100
5101 StringPtr MakeMetadataName(const Object& obj) const;
5102 FieldPtr GetMetadataField(const String& metaname) const;
5103 void AddMetadata(const Object& owner,
5104 const String& name,
5105 TokenPosition token_pos,
5106 intptr_t kernel_offset,
5107 intptr_t bytecode_offset) const;
5108
5109 FINAL_HEAP_OBJECT_IMPLEMENTATION(Library, Object);
5110
5111 friend class Bootstrap;
5112 friend class Class;
5113 friend class Debugger;
5114 friend class DictionaryIterator;
5115 friend class Isolate;
5116 friend class LibraryDeserializationCluster;
5117 friend class Namespace;
5118 friend class Object;
5119 friend class Precompiler;
5120};
5121
5122// A Namespace contains the names in a library dictionary, filtered by
5123// the show/hide combinators.
5124class Namespace : public Object {
5125 public:
5126 LibraryPtr library() const { return raw_ptr()->library_; }
5127 ArrayPtr show_names() const { return raw_ptr()->show_names_; }
5128 ArrayPtr hide_names() const { return raw_ptr()->hide_names_; }
5129
5130 void AddMetadata(const Object& owner,
5131 TokenPosition token_pos,
5132 intptr_t kernel_offset = 0);
5133 ObjectPtr GetMetadata() const;
5134
5135 static intptr_t InstanceSize() {
5136 return RoundedAllocationSize(sizeof(NamespaceLayout));
5137 }
5138
5139 bool HidesName(const String& name) const;
5140 ObjectPtr Lookup(const String& name,
5141 ZoneGrowableArray<intptr_t>* trail = nullptr) const;
5142
5143 static NamespacePtr New(const Library& library,
5144 const Array& show_names,
5145 const Array& hide_names);
5146
5147 private:
5148 static NamespacePtr New();
5149
5150 FieldPtr metadata_field() const { return raw_ptr()->metadata_field_; }
5151 void set_metadata_field(const Field& value) const;
5152
5153 FINAL_HEAP_OBJECT_IMPLEMENTATION(Namespace, Object);
5154 friend class Class;
5155 friend class Precompiler;
5156};
5157
5158class KernelProgramInfo : public Object {
5159 public:
5160 static KernelProgramInfoPtr New(const TypedData& string_offsets,
5161 const ExternalTypedData& string_data,
5162 const TypedData& canonical_names,
5163 const ExternalTypedData& metadata_payload,
5164 const ExternalTypedData& metadata_mappings,
5165 const ExternalTypedData& constants_table,
5166 const Array& scripts,
5167 const Array& libraries_cache,
5168 const Array& classes_cache,
5169 const Object& retained_kernel_blob,
5170 const uint32_t binary_version);
5171
5172 static intptr_t InstanceSize() {
5173 return RoundedAllocationSize(sizeof(KernelProgramInfoLayout));
5174 }
5175
5176 TypedDataPtr string_offsets() const { return raw_ptr()->string_offsets_; }
5177
5178 ExternalTypedDataPtr string_data() const { return raw_ptr()->string_data_; }
5179
5180 TypedDataPtr canonical_names() const { return raw_ptr()->canonical_names_; }
5181
5182 ExternalTypedDataPtr metadata_payloads() const {
5183 return raw_ptr()->metadata_payloads_;
5184 }
5185
5186 ExternalTypedDataPtr metadata_mappings() const {
5187 return raw_ptr()->metadata_mappings_;
5188 }
5189
5190 ExternalTypedDataPtr constants_table() const {
5191 return raw_ptr()->constants_table_;
5192 }
5193
5194 void set_constants_table(const ExternalTypedData& value) const;
5195
5196 ArrayPtr scripts() const { return raw_ptr()->scripts_; }
5197 void set_scripts(const Array& scripts) const;
5198
5199 ArrayPtr constants() const { return raw_ptr()->constants_; }
5200 void set_constants(const Array& constants) const;
5201
5202 uint32_t kernel_binary_version() const {
5203 return raw_ptr()->kernel_binary_version_;
5204 }
5205 void set_kernel_binary_version(uint32_t version) const;
5206
5207 // If we load a kernel blob with evaluated constants, then we delay setting
5208 // the native names of [Function] objects until we've read the constant table
5209 // (since native names are encoded as constants).
5210 //
5211 // This array will hold the functions which might need their native name set.
5212 GrowableObjectArrayPtr potential_natives() const {
5213 return raw_ptr()->potential_natives_;
5214 }
5215 void set_potential_natives(const GrowableObjectArray& candidates) const;
5216
5217 GrowableObjectArrayPtr potential_pragma_functions() const {
5218 return raw_ptr()->potential_pragma_functions_;
5219 }
5220 void set_potential_pragma_functions(
5221 const GrowableObjectArray& candidates) const;
5222
5223 ScriptPtr ScriptAt(intptr_t index) const;
5224
5225 ArrayPtr libraries_cache() const { return raw_ptr()->libraries_cache_; }
5226 void set_libraries_cache(const Array& cache) const;
5227 LibraryPtr LookupLibrary(Thread* thread, const Smi& name_index) const;
5228 LibraryPtr InsertLibrary(Thread* thread,
5229 const Smi& name_index,
5230 const Library& lib) const;
5231
5232 ArrayPtr classes_cache() const { return raw_ptr()->classes_cache_; }
5233 void set_classes_cache(const Array& cache) const;
5234 ClassPtr LookupClass(Thread* thread, const Smi& name_index) const;
5235 ClassPtr InsertClass(Thread* thread,
5236 const Smi& name_index,
5237 const Class& klass) const;
5238
5239 ArrayPtr bytecode_component() const { return raw_ptr()->bytecode_component_; }
5240 void set_bytecode_component(const Array& bytecode_component) const;
5241
5242 private:
5243 static KernelProgramInfoPtr New();
5244
5245 FINAL_HEAP_OBJECT_IMPLEMENTATION(KernelProgramInfo, Object);
5246 friend class Class;
5247};
5248
5249// ObjectPool contains constants, immediates and addresses referenced by
5250// generated code and deoptimization infos. Each entry has an type associated
5251// with it which is stored in-inline after all the entries.
5252class ObjectPool : public Object {
5253 public:
5254 using EntryType = compiler::ObjectPoolBuilderEntry::EntryType;
5255 using Patchability = compiler::ObjectPoolBuilderEntry::Patchability;
5256 using TypeBits = compiler::ObjectPoolBuilderEntry::TypeBits;
5257 using PatchableBit = compiler::ObjectPoolBuilderEntry::PatchableBit;
5258
5259 struct Entry {
5260 Entry() : raw_value_(), type_() {}
5261 explicit Entry(const Object* obj)
5262 : obj_(obj), type_(EntryType::kTaggedObject) {}
5263 Entry(uword value, EntryType info) : raw_value_(value), type_(info) {}
5264 union {
5265 const Object* obj_;
5266 uword raw_value_;
5267 };
5268 EntryType type_;
5269 };
5270
5271 intptr_t Length() const { return raw_ptr()->length_; }
5272 void SetLength(intptr_t value) const {
5273 StoreNonPointer(&raw_ptr()->length_, value);
5274 }
5275
5276 static intptr_t length_offset() {
5277 return OFFSET_OF(ObjectPoolLayout, length_);
5278 }
5279 static intptr_t data_offset() {
5280 return OFFSET_OF_RETURNED_VALUE(ObjectPoolLayout, data);
5281 }
5282 static intptr_t element_offset(intptr_t index) {
5283 return OFFSET_OF_RETURNED_VALUE(ObjectPoolLayout, data) +
5284 sizeof(ObjectPoolLayout::Entry) * index;
5285 }
5286
5287 struct ArrayTraits {
5288 static intptr_t elements_start_offset() {
5289 return ObjectPool::data_offset();
5290 }
5291
5292 static constexpr intptr_t kElementSize = sizeof(ObjectPoolLayout::Entry);
5293 };
5294
5295 EntryType TypeAt(intptr_t index) const {
5296 ASSERT((index >= 0) && (index <= Length()));
5297 return TypeBits::decode(raw_ptr()->entry_bits()[index]);
5298 }
5299
5300 Patchability PatchableAt(intptr_t index) const {
5301 ASSERT((index >= 0) && (index <= Length()));
5302 return PatchableBit::decode(raw_ptr()->entry_bits()[index]);
5303 }
5304
5305 void SetTypeAt(intptr_t index, EntryType type, Patchability patchable) const {
5306 ASSERT(index >= 0 && index <= Length());
5307 const uint8_t bits =
5308 PatchableBit::encode(patchable) | TypeBits::encode(type);
5309 StoreNonPointer(&raw_ptr()->entry_bits()[index], bits);
5310 }
5311
5312 template <std::memory_order order = std::memory_order_relaxed>
5313 ObjectPtr ObjectAt(intptr_t index) const {
5314 ASSERT((TypeAt(index) == EntryType::kTaggedObject) ||
5315 (TypeAt(index) == EntryType::kNativeEntryData));
5316 return LoadPointer<ObjectPtr, order>(&(EntryAddr(index)->raw_obj_));
5317 }
5318
5319 template <std::memory_order order = std::memory_order_relaxed>
5320 void SetObjectAt(intptr_t index, const Object& obj) const {
5321 ASSERT((TypeAt(index) == EntryType::kTaggedObject) ||
5322 (TypeAt(index) == EntryType::kNativeEntryData) ||
5323 (TypeAt(index) == EntryType::kImmediate && obj.IsSmi()));
5324 StorePointer<ObjectPtr, order>(&EntryAddr(index)->raw_obj_, obj.raw());
5325 }
5326
5327 uword RawValueAt(intptr_t index) const {
5328 ASSERT(TypeAt(index) != EntryType::kTaggedObject);
5329 return EntryAddr(index)->raw_value_;
5330 }
5331 void SetRawValueAt(intptr_t index, uword raw_value) const {
5332 ASSERT(TypeAt(index) != EntryType::kTaggedObject);
5333 StoreNonPointer(&EntryAddr(index)->raw_value_, raw_value);
5334 }
5335
5336 static intptr_t InstanceSize() {
5337 ASSERT(sizeof(ObjectPoolLayout) ==
5338 OFFSET_OF_RETURNED_VALUE(ObjectPoolLayout, data));
5339 return 0;
5340 }
5341
5342 static const intptr_t kBytesPerElement =
5343 sizeof(ObjectPoolLayout::Entry) + sizeof(uint8_t);
5344 static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
5345
5346 static intptr_t InstanceSize(intptr_t len) {
5347 // Ensure that variable length data is not adding to the object length.
5348 ASSERT(sizeof(ObjectPoolLayout) ==
5349 (sizeof(ObjectLayout) + (1 * kWordSize)));
5350 ASSERT(0 <= len && len <= kMaxElements);
5351 return RoundedAllocationSize(sizeof(ObjectPoolLayout) +
5352 (len * kBytesPerElement));
5353 }
5354
5355 static ObjectPoolPtr NewFromBuilder(
5356 const compiler::ObjectPoolBuilder& builder);
5357 static ObjectPoolPtr New(intptr_t len);
5358
5359 void CopyInto(compiler::ObjectPoolBuilder* builder) const;
5360
5361 // Returns the pool index from the offset relative to a tagged ObjectPoolPtr,
5362 // adjusting for the tag-bit.
5363 static intptr_t IndexFromOffset(intptr_t offset) {
5364 ASSERT(
5365 Utils::IsAligned(offset + kHeapObjectTag, compiler::target::kWordSize));
5366#if defined(DART_PRECOMPILER)
5367 return (offset + kHeapObjectTag -
5368 compiler::target::ObjectPool::element_offset(0)) /
5369 compiler::target::kWordSize;
5370#else
5371 return (offset + kHeapObjectTag - element_offset(0)) / kWordSize;
5372#endif
5373 }
5374
5375 static intptr_t OffsetFromIndex(intptr_t index) {
5376 return element_offset(index) - kHeapObjectTag;
5377 }
5378
5379 void DebugPrint() const;
5380
5381 private:
5382 ObjectPoolLayout::Entry const* EntryAddr(intptr_t index) const {
5383 ASSERT((index >= 0) && (index < Length()));
5384 return &raw_ptr()->data()[index];
5385 }
5386
5387 FINAL_HEAP_OBJECT_IMPLEMENTATION(ObjectPool, Object);
5388 friend class Class;
5389 friend class Object;
5390 friend class ObjectPoolLayout;
5391};
5392
5393class Instructions : public Object {
5394 public:
5395 enum {
5396 kSizePos = 0,
5397 kSizeSize = 31,
5398 kFlagsPos = kSizePos + kSizeSize,
5399 kFlagsSize = 1, // Currently, only flag is single entry flag.
5400 };
5401
5402 class SizeBits : public BitField<uint32_t, uint32_t, kSizePos, kSizeSize> {};
5403 class FlagsBits : public BitField<uint32_t, bool, kFlagsPos, kFlagsSize> {};
5404
5405 // Excludes HeaderSize().
5406 intptr_t Size() const { return SizeBits::decode(raw_ptr()->size_and_flags_); }
5407 static intptr_t Size(const InstructionsPtr instr) {
5408 return SizeBits::decode(instr->ptr()->size_and_flags_);
5409 }
5410
5411 bool HasMonomorphicEntry() const {
5412 return FlagsBits::decode(raw_ptr()->size_and_flags_);
5413 }
5414 static bool HasMonomorphicEntry(const InstructionsPtr instr) {
5415 return FlagsBits::decode(instr->ptr()->size_and_flags_);
5416 }
5417
5418 uword PayloadStart() const { return PayloadStart(raw()); }
5419 uword MonomorphicEntryPoint() const { return MonomorphicEntryPoint(raw()); }
5420 uword EntryPoint() const { return EntryPoint(raw()); }
5421 static uword PayloadStart(const InstructionsPtr instr) {
5422 return reinterpret_cast<uword>(instr->ptr()) + HeaderSize();
5423 }
5424
5425// Note: We keep the checked entrypoint offsets even (emitting NOPs if
5426// necessary) to allow them to be seen as Smis by the GC.
5427#if defined(TARGET_ARCH_IA32)
5428 static const intptr_t kMonomorphicEntryOffsetJIT = 6;
5429 static const intptr_t kPolymorphicEntryOffsetJIT = 34;
5430 static const intptr_t kMonomorphicEntryOffsetAOT = 0;
5431 static const intptr_t kPolymorphicEntryOffsetAOT = 0;
5432#elif defined(TARGET_ARCH_X64)
5433 static const intptr_t kMonomorphicEntryOffsetJIT = 8;
5434 static const intptr_t kPolymorphicEntryOffsetJIT = 40;
5435 static const intptr_t kMonomorphicEntryOffsetAOT = 8;
5436 static const intptr_t kPolymorphicEntryOffsetAOT = 22;
5437#elif defined(TARGET_ARCH_ARM)
5438 static const intptr_t kMonomorphicEntryOffsetJIT = 0;
5439 static const intptr_t kPolymorphicEntryOffsetJIT = 40;
5440 static const intptr_t kMonomorphicEntryOffsetAOT = 0;
5441 static const intptr_t kPolymorphicEntryOffsetAOT = 12;
5442#elif defined(TARGET_ARCH_ARM64)
5443 static const intptr_t kMonomorphicEntryOffsetJIT = 8;
5444 static const intptr_t kPolymorphicEntryOffsetJIT = 48;
5445 static const intptr_t kMonomorphicEntryOffsetAOT = 8;
5446 static const intptr_t kPolymorphicEntryOffsetAOT = 20;
5447#else
5448#error Missing entry offsets for current architecture
5449#endif
5450
5451 static uword MonomorphicEntryPoint(const InstructionsPtr instr) {
5452 uword entry = PayloadStart(instr);
5453 if (HasMonomorphicEntry(instr)) {
5454 entry += !FLAG_precompiled_mode ? kMonomorphicEntryOffsetJIT
5455 : kMonomorphicEntryOffsetAOT;
5456 }
5457 return entry;
5458 }
5459
5460 static uword EntryPoint(const InstructionsPtr instr) {
5461 uword entry = PayloadStart(instr);
5462 if (HasMonomorphicEntry(instr)) {
5463 entry += !FLAG_precompiled_mode ? kPolymorphicEntryOffsetJIT
5464 : kPolymorphicEntryOffsetAOT;
5465 }
5466 return entry;
5467 }
5468
5469 static const intptr_t kMaxElements =
5470 (kMaxInt32 - (sizeof(InstructionsLayout) + sizeof(ObjectLayout) +
5471 (2 * kMaxObjectAlignment)));
5472
5473 static intptr_t InstanceSize() {
5474 ASSERT(sizeof(InstructionsLayout) ==
5475 OFFSET_OF_RETURNED_VALUE(InstructionsLayout, data));
5476 return 0;
5477 }
5478
5479 static intptr_t InstanceSize(intptr_t size) {
5480 return Utils::RoundUp(HeaderSize() + size, kObjectAlignment);
5481 }
5482
5483 static intptr_t HeaderSize() {
5484 return Utils::RoundUp(sizeof(InstructionsLayout), kWordSize);
5485 }
5486
5487 static InstructionsPtr FromPayloadStart(uword payload_start) {
5488 return static_cast<InstructionsPtr>(payload_start - HeaderSize() +
5489 kHeapObjectTag);
5490 }
5491
5492 bool Equals(const Instructions& other) const {
5493 return Equals(raw(), other.raw());
5494 }
5495
5496 static bool Equals(InstructionsPtr a, InstructionsPtr b) {
5497 if (Size(a) != Size(b)) return false;
5498 NoSafepointScope no_safepoint;
5499 return memcmp(a->ptr(), b->ptr(), InstanceSize(Size(a))) == 0;
5500 }
5501
5502 uint32_t Hash() const {
5503 return HashBytes(reinterpret_cast<const uint8_t*>(PayloadStart()), Size());
5504 }
5505
5506 CodeStatistics* stats() const;
5507 void set_stats(CodeStatistics* stats) const;
5508
5509 private:
5510 void SetSize(intptr_t value) const {
5511 ASSERT(value >= 0);
5512 StoreNonPointer(&raw_ptr()->size_and_flags_,
5513 SizeBits::update(value, raw_ptr()->size_and_flags_));
5514 }
5515
5516 void SetHasMonomorphicEntry(bool value) const {
5517 StoreNonPointer(&raw_ptr()->size_and_flags_,
5518 FlagsBits::update(value, raw_ptr()->size_and_flags_));
5519 }
5520
5521 // New is a private method as RawInstruction and RawCode objects should
5522 // only be created using the Code::FinalizeCode method. This method creates
5523 // the RawInstruction and RawCode objects, sets up the pointer offsets
5524 // and links the two in a GC safe manner.
5525 static InstructionsPtr New(intptr_t size, bool has_monomorphic_entry);
5526
5527 FINAL_HEAP_OBJECT_IMPLEMENTATION(Instructions, Object);
5528 friend class Class;
5529 friend class Code;
5530 friend class AssemblyImageWriter;
5531 friend class BlobImageWriter;
5532 friend class ImageWriter;
5533};
5534
5535// Used only to provide memory accounting for the bare instruction payloads
5536// we serialize, since they are no longer part of RawInstructions objects.
5537class InstructionsSection : public Object {
5538 public:
5539 // Excludes HeaderSize().
5540 intptr_t Size() const { return raw_ptr()->payload_length_; }
5541 static intptr_t Size(const InstructionsSectionPtr instr) {
5542 return instr->ptr()->payload_length_;
5543 }
5544 static intptr_t InstanceSize() {
5545 ASSERT(sizeof(InstructionsSectionLayout) ==
5546 OFFSET_OF_RETURNED_VALUE(InstructionsSectionLayout, data));
5547 return 0;
5548 }
5549
5550 static intptr_t InstanceSize(intptr_t size) {
5551 return Utils::RoundUp(HeaderSize() + size, kObjectAlignment);
5552 }
5553
5554 static intptr_t HeaderSize() {
5555 return Utils::RoundUp(sizeof(InstructionsSectionLayout), kWordSize);
5556 }
5557
5558 private:
5559 FINAL_HEAP_OBJECT_IMPLEMENTATION(InstructionsSection, Object);
5560 friend class Class;
5561};
5562
5563class LocalVarDescriptors : public Object {
5564 public:
5565 intptr_t Length() const;
5566
5567 StringPtr GetName(intptr_t var_index) const;
5568
5569 void SetVar(intptr_t var_index,
5570 const String& name,
5571 LocalVarDescriptorsLayout::VarInfo* info) const;
5572
5573 void GetInfo(intptr_t var_index,
5574 LocalVarDescriptorsLayout::VarInfo* info) const;
5575
5576 static const intptr_t kBytesPerElement =
5577 sizeof(LocalVarDescriptorsLayout::VarInfo);
5578 static const intptr_t kMaxElements = LocalVarDescriptorsLayout::kMaxIndex;
5579
5580 static intptr_t InstanceSize() {
5581 ASSERT(sizeof(LocalVarDescriptorsLayout) ==
5582 OFFSET_OF_RETURNED_VALUE(LocalVarDescriptorsLayout, names));
5583 return 0;
5584 }
5585 static intptr_t InstanceSize(intptr_t len) {
5586 ASSERT(0 <= len && len <= kMaxElements);
5587 return RoundedAllocationSize(
5588 sizeof(LocalVarDescriptorsLayout) +
5589 (len * kWordSize) // RawStrings for names.
5590 + (len * sizeof(LocalVarDescriptorsLayout::VarInfo)));
5591 }
5592
5593 static LocalVarDescriptorsPtr New(intptr_t num_variables);
5594
5595 static const char* KindToCString(LocalVarDescriptorsLayout::VarInfoKind kind);
5596
5597 private:
5598 FINAL_HEAP_OBJECT_IMPLEMENTATION(LocalVarDescriptors, Object);
5599 friend class Class;
5600 friend class Object;
5601};
5602
5603class PcDescriptors : public Object {
5604 public:
5605 static const intptr_t kBytesPerElement = 1;
5606 static const intptr_t kMaxElements = kMaxInt32 / kBytesPerElement;
5607
5608 static intptr_t UnroundedSize(PcDescriptorsPtr desc) {
5609 return UnroundedSize(desc->ptr()->length_);
5610 }
5611 static intptr_t UnroundedSize(intptr_t len) {
5612 return sizeof(PcDescriptorsLayout) + len;
5613 }
5614 static intptr_t InstanceSize() {
5615 ASSERT(sizeof(PcDescriptorsLayout) ==
5616 OFFSET_OF_RETURNED_VALUE(PcDescriptorsLayout, data));
5617 return 0;
5618 }
5619 static intptr_t InstanceSize(intptr_t len) {
5620 ASSERT(0 <= len && len <= kMaxElements);
5621 return RoundedAllocationSize(UnroundedSize(len));
5622 }
5623
5624 static PcDescriptorsPtr New(GrowableArray<uint8_t>* delta_encoded_data);
5625
5626 // Verify (assert) assumptions about pc descriptors in debug mode.
5627 void Verify(const Function& function) const;
5628
5629 static void PrintHeaderString();
5630
5631 void PrintToJSONObject(JSONObject* jsobj, bool ref) const;
5632
5633 // Encode integer in SLEB128 format.
5634 static void EncodeInteger(GrowableArray<uint8_t>* data, intptr_t value);
5635
5636 // Decode SLEB128 encoded integer. Update byte_index to the next integer.
5637 intptr_t DecodeInteger(intptr_t* byte_index) const;
5638
5639 // We would have a VisitPointers function here to traverse the
5640 // pc descriptors table to visit objects if any in the table.
5641 // Note: never return a reference to a PcDescriptorsLayout::PcDescriptorRec
5642 // as the object can move.
5643 class Iterator : ValueObject {
5644 public:
5645 Iterator(const PcDescriptors& descriptors, intptr_t kind_mask)
5646 : descriptors_(descriptors),
5647 kind_mask_(kind_mask),
5648 byte_index_(0),
5649 cur_pc_offset_(0),
5650 cur_kind_(0),
5651 cur_deopt_id_(0),
5652 cur_token_pos_(0),
5653 cur_try_index_(0),
5654 cur_yield_index_(PcDescriptorsLayout::kInvalidYieldIndex) {}
5655
5656 bool MoveNext() {
5657 // Moves to record that matches kind_mask_.
5658 while (byte_index_ < descriptors_.Length()) {
5659 const int32_t kind_and_metadata =
5660 descriptors_.DecodeInteger(&byte_index_);
5661 cur_kind_ =
5662 PcDescriptorsLayout::KindAndMetadata::DecodeKind(kind_and_metadata);
5663 cur_try_index_ = PcDescriptorsLayout::KindAndMetadata::DecodeTryIndex(
5664 kind_and_metadata);
5665 cur_yield_index_ =
5666 PcDescriptorsLayout::KindAndMetadata::DecodeYieldIndex(
5667 kind_and_metadata);
5668
5669 cur_pc_offset_ += descriptors_.DecodeInteger(&byte_index_);
5670
5671 if (!FLAG_precompiled_mode) {
5672 cur_deopt_id_ += descriptors_.DecodeInteger(&byte_index_);
5673 cur_token_pos_ += descriptors_.DecodeInteger(&byte_index_);
5674 }
5675
5676 if ((cur_kind_ & kind_mask_) != 0) {
5677 return true; // Current is valid.
5678 }
5679 }
5680 return false;
5681 }
5682
5683 uword PcOffset() const { return cur_pc_offset_; }
5684 intptr_t DeoptId() const { return cur_deopt_id_; }
5685 TokenPosition TokenPos() const { return TokenPosition(cur_token_pos_); }
5686 intptr_t TryIndex() const { return cur_try_index_; }
5687 intptr_t YieldIndex() const { return cur_yield_index_; }
5688 PcDescriptorsLayout::Kind Kind() const {
5689 return static_cast<PcDescriptorsLayout::Kind>(cur_kind_);
5690 }
5691
5692 private:
5693 friend class PcDescriptors;
5694
5695 // For nested iterations, starting at element after.
5696 explicit Iterator(const Iterator& iter)
5697 : ValueObject(),
5698 descriptors_(iter.descriptors_),
5699 kind_mask_(iter.kind_mask_),
5700 byte_index_(iter.byte_index_),
5701 cur_pc_offset_(iter.cur_pc_offset_),
5702 cur_kind_(iter.cur_kind_),
5703 cur_deopt_id_(iter.cur_deopt_id_),
5704 cur_token_pos_(iter.cur_token_pos_),
5705 cur_try_index_(iter.cur_try_index_),
5706 cur_yield_index_(iter.cur_yield_index_) {}
5707
5708 const PcDescriptors& descriptors_;
5709 const intptr_t kind_mask_;
5710 intptr_t byte_index_;
5711
5712 intptr_t cur_pc_offset_;
5713 intptr_t cur_kind_;
5714 intptr_t cur_deopt_id_;
5715 intptr_t cur_token_pos_;
5716 intptr_t cur_try_index_;
5717 intptr_t cur_yield_index_;
5718 };
5719
5720 intptr_t Length() const;
5721 bool Equals(const PcDescriptors& other) const {
5722 if (Length() != other.Length()) {
5723 return false;
5724 }
5725 NoSafepointScope no_safepoint;
5726 return memcmp(raw_ptr(), other.raw_ptr(), InstanceSize(Length())) == 0;
5727 }
5728
5729 private:
5730 static const char* KindAsStr(PcDescriptorsLayout::Kind kind);
5731
5732 static PcDescriptorsPtr New(intptr_t length);
5733
5734 void SetLength(intptr_t value) const;
5735 void CopyData(GrowableArray<uint8_t>* data);
5736
5737 FINAL_HEAP_OBJECT_IMPLEMENTATION(PcDescriptors, Object);
5738 friend class Class;
5739 friend class Object;
5740};
5741
5742class CodeSourceMap : public Object {
5743 public:
5744 static const intptr_t kBytesPerElement = 1;
5745 static const intptr_t kMaxElements = kMaxInt32 / kBytesPerElement;
5746
5747 static intptr_t UnroundedSize(CodeSourceMapPtr map) {
5748 return UnroundedSize(map->ptr()->length_);
5749 }
5750 static intptr_t UnroundedSize(intptr_t len) {
5751 return sizeof(CodeSourceMapLayout) + len;
5752 }
5753 static intptr_t InstanceSize() {
5754 ASSERT(sizeof(CodeSourceMapLayout) ==
5755 OFFSET_OF_RETURNED_VALUE(CodeSourceMapLayout, data));
5756 return 0;
5757 }
5758 static intptr_t InstanceSize(intptr_t len) {
5759 ASSERT(0 <= len && len <= kMaxElements);
5760 return RoundedAllocationSize(UnroundedSize(len));
5761 }
5762
5763 static CodeSourceMapPtr New(intptr_t length);
5764
5765 intptr_t Length() const { return raw_ptr()->length_; }
5766 uint8_t* Data() const {
5767 return UnsafeMutableNonPointer(&raw_ptr()->data()[0]);
5768 }
5769
5770 bool Equals(const CodeSourceMap& other) const {
5771 if (Length() != other.Length()) {
5772 return false;
5773 }
5774 NoSafepointScope no_safepoint;
5775 return memcmp(raw_ptr(), other.raw_ptr(), InstanceSize(Length())) == 0;
5776 }
5777
5778 void PrintToJSONObject(JSONObject* jsobj, bool ref) const;
5779
5780 private:
5781 void SetLength(intptr_t value) const;
5782
5783 FINAL_HEAP_OBJECT_IMPLEMENTATION(CodeSourceMap, Object);
5784 friend class Class;
5785 friend class Object;
5786};
5787
5788class CompressedStackMaps : public Object {
5789 public:
5790 static const intptr_t kHashBits = 30;
5791
5792 uintptr_t payload_size() const { return PayloadSizeOf(raw()); }
5793 static uintptr_t PayloadSizeOf(const CompressedStackMapsPtr raw) {
5794 return CompressedStackMapsLayout::SizeField::decode(
5795 raw->ptr()->flags_and_size_);
5796 }
5797
5798 bool Equals(const CompressedStackMaps& other) const {
5799 // All of the table flags and payload size must match.
5800 if (raw_ptr()->flags_and_size_ != other.raw_ptr()->flags_and_size_) {
5801 return false;
5802 }
5803 NoSafepointScope no_safepoint;
5804 return memcmp(raw_ptr(), other.raw_ptr(), InstanceSize(payload_size())) ==
5805 0;
5806 }
5807
5808 // Methods to allow use with PointerKeyValueTrait to create sets of CSMs.
5809 bool Equals(const CompressedStackMaps* other) const { return Equals(*other); }
5810 intptr_t Hashcode() const;
5811
5812 static intptr_t UnroundedSize(CompressedStackMapsPtr maps) {
5813 return UnroundedSize(CompressedStackMaps::PayloadSizeOf(maps));
5814 }
5815 static intptr_t UnroundedSize(intptr_t length) {
5816 return sizeof(CompressedStackMapsLayout) + length;
5817 }
5818 static intptr_t InstanceSize() {
5819 ASSERT(sizeof(CompressedStackMapsLayout) ==
5820 OFFSET_OF_RETURNED_VALUE(CompressedStackMapsLayout, data));
5821 return 0;
5822 }
5823 static intptr_t InstanceSize(intptr_t length) {
5824 return RoundedAllocationSize(UnroundedSize(length));
5825 }
5826
5827 bool UsesGlobalTable() const { return !IsNull() && UsesGlobalTable(raw()); }
5828 static bool UsesGlobalTable(const CompressedStackMapsPtr raw) {
5829 return CompressedStackMapsLayout::UsesTableBit::decode(
5830 raw->ptr()->flags_and_size_);
5831 }
5832
5833 bool IsGlobalTable() const { return !IsNull() && IsGlobalTable(raw()); }
5834 static bool IsGlobalTable(const CompressedStackMapsPtr raw) {
5835 return CompressedStackMapsLayout::GlobalTableBit::decode(
5836 raw->ptr()->flags_and_size_);
5837 }
5838
5839 static CompressedStackMapsPtr NewInlined(
5840 const GrowableArray<uint8_t>& bytes) {
5841 return New(bytes, /*is_global_table=*/false, /*uses_global_table=*/false);
5842 }
5843 static CompressedStackMapsPtr NewUsingTable(
5844 const GrowableArray<uint8_t>& bytes) {
5845 return New(bytes, /*is_global_table=*/false, /*uses_global_table=*/true);
5846 }
5847
5848 static CompressedStackMapsPtr NewGlobalTable(
5849 const GrowableArray<uint8_t>& bytes) {
5850 return New(bytes, /*is_global_table=*/true, /*uses_global_table=*/false);
5851 }
5852
5853 private:
5854 static CompressedStackMapsPtr New(const GrowableArray<uint8_t>& bytes,
5855 bool is_global_table,
5856 bool uses_global_table);
5857
5858 uint8_t PayloadByte(uintptr_t offset) const {
5859 ASSERT(offset < payload_size());
5860 return raw_ptr()->data()[offset];
5861 }
5862
5863 FINAL_HEAP_OBJECT_IMPLEMENTATION(CompressedStackMaps, Object);
5864 friend class Class;
5865 friend class CompressedStackMapsIterator; // For PayloadByte
5866 friend class StackMapEntry; // For PayloadByte
5867};
5868
5869class ExceptionHandlers : public Object {
5870 public:
5871 static const intptr_t kInvalidPcOffset = 0;
5872
5873 intptr_t num_entries() const;
5874
5875 void GetHandlerInfo(intptr_t try_index, ExceptionHandlerInfo* info) const;
5876
5877 uword HandlerPCOffset(intptr_t try_index) const;
5878 intptr_t OuterTryIndex(intptr_t try_index) const;
5879 bool NeedsStackTrace(intptr_t try_index) const;
5880 bool IsGenerated(intptr_t try_index) const;
5881
5882 void SetHandlerInfo(intptr_t try_index,
5883 intptr_t outer_try_index,
5884 uword handler_pc_offset,
5885 bool needs_stacktrace,
5886 bool has_catch_all,
5887 bool is_generated) const;
5888
5889 ArrayPtr GetHandledTypes(intptr_t try_index) const;
5890 void SetHandledTypes(intptr_t try_index, const Array& handled_types) const;
5891 bool HasCatchAll(intptr_t try_index) const;
5892
5893 static intptr_t InstanceSize() {
5894 ASSERT(sizeof(ExceptionHandlersLayout) ==
5895 OFFSET_OF_RETURNED_VALUE(ExceptionHandlersLayout, data));
5896 return 0;
5897 }
5898 static intptr_t InstanceSize(intptr_t len) {
5899 return RoundedAllocationSize(sizeof(ExceptionHandlersLayout) +
5900 (len * sizeof(ExceptionHandlerInfo)));
5901 }
5902
5903 static ExceptionHandlersPtr New(intptr_t num_handlers);
5904 static ExceptionHandlersPtr New(const Array& handled_types_data);
5905
5906 // We would have a VisitPointers function here to traverse the
5907 // exception handler table to visit objects if any in the table.
5908
5909 private:
5910 // Pick somewhat arbitrary maximum number of exception handlers
5911 // for a function. This value is used to catch potentially
5912 // malicious code.
5913 static const intptr_t kMaxHandlers = 1024 * 1024;
5914
5915 void set_handled_types_data(const Array& value) const;
5916
5917 FINAL_HEAP_OBJECT_IMPLEMENTATION(ExceptionHandlers, Object);
5918 friend class Class;
5919 friend class Object;
5920};
5921
5922// A WeakSerializationReference (WSR) denotes a type of weak reference to a
5923// target object. In particular, objects that can only be reached from roots via
5924// WSR edges during serialization of AOT snapshots should not be serialized. Of
5925// course, the target object may still be serialized if there are paths to the
5926// object from the roots that do not go through one of these objects, in which
5927// case the WSR is discarded in favor of a direct reference during serialization
5928// to avoid runtime overhead.
5929//
5930// Note: Some objects cannot be dropped during AOT serialization, and thus
5931// Wrap() may return the original object in some cases. The CanWrap()
5932// function returns false if Wrap() will return the original object.
5933// In particular, the null object will never be wrapped, so receiving
5934// Object::null() from target() means the WSR represents a dropped target.
5935//
5936// Unfortunately a WSR is not a proxy for the original object, so if WSRs may
5937// appear as field contents (currently only possible for ObjectPtr fields),
5938// then code that accesses that field must handle the case where an WSR has
5939// been introduced. Before serialization, Unwrap can be used to take a
5940// Object reference or RawObject pointer and remove any WSR wrapping before use.
5941// After deserialization, any WSRs no longer contain a pointer to the target,
5942// but instead contain only the class ID of the original target.
5943//
5944// Current uses of WSRs:
5945// * Code::owner_
5946class WeakSerializationReference : public Object {
5947 public:
5948 ObjectPtr target() const { return TargetOf(raw()); }
5949 static ObjectPtr TargetOf(const WeakSerializationReferencePtr raw) {
5950#if defined(DART_PRECOMPILED_RUNTIME)
5951 // WSRs in the precompiled runtime only contain some remaining info about
5952 // their old target, not a reference to the target itself..
5953 return Object::null();
5954#else
5955 // Outside the precompiled runtime, they should always have a target.
5956 ASSERT(raw->ptr()->target_ != Object::null());
5957 return raw->ptr()->target_;
5958#endif
5959 }
5960
5961 classid_t TargetClassId() const { return TargetClassIdOf(raw()); }
5962 static classid_t TargetClassIdOf(const WeakSerializationReferencePtr raw) {
5963#if defined(DART_PRECOMPILED_RUNTIME)
5964 // No new instances of WSRs are created in the precompiled runtime, so
5965 // this instance came from deserialization and thus must be the empty WSR.
5966 return raw->ptr()->cid_;
5967#else
5968 return TargetOf(raw)->GetClassId();
5969#endif
5970 }
5971
5972 static ObjectPtr Unwrap(const Object& obj) { return Unwrap(obj.raw()); }
5973 // Gets the underlying object from a WSR, or the original object if it is
5974 // not one. Notably, Unwrap(Wrap(r)) == r for all raw objects r, whether
5975 // CanWrap(r) or not. However, this will not hold if a serialization and
5976 // deserialization step is put between the two calls.
5977 static ObjectPtr Unwrap(ObjectPtr obj) {
5978 if (!obj->IsWeakSerializationReference()) return obj;
5979 return TargetOf(static_cast<WeakSerializationReferencePtr>(obj));
5980 }
5981
5982 // An Unwrap that only unwraps if there's a valid target, otherwise the
5983 // WSR is returned. Useful for cases where we want to call Object methods
5984 // like ToCString() on whatever non-null object we can get.
5985 static ObjectPtr UnwrapIfTarget(const Object& obj) {
5986 return UnwrapIfTarget(obj.raw());
5987 }
5988 static ObjectPtr UnwrapIfTarget(ObjectPtr raw) {
5989#if defined(DART_PRECOMPILED_RUNTIME)
5990 // In the precompiled runtime, WSRs never have a target so we always return
5991 // the argument.
5992 return raw;
5993#else
5994 if (!raw->IsWeakSerializationReference()) return raw;
5995 // Otherwise, they always do.
5996 return TargetOf(WeakSerializationReference::RawCast(raw));
5997#endif
5998 }
5999
6000 static classid_t UnwrappedClassIdOf(const Object& obj) {
6001 return UnwrappedClassIdOf(obj.raw());
6002 }
6003 // Gets the class ID of the underlying object from a WSR, or the class ID of
6004 // the object if it is not one.
6005 //
6006 // UnwrappedClassOf(Wrap(r)) == UnwrappedClassOf(r) for all raw objects r,
6007 // whether CanWrap(r) or not. Unlike Unwrap, this is still true even if
6008 // there is a serialization and deserialization step between the two calls,
6009 // since that information is saved in the serialized WSR.
6010 static classid_t UnwrappedClassIdOf(ObjectPtr obj) {
6011 if (!obj->IsWeakSerializationReference()) return obj->GetClassId();
6012 return TargetClassIdOf(WeakSerializationReference::RawCast(obj));
6013 }
6014
6015 static intptr_t InstanceSize() {
6016 return RoundedAllocationSize(sizeof(WeakSerializationReferenceLayout));
6017 }
6018
6019#if defined(DART_PRECOMPILER)
6020 // Returns true if a new WSR would be created when calling Wrap.
6021 static bool CanWrap(const Object& object);
6022
6023 // This returns ObjectPtr, not WeakSerializationReferencePtr, because
6024 // target.raw() is returned when CanWrap(target) is false.
6025 static ObjectPtr Wrap(Zone* zone, const Object& target);
6026#endif
6027
6028 private:
6029 FINAL_HEAP_OBJECT_IMPLEMENTATION(WeakSerializationReference, Object);
6030 friend class Class;
6031};
6032
6033class Code : public Object {
6034 public:
6035 // When dual mapping, this returns the executable view.
6036 InstructionsPtr active_instructions() const {
6037#if defined(DART_PRECOMPILED_RUNTIME)
6038 UNREACHABLE();
6039 return NULL;
6040#else
6041 return raw_ptr()->active_instructions_;
6042#endif
6043 }
6044
6045 // When dual mapping, these return the executable view.
6046 InstructionsPtr instructions() const { return raw_ptr()->instructions_; }
6047 static InstructionsPtr InstructionsOf(const CodePtr code) {
6048 return code->ptr()->instructions_;
6049 }
6050
6051 static intptr_t saved_instructions_offset() {
6052 return OFFSET_OF(CodeLayout, instructions_);
6053 }
6054
6055 using EntryKind = CodeEntryKind;
6056
6057 static const char* EntryKindToCString(EntryKind kind);
6058 static bool ParseEntryKind(const char* str, EntryKind* out);
6059
6060 static intptr_t entry_point_offset(EntryKind kind = EntryKind::kNormal) {
6061 switch (kind) {
6062 case EntryKind::kNormal:
6063 return OFFSET_OF(CodeLayout, entry_point_);
6064 case EntryKind::kUnchecked:
6065 return OFFSET_OF(CodeLayout, unchecked_entry_point_);
6066 case EntryKind::kMonomorphic:
6067 return OFFSET_OF(CodeLayout, monomorphic_entry_point_);
6068 case EntryKind::kMonomorphicUnchecked:
6069 return OFFSET_OF(CodeLayout, monomorphic_unchecked_entry_point_);
6070 default:
6071 UNREACHABLE();
6072 }
6073 }
6074
6075 ObjectPoolPtr object_pool() const { return raw_ptr()->object_pool_; }
6076 static intptr_t object_pool_offset() {
6077 return OFFSET_OF(CodeLayout, object_pool_);
6078 }
6079
6080 intptr_t pointer_offsets_length() const {
6081 return PtrOffBits::decode(raw_ptr()->state_bits_);
6082 }
6083
6084 bool is_optimized() const {
6085 return OptimizedBit::decode(raw_ptr()->state_bits_);
6086 }
6087 void set_is_optimized(bool value) const;
6088 static bool IsOptimized(CodePtr code) {
6089 return Code::OptimizedBit::decode(code->ptr()->state_bits_);
6090 }
6091
6092 bool is_force_optimized() const {
6093 return ForceOptimizedBit::decode(raw_ptr()->state_bits_);
6094 }
6095 void set_is_force_optimized(bool value) const;
6096
6097 bool is_alive() const { return AliveBit::decode(raw_ptr()->state_bits_); }
6098 void set_is_alive(bool value) const;
6099
6100 bool HasMonomorphicEntry() const { return HasMonomorphicEntry(raw()); }
6101 static bool HasMonomorphicEntry(const CodePtr code) {
6102#if defined(DART_PRECOMPILED_RUNTIME)
6103 return code->ptr()->entry_point_ != code->ptr()->monomorphic_entry_point_;
6104#else
6105 return Instructions::HasMonomorphicEntry(InstructionsOf(code));
6106#endif
6107 }
6108
6109 // Returns the payload start of [instructions()].
6110 uword PayloadStart() const { return PayloadStartOf(raw()); }
6111 static uword PayloadStartOf(const CodePtr code) {
6112#if defined(DART_PRECOMPILED_RUNTIME)
6113 const uword entry_offset = HasMonomorphicEntry(code)
6114 ? Instructions::kPolymorphicEntryOffsetAOT
6115 : 0;
6116 return EntryPointOf(code) - entry_offset;
6117#else
6118 return Instructions::PayloadStart(InstructionsOf(code));
6119#endif
6120 }
6121
6122 // Returns the entry point of [instructions()].
6123 uword EntryPoint() const { return EntryPointOf(raw()); }
6124 static uword EntryPointOf(const CodePtr code) {
6125#if defined(DART_PRECOMPILED_RUNTIME)
6126 return code->ptr()->entry_point_;
6127#else
6128 return Instructions::EntryPoint(InstructionsOf(code));
6129#endif
6130 }
6131
6132 // Returns the unchecked entry point of [instructions()].
6133 uword UncheckedEntryPoint() const {
6134#if defined(DART_PRECOMPILED_RUNTIME)
6135 return raw_ptr()->unchecked_entry_point_;
6136#else
6137 return EntryPoint() + raw_ptr()->unchecked_offset_;
6138#endif
6139 }
6140 // Returns the monomorphic entry point of [instructions()].
6141 uword MonomorphicEntryPoint() const {
6142#if defined(DART_PRECOMPILED_RUNTIME)
6143 return raw_ptr()->monomorphic_entry_point_;
6144#else
6145 return Instructions::MonomorphicEntryPoint(instructions());
6146#endif
6147 }
6148 // Returns the unchecked monomorphic entry point of [instructions()].
6149 uword MonomorphicUncheckedEntryPoint() const {
6150#if defined(DART_PRECOMPILED_RUNTIME)
6151 return raw_ptr()->monomorphic_unchecked_entry_point_;
6152#else
6153 return MonomorphicEntryPoint() + raw_ptr()->unchecked_offset_;
6154#endif
6155 }
6156
6157 // Returns the size of [instructions()].
6158 intptr_t Size() const { return PayloadSizeOf(raw()); }
6159 static intptr_t PayloadSizeOf(const CodePtr code) {
6160#if defined(DART_PRECOMPILED_RUNTIME)
6161 return code->ptr()->instructions_length_;
6162#else
6163 return Instructions::Size(InstructionsOf(code));
6164#endif
6165 }
6166
6167 ObjectPoolPtr GetObjectPool() const;
6168 // Returns whether the given PC address is in [instructions()].
6169 bool ContainsInstructionAt(uword addr) const {
6170 return ContainsInstructionAt(raw(), addr);
6171 }
6172
6173 // Returns whether the given PC address is in [InstructionsOf(code)].
6174 static bool ContainsInstructionAt(const CodePtr code, uword pc) {
6175 return CodeLayout::ContainsPC(code, pc);
6176 }
6177
6178 // Returns true if there is a debugger breakpoint set in this code object.
6179 bool HasBreakpoint() const;
6180
6181 PcDescriptorsPtr pc_descriptors() const { return raw_ptr()->pc_descriptors_; }
6182 void set_pc_descriptors(const PcDescriptors& descriptors) const {
6183 ASSERT(descriptors.IsOld());
6184 StorePointer(&raw_ptr()->pc_descriptors_, descriptors.raw());
6185 }
6186
6187 CodeSourceMapPtr code_source_map() const {
6188 return raw_ptr()->code_source_map_;
6189 }
6190
6191 void set_code_source_map(const CodeSourceMap& code_source_map) const {
6192 ASSERT(code_source_map.IsOld());
6193 StorePointer(&raw_ptr()->code_source_map_, code_source_map.raw());
6194 }
6195
6196 // Array of DeoptInfo objects.
6197 ArrayPtr deopt_info_array() const {
6198#if defined(DART_PRECOMPILED_RUNTIME)
6199 UNREACHABLE();
6200 return NULL;
6201#else
6202 return raw_ptr()->deopt_info_array_;
6203#endif
6204 }
6205 void set_deopt_info_array(const Array& array) const;
6206
6207#if !defined(DART_PRECOMPILED_RUNTIME)
6208 intptr_t num_variables() const;
6209 void set_num_variables(intptr_t num_variables) const;
6210#endif
6211
6212#if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
6213 TypedDataPtr catch_entry_moves_maps() const;
6214 void set_catch_entry_moves_maps(const TypedData& maps) const;
6215#endif
6216
6217 CompressedStackMapsPtr compressed_stackmaps() const {
6218 return raw_ptr()->compressed_stackmaps_;
6219 }
6220 void set_compressed_stackmaps(const CompressedStackMaps& maps) const;
6221
6222 enum CallKind {
6223 kPcRelativeCall = 1,
6224 kPcRelativeTTSCall = 2,
6225 kPcRelativeTailCall = 3,
6226 kCallViaCode = 4,
6227 };
6228
6229 enum CallEntryPoint {
6230 kDefaultEntry,
6231 kUncheckedEntry,
6232 };
6233
6234 enum SCallTableEntry {
6235 kSCallTableKindAndOffset = 0,
6236 kSCallTableCodeOrTypeTarget = 1,
6237 kSCallTableFunctionTarget = 2,
6238 kSCallTableEntryLength = 3,
6239 };
6240
6241 enum class PoolAttachment {
6242 kAttachPool,
6243 kNotAttachPool,
6244 };
6245
6246 class KindField : public BitField<intptr_t, CallKind, 0, 3> {};
6247 class EntryPointField
6248 : public BitField<intptr_t, CallEntryPoint, KindField::kNextBit, 1> {};
6249 class OffsetField
6250 : public BitField<intptr_t, intptr_t, EntryPointField::kNextBit, 26> {};
6251
6252 void set_static_calls_target_table(const Array& value) const;
6253 ArrayPtr static_calls_target_table() const {
6254#if defined(DART_PRECOMPILED_RUNTIME)
6255 UNREACHABLE();
6256 return NULL;
6257#else
6258 return raw_ptr()->static_calls_target_table_;
6259#endif
6260 }
6261
6262 TypedDataPtr GetDeoptInfoAtPc(uword pc,
6263 ICData::DeoptReasonId* deopt_reason,
6264 uint32_t* deopt_flags) const;
6265
6266 // Returns null if there is no static call at 'pc'.
6267 FunctionPtr GetStaticCallTargetFunctionAt(uword pc) const;
6268 // Aborts if there is no static call at 'pc'.
6269 void SetStaticCallTargetCodeAt(uword pc, const Code& code) const;
6270 void SetStubCallTargetCodeAt(uword pc, const Code& code) const;
6271
6272 void Disassemble(DisassemblyFormatter* formatter = NULL) const;
6273
6274 class Comments : public ZoneAllocated {
6275 public:
6276 static Comments& New(intptr_t count);
6277
6278 intptr_t Length() const;
6279
6280 void SetPCOffsetAt(intptr_t idx, intptr_t pc_offset);
6281 void SetCommentAt(intptr_t idx, const String& comment);
6282
6283 intptr_t PCOffsetAt(intptr_t idx) const;
6284 StringPtr CommentAt(intptr_t idx) const;
6285
6286 private:
6287 explicit Comments(const Array& comments);
6288
6289 // Layout of entries describing comments.
6290 enum {
6291 kPCOffsetEntry = 0, // PC offset to a comment as a Smi.
6292 kCommentEntry, // Comment text as a String.
6293 kNumberOfEntries
6294 };
6295
6296 const Array& comments_;
6297
6298 friend class Code;
6299
6300 DISALLOW_COPY_AND_ASSIGN(Comments);
6301 };
6302
6303 const Comments& comments() const;
6304 void set_comments(const Comments& comments) const;
6305
6306 ObjectPtr return_address_metadata() const {
6307#if defined(PRODUCT)
6308 UNREACHABLE();
6309 return NULL;
6310#else
6311 return raw_ptr()->return_address_metadata_;
6312#endif
6313 }
6314 // Sets |return_address_metadata|.
6315 void SetPrologueOffset(intptr_t offset) const;
6316 // Returns -1 if no prologue offset is available.
6317 intptr_t GetPrologueOffset() const;
6318
6319 ArrayPtr inlined_id_to_function() const;
6320 void set_inlined_id_to_function(const Array& value) const;
6321
6322 // Provides the call stack at the given pc offset, with the top-of-stack in
6323 // the last element and the root function (this) as the first element, along
6324 // with the corresponding source positions. Note the token position for each
6325 // function except the top-of-stack is the position of the call to the next
6326 // function. The stack will be empty if we lack the metadata to produce it,
6327 // which happens for stub code.
6328 // The pc offset is interpreted as an instruction address (as needed by the
6329 // disassembler or the top frame of a profiler sample).
6330 void GetInlinedFunctionsAtInstruction(
6331 intptr_t pc_offset,
6332 GrowableArray<const Function*>* functions,
6333 GrowableArray<TokenPosition>* token_positions) const;
6334 // Same as above, except the pc is interpreted as a return address (as needed
6335 // for a stack trace or the bottom frames of a profiler sample).
6336 void GetInlinedFunctionsAtReturnAddress(
6337 intptr_t pc_offset,
6338 GrowableArray<const Function*>* functions,
6339 GrowableArray<TokenPosition>* token_positions) const {
6340 GetInlinedFunctionsAtInstruction(pc_offset - 1, functions, token_positions);
6341 }
6342
6343 NOT_IN_PRODUCT(void PrintJSONInlineIntervals(JSONObject* object) const);
6344 void DumpInlineIntervals() const;
6345 void DumpSourcePositions(bool relative_addresses = false) const;
6346
6347 LocalVarDescriptorsPtr var_descriptors() const {
6348#if defined(PRODUCT)
6349 UNREACHABLE();
6350 return NULL;
6351#else
6352 return raw_ptr()->var_descriptors_;
6353#endif
6354 }
6355 void set_var_descriptors(const LocalVarDescriptors& value) const {
6356#if defined(PRODUCT)
6357 UNREACHABLE();
6358#else
6359 ASSERT(value.IsOld());
6360 StorePointer(&raw_ptr()->var_descriptors_, value.raw());
6361#endif
6362 }
6363
6364 // Will compute local var descriptors if necessary.
6365 LocalVarDescriptorsPtr GetLocalVarDescriptors() const;
6366
6367 ExceptionHandlersPtr exception_handlers() const {
6368 return raw_ptr()->exception_handlers_;
6369 }
6370 void set_exception_handlers(const ExceptionHandlers& handlers) const {
6371 ASSERT(handlers.IsOld());
6372 StorePointer(&raw_ptr()->exception_handlers_, handlers.raw());
6373 }
6374
6375 // WARNING: function() returns the owner which is not guaranteed to be
6376 // a Function. It is up to the caller to guarantee it isn't a stub, class,
6377 // or something else.
6378 // TODO(turnidge): Consider dropping this function and making
6379 // everybody use owner(). Currently this function is misused - even
6380 // while generating the snapshot.
6381 FunctionPtr function() const {
6382 ASSERT(IsFunctionCode());
6383 return Function::RawCast(
6384 WeakSerializationReference::Unwrap(raw_ptr()->owner_));
6385 }
6386
6387 ObjectPtr owner() const { return raw_ptr()->owner_; }
6388 void set_owner(const Object& owner) const;
6389
6390 classid_t OwnerClassId() const { return OwnerClassIdOf(raw()); }
6391 static classid_t OwnerClassIdOf(CodePtr raw) {
6392 return WeakSerializationReference::UnwrappedClassIdOf(raw->ptr()->owner_);
6393 }
6394
6395 static intptr_t owner_offset() { return OFFSET_OF(CodeLayout, owner_); }
6396
6397 // We would have a VisitPointers function here to traverse all the
6398 // embedded objects in the instructions using pointer_offsets.
6399
6400 static const intptr_t kBytesPerElement =
6401 sizeof(reinterpret_cast<CodeLayout*>(0)->data()[0]);
6402 static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
6403
6404 static intptr_t InstanceSize() {
6405 ASSERT(sizeof(CodeLayout) == OFFSET_OF_RETURNED_VALUE(CodeLayout, data));
6406 return 0;
6407 }
6408 static intptr_t InstanceSize(intptr_t len) {
6409 ASSERT(0 <= len && len <= kMaxElements);
6410 return RoundedAllocationSize(sizeof(CodeLayout) + (len * kBytesPerElement));
6411 }
6412#if !defined(DART_PRECOMPILED_RUNTIME)
6413 // Finalizes the generated code, by generating various kinds of metadata (e.g.
6414 // stack maps, pc descriptors, ...) and attach them to a newly generated
6415 // [Code] object.
6416 //
6417 // If Code::PoolAttachment::kAttachPool is specified for [pool_attachment]
6418 // then a new [ObjectPool] will be attached to the code object as well.
6419 // Otherwise the caller is responsible for doing this via
6420 // `Object::set_object_pool()`.
6421 static CodePtr FinalizeCode(FlowGraphCompiler* compiler,
6422 compiler::Assembler* assembler,
6423 PoolAttachment pool_attachment,
6424 bool optimized,
6425 CodeStatistics* stats);
6426
6427 // Notifies all active [CodeObserver]s.
6428 static void NotifyCodeObservers(const Code& code, bool optimized);
6429 static void NotifyCodeObservers(const Function& function,
6430 const Code& code,
6431 bool optimized);
6432 static void NotifyCodeObservers(const char* name,
6433 const Code& code,
6434 bool optimized);
6435
6436 // Calls [FinalizeCode] and also notifies [CodeObserver]s.
6437 static CodePtr FinalizeCodeAndNotify(const Function& function,
6438 FlowGraphCompiler* compiler,
6439 compiler::Assembler* assembler,
6440 PoolAttachment pool_attachment,
6441 bool optimized = false,
6442 CodeStatistics* stats = nullptr);
6443 static CodePtr FinalizeCodeAndNotify(const char* name,
6444 FlowGraphCompiler* compiler,
6445 compiler::Assembler* assembler,
6446 PoolAttachment pool_attachment,
6447 bool optimized = false,
6448 CodeStatistics* stats = nullptr);
6449
6450#endif
6451 static CodePtr LookupCode(uword pc);
6452 static CodePtr LookupCodeInVmIsolate(uword pc);
6453 static CodePtr FindCode(uword pc, int64_t timestamp);
6454
6455 int32_t GetPointerOffsetAt(int index) const {
6456 NoSafepointScope no_safepoint;
6457 return *PointerOffsetAddrAt(index);
6458 }
6459 TokenPosition GetTokenIndexOfPC(uword pc) const;
6460
6461 // Find pc, return 0 if not found.
6462 uword GetPcForDeoptId(intptr_t deopt_id,
6463 PcDescriptorsLayout::Kind kind) const;
6464 intptr_t GetDeoptIdForOsr(uword pc) const;
6465
6466 const char* Name() const;
6467 const char* QualifiedName(const NameFormattingParams& params) const;
6468
6469 int64_t compile_timestamp() const {
6470#if defined(PRODUCT)
6471 return 0;
6472#else
6473 return raw_ptr()->compile_timestamp_;
6474#endif
6475 }
6476
6477 bool IsStubCode() const;
6478 bool IsAllocationStubCode() const;
6479 bool IsTypeTestStubCode() const;
6480 bool IsFunctionCode() const;
6481
6482 void DisableDartCode() const;
6483
6484 void DisableStubCode() const;
6485
6486 void Enable() const {
6487 if (!IsDisabled()) return;
6488 ASSERT(Thread::Current()->IsMutatorThread());
6489 ResetActiveInstructions();
6490 }
6491
6492 bool IsDisabled() const { return IsDisabled(raw()); }
6493 static bool IsDisabled(CodePtr code) {
6494#if defined(DART_PRECOMPILED_RUNTIME)
6495 UNREACHABLE();
6496 return false;
6497#else
6498 return code->ptr()->instructions_ != code->ptr()->active_instructions_;
6499#endif
6500 }
6501
6502 void set_object_pool(ObjectPoolPtr object_pool) const {
6503 StorePointer(&raw_ptr()->object_pool_, object_pool);
6504 }
6505
6506 private:
6507 void set_state_bits(intptr_t bits) const;
6508
6509 friend class ObjectLayout; // For ObjectLayout::SizeFromClass().
6510 friend class CodeLayout;
6511 enum {
6512 kOptimizedBit = 0,
6513 kForceOptimizedBit = 1,
6514 kAliveBit = 2,
6515 kPtrOffBit = 3,
6516 kPtrOffSize = 29,
6517 };
6518
6519 class OptimizedBit : public BitField<int32_t, bool, kOptimizedBit, 1> {};
6520
6521 // Force-optimized is true if the Code was generated for a function with
6522 // Function::ForceOptimize().
6523 class ForceOptimizedBit
6524 : public BitField<int32_t, bool, kForceOptimizedBit, 1> {};
6525
6526 class AliveBit : public BitField<int32_t, bool, kAliveBit, 1> {};
6527 class PtrOffBits
6528 : public BitField<int32_t, intptr_t, kPtrOffBit, kPtrOffSize> {};
6529
6530 class SlowFindRawCodeVisitor : public FindObjectVisitor {
6531 public:
6532 explicit SlowFindRawCodeVisitor(uword pc) : pc_(pc) {}
6533 virtual ~SlowFindRawCodeVisitor() {}
6534
6535 // Check if object matches find condition.
6536 virtual bool FindObject(ObjectPtr obj) const;
6537
6538 private:
6539 const uword pc_;
6540
6541 DISALLOW_COPY_AND_ASSIGN(SlowFindRawCodeVisitor);
6542 };
6543
6544 static const intptr_t kEntrySize = sizeof(int32_t); // NOLINT
6545
6546 void set_compile_timestamp(int64_t timestamp) const {
6547#if defined(PRODUCT)
6548 UNREACHABLE();
6549#else
6550 StoreNonPointer(&raw_ptr()->compile_timestamp_, timestamp);
6551#endif
6552 }
6553
6554 // Initializes the cached entrypoint addresses in [code] as calculated
6555 // from [instructions] and [unchecked_offset].
6556 static void InitializeCachedEntryPointsFrom(CodePtr code,
6557 InstructionsPtr instructions,
6558 uint32_t unchecked_offset);
6559
6560 // Sets [active_instructions_] to [instructions] and updates the cached
6561 // entry point addresses.
6562 void SetActiveInstructions(const Instructions& instructions,
6563 uint32_t unchecked_offset) const;
6564
6565 // Resets [active_instructions_] to its original value of [instructions_] and
6566 // updates the cached entry point addresses to match.
6567 void ResetActiveInstructions() const;
6568
6569 void set_instructions(const Instructions& instructions) const {
6570 ASSERT(Thread::Current()->IsMutatorThread() || !is_alive());
6571 StorePointer(&raw_ptr()->instructions_, instructions.raw());
6572 }
6573#if !defined(DART_PRECOMPILED_RUNTIME)
6574 void set_unchecked_offset(uword offset) const {
6575 StoreNonPointer(&raw_ptr()->unchecked_offset_, offset);
6576 }
6577#endif
6578
6579 // Returns the unchecked entry point offset for [instructions_].
6580 uint32_t UncheckedEntryPointOffset() const {
6581 return UncheckedEntryPointOffsetOf(raw());
6582 }
6583 static uint32_t UncheckedEntryPointOffsetOf(CodePtr code) {
6584#if defined(DART_PRECOMPILED_RUNTIME)
6585 UNREACHABLE();
6586#else
6587 return code->ptr()->unchecked_offset_;
6588#endif
6589 }
6590
6591 void set_pointer_offsets_length(intptr_t value) {
6592 // The number of fixups is limited to 1-billion.
6593 ASSERT(Utils::IsUint(30, value));
6594 set_state_bits(PtrOffBits::update(value, raw_ptr()->state_bits_));
6595 }
6596 int32_t* PointerOffsetAddrAt(int index) const {
6597 ASSERT(index >= 0);
6598 ASSERT(index < pointer_offsets_length());
6599 // TODO(iposva): Unit test is missing for this functionality.
6600 return &UnsafeMutableNonPointer(raw_ptr()->data())[index];
6601 }
6602 void SetPointerOffsetAt(int index, int32_t offset_in_instructions) {
6603 NoSafepointScope no_safepoint;
6604 *PointerOffsetAddrAt(index) = offset_in_instructions;
6605 }
6606
6607 intptr_t BinarySearchInSCallTable(uword pc) const;
6608 static CodePtr LookupCodeInIsolate(Isolate* isolate, uword pc);
6609
6610 // New is a private method as RawInstruction and RawCode objects should
6611 // only be created using the Code::FinalizeCode method. This method creates
6612 // the RawInstruction and RawCode objects, sets up the pointer offsets
6613 // and links the two in a GC safe manner.
6614 static CodePtr New(intptr_t pointer_offsets_length);
6615
6616 FINAL_HEAP_OBJECT_IMPLEMENTATION(Code, Object);
6617 friend class Class;
6618 friend class CodeTestHelper;
6619 friend class SnapshotWriter;
6620 friend class StubCode; // for set_object_pool
6621 friend class Precompiler; // for set_object_pool
6622 friend class FunctionSerializationCluster;
6623 friend class CodeSerializationCluster;
6624 friend class CodeDeserializationCluster;
6625 friend class Deserializer; // for InitializeCachedEntryPointsFrom
6626 friend class StubCode; // for set_object_pool
6627 friend class MegamorphicCacheTable; // for set_object_pool
6628 friend class CodePatcher; // for set_instructions
6629 friend class ProgramVisitor; // for set_instructions
6630 // So that the FunctionLayout pointer visitor can determine whether code the
6631 // function points to is optimized.
6632 friend class FunctionLayout;
6633 friend class CallSiteResetter;
6634 friend class CodeKeyValueTrait; // for UncheckedEntryPointOffset
6635};
6636
6637class Bytecode : public Object {
6638 public:
6639 uword instructions() const { return raw_ptr()->instructions_; }
6640
6641 uword PayloadStart() const { return instructions(); }
6642 intptr_t Size() const { return raw_ptr()->instructions_size_; }
6643
6644 ObjectPoolPtr object_pool() const { return raw_ptr()->object_pool_; }
6645
6646 bool ContainsInstructionAt(uword addr) const {
6647 return BytecodeLayout::ContainsPC(raw(), addr);
6648 }
6649
6650 PcDescriptorsPtr pc_descriptors() const { return raw_ptr()->pc_descriptors_; }
6651 void set_pc_descriptors(const PcDescriptors& descriptors) const {
6652 ASSERT(descriptors.IsOld());
6653 StorePointer(&raw_ptr()->pc_descriptors_, descriptors.raw());
6654 }
6655
6656 void Disassemble(DisassemblyFormatter* formatter = NULL) const;
6657
6658 ExceptionHandlersPtr exception_handlers() const {
6659 return raw_ptr()->exception_handlers_;
6660 }
6661 void set_exception_handlers(const ExceptionHandlers& handlers) const {
6662 ASSERT(handlers.IsOld());
6663 StorePointer(&raw_ptr()->exception_handlers_, handlers.raw());
6664 }
6665
6666 FunctionPtr function() const { return raw_ptr()->function_; }
6667
6668 void set_function(const Function& function) const {
6669 ASSERT(function.IsOld());
6670 StorePointer(&raw_ptr()->function_, function.raw());
6671 }
6672
6673 static intptr_t InstanceSize() {
6674 return RoundedAllocationSize(sizeof(BytecodeLayout));
6675 }
6676 static BytecodePtr New(uword instructions,
6677 intptr_t instructions_size,
6678 intptr_t instructions_offset,
6679 const ObjectPool& object_pool);
6680
6681 ExternalTypedDataPtr GetBinary(Zone* zone) const;
6682
6683 TokenPosition GetTokenIndexOfPC(uword return_address) const;
6684 intptr_t GetTryIndexAtPc(uword return_address) const;
6685
6686 // Return the pc of the first 'DebugCheck' opcode of the bytecode.
6687 // Return 0 if none is found.
6688 uword GetFirstDebugCheckOpcodePc() const;
6689
6690 // Return the pc after the first 'debug checked' opcode in the range.
6691 // Return 0 if none is found.
6692 uword GetDebugCheckedOpcodeReturnAddress(uword from_offset,
6693 uword to_offset) const;
6694
6695 intptr_t instructions_binary_offset() const {
6696 return raw_ptr()->instructions_binary_offset_;
6697 }
6698 void set_instructions_binary_offset(intptr_t value) const {
6699 StoreNonPointer(&raw_ptr()->instructions_binary_offset_, value);
6700 }
6701
6702 intptr_t source_positions_binary_offset() const {
6703 return raw_ptr()->source_positions_binary_offset_;
6704 }
6705 void set_source_positions_binary_offset(intptr_t value) const {
6706 StoreNonPointer(&raw_ptr()->source_positions_binary_offset_, value);
6707 }
6708 bool HasSourcePositions() const {
6709 return (source_positions_binary_offset() != 0);
6710 }
6711
6712 intptr_t local_variables_binary_offset() const {
6713 return raw_ptr()->local_variables_binary_offset_;
6714 }
6715 void set_local_variables_binary_offset(intptr_t value) const {
6716 StoreNonPointer(&raw_ptr()->local_variables_binary_offset_, value);
6717 }
6718 bool HasLocalVariablesInfo() const {
6719 return (local_variables_binary_offset() != 0);
6720 }
6721
6722 LocalVarDescriptorsPtr var_descriptors() const {
6723#if defined(PRODUCT)
6724 UNREACHABLE();
6725 return nullptr;
6726#else
6727 return raw_ptr()->var_descriptors_;
6728#endif
6729 }
6730 void set_var_descriptors(const LocalVarDescriptors& value) const {
6731#if defined(PRODUCT)
6732 UNREACHABLE();
6733#else
6734 ASSERT(value.IsOld());
6735 StorePointer(&raw_ptr()->var_descriptors_, value.raw());
6736#endif
6737 }
6738
6739 // Will compute local var descriptors if necessary.
6740 LocalVarDescriptorsPtr GetLocalVarDescriptors() const;
6741
6742 const char* Name() const;
6743 const char* QualifiedName() const;
6744 const char* FullyQualifiedName() const;
6745
6746 class SlowFindRawBytecodeVisitor : public FindObjectVisitor {
6747 public:
6748 explicit SlowFindRawBytecodeVisitor(uword pc) : pc_(pc) {}
6749 virtual ~SlowFindRawBytecodeVisitor() {}
6750
6751 // Check if object matches find condition.
6752 virtual bool FindObject(ObjectPtr obj) const;
6753
6754 private:
6755 const uword pc_;
6756
6757 DISALLOW_COPY_AND_ASSIGN(SlowFindRawBytecodeVisitor);
6758 };
6759
6760 static BytecodePtr FindCode(uword pc);
6761
6762 private:
6763 void set_instructions(uword instructions) const {
6764 StoreNonPointer(&raw_ptr()->instructions_, instructions);
6765 }
6766 void set_instructions_size(intptr_t size) const {
6767 StoreNonPointer(&raw_ptr()->instructions_size_, size);
6768 }
6769 void set_object_pool(const ObjectPool& object_pool) const {
6770 StorePointer(&raw_ptr()->object_pool_, object_pool.raw());
6771 }
6772
6773 friend class BytecodeDeserializationCluster;
6774 friend class ObjectLayout; // For ObjectLayout::SizeFromClass().
6775 friend class BytecodeLayout;
6776
6777 FINAL_HEAP_OBJECT_IMPLEMENTATION(Bytecode, Object);
6778 friend class Class;
6779 friend class SnapshotWriter;
6780};
6781
6782class Context : public Object {
6783 public:
6784 ContextPtr parent() const { return raw_ptr()->parent_; }
6785 void set_parent(const Context& parent) const {
6786 StorePointer(&raw_ptr()->parent_, parent.raw());
6787 }
6788 static intptr_t parent_offset() { return OFFSET_OF(ContextLayout, parent_); }
6789
6790 intptr_t num_variables() const { return raw_ptr()->num_variables_; }
6791 static intptr_t num_variables_offset() {
6792 return OFFSET_OF(ContextLayout, num_variables_);
6793 }
6794 static intptr_t NumVariables(const ContextPtr context) {
6795 return context->ptr()->num_variables_;
6796 }
6797
6798 ObjectPtr At(intptr_t context_index) const {
6799 return *ObjectAddr(context_index);
6800 }
6801 inline void SetAt(intptr_t context_index, const Object& value) const;
6802
6803 intptr_t GetLevel() const;
6804
6805 void Dump(int indent = 0) const;
6806
6807 static const intptr_t kBytesPerElement = kWordSize;
6808 static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
6809
6810 static const intptr_t kAwaitJumpVarIndex = 0;
6811 static const intptr_t kAsyncCompleterIndex = 1;
6812 static const intptr_t kControllerIndex = 1;
6813 // Expected context index of chained futures in recognized async functions.
6814 // These are used to unwind async stacks.
6815 static const intptr_t kFutureTimeoutFutureIndex = 2;
6816 static const intptr_t kFutureWaitFutureIndex = 2;
6817
6818 static intptr_t variable_offset(intptr_t context_index) {
6819 return OFFSET_OF_RETURNED_VALUE(ContextLayout, data) +
6820 (kWordSize * context_index);
6821 }
6822
6823 static bool IsValidLength(intptr_t len) {
6824 return 0 <= len && len <= compiler::target::Array::kMaxElements;
6825 }
6826
6827 static intptr_t InstanceSize() {
6828 ASSERT(sizeof(ContextLayout) ==
6829 OFFSET_OF_RETURNED_VALUE(ContextLayout, data));
6830 return 0;
6831 }
6832
6833 static intptr_t InstanceSize(intptr_t len) {
6834 ASSERT(IsValidLength(len));
6835 return RoundedAllocationSize(sizeof(ContextLayout) +
6836 (len * kBytesPerElement));
6837 }
6838
6839 static ContextPtr New(intptr_t num_variables, Heap::Space space = Heap::kNew);
6840
6841 private:
6842 ObjectPtr const* ObjectAddr(intptr_t context_index) const {
6843 ASSERT((context_index >= 0) && (context_index < num_variables()));
6844 return &raw_ptr()->data()[context_index];
6845 }
6846
6847 void set_num_variables(intptr_t num_variables) const {
6848 StoreNonPointer(&raw_ptr()->num_variables_, num_variables);
6849 }
6850
6851 FINAL_HEAP_OBJECT_IMPLEMENTATION(Context, Object);
6852 friend class Class;
6853 friend class Object;
6854};
6855
6856// The ContextScope class makes it possible to delay the compilation of a local
6857// function until it is invoked. A ContextScope instance collects the local
6858// variables that are referenced by the local function to be compiled and that
6859// belong to the outer scopes, that is, to the local scopes of (possibly nested)
6860// functions enclosing the local function. Each captured variable is represented
6861// by its token position in the source, its name, its type, its allocation index
6862// in the context, and its context level. The function nesting level and loop
6863// nesting level are not preserved, since they are only used until the context
6864// level is assigned. In addition the ContextScope has a field 'is_implicit'
6865// which is true if the ContextScope was created for an implicit closure.
6866class ContextScope : public Object {
6867 public:
6868 intptr_t num_variables() const { return raw_ptr()->num_variables_; }
6869
6870 TokenPosition TokenIndexAt(intptr_t scope_index) const;
6871 void SetTokenIndexAt(intptr_t scope_index, TokenPosition token_pos) const;
6872
6873 TokenPosition DeclarationTokenIndexAt(intptr_t scope_index) const;
6874 void SetDeclarationTokenIndexAt(intptr_t scope_index,
6875 TokenPosition declaration_token_pos) const;
6876
6877 StringPtr NameAt(intptr_t scope_index) const;
6878 void SetNameAt(intptr_t scope_index, const String& name) const;
6879
6880 void ClearFlagsAt(intptr_t scope_index) const;
6881
6882 bool IsFinalAt(intptr_t scope_index) const;
6883 void SetIsFinalAt(intptr_t scope_index, bool is_final) const;
6884
6885 bool IsLateAt(intptr_t scope_index) const;
6886 void SetIsLateAt(intptr_t scope_index, bool is_late) const;
6887
6888 intptr_t LateInitOffsetAt(intptr_t scope_index) const;
6889 void SetLateInitOffsetAt(intptr_t scope_index,
6890 intptr_t late_init_offset) const;
6891
6892 bool IsConstAt(intptr_t scope_index) const;
6893 void SetIsConstAt(intptr_t scope_index, bool is_const) const;
6894
6895 AbstractTypePtr TypeAt(intptr_t scope_index) const;
6896 void SetTypeAt(intptr_t scope_index, const AbstractType& type) const;
6897
6898 InstancePtr ConstValueAt(intptr_t scope_index) const;
6899 void SetConstValueAt(intptr_t scope_index, const Instance& value) const;
6900
6901 intptr_t ContextIndexAt(intptr_t scope_index) const;
6902 void SetContextIndexAt(intptr_t scope_index, intptr_t context_index) const;
6903
6904 intptr_t ContextLevelAt(intptr_t scope_index) const;
6905 void SetContextLevelAt(intptr_t scope_index, intptr_t context_level) const;
6906
6907 static const intptr_t kBytesPerElement =
6908 sizeof(ContextScopeLayout::VariableDesc);
6909 static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
6910
6911 static intptr_t InstanceSize() {
6912 ASSERT(sizeof(ContextScopeLayout) ==
6913 OFFSET_OF_RETURNED_VALUE(ContextScopeLayout, data));
6914 return 0;
6915 }
6916
6917 static intptr_t InstanceSize(intptr_t len) {
6918 ASSERT(0 <= len && len <= kMaxElements);
6919 return RoundedAllocationSize(sizeof(ContextScopeLayout) +
6920 (len * kBytesPerElement));
6921 }
6922
6923 static ContextScopePtr New(intptr_t num_variables, bool is_implicit);
6924
6925 private:
6926 void set_num_variables(intptr_t num_variables) const {
6927 StoreNonPointer(&raw_ptr()->num_variables_, num_variables);
6928 }
6929
6930 void set_is_implicit(bool is_implicit) const {
6931 StoreNonPointer(&raw_ptr()->is_implicit_, is_implicit);
6932 }
6933
6934 const ContextScopeLayout::VariableDesc* VariableDescAddr(
6935 intptr_t index) const {
6936 ASSERT((index >= 0) && (index < num_variables()));
6937 return raw_ptr()->VariableDescAddr(index);
6938 }
6939
6940 bool GetFlagAt(intptr_t scope_index, intptr_t mask) const;
6941 void SetFlagAt(intptr_t scope_index, intptr_t mask, bool value) const;
6942
6943 FINAL_HEAP_OBJECT_IMPLEMENTATION(ContextScope, Object);
6944 friend class Class;
6945 friend class Object;
6946};
6947
6948class MegamorphicCache : public CallSiteData {
6949 public:
6950 static const intptr_t kInitialCapacity = 16;
6951 static const intptr_t kSpreadFactor = 7;
6952 static const double kLoadFactor;
6953
6954 enum EntryType {
6955 kClassIdIndex,
6956 kTargetFunctionIndex,
6957 kEntryLength,
6958 };
6959
6960 ArrayPtr buckets() const;
6961 void set_buckets(const Array& buckets) const;
6962
6963 intptr_t mask() const;
6964 void set_mask(intptr_t mask) const;
6965
6966 intptr_t filled_entry_count() const;
6967 void set_filled_entry_count(intptr_t num) const;
6968
6969 static intptr_t buckets_offset() {
6970 return OFFSET_OF(MegamorphicCacheLayout, buckets_);
6971 }
6972 static intptr_t mask_offset() {
6973 return OFFSET_OF(MegamorphicCacheLayout, mask_);
6974 }
6975 static intptr_t arguments_descriptor_offset() {
6976 return OFFSET_OF(MegamorphicCacheLayout, args_descriptor_);
6977 }
6978
6979 static MegamorphicCachePtr New(const String& target_name,
6980 const Array& arguments_descriptor);
6981
6982 void Insert(const Smi& class_id, const Object& target) const;
6983
6984 void SwitchToBareInstructions();
6985
6986 static intptr_t InstanceSize() {
6987 return RoundedAllocationSize(sizeof(MegamorphicCacheLayout));
6988 }
6989
6990 static MegamorphicCachePtr Clone(const MegamorphicCache& from);
6991
6992 private:
6993 friend class Class;
6994 friend class MegamorphicCacheTable;
6995 friend class ProgramVisitor;
6996
6997 static MegamorphicCachePtr New();
6998
6999 // The caller must hold Isolate::megamorphic_mutex().
7000 void EnsureCapacityLocked() const;
7001 void InsertLocked(const Smi& class_id, const Object& target) const;
7002
7003 static inline void SetEntry(const Array& array,
7004 intptr_t index,
7005 const Smi& class_id,
7006 const Object& target);
7007
7008 static inline ObjectPtr GetClassId(const Array& array, intptr_t index);
7009 static inline ObjectPtr GetTargetFunction(const Array& array, intptr_t index);
7010
7011 FINAL_HEAP_OBJECT_IMPLEMENTATION(MegamorphicCache, CallSiteData);
7012};
7013
7014class SubtypeTestCache : public Object {
7015 public:
7016 enum Entries {
7017 kTestResult = 0,
7018 kInstanceClassIdOrFunction = 1,
7019 kInstanceTypeArguments = 2,
7020 kInstantiatorTypeArguments = 3,
7021 kFunctionTypeArguments = 4,
7022 kInstanceParentFunctionTypeArguments = 5,
7023 kInstanceDelayedFunctionTypeArguments = 6,
7024 kTestEntryLength = 7,
7025 };
7026
7027 intptr_t NumberOfChecks() const;
7028 void AddCheck(const Object& instance_class_id_or_function,
7029 const TypeArguments& instance_type_arguments,
7030 const TypeArguments& instantiator_type_arguments,
7031 const TypeArguments& function_type_arguments,
7032 const TypeArguments& instance_parent_function_type_arguments,
7033 const TypeArguments& instance_delayed_type_arguments,
7034 const Bool& test_result) const;
7035 void GetCheck(intptr_t ix,
7036 Object* instance_class_id_or_function,
7037 TypeArguments* instance_type_arguments,
7038 TypeArguments* instantiator_type_arguments,
7039 TypeArguments* function_type_arguments,
7040 TypeArguments* instance_parent_function_type_arguments,
7041 TypeArguments* instance_delayed_type_arguments,
7042 Bool* test_result) const;
7043 void Reset() const;
7044
7045 static SubtypeTestCachePtr New();
7046
7047 static intptr_t InstanceSize() {
7048 return RoundedAllocationSize(sizeof(SubtypeTestCacheLayout));
7049 }
7050
7051 static intptr_t cache_offset() {
7052 return OFFSET_OF(SubtypeTestCacheLayout, cache_);
7053 }
7054
7055 static void Init();
7056 static void Cleanup();
7057
7058 ArrayPtr cache() const;
7059
7060 private:
7061 // A VM heap allocated preinitialized empty subtype entry array.
7062 static ArrayPtr cached_array_;
7063
7064 void set_cache(const Array& value) const;
7065
7066 intptr_t TestEntryLength() const;
7067
7068 FINAL_HEAP_OBJECT_IMPLEMENTATION(SubtypeTestCache, Object);
7069 friend class Class;
7070 friend class Serializer;
7071 friend class Deserializer;
7072};
7073
7074class LoadingUnit : public Object {
7075 public:
7076 static constexpr intptr_t kIllegalId = 0;
7077 COMPILE_ASSERT(kIllegalId == WeakTable::kNoValue);
7078 static constexpr intptr_t kRootId = 1;
7079
7080 static LoadingUnitPtr New();
7081
7082 static intptr_t InstanceSize() {
7083 return RoundedAllocationSize(sizeof(LoadingUnitLayout));
7084 }
7085
7086 LoadingUnitPtr parent() const;
7087 void set_parent(const LoadingUnit& value) const;
7088
7089 ArrayPtr base_objects() const;
7090 void set_base_objects(const Array& value) const;
7091
7092 intptr_t id() const { return raw_ptr()->id_; }
7093 void set_id(intptr_t id) const { StoreNonPointer(&raw_ptr()->id_, id); }
7094
7095 // True once the VM deserializes this unit's snapshot.
7096 bool loaded() const { return raw_ptr()->loaded_; }
7097 void set_loaded(bool value) const {
7098 StoreNonPointer(&raw_ptr()->loaded_, value);
7099 }
7100
7101 // True once the VM invokes the embedder's deferred load callback until the
7102 // embedder calls Dart_DeferredLoadComplete[Error].
7103 bool load_outstanding() const { return raw_ptr()->load_outstanding_; }
7104 void set_load_outstanding(bool value) const {
7105 StoreNonPointer(&raw_ptr()->load_outstanding_, value);
7106 }
7107
7108 ObjectPtr IssueLoad() const;
7109 void CompleteLoad(const String& error_message, bool transient_error) const;
7110
7111 private:
7112 FINAL_HEAP_OBJECT_IMPLEMENTATION(LoadingUnit, Object);
7113 friend class Class;
7114};
7115
7116class Error : public Object {
7117 public:
7118 virtual const char* ToErrorCString() const;
7119
7120 private:
7121 HEAP_OBJECT_IMPLEMENTATION(Error, Object);
7122};
7123
7124class ApiError : public Error {
7125 public:
7126 StringPtr message() const { return raw_ptr()->message_; }
7127
7128 static intptr_t InstanceSize() {
7129 return RoundedAllocationSize(sizeof(ApiErrorLayout));
7130 }
7131
7132 static ApiErrorPtr New(const String& message, Heap::Space space = Heap::kNew);
7133
7134 virtual const char* ToErrorCString() const;
7135
7136 private:
7137 void set_message(const String& message) const;
7138
7139 static ApiErrorPtr New();
7140
7141 FINAL_HEAP_OBJECT_IMPLEMENTATION(ApiError, Error);
7142 friend class Class;
7143};
7144
7145class LanguageError : public Error {
7146 public:
7147 Report::Kind kind() const {
7148 return static_cast<Report::Kind>(raw_ptr()->kind_);
7149 }
7150
7151 // Build, cache, and return formatted message.
7152 StringPtr FormatMessage() const;
7153
7154 static intptr_t InstanceSize() {
7155 return RoundedAllocationSize(sizeof(LanguageErrorLayout));
7156 }
7157
7158 // A null script means no source and a negative token_pos means no position.
7159 static LanguageErrorPtr NewFormatted(const Error& prev_error,
7160 const Script& script,
7161 TokenPosition token_pos,
7162 bool report_after_token,
7163 Report::Kind kind,
7164 Heap::Space space,
7165 const char* format,
7166 ...) PRINTF_ATTRIBUTE(7, 8);
7167
7168 static LanguageErrorPtr NewFormattedV(const Error& prev_error,
7169 const Script& script,
7170 TokenPosition token_pos,
7171 bool report_after_token,
7172 Report::Kind kind,
7173 Heap::Space space,
7174 const char* format,
7175 va_list args);
7176
7177 static LanguageErrorPtr New(const String& formatted_message,
7178 Report::Kind kind = Report::kError,
7179 Heap::Space space = Heap::kNew);
7180
7181 virtual const char* ToErrorCString() const;
7182
7183 TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
7184
7185 private:
7186 ErrorPtr previous_error() const { return raw_ptr()->previous_error_; }
7187 void set_previous_error(const Error& value) const;
7188
7189 ScriptPtr script() const { return raw_ptr()->script_; }
7190 void set_script(const Script& value) const;
7191
7192 void set_token_pos(TokenPosition value) const;
7193
7194 bool report_after_token() const { return raw_ptr()->report_after_token_; }
7195 void set_report_after_token(bool value);
7196
7197 void set_kind(uint8_t value) const;
7198
7199 StringPtr message() const { return raw_ptr()->message_; }
7200 void set_message(const String& value) const;
7201
7202 StringPtr formatted_message() const { return raw_ptr()->formatted_message_; }
7203 void set_formatted_message(const String& value) const;
7204
7205 static LanguageErrorPtr New();
7206
7207 FINAL_HEAP_OBJECT_IMPLEMENTATION(LanguageError, Error);
7208 friend class Class;
7209};
7210
7211class UnhandledException : public Error {
7212 public:
7213 InstancePtr exception() const { return raw_ptr()->exception_; }
7214 static intptr_t exception_offset() {
7215 return OFFSET_OF(UnhandledExceptionLayout, exception_);
7216 }
7217
7218 InstancePtr stacktrace() const { return raw_ptr()->stacktrace_; }
7219 static intptr_t stacktrace_offset() {
7220 return OFFSET_OF(UnhandledExceptionLayout, stacktrace_);
7221 }
7222
7223 static intptr_t InstanceSize() {
7224 return RoundedAllocationSize(sizeof(UnhandledExceptionLayout));
7225 }
7226
7227 static UnhandledExceptionPtr New(const Instance& exception,
7228 const Instance& stacktrace,
7229 Heap::Space space = Heap::kNew);
7230
7231 virtual const char* ToErrorCString() const;
7232
7233 private:
7234 static UnhandledExceptionPtr New(Heap::Space space = Heap::kNew);
7235
7236 void set_exception(const Instance& exception) const;
7237 void set_stacktrace(const Instance& stacktrace) const;
7238
7239 FINAL_HEAP_OBJECT_IMPLEMENTATION(UnhandledException, Error);
7240 friend class Class;
7241 friend class ObjectStore;
7242};
7243
7244class UnwindError : public Error {
7245 public:
7246 bool is_user_initiated() const { return raw_ptr()->is_user_initiated_; }
7247 void set_is_user_initiated(bool value) const;
7248
7249 StringPtr message() const { return raw_ptr()->message_; }
7250
7251 static intptr_t InstanceSize() {
7252 return RoundedAllocationSize(sizeof(UnwindErrorLayout));
7253 }
7254
7255 static UnwindErrorPtr New(const String& message,
7256 Heap::Space space = Heap::kNew);
7257
7258 virtual const char* ToErrorCString() const;
7259
7260 private:
7261 void set_message(const String& message) const;
7262
7263 FINAL_HEAP_OBJECT_IMPLEMENTATION(UnwindError, Error);
7264 friend class Class;
7265};
7266
7267// Instance is the base class for all instance objects (aka the Object class
7268// in Dart source code.
7269class Instance : public Object {
7270 public:
7271 // Equality and identity testing.
7272 // 1. OperatorEquals: true iff 'this == other' is true in Dart code.
7273 // 2. IsIdenticalTo: true iff 'identical(this, other)' is true in Dart code.
7274 // 3. CanonicalizeEquals: used to canonicalize compile-time constants, e.g.,
7275 // using bitwise equality of fields and list elements.
7276 // Subclasses where 1 and 3 coincide may also define a plain Equals, e.g.,
7277 // String and Integer.
7278 virtual bool OperatorEquals(const Instance& other) const;
7279 bool IsIdenticalTo(const Instance& other) const;
7280 virtual bool CanonicalizeEquals(const Instance& other) const;
7281 virtual uint32_t CanonicalizeHash() const;
7282
7283 intptr_t SizeFromClass() const {
7284#if defined(DEBUG)
7285 const Class& cls = Class::Handle(clazz());
7286 ASSERT(cls.is_finalized() || cls.is_prefinalized());
7287#endif
7288 return (clazz()->ptr()->host_instance_size_in_words_ * kWordSize);
7289 }
7290
7291 // Returns Instance::null() if instance cannot be canonicalized.
7292 // Any non-canonical number of string will be canonicalized here.
7293 // An instance cannot be canonicalized if it still contains non-canonical
7294 // instances in its fields.
7295 // Returns error in error_str, pass NULL if an error cannot occur.
7296 virtual InstancePtr CheckAndCanonicalize(Thread* thread,
7297 const char** error_str) const;
7298
7299 // Returns true if all fields are OK for canonicalization.
7300 virtual bool CheckAndCanonicalizeFields(Thread* thread,
7301 const char** error_str) const;
7302
7303 InstancePtr CopyShallowToOldSpace(Thread* thread) const;
7304
7305#if defined(DEBUG)
7306 // Check if instance is canonical.
7307 virtual bool CheckIsCanonical(Thread* thread) const;
7308#endif // DEBUG
7309
7310 ObjectPtr GetField(const Field& field) const;
7311
7312 void SetField(const Field& field, const Object& value) const;
7313
7314 AbstractTypePtr GetType(Heap::Space space) const;
7315
7316 // Access the arguments of the [Type] of this [Instance].
7317 // Note: for [Type]s instead of [Instance]s with a [Type] attached, use
7318 // [arguments()] and [set_arguments()]
7319 virtual TypeArgumentsPtr GetTypeArguments() const;
7320 virtual void SetTypeArguments(const TypeArguments& value) const;
7321
7322 // Check if the type of this instance is a subtype of the given other type.
7323 // The type argument vectors are used to instantiate the other type if needed.
7324 bool IsInstanceOf(const AbstractType& other,
7325 const TypeArguments& other_instantiator_type_arguments,
7326 const TypeArguments& other_function_type_arguments) const;
7327
7328 // Check if this instance is assignable to the given other type.
7329 // The type argument vectors are used to instantiate the other type if needed.
7330 bool IsAssignableTo(const AbstractType& other,
7331 const TypeArguments& other_instantiator_type_arguments,
7332 const TypeArguments& other_function_type_arguments) const;
7333
7334 // Return true if the null instance can be assigned to a variable of [other]
7335 // type. Return false if null cannot be assigned or we cannot tell (if
7336 // [other] is a type parameter in NNBD strong mode).
7337 static bool NullIsAssignableTo(const AbstractType& other);
7338
7339 bool IsValidNativeIndex(int index) const {
7340 return ((index >= 0) && (index < clazz()->ptr()->num_native_fields_));
7341 }
7342
7343 intptr_t* NativeFieldsDataAddr() const;
7344 inline intptr_t GetNativeField(int index) const;
7345 inline void GetNativeFields(uint16_t num_fields,
7346 intptr_t* field_values) const;
7347 void SetNativeFields(uint16_t num_fields, const intptr_t* field_values) const;
7348
7349 uint16_t NumNativeFields() const {
7350 return clazz()->ptr()->num_native_fields_;
7351 }
7352
7353 void SetNativeField(int index, intptr_t value) const;
7354
7355 // If the instance is a callable object, i.e. a closure or the instance of a
7356 // class implementing a 'call' method, return true and set the function
7357 // (if not NULL) to call.
7358 bool IsCallable(Function* function) const;
7359
7360 ObjectPtr Invoke(const String& selector,
7361 const Array& arguments,
7362 const Array& argument_names,
7363 bool respect_reflectable = true,
7364 bool check_is_entrypoint = false) const;
7365 ObjectPtr InvokeGetter(const String& selector,
7366 bool respect_reflectable = true,
7367 bool check_is_entrypoint = false) const;
7368 ObjectPtr InvokeSetter(const String& selector,
7369 const Instance& argument,
7370 bool respect_reflectable = true,
7371 bool check_is_entrypoint = false) const;
7372
7373 // Evaluate the given expression as if it appeared in an instance method of
7374 // this instance and return the resulting value, or an error object if
7375 // evaluating the expression fails. The method has the formal (type)
7376 // parameters given in (type_)param_names, and is invoked with the (type)
7377 // argument values given in (type_)param_values.
7378 ObjectPtr EvaluateCompiledExpression(
7379 const Class& method_cls,
7380 const ExternalTypedData& kernel_buffer,
7381 const Array& type_definitions,
7382 const Array& param_values,
7383 const TypeArguments& type_param_values) const;
7384
7385 // Equivalent to invoking hashCode on this instance.
7386 virtual ObjectPtr HashCode() const;
7387
7388 // Equivalent to invoking identityHashCode with this instance.
7389 ObjectPtr IdentityHashCode() const;
7390
7391 static intptr_t InstanceSize() {
7392 return RoundedAllocationSize(sizeof(InstanceLayout));
7393 }
7394
7395 static InstancePtr New(const Class& cls, Heap::Space space = Heap::kNew);
7396
7397 // Array/list element address computations.
7398 static intptr_t DataOffsetFor(intptr_t cid);
7399 static intptr_t ElementSizeFor(intptr_t cid);
7400
7401 // Pointers may be subtyped, but their subtypes may not get extra fields.
7402 // The subtype runtime representation has exactly the same object layout,
7403 // only the class_id is different. So, it is safe to use subtype instances in
7404 // Pointer handles.
7405 virtual bool IsPointer() const;
7406
7407 static intptr_t NextFieldOffset() { return sizeof(InstanceLayout); }
7408
7409 protected:
7410#ifndef PRODUCT
7411 virtual void PrintSharedInstanceJSON(JSONObject* jsobj, bool ref) const;
7412#endif
7413
7414 private:
7415 // Return true if the runtimeType of this instance is a subtype of other type.
7416 bool RuntimeTypeIsSubtypeOf(
7417 const AbstractType& other,
7418 const TypeArguments& other_instantiator_type_arguments,
7419 const TypeArguments& other_function_type_arguments) const;
7420
7421 // Returns true if the type of this instance is a subtype of FutureOr<T>
7422 // specified by instantiated type 'other'.
7423 // Returns false if other type is not a FutureOr.
7424 bool RuntimeTypeIsSubtypeOfFutureOr(Zone* zone,
7425 const AbstractType& other) const;
7426
7427 // Return true if the null instance is an instance of other type.
7428 static bool NullIsInstanceOf(
7429 const AbstractType& other,
7430 const TypeArguments& other_instantiator_type_arguments,
7431 const TypeArguments& other_function_type_arguments);
7432
7433 ObjectPtr* FieldAddrAtOffset(intptr_t offset) const {
7434 ASSERT(IsValidFieldOffset(offset));
7435 return reinterpret_cast<ObjectPtr*>(raw_value() - kHeapObjectTag + offset);
7436 }
7437 ObjectPtr* FieldAddr(const Field& field) const {
7438 return FieldAddrAtOffset(field.HostOffset());
7439 }
7440 ObjectPtr* NativeFieldsAddr() const {
7441 return FieldAddrAtOffset(sizeof(ObjectLayout));
7442 }
7443 void SetFieldAtOffset(intptr_t offset, const Object& value) const {
7444 StorePointer(FieldAddrAtOffset(offset), value.raw());
7445 }
7446 bool IsValidFieldOffset(intptr_t offset) const;
7447
7448 // The following raw methods are used for morphing.
7449 // They are needed due to the extraction of the class in IsValidFieldOffset.
7450 ObjectPtr* RawFieldAddrAtOffset(intptr_t offset) const {
7451 return reinterpret_cast<ObjectPtr*>(raw_value() - kHeapObjectTag + offset);
7452 }
7453 ObjectPtr RawGetFieldAtOffset(intptr_t offset) const {
7454 return *RawFieldAddrAtOffset(offset);
7455 }
7456 void RawSetFieldAtOffset(intptr_t offset, const Object& value) const {
7457 StorePointer(RawFieldAddrAtOffset(offset), value.raw());
7458 }
7459
7460 static InstancePtr NewFromCidAndSize(SharedClassTable* shared_class_table,
7461 classid_t cid,
7462 Heap::Space heap = Heap::kNew);
7463
7464 // TODO(iposva): Determine if this gets in the way of Smi.
7465 HEAP_OBJECT_IMPLEMENTATION(Instance, Object);
7466 friend class ByteBuffer;
7467 friend class Class;
7468 friend class Closure;
7469 friend class Pointer;
7470 friend class DeferredObject;
7471 friend class RegExp;
7472 friend class SnapshotWriter;
7473 friend class StubCode;
7474 friend class TypedDataView;
7475 friend class InstanceSerializationCluster;
7476 friend class InstanceDeserializationCluster;
7477 friend class ClassDeserializationCluster; // vtable
7478 friend class InstanceMorpher;
7479 friend class Obfuscator; // RawGetFieldAtOffset, RawSetFieldAtOffset
7480};
7481
7482class LibraryPrefix : public Instance {
7483 public:
7484 StringPtr name() const { return raw_ptr()->name_; }
7485 virtual StringPtr DictionaryName() const { return name(); }
7486
7487 ArrayPtr imports() const { return raw_ptr()->imports_; }
7488 intptr_t num_imports() const { return raw_ptr()->num_imports_; }
7489 LibraryPtr importer() const { return raw_ptr()->importer_; }
7490
7491 LibraryPtr GetLibrary(int index) const;
7492 void AddImport(const Namespace& import) const;
7493
7494 bool is_deferred_load() const { return raw_ptr()->is_deferred_load_; }
7495 bool is_loaded() const { return raw_ptr()->is_loaded_; }
7496 void set_is_loaded(bool value) const {
7497 return StoreNonPointer(&raw_ptr()->is_loaded_, value);
7498 }
7499
7500 static intptr_t InstanceSize() {
7501 return RoundedAllocationSize(sizeof(LibraryPrefixLayout));
7502 }
7503
7504 static LibraryPrefixPtr New(const String& name,
7505 const Namespace& import,
7506 bool deferred_load,
7507 const Library& importer);
7508
7509 private:
7510 static const int kInitialSize = 2;
7511 static const int kIncrementSize = 2;
7512
7513 void set_name(const String& value) const;
7514 void set_imports(const Array& value) const;
7515 void set_num_imports(intptr_t value) const;
7516 void set_importer(const Library& value) const;
7517
7518 static LibraryPrefixPtr New();
7519
7520 FINAL_HEAP_OBJECT_IMPLEMENTATION(LibraryPrefix, Instance);
7521 friend class Class;
7522};
7523
7524// A TypeArguments is an array of AbstractType.
7525class TypeArguments : public Instance {
7526 public:
7527 // We use 30 bits for the hash code so hashes in a snapshot taken on a
7528 // 64-bit architecture stay in Smi range when loaded on a 32-bit
7529 // architecture.
7530 static const intptr_t kHashBits = 30;
7531
7532 // Hash value for a type argument vector consisting solely of dynamic types.
7533 static const intptr_t kAllDynamicHash = 1;
7534
7535 intptr_t Length() const;
7536 AbstractTypePtr TypeAt(intptr_t index) const;
7537 AbstractTypePtr TypeAtNullSafe(intptr_t index) const;
7538 static intptr_t type_at_offset(intptr_t index) {
7539 return OFFSET_OF_RETURNED_VALUE(TypeArgumentsLayout, types) +
7540 index * kWordSize;
7541 }
7542 void SetTypeAt(intptr_t index, const AbstractType& value) const;
7543
7544 struct ArrayTraits {
7545 static intptr_t elements_start_offset() {
7546 return TypeArguments::type_at_offset(0);
7547 }
7548
7549 static constexpr intptr_t kElementSize = kWordSize;
7550 };
7551
7552 // The nullability of a type argument vector represents the nullability of its
7553 // type elements (up to a maximum number of them, i.e. kNullabilityMaxTypes).
7554 // It is used at runtime in some cases (predetermined by the compiler) to
7555 // decide whether the instantiator type arguments (ITA) can be shared instead
7556 // of performing a more costly instantiation of the uninstantiated type
7557 // arguments (UTA).
7558 // The vector nullability is stored as a bit vector (in a Smi field), using
7559 // 2 bits per type:
7560 // - the high bit is set if the type is nullable or legacy.
7561 // - the low bit is set if the type is nullable.
7562 // The nullabilty is 0 if the vector is longer than kNullabilityMaxTypes.
7563 // The condition evaluated at runtime to decide whether UTA can share ITA is
7564 // (UTA.nullability & ITA.nullability) == UTA.nullability
7565 // Note that this allows for ITA to be longer than UTA.
7566 static const intptr_t kNullabilityBitsPerType = 2;
7567 static const intptr_t kNullabilityMaxTypes =
7568 kSmiBits / kNullabilityBitsPerType;
7569 static const intptr_t kNonNullableBits = 0;
7570 static const intptr_t kNullableBits = 3;
7571 static const intptr_t kLegacyBits = 2;
7572 intptr_t nullability() const;
7573 static intptr_t nullability_offset() {
7574 return OFFSET_OF(TypeArgumentsLayout, nullability_);
7575 }
7576
7577 // The name of this type argument vector, e.g. "<T, dynamic, List<T>, Smi>".
7578 StringPtr Name() const;
7579
7580 // The name of this type argument vector, e.g. "<T, dynamic, List<T>, int>".
7581 // Names of internal classes are mapped to their public interfaces.
7582 StringPtr UserVisibleName() const;
7583
7584 // Print the internal or public name of a subvector of this type argument
7585 // vector, e.g. "<T, dynamic, List<T>, int>".
7586 void PrintSubvectorName(
7587 intptr_t from_index,
7588 intptr_t len,
7589 NameVisibility name_visibility,
7590 BaseTextBuffer* printer,
7591 NameDisambiguation name_disambiguation = NameDisambiguation::kNo) const;
7592
7593 // Check if the subvector of length 'len' starting at 'from_index' of this
7594 // type argument vector consists solely of DynamicType.
7595 bool IsRaw(intptr_t from_index, intptr_t len) const {
7596 return IsDynamicTypes(false, from_index, len);
7597 }
7598
7599 // Check if this type argument vector would consist solely of DynamicType if
7600 // it was instantiated from both a raw (null) instantiator type arguments and
7601 // a raw (null) function type arguments, i.e. consider each class type
7602 // parameter and function type parameters as it would be first instantiated
7603 // from a vector of dynamic types.
7604 // Consider only a prefix of length 'len'.
7605 bool IsRawWhenInstantiatedFromRaw(intptr_t len) const {
7606 return IsDynamicTypes(true, 0, len);
7607 }
7608
7609 TypeArgumentsPtr Prepend(Zone* zone,
7610 const TypeArguments& other,
7611 intptr_t other_length,
7612 intptr_t total_length) const;
7613
7614 // Concatenate [this] and [other] vectors of type parameters.
7615 TypeArgumentsPtr ConcatenateTypeParameters(Zone* zone,
7616 const TypeArguments& other) const;
7617
7618 // Check if the vectors are equal (they may be null).
7619 bool Equals(const TypeArguments& other) const {
7620 return IsSubvectorEquivalent(other, 0, IsNull() ? 0 : Length(),
7621 TypeEquality::kCanonical);
7622 }
7623
7624 bool IsEquivalent(const TypeArguments& other,
7625 TypeEquality kind,
7626 TrailPtr trail = nullptr) const {
7627 return IsSubvectorEquivalent(other, 0, IsNull() ? 0 : Length(), kind,
7628 trail);
7629 }
7630 bool IsSubvectorEquivalent(const TypeArguments& other,
7631 intptr_t from_index,
7632 intptr_t len,
7633 TypeEquality kind,
7634 TrailPtr trail = nullptr) const;
7635
7636 // Check if the vector is instantiated (it must not be null).
7637 bool IsInstantiated(Genericity genericity = kAny,
7638 intptr_t num_free_fun_type_params = kAllFree,
7639 TrailPtr trail = nullptr) const {
7640 return IsSubvectorInstantiated(0, Length(), genericity,
7641 num_free_fun_type_params, trail);
7642 }
7643 bool IsSubvectorInstantiated(intptr_t from_index,
7644 intptr_t len,
7645 Genericity genericity = kAny,
7646 intptr_t num_free_fun_type_params = kAllFree,
7647 TrailPtr trail = nullptr) const;
7648 bool IsUninstantiatedIdentity() const;
7649
7650 // Determine whether this uninstantiated type argument vector can share its
7651 // instantiator (resp. function) type argument vector instead of being
7652 // instantiated at runtime.
7653 // If null is passed in for 'with_runtime_check', the answer is unconditional
7654 // (i.e. the answer will be false even if a runtime check may allow sharing),
7655 // otherwise, in case the function returns true, 'with_runtime_check'
7656 // indicates if a check is still required at runtime before allowing sharing.
7657 bool CanShareInstantiatorTypeArguments(
7658 const Class& instantiator_class,
7659 bool* with_runtime_check = nullptr) const;
7660 bool CanShareFunctionTypeArguments(const Function& function,
7661 bool* with_runtime_check = nullptr) const;
7662
7663 // Return true if all types of this vector are finalized.
7664 bool IsFinalized() const;
7665
7666 // Return true if this vector contains a recursive type argument.
7667 bool IsRecursive() const;
7668
7669 virtual InstancePtr CheckAndCanonicalize(Thread* thread,
7670 const char** error_str) const {
7671 return Canonicalize();
7672 }
7673
7674 // Canonicalize only if instantiated, otherwise returns 'this'.
7675 TypeArgumentsPtr Canonicalize(TrailPtr trail = nullptr) const;
7676
7677 // Add the class name and URI of each type argument of this vector to the uris
7678 // list and mark ambiguous triplets to be printed.
7679 void EnumerateURIs(URIs* uris) const;
7680
7681 // Return 'this' if this type argument vector is instantiated, i.e. if it does
7682 // not refer to type parameters. Otherwise, return a new type argument vector
7683 // where each reference to a type parameter is replaced with the corresponding
7684 // type from the various type argument vectors (class instantiator, function,
7685 // or parent functions via the current context).
7686 TypeArgumentsPtr InstantiateFrom(
7687 const TypeArguments& instantiator_type_arguments,
7688 const TypeArguments& function_type_arguments,
7689 intptr_t num_free_fun_type_params,
7690 Heap::Space space,
7691 TrailPtr trail = nullptr) const;
7692
7693 // Runtime instantiation with canonicalization. Not to be used during type
7694 // finalization at compile time.
7695 TypeArgumentsPtr InstantiateAndCanonicalizeFrom(
7696 const TypeArguments& instantiator_type_arguments,
7697 const TypeArguments& function_type_arguments) const;
7698
7699 // Each cached instantiation consists of a 3-tuple in the instantiations_
7700 // array stored in each canonical uninstantiated type argument vector.
7701 enum Instantiation {
7702 kInstantiatorTypeArgsIndex = 0,
7703 kFunctionTypeArgsIndex,
7704 kInstantiatedTypeArgsIndex,
7705 kSizeInWords,
7706 };
7707
7708 // The array is terminated by the value kNoInstantiator occuring in place of
7709 // the instantiator type args of the 4-tuple that would otherwise follow.
7710 // Therefore, kNoInstantiator must be distinct from any type arguments vector,
7711 // even a null one. Since arrays are initialized with 0, the instantiations_
7712 // array is properly terminated upon initialization.
7713 static const intptr_t kNoInstantiator = 0;
7714
7715 // Return true if this type argument vector has cached instantiations.
7716 bool HasInstantiations() const;
7717
7718 // Return the number of cached instantiations for this type argument vector.
7719 intptr_t NumInstantiations() const;
7720
7721 static intptr_t instantiations_offset() {
7722 return OFFSET_OF(TypeArgumentsLayout, instantiations_);
7723 }
7724
7725 static const intptr_t kBytesPerElement = kWordSize;
7726 static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
7727
7728 static intptr_t InstanceSize() {
7729 ASSERT(sizeof(TypeArgumentsLayout) ==
7730 OFFSET_OF_RETURNED_VALUE(TypeArgumentsLayout, types));
7731 return 0;
7732 }
7733
7734 static intptr_t InstanceSize(intptr_t len) {
7735 // Ensure that the types() is not adding to the object size, which includes
7736 // 4 fields: instantiations_, length_, hash_, and nullability_.
7737 ASSERT(sizeof(TypeArgumentsLayout) ==
7738 (sizeof(ObjectLayout) + (kNumFields * kWordSize)));
7739 ASSERT(0 <= len && len <= kMaxElements);
7740 return RoundedAllocationSize(sizeof(TypeArgumentsLayout) +
7741 (len * kBytesPerElement));
7742 }
7743
7744 virtual uint32_t CanonicalizeHash() const {
7745 // Hash() is not stable until finalization is done.
7746 return 0;
7747 }
7748 intptr_t Hash() const;
7749 intptr_t HashForRange(intptr_t from_index, intptr_t len) const;
7750
7751 static TypeArgumentsPtr New(intptr_t len, Heap::Space space = Heap::kOld);
7752
7753 private:
7754 intptr_t ComputeNullability() const;
7755 void set_nullability(intptr_t value) const;
7756
7757 intptr_t ComputeHash() const;
7758 void SetHash(intptr_t value) const;
7759
7760 // Check if the subvector of length 'len' starting at 'from_index' of this
7761 // type argument vector consists solely of DynamicType.
7762 // If raw_instantiated is true, consider each class type parameter to be first
7763 // instantiated from a vector of dynamic types.
7764 bool IsDynamicTypes(bool raw_instantiated,
7765 intptr_t from_index,
7766 intptr_t len) const;
7767
7768 ArrayPtr instantiations() const;
7769 void set_instantiations(const Array& value) const;
7770 AbstractTypePtr const* TypeAddr(intptr_t index) const;
7771 void SetLength(intptr_t value) const;
7772 // Number of fields in the raw object is 4:
7773 // instantiations_, length_, hash_ and nullability_.
7774 static const int kNumFields = 4;
7775
7776 FINAL_HEAP_OBJECT_IMPLEMENTATION(TypeArguments, Instance);
7777 friend class AbstractType;
7778 friend class Class;
7779 friend class ClearTypeHashVisitor;
7780 friend class Object;
7781};
7782
7783// AbstractType is an abstract superclass.
7784// Subclasses of AbstractType are Type and TypeParameter.
7785class AbstractType : public Instance {
7786 public:
7787 // We use 30 bits for the hash code so hashes in a snapshot taken on a
7788 // 64-bit architecture stay in Smi range when loaded on a 32-bit
7789 // architecture.
7790 static const intptr_t kHashBits = 30;
7791
7792 virtual bool IsFinalized() const;
7793 virtual void SetIsFinalized() const;
7794 virtual bool IsBeingFinalized() const;
7795 virtual void SetIsBeingFinalized() const;
7796
7797 virtual Nullability nullability() const;
7798 // Returns true if type has '?' nullability suffix, or it is a
7799 // built-in type which is always nullable (Null, dynamic or void).
7800 bool IsNullable() const { return nullability() == Nullability::kNullable; }
7801 // Returns true if type does not have any nullability suffix.
7802 // This function also returns true for type parameters without
7803 // nullability suffix ("T") which can be instantiated with
7804 // nullable or legacy types.
7805 bool IsNonNullable() const {
7806 return nullability() == Nullability::kNonNullable;
7807 }
7808 // Returns true if type has '*' nullability suffix, i.e.
7809 // it is from a legacy (opted-out) library.
7810 bool IsLegacy() const { return nullability() == Nullability::kLegacy; }
7811 // Returns true if it is guaranteed that null cannot be
7812 // assigned to this type.
7813 bool IsStrictlyNonNullable() const;
7814
7815 virtual AbstractTypePtr SetInstantiatedNullability(
7816 const TypeParameter& type_param,
7817 Heap::Space space) const;
7818 virtual AbstractTypePtr NormalizeFutureOrType(Heap::Space space) const;
7819
7820 virtual bool HasTypeClass() const { return type_class_id() != kIllegalCid; }
7821 virtual classid_t type_class_id() const;
7822 virtual ClassPtr type_class() const;
7823 virtual TypeArgumentsPtr arguments() const;
7824 virtual void set_arguments(const TypeArguments& value) const;
7825 virtual TokenPosition token_pos() const;
7826 virtual bool IsInstantiated(Genericity genericity = kAny,
7827 intptr_t num_free_fun_type_params = kAllFree,
7828 TrailPtr trail = nullptr) const;
7829 virtual bool CanonicalizeEquals(const Instance& other) const {
7830 return Equals(other);
7831 }
7832 virtual uint32_t CanonicalizeHash() const { return Hash(); }
7833 virtual bool Equals(const Instance& other) const {
7834 return IsEquivalent(other, TypeEquality::kCanonical);
7835 }
7836 virtual bool IsEquivalent(const Instance& other,
7837 TypeEquality kind,
7838 TrailPtr trail = nullptr) const;
7839 virtual bool IsRecursive() const;
7840
7841 // Check if this type represents a function type.
7842 virtual bool IsFunctionType() const { return false; }
7843
7844 // Instantiate this type using the given type argument vectors.
7845 //
7846 // Note that some type parameters appearing in this type may not require
7847 // instantiation. Consider a class C<T> declaring a non-generic method
7848 // foo(bar<B>(T t, B b)). Although foo is not a generic method, it takes a
7849 // generic function bar<B> as argument and its function type refers to class
7850 // type parameter T and function type parameter B. When instantiating the
7851 // function type of foo for a particular value of T, function type parameter B
7852 // must remain uninstantiated, because only T is a free variable in this type.
7853 //
7854 // Return a new type, or return 'this' if it is already instantiated.
7855 virtual AbstractTypePtr InstantiateFrom(
7856 const TypeArguments& instantiator_type_arguments,
7857 const TypeArguments& function_type_arguments,
7858 intptr_t num_free_fun_type_params,
7859 Heap::Space space,
7860 TrailPtr trail = nullptr) const;
7861
7862 virtual InstancePtr CheckAndCanonicalize(Thread* thread,
7863 const char** error_str) const {
7864 return Canonicalize();
7865 }
7866
7867 // Return the canonical version of this type.
7868 virtual AbstractTypePtr Canonicalize(TrailPtr trail = nullptr) const;
7869
7870#if defined(DEBUG)
7871 // Check if abstract type is canonical.
7872 virtual bool CheckIsCanonical(Thread* thread) const {
7873 UNREACHABLE();
7874 return false;
7875 }
7876#endif // DEBUG
7877
7878 // Return the object associated with the receiver in the trail or
7879 // AbstractType::null() if the receiver is not contained in the trail.
7880 AbstractTypePtr OnlyBuddyInTrail(TrailPtr trail) const;
7881
7882 // If the trail is null, allocate a trail, add the pair <receiver, buddy> to
7883 // the trail. The receiver may only be added once with its only buddy.
7884 void AddOnlyBuddyToTrail(TrailPtr* trail, const AbstractType& buddy) const;
7885
7886 // Return true if the receiver is contained in the trail.
7887 // Otherwise, if the trail is null, allocate a trail, then add the receiver to
7888 // the trail and return false.
7889 bool TestAndAddToTrail(TrailPtr* trail) const;
7890
7891 // Return true if the pair <receiver, buddy> is contained in the trail.
7892 // Otherwise, if the trail is null, allocate a trail, add the pair <receiver,
7893 // buddy> to the trail and return false.
7894 // The receiver may be added several times, each time with a different buddy.
7895 bool TestAndAddBuddyToTrail(TrailPtr* trail, const AbstractType& buddy) const;
7896
7897 // Add the pair <name, uri> to the list, if not already present.
7898 static void AddURI(URIs* uris, const String& name, const String& uri);
7899
7900 // Return a formatted string of the uris.
7901 static StringPtr PrintURIs(URIs* uris);
7902
7903 // Returns a C-String (possibly "") representing the nullability of this type.
7904 // Legacy and undetermined suffixes are only displayed with kInternalName.
7905 virtual const char* NullabilitySuffix(NameVisibility name_visibility) const;
7906
7907 // The name of this type, including the names of its type arguments, if any.
7908 virtual StringPtr Name() const;
7909
7910 // The name of this type, including the names of its type arguments, if any.
7911 // Names of internal classes are mapped to their public interfaces.
7912 virtual StringPtr UserVisibleName() const;
7913
7914 // Return the internal or public name of this type, including the names of its
7915 // type arguments, if any.
7916 void PrintName(
7917 NameVisibility visibility,
7918 BaseTextBuffer* printer,
7919 NameDisambiguation name_disambiguation = NameDisambiguation::kNo) const;
7920
7921 // Add the class name and URI of each occuring type to the uris
7922 // list and mark ambiguous triplets to be printed.
7923 virtual void EnumerateURIs(URIs* uris) const;
7924
7925 virtual intptr_t Hash() const;
7926
7927 // The name of this type's class, i.e. without the type argument names of this
7928 // type.
7929 StringPtr ClassName() const;
7930
7931 // Check if this type is a still uninitialized TypeRef.
7932 bool IsNullTypeRef() const;
7933
7934 // Check if this type represents the 'dynamic' type.
7935 bool IsDynamicType() const { return type_class_id() == kDynamicCid; }
7936
7937 // Check if this type represents the 'void' type.
7938 bool IsVoidType() const { return type_class_id() == kVoidCid; }
7939
7940 // Check if this type represents the 'Null' type.
7941 bool IsNullType() const;
7942
7943 // Check if this type represents the 'Never' type.
7944 bool IsNeverType() const;
7945
7946 // Check if this type represents the 'Object' type.
7947 bool IsObjectType() const { return type_class_id() == kInstanceCid; }
7948
7949 // Check if this type represents a top type for subtyping,
7950 // assignability and 'as' type tests.
7951 //
7952 // Returns true if
7953 // - any type is a subtype of this type;
7954 // - any value can be assigned to a variable of this type;
7955 // - 'as' type test always succeeds for this type.
7956 bool IsTopTypeForSubtyping() const;
7957
7958 // Check if this type represents a top type for 'is' type tests.
7959 // Returns true if 'is' type test always returns true for this type.
7960 bool IsTopTypeForInstanceOf() const;
7961
7962 // Check if this type represents the 'bool' type.
7963 bool IsBoolType() const { return type_class_id() == kBoolCid; }
7964
7965 // Check if this type represents the 'int' type.
7966 bool IsIntType() const;
7967
7968 // Check if this type represents the 'double' type.
7969 bool IsDoubleType() const;
7970
7971 // Check if this type represents the 'Float32x4' type.
7972 bool IsFloat32x4Type() const;
7973
7974 // Check if this type represents the 'Float64x2' type.
7975 bool IsFloat64x2Type() const;
7976
7977 // Check if this type represents the 'Int32x4' type.
7978 bool IsInt32x4Type() const;
7979
7980 // Check if this type represents the 'num' type.
7981 bool IsNumberType() const { return type_class_id() == kNumberCid; }
7982
7983 // Check if this type represents the '_Smi' type.
7984 bool IsSmiType() const { return type_class_id() == kSmiCid; }
7985
7986 // Check if this type represents the 'String' type.
7987 bool IsStringType() const;
7988
7989 // Check if this type represents the Dart 'Function' type.
7990 bool IsDartFunctionType() const;
7991
7992 // Check if this type represents the Dart '_Closure' type.
7993 bool IsDartClosureType() const;
7994
7995 // Check if this type represents the 'Pointer' type from "dart:ffi".
7996 bool IsFfiPointerType() const;
7997
7998 // Check if this type represents the 'FutureOr' type.
7999 bool IsFutureOrType() const { return type_class_id() == kFutureOrCid; }
8000
8001 // Returns the type argument of this (possibly nested) 'FutureOr' type.
8002 // Returns unmodified type if this type is not a 'FutureOr' type.
8003 AbstractTypePtr UnwrapFutureOr() const;
8004
8005 // Returns true if catching this type will catch all exceptions.
8006 // Exception objects are guaranteed to be non-nullable, so
8007 // non-nullable Object is also a catch-all type.
8008 bool IsCatchAllType() const { return IsDynamicType() || IsObjectType(); }
8009
8010 // Check the subtype relationship.
8011 bool IsSubtypeOf(const AbstractType& other,
8012 Heap::Space space,
8013 TrailPtr trail = nullptr) const;
8014
8015 // Returns true iff subtype is a subtype of supertype, false otherwise or if
8016 // an error occurred.
8017 static bool InstantiateAndTestSubtype(
8018 AbstractType* subtype,
8019 AbstractType* supertype,
8020 const TypeArguments& instantiator_type_args,
8021 const TypeArguments& function_type_args);
8022
8023 static intptr_t type_test_stub_entry_point_offset() {
8024 return OFFSET_OF(AbstractTypeLayout, type_test_stub_entry_point_);
8025 }
8026
8027 uword type_test_stub_entry_point() const {
8028 return raw_ptr()->type_test_stub_entry_point_;
8029 }
8030 CodePtr type_test_stub() const { return raw_ptr()->type_test_stub_; }
8031
8032 void SetTypeTestingStub(const Code& stub) const;
8033
8034 private:
8035 // Returns true if this type is a subtype of FutureOr<T> specified by 'other'.
8036 // Returns false if other type is not a FutureOr.
8037 bool IsSubtypeOfFutureOr(Zone* zone,
8038 const AbstractType& other,
8039 Heap::Space space,
8040 TrailPtr trail = nullptr) const;
8041
8042 protected:
8043 HEAP_OBJECT_IMPLEMENTATION(AbstractType, Instance);
8044 friend class Class;
8045 friend class Function;
8046 friend class TypeArguments;
8047};
8048
8049// A Type consists of a class, possibly parameterized with type
8050// arguments. Example: C<T1, T2>.
8051//
8052// Caution: 'TypePtr' denotes a 'raw' pointer to a VM object of class Type, as
8053// opposed to 'Type' denoting a 'handle' to the same object. 'RawType' does not
8054// relate to a 'raw type', as opposed to a 'cooked type' or 'rare type'.
8055class Type : public AbstractType {
8056 public:
8057 static intptr_t type_class_id_offset() {
8058 return OFFSET_OF(TypeLayout, type_class_id_);
8059 }
8060 static intptr_t arguments_offset() {
8061 return OFFSET_OF(TypeLayout, arguments_);
8062 }
8063 static intptr_t type_state_offset() {
8064 return OFFSET_OF(TypeLayout, type_state_);
8065 }
8066 static intptr_t hash_offset() { return OFFSET_OF(TypeLayout, hash_); }
8067 static intptr_t nullability_offset() {
8068 return OFFSET_OF(TypeLayout, nullability_);
8069 }
8070 virtual bool IsFinalized() const {
8071 return (raw_ptr()->type_state_ == TypeLayout::kFinalizedInstantiated) ||
8072 (raw_ptr()->type_state_ == TypeLayout::kFinalizedUninstantiated);
8073 }
8074 virtual void SetIsFinalized() const;
8075 void ResetIsFinalized() const; // Ignore current state and set again.
8076 virtual bool IsBeingFinalized() const {
8077 return raw_ptr()->type_state_ == TypeLayout::kBeingFinalized;
8078 }
8079 virtual void SetIsBeingFinalized() const;
8080 virtual bool HasTypeClass() const {
8081 ASSERT(type_class_id() != kIllegalCid);
8082 return true;
8083 }
8084 virtual Nullability nullability() const {
8085 return static_cast<Nullability>(raw_ptr()->nullability_);
8086 }
8087 TypePtr ToNullability(Nullability value, Heap::Space space) const;
8088 virtual classid_t type_class_id() const;
8089 virtual ClassPtr type_class() const;
8090 void set_type_class(const Class& value) const;
8091 virtual TypeArgumentsPtr arguments() const { return raw_ptr()->arguments_; }
8092 virtual void set_arguments(const TypeArguments& value) const;
8093 virtual TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
8094 virtual bool IsInstantiated(Genericity genericity = kAny,
8095 intptr_t num_free_fun_type_params = kAllFree,
8096 TrailPtr trail = nullptr) const;
8097 virtual bool IsEquivalent(const Instance& other,
8098 TypeEquality kind,
8099 TrailPtr trail = nullptr) const;
8100 virtual bool IsRecursive() const;
8101
8102 // Return true if this type can be used as the declaration type of cls after
8103 // canonicalization (passed-in cls must match type_class()).
8104 bool IsDeclarationTypeOf(const Class& cls) const;
8105
8106 // If signature is not null, this type represents a function type. Note that
8107 // the signature fully represents the type and type arguments can be ignored.
8108 // However, in case of a generic typedef, they document how the typedef class
8109 // was parameterized to obtain the actual signature.
8110 FunctionPtr signature() const;
8111 void set_signature(const Function& value) const;
8112 static intptr_t signature_offset() {
8113 return OFFSET_OF(TypeLayout, signature_);
8114 }
8115
8116 virtual bool IsFunctionType() const {
8117 return signature() != Function::null();
8118 }
8119 virtual AbstractTypePtr InstantiateFrom(
8120 const TypeArguments& instantiator_type_arguments,
8121 const TypeArguments& function_type_arguments,
8122 intptr_t num_free_fun_type_params,
8123 Heap::Space space,
8124 TrailPtr trail = nullptr) const;
8125 virtual AbstractTypePtr Canonicalize(TrailPtr trail = nullptr) const;
8126#if defined(DEBUG)
8127 // Check if type is canonical.
8128 virtual bool CheckIsCanonical(Thread* thread) const;
8129#endif // DEBUG
8130 virtual void EnumerateURIs(URIs* uris) const;
8131
8132 virtual intptr_t Hash() const;
8133 intptr_t ComputeHash() const;
8134
8135 static intptr_t InstanceSize() {
8136 return RoundedAllocationSize(sizeof(TypeLayout));
8137 }
8138
8139 // The type of the literal 'null'.
8140 static TypePtr NullType();
8141
8142 // The 'dynamic' type.
8143 static TypePtr DynamicType();
8144
8145 // The 'void' type.
8146 static TypePtr VoidType();
8147
8148 // The 'Never' type.
8149 static TypePtr NeverType();
8150
8151 // The 'Object' type.
8152 static TypePtr ObjectType();
8153
8154 // The 'bool' type.
8155 static TypePtr BoolType();
8156
8157 // The 'int' type.
8158 static TypePtr IntType();
8159
8160 // The 'int?' type.
8161 static TypePtr NullableIntType();
8162
8163 // The 'Smi' type.
8164 static TypePtr SmiType();
8165
8166 // The 'Mint' type.
8167 static TypePtr MintType();
8168
8169 // The 'double' type.
8170 static TypePtr Double();
8171
8172 // The 'double?' type.
8173 static TypePtr NullableDouble();
8174
8175 // The 'Float32x4' type.
8176 static TypePtr Float32x4();
8177
8178 // The 'Float64x2' type.
8179 static TypePtr Float64x2();
8180
8181 // The 'Int32x4' type.
8182 static TypePtr Int32x4();
8183
8184 // The 'num' type.
8185 static TypePtr Number();
8186
8187 // The 'String' type.
8188 static TypePtr StringType();
8189
8190 // The 'Array' type.
8191 static TypePtr ArrayType();
8192
8193 // The 'Function' type.
8194 static TypePtr DartFunctionType();
8195
8196 // The 'Type' type.
8197 static TypePtr DartTypeType();
8198
8199 // The finalized type of the given non-parameterized class.
8200 static TypePtr NewNonParameterizedType(const Class& type_class);
8201
8202 static TypePtr New(const Class& clazz,
8203 const TypeArguments& arguments,
8204 TokenPosition token_pos,
8205 Nullability nullability = Nullability::kLegacy,
8206 Heap::Space space = Heap::kOld);
8207
8208 private:
8209 void SetHash(intptr_t value) const;
8210
8211 void set_token_pos(TokenPosition token_pos) const;
8212 void set_type_state(int8_t state) const;
8213 void set_nullability(Nullability value) const {
8214 ASSERT(!IsCanonical());
8215 StoreNonPointer(&raw_ptr()->nullability_, static_cast<int8_t>(value));
8216 }
8217
8218 static TypePtr New(Heap::Space space = Heap::kOld);
8219
8220 FINAL_HEAP_OBJECT_IMPLEMENTATION(Type, AbstractType);
8221 friend class Class;
8222 friend class TypeArguments;
8223 friend class ClearTypeHashVisitor;
8224};
8225
8226// A TypeRef is used to break cycles in the representation of recursive types.
8227// Its only field is the recursive AbstractType it refers to, which can
8228// temporarily be null during finalization.
8229// Note that the cycle always involves type arguments.
8230class TypeRef : public AbstractType {
8231 public:
8232 static intptr_t type_offset() { return OFFSET_OF(TypeRefLayout, type_); }
8233
8234 virtual bool IsFinalized() const {
8235 const AbstractType& ref_type = AbstractType::Handle(type());
8236 return !ref_type.IsNull() && ref_type.IsFinalized();
8237 }
8238 virtual bool IsBeingFinalized() const {
8239 const AbstractType& ref_type = AbstractType::Handle(type());
8240 return ref_type.IsNull() || ref_type.IsBeingFinalized();
8241 }
8242 virtual Nullability nullability() const {
8243 const AbstractType& ref_type = AbstractType::Handle(type());
8244 ASSERT(!ref_type.IsNull());
8245 return ref_type.nullability();
8246 }
8247 virtual bool HasTypeClass() const {
8248 return (type() != AbstractType::null()) &&
8249 AbstractType::Handle(type()).HasTypeClass();
8250 }
8251 AbstractTypePtr type() const { return raw_ptr()->type_; }
8252 void set_type(const AbstractType& value) const;
8253 virtual classid_t type_class_id() const {
8254 return AbstractType::Handle(type()).type_class_id();
8255 }
8256 virtual ClassPtr type_class() const {
8257 return AbstractType::Handle(type()).type_class();
8258 }
8259 virtual TypeArgumentsPtr arguments() const {
8260 return AbstractType::Handle(type()).arguments();
8261 }
8262 virtual TokenPosition token_pos() const {
8263 return AbstractType::Handle(type()).token_pos();
8264 }
8265 virtual bool IsInstantiated(Genericity genericity = kAny,
8266 intptr_t num_free_fun_type_params = kAllFree,
8267 TrailPtr trail = nullptr) const;
8268 virtual bool IsEquivalent(const Instance& other,
8269 TypeEquality kind,
8270 TrailPtr trail = nullptr) const;
8271 virtual bool IsRecursive() const { return true; }
8272 virtual bool IsFunctionType() const {
8273 const AbstractType& ref_type = AbstractType::Handle(type());
8274 return !ref_type.IsNull() && ref_type.IsFunctionType();
8275 }
8276 virtual AbstractTypePtr InstantiateFrom(
8277 const TypeArguments& instantiator_type_arguments,
8278 const TypeArguments& function_type_arguments,
8279 intptr_t num_free_fun_type_params,
8280 Heap::Space space,
8281 TrailPtr trail = nullptr) const;
8282 virtual AbstractTypePtr Canonicalize(TrailPtr trail = nullptr) const;
8283#if defined(DEBUG)
8284 // Check if typeref is canonical.
8285 virtual bool CheckIsCanonical(Thread* thread) const;
8286#endif // DEBUG
8287 virtual void EnumerateURIs(URIs* uris) const;
8288
8289 virtual intptr_t Hash() const;
8290
8291 static intptr_t InstanceSize() {
8292 return RoundedAllocationSize(sizeof(TypeRefLayout));
8293 }
8294
8295 static TypeRefPtr New(const AbstractType& type);
8296
8297 private:
8298 static TypeRefPtr New();
8299
8300 FINAL_HEAP_OBJECT_IMPLEMENTATION(TypeRef, AbstractType);
8301 friend class Class;
8302};
8303
8304// A TypeParameter represents a type parameter of a parameterized class.
8305// It specifies its index (and its name for debugging purposes), as well as its
8306// upper bound.
8307// For example, the type parameter 'V' is specified as index 1 in the context of
8308// the class HashMap<K, V>. At compile time, the TypeParameter is not
8309// instantiated yet, i.e. it is only a place holder.
8310// Upon finalization, the TypeParameter index is changed to reflect its position
8311// as type argument (rather than type parameter) of the parameterized class.
8312// If the type parameter is declared without an extends clause, its bound is set
8313// to the ObjectType.
8314class TypeParameter : public AbstractType {
8315 public:
8316 virtual bool IsFinalized() const {
8317 return TypeParameterLayout::FinalizedBit::decode(raw_ptr()->flags_);
8318 }
8319 virtual void SetIsFinalized() const;
8320 virtual bool IsBeingFinalized() const { return false; }
8321 bool IsGenericCovariantImpl() const {
8322 return TypeParameterLayout::GenericCovariantImplBit::decode(
8323 raw_ptr()->flags_);
8324 }
8325 void SetGenericCovariantImpl(bool value) const;
8326 bool IsDeclaration() const {
8327 return TypeParameterLayout::DeclarationBit::decode(raw_ptr()->flags_);
8328 }
8329 void SetDeclaration(bool value) const;
8330 virtual Nullability nullability() const {
8331 return static_cast<Nullability>(raw_ptr()->nullability_);
8332 }
8333 TypeParameterPtr ToNullability(Nullability value, Heap::Space space) const;
8334 virtual bool HasTypeClass() const { return false; }
8335 virtual classid_t type_class_id() const { return kIllegalCid; }
8336 classid_t parameterized_class_id() const;
8337 ClassPtr parameterized_class() const;
8338 FunctionPtr parameterized_function() const {
8339 return raw_ptr()->parameterized_function_;
8340 }
8341 bool IsClassTypeParameter() const {
8342 return parameterized_class_id() != kFunctionCid;
8343 }
8344 bool IsFunctionTypeParameter() const {
8345 return parameterized_function() != Function::null();
8346 }
8347 StringPtr name() const { return raw_ptr()->name_; }
8348 intptr_t index() const { return raw_ptr()->index_; }
8349 void set_index(intptr_t value) const;
8350 AbstractTypePtr bound() const { return raw_ptr()->bound_; }
8351 void set_bound(const AbstractType& value) const;
8352 virtual TokenPosition token_pos() const { return raw_ptr()->token_pos_; }
8353 virtual bool IsInstantiated(Genericity genericity = kAny,
8354 intptr_t num_free_fun_type_params = kAllFree,
8355 TrailPtr trail = nullptr) const;
8356 virtual bool IsEquivalent(const Instance& other,
8357 TypeEquality kind,
8358 TrailPtr trail = nullptr) const;
8359 virtual bool IsRecursive() const { return false; }
8360 virtual AbstractTypePtr InstantiateFrom(
8361 const TypeArguments& instantiator_type_arguments,
8362 const TypeArguments& function_type_arguments,
8363 intptr_t num_free_fun_type_params,
8364 Heap::Space space,
8365 TrailPtr trail = nullptr) const;
8366 virtual AbstractTypePtr Canonicalize(TrailPtr trail = nullptr) const;
8367#if defined(DEBUG)
8368 // Check if type parameter is canonical.
8369 virtual bool CheckIsCanonical(Thread* thread) const;
8370#endif // DEBUG
8371 virtual void EnumerateURIs(URIs* uris) const;
8372
8373 virtual intptr_t Hash() const;
8374
8375 // Returns type corresponding to [this] type parameter from the
8376 // given [instantiator_type_arguments] and [function_type_arguments].
8377 // Unlike InstantiateFrom, nullability of type parameter is not applied to
8378 // the result.
8379 AbstractTypePtr GetFromTypeArguments(
8380 const TypeArguments& instantiator_type_arguments,
8381 const TypeArguments& function_type_arguments) const;
8382
8383 static intptr_t InstanceSize() {
8384 return RoundedAllocationSize(sizeof(TypeParameterLayout));
8385 }
8386
8387 // Only one of parameterized_class and parameterized_function is non-null.
8388 static TypeParameterPtr New(const Class& parameterized_class,
8389 const Function& parameterized_function,
8390 intptr_t index,
8391 const String& name,
8392 const AbstractType& bound,
8393 bool is_generic_covariant_impl,
8394 Nullability nullability,
8395 TokenPosition token_pos);
8396
8397 private:
8398 intptr_t ComputeHash() const;
8399 void SetHash(intptr_t value) const;
8400
8401 void set_parameterized_class(const Class& value) const;
8402 void set_parameterized_function(const Function& value) const;
8403 void set_name(const String& value) const;
8404 void set_token_pos(TokenPosition token_pos) const;
8405 void set_flags(uint8_t flags) const;
8406 void set_nullability(Nullability value) const;
8407
8408 static TypeParameterPtr New();
8409
8410 FINAL_HEAP_OBJECT_IMPLEMENTATION(TypeParameter, AbstractType);
8411 friend class Class;
8412 friend class ClearTypeHashVisitor;
8413};
8414
8415class Number : public Instance {
8416 public:
8417 // TODO(iposva): Add more useful Number methods.
8418 StringPtr ToString(Heap::Space space) const;
8419
8420 // Numbers are canonicalized differently from other instances/strings.
8421 virtual InstancePtr CheckAndCanonicalize(Thread* thread,
8422 const char** error_str) const;
8423
8424#if defined(DEBUG)
8425 // Check if number is canonical.
8426 virtual bool CheckIsCanonical(Thread* thread) const;
8427#endif // DEBUG
8428
8429 private:
8430 OBJECT_IMPLEMENTATION(Number, Instance);
8431
8432 friend class Class;
8433};
8434
8435class Integer : public Number {
8436 public:
8437 static IntegerPtr New(const String& str, Heap::Space space = Heap::kNew);
8438
8439 // Creates a new Integer by given uint64_t value.
8440 // Silently casts value to int64_t with wrap-around if it is greater
8441 // than kMaxInt64.
8442 static IntegerPtr NewFromUint64(uint64_t value,
8443 Heap::Space space = Heap::kNew);
8444
8445 // Returns a canonical Integer object allocated in the old gen space.
8446 // Returns null if integer is out of range.
8447 static IntegerPtr NewCanonical(const String& str);
8448 static IntegerPtr NewCanonical(int64_t value);
8449
8450 static IntegerPtr New(int64_t value, Heap::Space space = Heap::kNew);
8451
8452 // Returns true iff the given uint64_t value is representable as Dart integer.
8453 static bool IsValueInRange(uint64_t value);
8454
8455 virtual bool OperatorEquals(const Instance& other) const {
8456 return Equals(other);
8457 }
8458 virtual bool CanonicalizeEquals(const Instance& other) const {
8459 return Equals(other);
8460 }
8461 virtual uint32_t CanonicalizeHash() const { return AsTruncatedUint32Value(); }
8462 virtual bool Equals(const Instance& other) const;
8463
8464 virtual ObjectPtr HashCode() const { return raw(); }
8465
8466 virtual bool IsZero() const;
8467 virtual bool IsNegative() const;
8468
8469 virtual double AsDoubleValue() const;
8470 virtual int64_t AsInt64Value() const;
8471 virtual int64_t AsTruncatedInt64Value() const { return AsInt64Value(); }
8472 virtual uint32_t AsTruncatedUint32Value() const;
8473
8474 virtual bool FitsIntoSmi() const;
8475
8476 // Returns 0, -1 or 1.
8477 virtual int CompareWith(const Integer& other) const;
8478
8479 // Converts integer to hex string.
8480 const char* ToHexCString(Zone* zone) const;
8481
8482 // Return the most compact presentation of an integer.
8483 IntegerPtr AsValidInteger() const;
8484
8485 // Returns null to indicate that a bigint operation is required.
8486 IntegerPtr ArithmeticOp(Token::Kind operation,
8487 const Integer& other,
8488 Heap::Space space = Heap::kNew) const;
8489 IntegerPtr BitOp(Token::Kind operation,
8490 const Integer& other,
8491 Heap::Space space = Heap::kNew) const;
8492 IntegerPtr ShiftOp(Token::Kind operation,
8493 const Integer& other,
8494 Heap::Space space = Heap::kNew) const;
8495
8496 static int64_t GetInt64Value(const IntegerPtr obj) {
8497 intptr_t raw_value = static_cast<intptr_t>(obj);
8498 if ((raw_value & kSmiTagMask) == kSmiTag) {
8499 return (raw_value >> kSmiTagShift);
8500 } else {
8501 ASSERT(obj->IsMint());
8502 return static_cast<const MintPtr>(obj)->ptr()->value_;
8503 }
8504 }
8505
8506 private:
8507 OBJECT_IMPLEMENTATION(Integer, Number);
8508 friend class Class;
8509};
8510
8511class Smi : public Integer {
8512 public:
8513 static const intptr_t kBits = kSmiBits;
8514 static const intptr_t kMaxValue = kSmiMax;
8515 static const intptr_t kMinValue = kSmiMin;
8516
8517 intptr_t Value() const { return RawSmiValue(raw()); }
8518
8519 virtual bool Equals(const Instance& other) const;
8520 virtual bool IsZero() const { return Value() == 0; }
8521 virtual bool IsNegative() const { return Value() < 0; }
8522
8523 virtual double AsDoubleValue() const;
8524 virtual int64_t AsInt64Value() const;
8525 virtual uint32_t AsTruncatedUint32Value() const;
8526
8527 virtual bool FitsIntoSmi() const { return true; }
8528
8529 virtual int CompareWith(const Integer& other) const;
8530
8531 static intptr_t InstanceSize() { return 0; }
8532
8533 static SmiPtr New(intptr_t value) {
8534 SmiPtr raw_smi = static_cast<SmiPtr>(
8535 (static_cast<uintptr_t>(value) << kSmiTagShift) | kSmiTag);
8536 ASSERT(RawSmiValue(raw_smi) == value);
8537 return raw_smi;
8538 }
8539
8540 static SmiPtr FromAlignedAddress(uword address) {
8541 ASSERT((address & kSmiTagMask) == kSmiTag);
8542 return static_cast<SmiPtr>(address);
8543 }
8544
8545 static ClassPtr Class();
8546
8547 static intptr_t Value(const SmiPtr raw_smi) { return RawSmiValue(raw_smi); }
8548
8549 static intptr_t RawValue(intptr_t value) {
8550 return static_cast<intptr_t>(New(value));
8551 }
8552
8553 static bool IsValid(int64_t value) { return compiler::target::IsSmi(value); }
8554
8555 void operator=(SmiPtr value) {
8556 raw_ = value;
8557 CHECK_HANDLE();
8558 }
8559 void operator^=(ObjectPtr value) {
8560 raw_ = value;
8561 CHECK_HANDLE();
8562 }
8563
8564 private:
8565 static intptr_t NextFieldOffset() {
8566 // Indicates this class cannot be extended by dart code.
8567 return -kWordSize;
8568 }
8569
8570 Smi() : Integer() {}
8571 BASE_OBJECT_IMPLEMENTATION(Smi, Integer);
8572 OBJECT_SERVICE_SUPPORT(Smi);
8573 friend class Api; // For ValueFromRaw
8574 friend class Class;
8575 friend class Object;
8576 friend class ReusableSmiHandleScope;
8577 friend class Thread;
8578};
8579
8580class SmiTraits : AllStatic {
8581 public:
8582 static const char* Name() { return "SmiTraits"; }
8583 static bool ReportStats() { return false; }
8584
8585 static bool IsMatch(const Object& a, const Object& b) {
8586 return Smi::Cast(a).Value() == Smi::Cast(b).Value();
8587 }
8588
8589 static uword Hash(const Object& obj) { return Smi::Cast(obj).Value(); }
8590};
8591
8592class Mint : public Integer {
8593 public:
8594 static const intptr_t kBits = 63; // 64-th bit is sign.
8595 static const int64_t kMaxValue =
8596 static_cast<int64_t>(DART_2PART_UINT64_C(0x7FFFFFFF, FFFFFFFF));
8597 static const int64_t kMinValue =
8598 static_cast<int64_t>(DART_2PART_UINT64_C(0x80000000, 00000000));
8599
8600 int64_t value() const { return raw_ptr()->value_; }
8601 static intptr_t value_offset() { return OFFSET_OF(MintLayout, value_); }
8602
8603 virtual bool IsZero() const { return value() == 0; }
8604 virtual bool IsNegative() const { return value() < 0; }
8605
8606 virtual bool Equals(const Instance& other) const;
8607
8608 virtual double AsDoubleValue() const;
8609 virtual int64_t AsInt64Value() const;
8610 virtual uint32_t AsTruncatedUint32Value() const;
8611
8612 virtual bool FitsIntoSmi() const;
8613
8614 virtual int CompareWith(const Integer& other) const;
8615
8616 static intptr_t InstanceSize() {
8617 return RoundedAllocationSize(sizeof(MintLayout));
8618 }
8619
8620 protected:
8621 // Only Integer::NewXXX is allowed to call Mint::NewXXX directly.
8622 friend class Integer;
8623
8624 static MintPtr New(int64_t value, Heap::Space space = Heap::kNew);
8625
8626 static MintPtr NewCanonical(int64_t value);
8627
8628 private:
8629 void set_value(int64_t value) const;
8630
8631 MINT_OBJECT_IMPLEMENTATION(Mint, Integer, Integer);
8632 friend class Class;
8633 friend class Number;
8634};
8635
8636// Class Double represents class Double in corelib_impl, which implements
8637// abstract class double in corelib.
8638class Double : public Number {
8639 public:
8640 double value() const { return raw_ptr()->value_; }
8641
8642 bool BitwiseEqualsToDouble(double value) const;
8643 virtual bool OperatorEquals(const Instance& other) const;
8644 virtual bool CanonicalizeEquals(const Instance& other) const;
8645 virtual uint32_t CanonicalizeHash() const;
8646
8647 static DoublePtr New(double d, Heap::Space space = Heap::kNew);
8648
8649 static DoublePtr New(const String& str, Heap::Space space = Heap::kNew);
8650
8651 // Returns a canonical double object allocated in the old gen space.
8652 static DoublePtr NewCanonical(double d);
8653
8654 // Returns a canonical double object (allocated in the old gen space) or
8655 // Double::null() if str points to a string that does not convert to a
8656 // double value.
8657 static DoublePtr NewCanonical(const String& str);
8658
8659 static intptr_t InstanceSize() {
8660 return RoundedAllocationSize(sizeof(DoubleLayout));
8661 }
8662
8663 static intptr_t value_offset() { return OFFSET_OF(DoubleLayout, value_); }
8664
8665 private:
8666 void set_value(double value) const;
8667
8668 FINAL_HEAP_OBJECT_IMPLEMENTATION(Double, Number);
8669 friend class Class;
8670 friend class Number;
8671};
8672
8673// String may not be '\0' terminated.
8674class String : public Instance {
8675 public:
8676 // We use 30 bits for the hash code so hashes in a snapshot taken on a
8677 // 64-bit architecture stay in Smi range when loaded on a 32-bit
8678 // architecture.
8679 static const intptr_t kHashBits = 30;
8680
8681 static const intptr_t kOneByteChar = 1;
8682 static const intptr_t kTwoByteChar = 2;
8683
8684// All strings share the same maximum element count to keep things
8685// simple. We choose a value that will prevent integer overflow for
8686// 2 byte strings, since it is the worst case.
8687#if defined(HASH_IN_OBJECT_HEADER)
8688 static const intptr_t kSizeofRawString = sizeof(InstanceLayout) + kWordSize;
8689#else
8690 static const intptr_t kSizeofRawString =
8691 sizeof(InstanceLayout) + 2 * kWordSize;
8692#endif
8693 static const intptr_t kMaxElements = kSmiMax / kTwoByteChar;
8694
8695 class CodePointIterator : public ValueObject {
8696 public:
8697 explicit CodePointIterator(const String& str)
8698 : str_(str), ch_(0), index_(-1), end_(str.Length()) {
8699 ASSERT(!str_.IsNull());
8700 }
8701
8702 CodePointIterator(const String& str, intptr_t start, intptr_t length)
8703 : str_(str), ch_(0), index_(start - 1), end_(start + length) {
8704 ASSERT(start >= 0);
8705 ASSERT(end_ <= str.Length());
8706 }
8707
8708 int32_t Current() const {
8709 ASSERT(index_ >= 0);
8710 ASSERT(index_ < end_);
8711 return ch_;
8712 }
8713
8714 bool Next();
8715
8716 private:
8717 const String& str_;
8718 int32_t ch_;
8719 intptr_t index_;
8720 intptr_t end_;
8721 DISALLOW_IMPLICIT_CONSTRUCTORS(CodePointIterator);
8722 };
8723
8724 intptr_t Length() const { return LengthOf(raw()); }
8725 static intptr_t LengthOf(StringPtr obj) {
8726 return Smi::Value(obj->ptr()->length_);
8727 }
8728 static intptr_t length_offset() { return OFFSET_OF(StringLayout, length_); }
8729
8730 intptr_t Hash() const {
8731 intptr_t result = GetCachedHash(raw());
8732 if (result != 0) {
8733 return result;
8734 }
8735 result = String::Hash(*this, 0, this->Length());
8736 SetCachedHash(raw(), result);
8737 return result;
8738 }
8739
8740 static intptr_t Hash(StringPtr raw);
8741
8742 bool HasHash() const {
8743 ASSERT(Smi::New(0) == nullptr);
8744 return GetCachedHash(raw()) != 0;
8745 }
8746
8747 static intptr_t hash_offset() { return OFFSET_OF(StringLayout, hash_); }
8748 static intptr_t Hash(const String& str, intptr_t begin_index, intptr_t len);
8749 static intptr_t Hash(const char* characters, intptr_t len);
8750 static intptr_t Hash(const uint16_t* characters, intptr_t len);
8751 static intptr_t Hash(const int32_t* characters, intptr_t len);
8752 static intptr_t HashRawSymbol(const StringPtr symbol) {
8753 ASSERT(symbol->ptr()->IsCanonical());
8754 intptr_t result = GetCachedHash(symbol);
8755 ASSERT(result != 0);
8756 return result;
8757 }
8758
8759 // Returns the hash of str1 + str2.
8760 static intptr_t HashConcat(const String& str1, const String& str2);
8761
8762 virtual ObjectPtr HashCode() const { return Integer::New(Hash()); }
8763
8764 uint16_t CharAt(intptr_t index) const { return CharAt(raw(), index); }
8765 static uint16_t CharAt(StringPtr str, intptr_t index);
8766
8767 intptr_t CharSize() const;
8768
8769 inline bool Equals(const String& str) const;
8770
8771 bool Equals(const String& str,
8772 intptr_t begin_index, // begin index on 'str'.
8773 intptr_t len) const; // len on 'str'.
8774
8775 // Compares to a '\0' terminated array of UTF-8 encoded characters.
8776 bool Equals(const char* cstr) const;
8777
8778 // Compares to an array of Latin-1 encoded characters.
8779 bool EqualsLatin1(const uint8_t* characters, intptr_t len) const {
8780 return Equals(characters, len);
8781 }
8782
8783 // Compares to an array of UTF-16 encoded characters.
8784 bool Equals(const uint16_t* characters, intptr_t len) const;
8785
8786 // Compares to an array of UTF-32 encoded characters.
8787 bool Equals(const int32_t* characters, intptr_t len) const;
8788
8789 // True iff this string equals str1 + str2.
8790 bool EqualsConcat(const String& str1, const String& str2) const;
8791
8792 virtual bool OperatorEquals(const Instance& other) const {
8793 return Equals(other);
8794 }
8795 virtual bool CanonicalizeEquals(const Instance& other) const {
8796 return Equals(other);
8797 }
8798 virtual uint32_t CanonicalizeHash() const { return Hash(); }
8799 virtual bool Equals(const Instance& other) const;
8800
8801 intptr_t CompareTo(const String& other) const;
8802
8803 bool StartsWith(const String& other) const {
8804 NoSafepointScope no_safepoint;
8805 return StartsWith(raw(), other.raw());
8806 }
8807 static bool StartsWith(StringPtr str, StringPtr prefix);
8808 bool EndsWith(const String& other) const;
8809
8810 // Strings are canonicalized using the symbol table.
8811 virtual InstancePtr CheckAndCanonicalize(Thread* thread,
8812 const char** error_str) const;
8813
8814#if defined(DEBUG)
8815 // Check if string is canonical.
8816 virtual bool CheckIsCanonical(Thread* thread) const;
8817#endif // DEBUG
8818
8819 bool IsSymbol() const { return raw()->ptr()->IsCanonical(); }
8820
8821 bool IsOneByteString() const {
8822 return raw()->GetClassId() == kOneByteStringCid;
8823 }
8824
8825 bool IsTwoByteString() const {
8826 return raw()->GetClassId() == kTwoByteStringCid;
8827 }
8828
8829 bool IsExternalOneByteString() const {
8830 return raw()->GetClassId() == kExternalOneByteStringCid;
8831 }
8832
8833 bool IsExternalTwoByteString() const {
8834 return raw()->GetClassId() == kExternalTwoByteStringCid;
8835 }
8836
8837 bool IsExternal() const {
8838 return IsExternalStringClassId(raw()->GetClassId());
8839 }
8840
8841 void* GetPeer() const;
8842
8843 char* ToMallocCString() const;
8844 void ToUTF8(uint8_t* utf8_array, intptr_t array_len) const;
8845
8846 // Creates a new String object from a C string that is assumed to contain
8847 // UTF-8 encoded characters and '\0' is considered a termination character.
8848 // TODO(7123) - Rename this to FromCString(....).
8849 static StringPtr New(const char* cstr, Heap::Space space = Heap::kNew);
8850
8851 // Creates a new String object from an array of UTF-8 encoded characters.
8852 static StringPtr FromUTF8(const uint8_t* utf8_array,
8853 intptr_t array_len,
8854 Heap::Space space = Heap::kNew);
8855
8856 // Creates a new String object from an array of Latin-1 encoded characters.
8857 static StringPtr FromLatin1(const uint8_t* latin1_array,
8858 intptr_t array_len,
8859 Heap::Space space = Heap::kNew);
8860
8861 // Creates a new String object from an array of UTF-16 encoded characters.
8862 static StringPtr FromUTF16(const uint16_t* utf16_array,
8863 intptr_t array_len,
8864 Heap::Space space = Heap::kNew);
8865
8866 // Creates a new String object from an array of UTF-32 encoded characters.
8867 static StringPtr FromUTF32(const int32_t* utf32_array,
8868 intptr_t array_len,
8869 Heap::Space space = Heap::kNew);
8870
8871 // Create a new String object from another Dart String instance.
8872 static StringPtr New(const String& str, Heap::Space space = Heap::kNew);
8873
8874 // Creates a new External String object using the specified array of
8875 // UTF-8 encoded characters as the external reference.
8876 static StringPtr NewExternal(const uint8_t* utf8_array,
8877 intptr_t array_len,
8878 void* peer,
8879 intptr_t external_allocation_size,
8880 Dart_WeakPersistentHandleFinalizer callback,
8881 Heap::Space = Heap::kNew);
8882
8883 // Creates a new External String object using the specified array of
8884 // UTF-16 encoded characters as the external reference.
8885 static StringPtr NewExternal(const uint16_t* utf16_array,
8886 intptr_t array_len,
8887 void* peer,
8888 intptr_t external_allocation_size,
8889 Dart_WeakPersistentHandleFinalizer callback,
8890 Heap::Space = Heap::kNew);
8891
8892 static void Copy(const String& dst,
8893 intptr_t dst_offset,
8894 const uint8_t* characters,
8895 intptr_t len);
8896 static void Copy(const String& dst,
8897 intptr_t dst_offset,
8898 const uint16_t* characters,
8899 intptr_t len);
8900 static void Copy(const String& dst,
8901 intptr_t dst_offset,
8902 const String& src,
8903 intptr_t src_offset,
8904 intptr_t len);
8905
8906 static StringPtr EscapeSpecialCharacters(const String& str);
8907 // Encodes 'str' for use in an Internationalized Resource Identifier (IRI),
8908 // a generalization of URI (percent-encoding). See RFC 3987.
8909 static const char* EncodeIRI(const String& str);
8910 // Returns null if 'str' is not a valid encoding.
8911 static StringPtr DecodeIRI(const String& str);
8912 static StringPtr Concat(const String& str1,
8913 const String& str2,
8914 Heap::Space space = Heap::kNew);
8915 static StringPtr ConcatAll(const Array& strings,
8916 Heap::Space space = Heap::kNew);
8917 // Concat all strings in 'strings' from 'start' to 'end' (excluding).
8918 static StringPtr ConcatAllRange(const Array& strings,
8919 intptr_t start,
8920 intptr_t end,
8921 Heap::Space space = Heap::kNew);
8922
8923 static StringPtr SubString(const String& str,
8924 intptr_t begin_index,
8925 Heap::Space space = Heap::kNew);
8926 static StringPtr SubString(const String& str,
8927 intptr_t begin_index,
8928 intptr_t length,
8929 Heap::Space space = Heap::kNew) {
8930 return SubString(Thread::Current(), str, begin_index, length, space);
8931 }
8932 static StringPtr SubString(Thread* thread,
8933 const String& str,
8934 intptr_t begin_index,
8935 intptr_t length,
8936 Heap::Space space = Heap::kNew);
8937
8938 static StringPtr Transform(int32_t (*mapping)(int32_t ch),
8939 const String& str,
8940 Heap::Space space = Heap::kNew);
8941
8942 static StringPtr ToUpperCase(const String& str,
8943 Heap::Space space = Heap::kNew);
8944 static StringPtr ToLowerCase(const String& str,
8945 Heap::Space space = Heap::kNew);
8946
8947 static StringPtr RemovePrivateKey(const String& name);
8948
8949 static const char* ScrubName(const String& name, bool is_extension = false);
8950 static StringPtr ScrubNameRetainPrivate(const String& name,
8951 bool is_extension = false);
8952
8953 static bool EqualsIgnoringPrivateKey(const String& str1, const String& str2);
8954
8955 static StringPtr NewFormatted(const char* format, ...) PRINTF_ATTRIBUTE(1, 2);
8956 static StringPtr NewFormatted(Heap::Space space, const char* format, ...)
8957 PRINTF_ATTRIBUTE(2, 3);
8958 static StringPtr NewFormattedV(const char* format,
8959 va_list args,
8960 Heap::Space space = Heap::kNew);
8961
8962 static bool ParseDouble(const String& str,
8963 intptr_t start,
8964 intptr_t end,
8965 double* result);
8966
8967#if !defined(HASH_IN_OBJECT_HEADER)
8968 static uint32_t GetCachedHash(const StringPtr obj) {
8969 return Smi::Value(obj->ptr()->hash_);
8970 }
8971
8972 static void SetCachedHash(StringPtr obj, uintptr_t hash) {
8973 obj->ptr()->hash_ = Smi::New(hash);
8974 }
8975#endif
8976
8977 protected:
8978 // These two operate on an array of Latin-1 encoded characters.
8979 // They are protected to avoid mistaking Latin-1 for UTF-8, but used
8980 // by friendly templated code (e.g., Symbols).
8981 bool Equals(const uint8_t* characters, intptr_t len) const;
8982 static intptr_t Hash(const uint8_t* characters, intptr_t len);
8983
8984 void SetLength(intptr_t value) const {
8985 // This is only safe because we create a new Smi, which does not cause
8986 // heap allocation.
8987 StoreSmi(&raw_ptr()->length_, Smi::New(value));
8988 }
8989
8990 void SetHash(intptr_t value) const { SetCachedHash(raw(), value); }
8991
8992 template <typename HandleType, typename ElementType, typename CallbackType>
8993 static void ReadFromImpl(SnapshotReader* reader,
8994 String* str_obj,
8995 intptr_t len,
8996 intptr_t tags,
8997 CallbackType new_symbol,
8998 Snapshot::Kind kind);
8999
9000 FINAL_HEAP_OBJECT_IMPLEMENTATION(String, Instance);
9001
9002 friend class Class;
9003 friend class Symbols;
9004 friend class StringSlice; // SetHash
9005 template <typename CharType>
9006 friend class CharArray; // SetHash
9007 friend class ConcatString; // SetHash
9008 friend class OneByteString;
9009 friend class TwoByteString;
9010 friend class ExternalOneByteString;
9011 friend class ExternalTwoByteString;
9012 friend class OneByteStringLayout;
9013 friend class RODataSerializationCluster; // SetHash
9014 friend class Pass2Visitor; // Stack "handle"
9015};
9016
9017class OneByteString : public AllStatic {
9018 public:
9019 static uint16_t CharAt(const String& str, intptr_t index) {
9020 ASSERT(str.IsOneByteString());
9021 NoSafepointScope no_safepoint;
9022 return OneByteString::CharAt(static_cast<OneByteStringPtr>(str.raw()),
9023 index);
9024 }
9025
9026 static uint16_t CharAt(OneByteStringPtr str, intptr_t index) {
9027 ASSERT(index >= 0 && index < String::LengthOf(str));
9028 return str->ptr()->data()[index];
9029 }
9030
9031 static void SetCharAt(const String& str, intptr_t index, uint8_t code_unit) {
9032 NoSafepointScope no_safepoint;
9033 *CharAddr(str, index) = code_unit;
9034 }
9035 static OneByteStringPtr EscapeSpecialCharacters(const String& str);
9036 // We use the same maximum elements for all strings.
9037 static const intptr_t kBytesPerElement = 1;
9038 static const intptr_t kMaxElements = String::kMaxElements;
9039
9040 static intptr_t data_offset() {
9041 return OFFSET_OF_RETURNED_VALUE(OneByteStringLayout, data);
9042 }
9043
9044 static intptr_t UnroundedSize(OneByteStringPtr str) {
9045 return UnroundedSize(Smi::Value(str->ptr()->length_));
9046 }
9047 static intptr_t UnroundedSize(intptr_t len) {
9048 return sizeof(OneByteStringLayout) + (len * kBytesPerElement);
9049 }
9050 static intptr_t InstanceSize() {
9051 ASSERT(sizeof(OneByteStringLayout) ==
9052 OFFSET_OF_RETURNED_VALUE(OneByteStringLayout, data));
9053 return 0;
9054 }
9055 static intptr_t InstanceSize(intptr_t len) {
9056 ASSERT(sizeof(OneByteStringLayout) == String::kSizeofRawString);
9057 ASSERT(0 <= len && len <= kMaxElements);
9058#if defined(HASH_IN_OBJECT_HEADER)
9059 // We have to pad zero-length raw strings so that they can be externalized.
9060 // If we don't pad, then the external string object does not fit in the
9061 // memory allocated for the raw string.
9062 if (len == 0) return InstanceSize(1);
9063#endif
9064 return String::RoundedAllocationSize(UnroundedSize(len));
9065 }
9066
9067 static OneByteStringPtr New(intptr_t len, Heap::Space space);
9068 static OneByteStringPtr New(const char* c_string,
9069 Heap::Space space = Heap::kNew) {
9070 return New(reinterpret_cast<const uint8_t*>(c_string), strlen(c_string),
9071 space);
9072 }
9073 static OneByteStringPtr New(const uint8_t* characters,
9074 intptr_t len,
9075 Heap::Space space);
9076 static OneByteStringPtr New(const uint16_t* characters,
9077 intptr_t len,
9078 Heap::Space space);
9079 static OneByteStringPtr New(const int32_t* characters,
9080 intptr_t len,
9081 Heap::Space space);
9082 static OneByteStringPtr New(const String& str, Heap::Space space);
9083 // 'other' must be OneByteString.
9084 static OneByteStringPtr New(const String& other_one_byte_string,
9085 intptr_t other_start_index,
9086 intptr_t other_len,
9087 Heap::Space space);
9088
9089 static OneByteStringPtr New(const TypedData& other_typed_data,
9090 intptr_t other_start_index,
9091 intptr_t other_len,
9092 Heap::Space space = Heap::kNew);
9093
9094 static OneByteStringPtr New(const ExternalTypedData& other_typed_data,
9095 intptr_t other_start_index,
9096 intptr_t other_len,
9097 Heap::Space space = Heap::kNew);
9098
9099 static OneByteStringPtr Concat(const String& str1,
9100 const String& str2,
9101 Heap::Space space);
9102 static OneByteStringPtr ConcatAll(const Array& strings,
9103 intptr_t start,
9104 intptr_t end,
9105 intptr_t len,
9106 Heap::Space space);
9107
9108 static OneByteStringPtr Transform(int32_t (*mapping)(int32_t ch),
9109 const String& str,
9110 Heap::Space space);
9111
9112 // High performance version of substring for one-byte strings.
9113 // "str" must be OneByteString.
9114 static OneByteStringPtr SubStringUnchecked(const String& str,
9115 intptr_t begin_index,
9116 intptr_t length,
9117 Heap::Space space);
9118
9119 static const ClassId kClassId = kOneByteStringCid;
9120
9121 static OneByteStringPtr null() {
9122 return static_cast<OneByteStringPtr>(Object::null());
9123 }
9124
9125 private:
9126 static OneByteStringPtr raw(const String& str) {
9127 return static_cast<OneByteStringPtr>(str.raw());
9128 }
9129
9130 static const OneByteStringLayout* raw_ptr(const String& str) {
9131 return reinterpret_cast<const OneByteStringLayout*>(str.raw_ptr());
9132 }
9133
9134 static uint8_t* CharAddr(const String& str, intptr_t index) {
9135 ASSERT((index >= 0) && (index < str.Length()));
9136 ASSERT(str.IsOneByteString());
9137 return &str.UnsafeMutableNonPointer(raw_ptr(str)->data())[index];
9138 }
9139
9140 static uint8_t* DataStart(const String& str) {
9141 ASSERT(str.IsOneByteString());
9142 return &str.UnsafeMutableNonPointer(raw_ptr(str)->data())[0];
9143 }
9144
9145 static OneByteStringPtr ReadFrom(SnapshotReader* reader,
9146 intptr_t object_id,
9147 intptr_t tags,
9148 Snapshot::Kind kind,
9149 bool as_reference);
9150
9151 friend class Class;
9152 friend class String;
9153 friend class Symbols;
9154 friend class ExternalOneByteString;
9155 friend class SnapshotReader;
9156 friend class StringHasher;
9157 friend class Utf8;
9158};
9159
9160class TwoByteString : public AllStatic {
9161 public:
9162 static uint16_t CharAt(const String& str, intptr_t index) {
9163 ASSERT(str.IsTwoByteString());
9164 NoSafepointScope no_safepoint;
9165 return TwoByteString::CharAt(static_cast<TwoByteStringPtr>(str.raw()),
9166 index);
9167 }
9168
9169 static uint16_t CharAt(TwoByteStringPtr str, intptr_t index) {
9170 ASSERT(index >= 0 && index < String::LengthOf(str));
9171 return str->ptr()->data()[index];
9172 }
9173
9174 static void SetCharAt(const String& str, intptr_t index, uint16_t ch) {
9175 NoSafepointScope no_safepoint;
9176 *CharAddr(str, index) = ch;
9177 }
9178
9179 static TwoByteStringPtr EscapeSpecialCharacters(const String& str);
9180
9181 // We use the same maximum elements for all strings.
9182 static const intptr_t kBytesPerElement = 2;
9183 static const intptr_t kMaxElements = String::kMaxElements;
9184
9185 static intptr_t data_offset() {
9186 return OFFSET_OF_RETURNED_VALUE(TwoByteStringLayout, data);
9187 }
9188
9189 static intptr_t UnroundedSize(TwoByteStringPtr str) {
9190 return UnroundedSize(Smi::Value(str->ptr()->length_));
9191 }
9192 static intptr_t UnroundedSize(intptr_t len) {
9193 return sizeof(TwoByteStringLayout) + (len * kBytesPerElement);
9194 }
9195 static intptr_t InstanceSize() {
9196 ASSERT(sizeof(TwoByteStringLayout) ==
9197 OFFSET_OF_RETURNED_VALUE(TwoByteStringLayout, data));
9198 return 0;
9199 }
9200 static intptr_t InstanceSize(intptr_t len) {
9201 ASSERT(sizeof(TwoByteStringLayout) == String::kSizeofRawString);
9202 ASSERT(0 <= len && len <= kMaxElements);
9203 // We have to pad zero-length raw strings so that they can be externalized.
9204 // If we don't pad, then the external string object does not fit in the
9205 // memory allocated for the raw string.
9206 if (len == 0) return InstanceSize(1);
9207 return String::RoundedAllocationSize(UnroundedSize(len));
9208 }
9209
9210 static TwoByteStringPtr New(intptr_t len, Heap::Space space);
9211 static TwoByteStringPtr New(const uint16_t* characters,
9212 intptr_t len,
9213 Heap::Space space);
9214 static TwoByteStringPtr New(intptr_t utf16_len,
9215 const int32_t* characters,
9216 intptr_t len,
9217 Heap::Space space);
9218 static TwoByteStringPtr New(const String& str, Heap::Space space);
9219
9220 static TwoByteStringPtr New(const TypedData& other_typed_data,
9221 intptr_t other_start_index,
9222 intptr_t other_len,
9223 Heap::Space space = Heap::kNew);
9224
9225 static TwoByteStringPtr New(const ExternalTypedData& other_typed_data,
9226 intptr_t other_start_index,
9227 intptr_t other_len,
9228 Heap::Space space = Heap::kNew);
9229
9230 static TwoByteStringPtr Concat(const String& str1,
9231 const String& str2,
9232 Heap::Space space);
9233 static TwoByteStringPtr ConcatAll(const Array& strings,
9234 intptr_t start,
9235 intptr_t end,
9236 intptr_t len,
9237 Heap::Space space);
9238
9239 static TwoByteStringPtr Transform(int32_t (*mapping)(int32_t ch),
9240 const String& str,
9241 Heap::Space space);
9242
9243 static TwoByteStringPtr null() {
9244 return static_cast<TwoByteStringPtr>(Object::null());
9245 }
9246
9247 static const ClassId kClassId = kTwoByteStringCid;
9248
9249 private:
9250 static TwoByteStringPtr raw(const String& str) {
9251 return static_cast<TwoByteStringPtr>(str.raw());
9252 }
9253
9254 static const TwoByteStringLayout* raw_ptr(const String& str) {
9255 return reinterpret_cast<const TwoByteStringLayout*>(str.raw_ptr());
9256 }
9257
9258 static uint16_t* CharAddr(const String& str, intptr_t index) {
9259 ASSERT((index >= 0) && (index < str.Length()));
9260 ASSERT(str.IsTwoByteString());
9261 return &str.UnsafeMutableNonPointer(raw_ptr(str)->data())[index];
9262 }
9263
9264 // Use this instead of CharAddr(0). It will not assert that the index is <
9265 // length.
9266 static uint16_t* DataStart(const String& str) {
9267 ASSERT(str.IsTwoByteString());
9268 return &str.UnsafeMutableNonPointer(raw_ptr(str)->data())[0];
9269 }
9270
9271 static TwoByteStringPtr ReadFrom(SnapshotReader* reader,
9272 intptr_t object_id,
9273 intptr_t tags,
9274 Snapshot::Kind kind,
9275 bool as_reference);
9276
9277 friend class Class;
9278 friend class String;
9279 friend class SnapshotReader;
9280 friend class Symbols;
9281};
9282
9283class ExternalOneByteString : public AllStatic {
9284 public:
9285 static uint16_t CharAt(const String& str, intptr_t index) {
9286 ASSERT(str.IsExternalOneByteString());
9287 NoSafepointScope no_safepoint;
9288 return ExternalOneByteString::CharAt(
9289 static_cast<ExternalOneByteStringPtr>(str.raw()), index);
9290 }
9291
9292 static uint16_t CharAt(ExternalOneByteStringPtr str, intptr_t index) {
9293 ASSERT(index >= 0 && index < String::LengthOf(str));
9294 return str->ptr()->external_data_[index];
9295 }
9296
9297 static void* GetPeer(const String& str) { return raw_ptr(str)->peer_; }
9298
9299 static intptr_t external_data_offset() {
9300 return OFFSET_OF(ExternalOneByteStringLayout, external_data_);
9301 }
9302
9303 // We use the same maximum elements for all strings.
9304 static const intptr_t kBytesPerElement = 1;
9305 static const intptr_t kMaxElements = String::kMaxElements;
9306
9307 static intptr_t InstanceSize() {
9308 return String::RoundedAllocationSize(sizeof(ExternalOneByteStringLayout));
9309 }
9310
9311 static ExternalOneByteStringPtr New(
9312 const uint8_t* characters,
9313 intptr_t len,
9314 void* peer,
9315 intptr_t external_allocation_size,
9316 Dart_WeakPersistentHandleFinalizer callback,
9317 Heap::Space space);
9318
9319 static ExternalOneByteStringPtr null() {
9320 return static_cast<ExternalOneByteStringPtr>(Object::null());
9321 }
9322
9323 static OneByteStringPtr EscapeSpecialCharacters(const String& str);
9324 static OneByteStringPtr EncodeIRI(const String& str);
9325 static OneByteStringPtr DecodeIRI(const String& str);
9326
9327 static const ClassId kClassId = kExternalOneByteStringCid;
9328
9329 private:
9330 static ExternalOneByteStringPtr raw(const String& str) {
9331 return static_cast<ExternalOneByteStringPtr>(str.raw());
9332 }
9333
9334 static const ExternalOneByteStringLayout* raw_ptr(const String& str) {
9335 return reinterpret_cast<const ExternalOneByteStringLayout*>(str.raw_ptr());
9336 }
9337
9338 static const uint8_t* CharAddr(const String& str, intptr_t index) {
9339 ASSERT((index >= 0) && (index < str.Length()));
9340 ASSERT(str.IsExternalOneByteString());
9341 return &(raw_ptr(str)->external_data_[index]);
9342 }
9343
9344 static const uint8_t* DataStart(const String& str) {
9345 ASSERT(str.IsExternalOneByteString());
9346 return raw_ptr(str)->external_data_;
9347 }
9348
9349 static void SetExternalData(const String& str,
9350 const uint8_t* data,
9351 void* peer) {
9352 ASSERT(str.IsExternalOneByteString());
9353 ASSERT(
9354 !Isolate::Current()->heap()->Contains(reinterpret_cast<uword>(data)));
9355 str.StoreNonPointer(&raw_ptr(str)->external_data_, data);
9356 str.StoreNonPointer(&raw_ptr(str)->peer_, peer);
9357 }
9358
9359 static void Finalize(void* isolate_callback_data,
9360 Dart_WeakPersistentHandle handle,
9361 void* peer);
9362
9363 static ExternalOneByteStringPtr ReadFrom(SnapshotReader* reader,
9364 intptr_t object_id,
9365 intptr_t tags,
9366 Snapshot::Kind kind,
9367 bool as_reference);
9368
9369 static intptr_t NextFieldOffset() {
9370 // Indicates this class cannot be extended by dart code.
9371 return -kWordSize;
9372 }
9373
9374 friend class Class;
9375 friend class String;
9376 friend class SnapshotReader;
9377 friend class Symbols;
9378 friend class Utf8;
9379};
9380
9381class ExternalTwoByteString : public AllStatic {
9382 public:
9383 static uint16_t CharAt(const String& str, intptr_t index) {
9384 ASSERT(str.IsExternalTwoByteString());
9385 NoSafepointScope no_safepoint;
9386 return ExternalTwoByteString::CharAt(
9387 static_cast<ExternalTwoByteStringPtr>(str.raw()), index);
9388 }
9389
9390 static uint16_t CharAt(ExternalTwoByteStringPtr str, intptr_t index) {
9391 ASSERT(index >= 0 && index < String::LengthOf(str));
9392 return str->ptr()->external_data_[index];
9393 }
9394
9395 static void* GetPeer(const String& str) { return raw_ptr(str)->peer_; }
9396
9397 static intptr_t external_data_offset() {
9398 return OFFSET_OF(ExternalTwoByteStringLayout, external_data_);
9399 }
9400
9401 // We use the same maximum elements for all strings.
9402 static const intptr_t kBytesPerElement = 2;
9403 static const intptr_t kMaxElements = String::kMaxElements;
9404
9405 static intptr_t InstanceSize() {
9406 return String::RoundedAllocationSize(sizeof(ExternalTwoByteStringLayout));
9407 }
9408
9409 static ExternalTwoByteStringPtr New(
9410 const uint16_t* characters,
9411 intptr_t len,
9412 void* peer,
9413 intptr_t external_allocation_size,
9414 Dart_WeakPersistentHandleFinalizer callback,
9415 Heap::Space space = Heap::kNew);
9416
9417 static ExternalTwoByteStringPtr null() {
9418 return static_cast<ExternalTwoByteStringPtr>(Object::null());
9419 }
9420
9421 static const ClassId kClassId = kExternalTwoByteStringCid;
9422
9423 private:
9424 static ExternalTwoByteStringPtr raw(const String& str) {
9425 return static_cast<ExternalTwoByteStringPtr>(str.raw());
9426 }
9427
9428 static const ExternalTwoByteStringLayout* raw_ptr(const String& str) {
9429 return reinterpret_cast<const ExternalTwoByteStringLayout*>(str.raw_ptr());
9430 }
9431
9432 static const uint16_t* CharAddr(const String& str, intptr_t index) {
9433 ASSERT((index >= 0) && (index < str.Length()));
9434 ASSERT(str.IsExternalTwoByteString());
9435 return &(raw_ptr(str)->external_data_[index]);
9436 }
9437
9438 static const uint16_t* DataStart(const String& str) {
9439 ASSERT(str.IsExternalTwoByteString());
9440 return raw_ptr(str)->external_data_;
9441 }
9442
9443 static void SetExternalData(const String& str,
9444 const uint16_t* data,
9445 void* peer) {
9446 ASSERT(str.IsExternalTwoByteString());
9447 ASSERT(
9448 !Isolate::Current()->heap()->Contains(reinterpret_cast<uword>(data)));
9449 str.StoreNonPointer(&raw_ptr(str)->external_data_, data);
9450 str.StoreNonPointer(&raw_ptr(str)->peer_, peer);
9451 }
9452
9453 static void Finalize(void* isolate_callback_data,
9454 Dart_WeakPersistentHandle handle,
9455 void* peer);
9456
9457 static ExternalTwoByteStringPtr ReadFrom(SnapshotReader* reader,
9458 intptr_t object_id,
9459 intptr_t tags,
9460 Snapshot::Kind kind,
9461 bool as_reference);
9462
9463 static intptr_t NextFieldOffset() {
9464 // Indicates this class cannot be extended by dart code.
9465 return -kWordSize;
9466 }
9467
9468 friend class Class;
9469 friend class String;
9470 friend class SnapshotReader;
9471 friend class Symbols;
9472};
9473
9474// Class Bool implements Dart core class bool.
9475class Bool : public Instance {
9476 public:
9477 bool value() const { return raw_ptr()->value_; }
9478
9479 static intptr_t InstanceSize() {
9480 return RoundedAllocationSize(sizeof(BoolLayout));
9481 }
9482
9483 static const Bool& True() { return Object::bool_true(); }
9484
9485 static const Bool& False() { return Object::bool_false(); }
9486
9487 static const Bool& Get(bool value) {
9488 return value ? Bool::True() : Bool::False();
9489 }
9490
9491 virtual uint32_t CanonicalizeHash() const {
9492 return raw() == True().raw() ? 1231 : 1237;
9493 }
9494
9495 private:
9496 void set_value(bool value) const {
9497 StoreNonPointer(&raw_ptr()->value_, value);
9498 }
9499
9500 // New should only be called to initialize the two legal bool values.
9501 static BoolPtr New(bool value);
9502
9503 FINAL_HEAP_OBJECT_IMPLEMENTATION(Bool, Instance);
9504 friend class Class;
9505 friend class Object; // To initialize the true and false values.
9506};
9507
9508class Array : public Instance {
9509 public:
9510 // We use 30 bits for the hash code so hashes in a snapshot taken on a
9511 // 64-bit architecture stay in Smi range when loaded on a 32-bit
9512 // architecture.
9513 static const intptr_t kHashBits = 30;
9514
9515 // Returns `true` if we use card marking for arrays of length [array_length].
9516 static bool UseCardMarkingForAllocation(const intptr_t array_length) {
9517 return Array::InstanceSize(array_length) > Heap::kNewAllocatableSize;
9518 }
9519
9520 intptr_t Length() const { return LengthOf(raw()); }
9521 static intptr_t LengthOf(const ArrayPtr array) {
9522 return Smi::Value(array->ptr()->length_);
9523 }
9524
9525 static intptr_t length_offset() { return OFFSET_OF(ArrayLayout, length_); }
9526 static intptr_t data_offset() {
9527 return OFFSET_OF_RETURNED_VALUE(ArrayLayout, data);
9528 }
9529 static intptr_t element_offset(intptr_t index) {
9530 return OFFSET_OF_RETURNED_VALUE(ArrayLayout, data) + kWordSize * index;
9531 }
9532
9533 struct ArrayTraits {
9534 static intptr_t elements_start_offset() { return Array::data_offset(); }
9535
9536 static constexpr intptr_t kElementSize = kWordSize;
9537 };
9538
9539 static bool Equals(ArrayPtr a, ArrayPtr b) {
9540 if (a == b) return true;
9541 if (a->IsRawNull() || b->IsRawNull()) return false;
9542 if (a->ptr()->length_ != b->ptr()->length_) return false;
9543 if (a->ptr()->type_arguments_ != b->ptr()->type_arguments_) return false;
9544 const intptr_t length = LengthOf(a);
9545 return memcmp(a->ptr()->data(), b->ptr()->data(), kWordSize * length) == 0;
9546 }
9547
9548 static ObjectPtr* DataOf(ArrayPtr array) { return array->ptr()->data(); }
9549
9550 template <std::memory_order order = std::memory_order_relaxed>
9551 ObjectPtr At(intptr_t index) const {
9552 return LoadPointer<ObjectPtr, order>(ObjectAddr(index));
9553 }
9554 template <std::memory_order order = std::memory_order_relaxed>
9555 void SetAt(intptr_t index, const Object& value) const {
9556 // TODO(iposva): Add storing NoSafepointScope.
9557 StoreArrayPointer<ObjectPtr, order>(ObjectAddr(index), value.raw());
9558 }
9559
9560 // Access to the array with acquire release semantics.
9561 ObjectPtr AtAcquire(intptr_t index) const {
9562 return At<std::memory_order_acquire>(index);
9563 }
9564 void SetAtRelease(intptr_t index, const Object& value) const {
9565 SetAt<std::memory_order_release>(index, value);
9566 }
9567
9568 bool IsImmutable() const { return raw()->GetClassId() == kImmutableArrayCid; }
9569
9570 virtual TypeArgumentsPtr GetTypeArguments() const {
9571 return raw_ptr()->type_arguments_;
9572 }
9573 virtual void SetTypeArguments(const TypeArguments& value) const {
9574 // An Array is raw or takes one type argument. However, its type argument
9575 // vector may be longer than 1 due to a type optimization reusing the type
9576 // argument vector of the instantiator.
9577 ASSERT(value.IsNull() ||
9578 ((value.Length() >= 1) &&
9579 value.IsInstantiated() /*&& value.IsCanonical()*/));
9580 // TODO(asiva): Values read from a message snapshot are not properly marked
9581 // as canonical. See for example tests/isolate/mandel_isolate_test.dart.
9582 StoreArrayPointer(&raw_ptr()->type_arguments_, value.raw());
9583 }
9584
9585 virtual bool CanonicalizeEquals(const Instance& other) const;
9586 virtual uint32_t CanonicalizeHash() const;
9587
9588 static const intptr_t kBytesPerElement = kWordSize;
9589 static const intptr_t kMaxElements = kSmiMax / kBytesPerElement;
9590 static const intptr_t kMaxNewSpaceElements =
9591 (Heap::kNewAllocatableSize - sizeof(ArrayLayout)) / kBytesPerElement;
9592
9593 static intptr_t type_arguments_offset() {
9594 return OFFSET_OF(ArrayLayout, type_arguments_);
9595 }
9596
9597 static bool IsValidLength(intptr_t len) {
9598 return 0 <= len && len <= kMaxElements;
9599 }
9600
9601 static intptr_t InstanceSize() {
9602 ASSERT(sizeof(ArrayLayout) == OFFSET_OF_RETURNED_VALUE(ArrayLayout, data));
9603 return 0;
9604 }
9605
9606 static intptr_t InstanceSize(intptr_t len) {
9607 // Ensure that variable length data is not adding to the object length.
9608 ASSERT(sizeof(ArrayLayout) == (sizeof(InstanceLayout) + (2 * kWordSize)));
9609 ASSERT(IsValidLength(len));
9610 return RoundedAllocationSize(sizeof(ArrayLayout) +
9611 (len * kBytesPerElement));
9612 }
9613
9614 // Returns true if all elements are OK for canonicalization.
9615 virtual bool CheckAndCanonicalizeFields(Thread* thread,
9616 const char** error_str) const;
9617
9618 // Make the array immutable to Dart code by switching the class pointer
9619 // to ImmutableArray.
9620 void MakeImmutable() const;
9621
9622 static ArrayPtr New(intptr_t len, Heap::Space space = Heap::kNew);
9623 static ArrayPtr New(intptr_t len,
9624 const AbstractType& element_type,
9625 Heap::Space space = Heap::kNew);
9626
9627 // Creates and returns a new array with 'new_length'. Copies all elements from
9628 // 'source' to the new array. 'new_length' must be greater than or equal to
9629 // 'source.Length()'. 'source' can be null.
9630 static ArrayPtr Grow(const Array& source,
9631 intptr_t new_length,
9632 Heap::Space space = Heap::kNew);
9633
9634 // Truncates the array to a given length. 'new_length' must be less than
9635 // or equal to 'source.Length()'. The remaining unused part of the array is
9636 // marked as an Array object or a regular Object so that it can be traversed
9637 // during garbage collection.
9638 void Truncate(intptr_t new_length) const;
9639
9640 // Return an Array object that contains all the elements currently present
9641 // in the specified Growable Object Array. This is done by first truncating
9642 // the Growable Object Array's backing array to the currently used size and
9643 // returning the truncated backing array.
9644 // The backing array of the original Growable Object Array is
9645 // set to an empty array.
9646 // If the unique parameter is false, the function is allowed to return
9647 // a shared Array instance.
9648 static ArrayPtr MakeFixedLength(const GrowableObjectArray& growable_array,
9649 bool unique = false);
9650
9651 ArrayPtr Slice(intptr_t start, intptr_t count, bool with_type_argument) const;
9652
9653 protected:
9654 static ArrayPtr New(intptr_t class_id,
9655 intptr_t len,
9656 Heap::Space space = Heap::kNew);
9657
9658 private:
9659 ObjectPtr const* ObjectAddr(intptr_t index) const {
9660 // TODO(iposva): Determine if we should throw an exception here.
9661 ASSERT((index >= 0) && (index < Length()));
9662 return &raw_ptr()->data()[index];
9663 }
9664
9665 void SetLength(intptr_t value) const {
9666 StoreSmi(&raw_ptr()->length_, Smi::New(value));
9667 }
9668 void SetLengthIgnoreRace(intptr_t value) const {
9669 StoreSmiIgnoreRace(&raw_ptr()->length_, Smi::New(value));
9670 }
9671
9672 template <typename type, std::memory_order order = std::memory_order_relaxed>
9673 void StoreArrayPointer(type const* addr, type value) const {
9674 raw()->ptr()->StoreArrayPointer<type, order>(addr, value);
9675 }
9676
9677 // Store a range of pointers [from, from + count) into [to, to + count).
9678 // TODO(koda): Use this to fix Object::Clone's broken store buffer logic.
9679 void StoreArrayPointers(ObjectPtr const* to,
9680 ObjectPtr const* from,
9681 intptr_t count) {
9682 ASSERT(Contains(reinterpret_cast<uword>(to)));
9683 if (raw()->IsNewObject()) {
9684 memmove(const_cast<ObjectPtr*>(to), from, count * kWordSize);
9685 } else {
9686 for (intptr_t i = 0; i < count; ++i) {
9687 StoreArrayPointer(&to[i], from[i]);
9688 }
9689 }
9690 }
9691
9692 FINAL_HEAP_OBJECT_IMPLEMENTATION(Array, Instance);
9693 friend class Class;
9694 friend class ImmutableArray;
9695 friend class Interpreter;
9696 friend class Object;
9697 friend class String;
9698};
9699
9700class ImmutableArray : public AllStatic {
9701 public:
9702 static ImmutableArrayPtr New(intptr_t len, Heap::Space space = Heap::kNew);
9703
9704 static ImmutableArrayPtr ReadFrom(SnapshotReader* reader,
9705 intptr_t object_id,
9706 intptr_t tags,
9707 Snapshot::Kind kind,
9708 bool as_reference);
9709
9710 static const ClassId kClassId = kImmutableArrayCid;
9711
9712 static intptr_t InstanceSize() { return Array::InstanceSize(); }
9713
9714 static intptr_t InstanceSize(intptr_t len) {
9715 return Array::InstanceSize(len);
9716 }
9717
9718 private:
9719 static intptr_t NextFieldOffset() {
9720 // Indicates this class cannot be extended by dart code.
9721 return -kWordSize;
9722 }
9723
9724 static ImmutableArrayPtr raw(const Array& array) {
9725 return static_cast<ImmutableArrayPtr>(array.raw());
9726 }
9727
9728 friend class Class;
9729};
9730
9731class GrowableObjectArray : public Instance {
9732 public:
9733 intptr_t Capacity() const {
9734 NoSafepointScope no_safepoint;
9735 ASSERT(!IsNull());
9736 return Smi::Value(DataArray()->length_);
9737 }
9738 intptr_t Length() const {
9739 ASSERT(!IsNull());
9740 return Smi::Value(raw_ptr()->length_);
9741 }
9742 void SetLength(intptr_t value) const {
9743 // This is only safe because we create a new Smi, which does not cause
9744 // heap allocation.
9745 StoreSmi(&raw_ptr()->length_, Smi::New(value));
9746 }
9747
9748 ArrayPtr data() const { return raw_ptr()->data_; }
9749 void SetData(const Array& value) const {
9750 StorePointer(&raw_ptr()->data_, value.raw());
9751 }
9752
9753 ObjectPtr At(intptr_t index) const {
9754 NoSafepointScope no_safepoint;
9755 ASSERT(!IsNull());
9756 ASSERT(index < Length());
9757 return *ObjectAddr(index);
9758 }
9759 void SetAt(intptr_t index, const Object& value) const {
9760 ASSERT(!IsNull());
9761 ASSERT(index < Length());
9762
9763 // TODO(iposva): Add storing NoSafepointScope.
9764 data()->ptr()->StoreArrayPointer(ObjectAddr(index), value.raw());
9765 }
9766
9767 void Add(const Object& value, Heap::Space space = Heap::kNew) const;
9768
9769 void Grow(intptr_t new_capacity, Heap::Space space = Heap::kNew) const;
9770 ObjectPtr RemoveLast() const;
9771
9772 virtual TypeArgumentsPtr GetTypeArguments() const {
9773 return raw_ptr()->type_arguments_;
9774 }
9775 virtual void SetTypeArguments(const TypeArguments& value) const {
9776 // A GrowableObjectArray is raw or takes one type argument. However, its
9777 // type argument vector may be longer than 1 due to a type optimization
9778 // reusing the type argument vector of the instantiator.
9779 ASSERT(value.IsNull() || ((value.Length() >= 1) && value.IsInstantiated() &&
9780 value.IsCanonical()));
9781 StorePointer(&raw_ptr()->type_arguments_, value.raw());
9782 }
9783
9784 // We don't expect a growable object array to be canonicalized.
9785 virtual bool CanonicalizeEquals(const Instance& other) const {
9786 UNREACHABLE();
9787 return false;
9788 }
9789
9790 // We don't expect a growable object array to be canonicalized.
9791 virtual InstancePtr CheckAndCanonicalize(Thread* thread,
9792 const char** error_str) const {
9793 UNREACHABLE();
9794 return Instance::null();
9795 }
9796
9797 static intptr_t type_arguments_offset() {
9798 return OFFSET_OF(GrowableObjectArrayLayout, type_arguments_);
9799 }
9800
9801 static intptr_t length_offset() {
9802 return OFFSET_OF(GrowableObjectArrayLayout, length_);
9803 }
9804 static intptr_t data_offset() {
9805 return OFFSET_OF(GrowableObjectArrayLayout, data_);
9806 }
9807
9808 static intptr_t InstanceSize() {
9809 return RoundedAllocationSize(sizeof(GrowableObjectArrayLayout));
9810 }
9811
9812 static GrowableObjectArrayPtr New(Heap::Space space = Heap::kNew) {
9813 return New(kDefaultInitialCapacity, space);
9814 }
9815 static GrowableObjectArrayPtr New(intptr_t capacity,
9816 Heap::Space space = Heap::kNew);
9817 static GrowableObjectArrayPtr New(const Array& array,
9818 Heap::Space space = Heap::kNew);
9819
9820 static SmiPtr NoSafepointLength(const GrowableObjectArrayPtr array) {
9821 return array->ptr()->length_;
9822 }
9823
9824 static ArrayPtr NoSafepointData(const GrowableObjectArrayPtr array) {
9825 return array->ptr()->data_;
9826 }
9827
9828 private:
9829 ArrayLayout* DataArray() const { return data()->ptr(); }
9830 ObjectPtr* ObjectAddr(intptr_t index) const {
9831 ASSERT((index >= 0) && (index < Length()));
9832 return &(DataArray()->data()[index]);
9833 }
9834
9835 static const int kDefaultInitialCapacity = 0;
9836
9837 FINAL_HEAP_OBJECT_IMPLEMENTATION(GrowableObjectArray, Instance);
9838 friend class Array;
9839 friend class Class;
9840};
9841
9842class Float32x4 : public Instance {
9843 public:
9844 static Float32x4Ptr New(float value0,
9845 float value1,
9846 float value2,
9847 float value3,
9848 Heap::Space space = Heap::kNew);
9849 static Float32x4Ptr New(simd128_value_t value,
9850 Heap::Space space = Heap::kNew);
9851
9852 float x() const;
9853 float y() const;
9854 float z() const;
9855 float w() const;
9856
9857 void set_x(float x) const;
9858 void set_y(float y) const;
9859 void set_z(float z) const;
9860 void set_w(float w) const;
9861
9862 simd128_value_t value() const;
9863 void set_value(simd128_value_t value) const;
9864
9865 static intptr_t InstanceSize() {
9866 return RoundedAllocationSize(sizeof(Float32x4Layout));
9867 }
9868
9869 static intptr_t value_offset() { return OFFSET_OF(Float32x4Layout, value_); }
9870
9871 private:
9872 FINAL_HEAP_OBJECT_IMPLEMENTATION(Float32x4, Instance);
9873 friend class Class;
9874};
9875
9876class Int32x4 : public Instance {
9877 public:
9878 static Int32x4Ptr New(int32_t value0,
9879 int32_t value1,
9880 int32_t value2,
9881 int32_t value3,
9882 Heap::Space space = Heap::kNew);
9883 static Int32x4Ptr New(simd128_value_t value, Heap::Space space = Heap::kNew);
9884
9885 int32_t x() const;
9886 int32_t y() const;
9887 int32_t z() const;
9888 int32_t w() const;
9889
9890 void set_x(int32_t x) const;
9891 void set_y(int32_t y) const;
9892 void set_z(int32_t z) const;
9893 void set_w(int32_t w) const;
9894
9895 simd128_value_t value() const;
9896 void set_value(simd128_value_t value) const;
9897
9898 static intptr_t InstanceSize() {
9899 return RoundedAllocationSize(sizeof(Int32x4Layout));
9900 }
9901
9902 static intptr_t value_offset() { return OFFSET_OF(Int32x4Layout, value_); }
9903
9904 private:
9905 FINAL_HEAP_OBJECT_IMPLEMENTATION(Int32x4, Instance);
9906 friend class Class;
9907};
9908
9909class Float64x2 : public Instance {
9910 public:
9911 static Float64x2Ptr New(double value0,
9912 double value1,
9913 Heap::Space space = Heap::kNew);
9914 static Float64x2Ptr New(simd128_value_t value,
9915 Heap::Space space = Heap::kNew);
9916
9917 double x() const;
9918 double y() const;
9919
9920 void set_x(double x) const;
9921 void set_y(double y) const;
9922
9923 simd128_value_t value() const;
9924 void set_value(simd128_value_t value) const;
9925
9926 static intptr_t InstanceSize() {
9927 return RoundedAllocationSize(sizeof(Float64x2Layout));
9928 }
9929
9930 static intptr_t value_offset() { return OFFSET_OF(Float64x2Layout, value_); }
9931
9932 private:
9933 FINAL_HEAP_OBJECT_IMPLEMENTATION(Float64x2, Instance);
9934 friend class Class;
9935};
9936
9937class PointerBase : public Instance {
9938 public:
9939 static intptr_t data_field_offset() {
9940 return OFFSET_OF(PointerBaseLayout, data_);
9941 }
9942};
9943
9944class TypedDataBase : public PointerBase {
9945 public:
9946 static intptr_t length_offset() {
9947 return OFFSET_OF(TypedDataBaseLayout, length_);
9948 }
9949
9950 SmiPtr length() const { return raw_ptr()->length_; }
9951
9952 intptr_t Length() const {
9953 ASSERT(!IsNull());
9954 return Smi::Value(raw_ptr()->length_);
9955 }
9956
9957 intptr_t LengthInBytes() const {
9958 return ElementSizeInBytes(raw()->GetClassId()) * Length();
9959 }
9960
9961 TypedDataElementType ElementType() const {
9962 return ElementType(raw()->GetClassId());
9963 }
9964
9965 intptr_t ElementSizeInBytes() const {
9966 return element_size(ElementType(raw()->GetClassId()));
9967 }
9968
9969 static intptr_t ElementSizeInBytes(classid_t cid) {
9970 return element_size(ElementType(cid));
9971 }
9972
9973 static TypedDataElementType ElementType(classid_t cid) {
9974 if (cid == kByteDataViewCid) {
9975 return kUint8ArrayElement;
9976 } else if (IsTypedDataClassId(cid)) {
9977 const intptr_t index =
9978 (cid - kTypedDataInt8ArrayCid - kTypedDataCidRemainderInternal) / 3;
9979 return static_cast<TypedDataElementType>(index);
9980 } else if (IsTypedDataViewClassId(cid)) {
9981 const intptr_t index =
9982 (cid - kTypedDataInt8ArrayCid - kTypedDataCidRemainderView) / 3;
9983 return static_cast<TypedDataElementType>(index);
9984 } else {
9985 ASSERT(IsExternalTypedDataClassId(cid));
9986 const intptr_t index =
9987 (cid - kTypedDataInt8ArrayCid - kTypedDataCidRemainderExternal) / 3;
9988 return static_cast<TypedDataElementType>(index);
9989 }
9990 }
9991
9992 void* DataAddr(intptr_t byte_offset) const {
9993 ASSERT((byte_offset == 0) ||
9994 ((byte_offset > 0) && (byte_offset < LengthInBytes())));
9995 return reinterpret_cast<void*>(Validate(raw_ptr()->data_) + byte_offset);
9996 }
9997
9998 protected:
9999 void SetLength(intptr_t value) const {
10000 ASSERT(value <= Smi::kMaxValue);
10001 StoreSmi(&raw_ptr()->length_, Smi::New(value));
10002 }
10003
10004 virtual uint8_t* Validate(uint8_t* data) const {
10005 return UnsafeMutableNonPointer(data);
10006 }
10007
10008 private:
10009 friend class Class;
10010
10011 static intptr_t element_size(intptr_t index) {
10012 ASSERT(0 <= index && index < kNumElementSizes);
10013 intptr_t size = element_size_table[index];
10014 ASSERT(size != 0);
10015 return size;
10016 }
10017 static const intptr_t kNumElementSizes =
10018 (kTypedDataFloat64x2ArrayCid - kTypedDataInt8ArrayCid) / 3 + 1;
10019 static const intptr_t element_size_table[kNumElementSizes];
10020
10021 HEAP_OBJECT_IMPLEMENTATION(TypedDataBase, PointerBase);
10022};
10023
10024class TypedData : public TypedDataBase {
10025 public:
10026 // We use 30 bits for the hash code so hashes in a snapshot taken on a
10027 // 64-bit architecture stay in Smi range when loaded on a 32-bit
10028 // architecture.
10029 static const intptr_t kHashBits = 30;
10030
10031 virtual bool CanonicalizeEquals(const Instance& other) const;
10032 virtual uint32_t CanonicalizeHash() const;
10033
10034#define TYPED_GETTER_SETTER(name, type) \
10035 type Get##name(intptr_t byte_offset) const { \
10036 ASSERT((byte_offset >= 0) && \
10037 (byte_offset + static_cast<intptr_t>(sizeof(type)) - 1) < \
10038 LengthInBytes()); \
10039 return LoadUnaligned(ReadOnlyDataAddr<type>(byte_offset)); \
10040 } \
10041 void Set##name(intptr_t byte_offset, type value) const { \
10042 NoSafepointScope no_safepoint; \
10043 StoreUnaligned(reinterpret_cast<type*>(DataAddr(byte_offset)), value); \
10044 }
10045
10046 TYPED_GETTER_SETTER(Int8, int8_t)
10047 TYPED_GETTER_SETTER(Uint8, uint8_t)
10048 TYPED_GETTER_SETTER(Int16, int16_t)
10049 TYPED_GETTER_SETTER(Uint16, uint16_t)
10050 TYPED_GETTER_SETTER(Int32, int32_t)
10051 TYPED_GETTER_SETTER(Uint32, uint32_t)
10052 TYPED_GETTER_SETTER(Int64, int64_t)
10053 TYPED_GETTER_SETTER(Uint64, uint64_t)
10054 TYPED_GETTER_SETTER(Float32, float)
10055 TYPED_GETTER_SETTER(Float64, double)
10056 TYPED_GETTER_SETTER(Float32x4, simd128_value_t)
10057 TYPED_GETTER_SETTER(Int32x4, simd128_value_t)
10058 TYPED_GETTER_SETTER(Float64x2, simd128_value_t)
10059
10060#undef TYPED_GETTER_SETTER
10061
10062 static intptr_t data_offset() { return TypedDataLayout::payload_offset(); }
10063
10064 static intptr_t InstanceSize() {
10065 ASSERT(sizeof(TypedDataLayout) ==
10066 OFFSET_OF_RETURNED_VALUE(TypedDataLayout, internal_data));
10067 return 0;
10068 }
10069
10070 static intptr_t InstanceSize(intptr_t lengthInBytes) {
10071 ASSERT(0 <= lengthInBytes && lengthInBytes <= kSmiMax);
10072 return RoundedAllocationSize(sizeof(TypedDataLayout) + lengthInBytes);
10073 }
10074
10075 static intptr_t MaxElements(intptr_t class_id) {
10076 ASSERT(IsTypedDataClassId(class_id));
10077 return (kSmiMax / ElementSizeInBytes(class_id));
10078 }
10079
10080 static intptr_t MaxNewSpaceElements(intptr_t class_id) {
10081 ASSERT(IsTypedDataClassId(class_id));
10082 return (Heap::kNewAllocatableSize - sizeof(TypedDataLayout)) /
10083 ElementSizeInBytes(class_id);
10084 }
10085
10086 static TypedDataPtr New(intptr_t class_id,
10087 intptr_t len,
10088 Heap::Space space = Heap::kNew);
10089
10090 template <typename DstType, typename SrcType>
10091 static void Copy(const DstType& dst,
10092 intptr_t dst_offset_in_bytes,
10093 const SrcType& src,
10094 intptr_t src_offset_in_bytes,
10095 intptr_t length_in_bytes) {
10096 ASSERT(Utils::RangeCheck(src_offset_in_bytes, length_in_bytes,
10097 src.LengthInBytes()));
10098 ASSERT(Utils::RangeCheck(dst_offset_in_bytes, length_in_bytes,
10099 dst.LengthInBytes()));
10100 {
10101 NoSafepointScope no_safepoint;
10102 if (length_in_bytes > 0) {
10103 memmove(dst.DataAddr(dst_offset_in_bytes),
10104 src.DataAddr(src_offset_in_bytes), length_in_bytes);
10105 }
10106 }
10107 }
10108
10109 template <typename DstType, typename SrcType>
10110 static void ClampedCopy(const DstType& dst,
10111 intptr_t dst_offset_in_bytes,
10112 const SrcType& src,
10113 intptr_t src_offset_in_bytes,
10114 intptr_t length_in_bytes) {
10115 ASSERT(Utils::RangeCheck(src_offset_in_bytes, length_in_bytes,
10116 src.LengthInBytes()));
10117 ASSERT(Utils::RangeCheck(dst_offset_in_bytes, length_in_bytes,
10118 dst.LengthInBytes()));
10119 {
10120 NoSafepointScope no_safepoint;
10121 if (length_in_bytes > 0) {
10122 uint8_t* dst_data =
10123 reinterpret_cast<uint8_t*>(dst.DataAddr(dst_offset_in_bytes));
10124 int8_t* src_data =
10125 reinterpret_cast<int8_t*>(src.DataAddr(src_offset_in_bytes));
10126 for (intptr_t ix = 0; ix < length_in_bytes; ix++) {
10127 int8_t v = *src_data;
10128 if (v < 0) v = 0;
10129 *dst_data = v;
10130 src_data++;
10131 dst_data++;
10132 }
10133 }
10134 }
10135 }
10136
10137 static bool IsTypedData(const Instance& obj) {
10138 ASSERT(!obj.IsNull());
10139 intptr_t cid = obj.raw()->GetClassId();
10140 return IsTypedDataClassId(cid);
10141 }
10142
10143 protected:
10144 void RecomputeDataField() { raw()->ptr()->RecomputeDataField(); }
10145
10146 private:
10147 // Provides const access to non-pointer, non-aligned data within the object.
10148 // Such access does not need a write barrier, but it is *not* GC-safe, since
10149 // the object might move.
10150 //
10151 // Therefore this method is private and the call-sites in this class need to
10152 // ensure the returned pointer does not escape.
10153 template <typename FieldType>
10154 const FieldType* ReadOnlyDataAddr(intptr_t byte_offset) const {
10155 return reinterpret_cast<const FieldType*>((raw_ptr()->data()) +
10156 byte_offset);
10157 }
10158
10159 FINAL_HEAP_OBJECT_IMPLEMENTATION(TypedData, TypedDataBase);
10160 friend class Class;
10161 friend class CompressedStackMapsIterator;
10162 friend class ExternalTypedData;
10163 friend class TypedDataView;
10164};
10165
10166class ExternalTypedData : public TypedDataBase {
10167 public:
10168 // Alignment of data when serializing ExternalTypedData in a clustered
10169 // snapshot. Should be independent of word size.
10170 static const int kDataSerializationAlignment = 8;
10171
10172#define TYPED_GETTER_SETTER(name, type) \
10173 type Get##name(intptr_t byte_offset) const { \
10174 return LoadUnaligned(reinterpret_cast<type*>(DataAddr(byte_offset))); \
10175 } \
10176 void Set##name(intptr_t byte_offset, type value) const { \
10177 StoreUnaligned(reinterpret_cast<type*>(DataAddr(byte_offset)), value); \
10178 }
10179 TYPED_GETTER_SETTER(Int8, int8_t)
10180 TYPED_GETTER_SETTER(Uint8, uint8_t)
10181 TYPED_GETTER_SETTER(Int16, int16_t)
10182 TYPED_GETTER_SETTER(Uint16, uint16_t)
10183 TYPED_GETTER_SETTER(Int32, int32_t)
10184 TYPED_GETTER_SETTER(Uint32, uint32_t)
10185 TYPED_GETTER_SETTER(Int64, int64_t)
10186 TYPED_GETTER_SETTER(Uint64, uint64_t)
10187 TYPED_GETTER_SETTER(Float32, float)
10188 TYPED_GETTER_SETTER(Float64, double)
10189 TYPED_GETTER_SETTER(Float32x4, simd128_value_t)
10190 TYPED_GETTER_SETTER(Int32x4, simd128_value_t)
10191 TYPED_GETTER_SETTER(Float64x2, simd128_value_t)
10192
10193#undef TYPED_GETTER_SETTER
10194
10195 FinalizablePersistentHandle* AddFinalizer(
10196 void* peer,
10197 Dart_WeakPersistentHandleFinalizer callback,
10198 intptr_t external_size) const;
10199
10200 static intptr_t data_offset() {
10201 return OFFSET_OF(ExternalTypedDataLayout, data_);
10202 }
10203
10204 static intptr_t InstanceSize() {
10205 return RoundedAllocationSize(sizeof(ExternalTypedDataLayout));
10206 }
10207
10208 static intptr_t MaxElements(intptr_t class_id) {
10209 ASSERT(IsExternalTypedDataClassId(class_id));
10210 return (kSmiMax / ElementSizeInBytes(class_id));
10211 }
10212
10213 static ExternalTypedDataPtr New(
10214 intptr_t class_id,
10215 uint8_t* data,
10216 intptr_t len,
10217 Heap::Space space = Heap::kNew,
10218 bool perform_eager_msan_initialization_check = true);
10219
10220 static ExternalTypedDataPtr NewFinalizeWithFree(uint8_t* data, intptr_t len);
10221
10222 static bool IsExternalTypedData(const Instance& obj) {
10223 ASSERT(!obj.IsNull());
10224 intptr_t cid = obj.raw()->GetClassId();
10225 return IsExternalTypedDataClassId(cid);
10226 }
10227
10228 protected:
10229 virtual uint8_t* Validate(uint8_t* data) const { return data; }
10230
10231 void SetLength(intptr_t value) const {
10232 ASSERT(value <= Smi::kMaxValue);
10233 StoreSmi(&raw_ptr()->length_, Smi::New(value));
10234 }
10235
10236 void SetData(uint8_t* data) const {
10237 ASSERT(
10238 !Isolate::Current()->heap()->Contains(reinterpret_cast<uword>(data)));
10239 StoreNonPointer(&raw_ptr()->data_, data);
10240 }
10241
10242 private:
10243 FINAL_HEAP_OBJECT_IMPLEMENTATION(ExternalTypedData, TypedDataBase);
10244 friend class Class;
10245};
10246
10247class TypedDataView : public TypedDataBase {
10248 public:
10249 static TypedDataViewPtr New(intptr_t class_id,
10250 Heap::Space space = Heap::kNew);
10251 static TypedDataViewPtr New(intptr_t class_id,
10252 const TypedDataBase& typed_data,
10253 intptr_t offset_in_bytes,
10254 intptr_t length,
10255 Heap::Space space = Heap::kNew);
10256
10257 static intptr_t InstanceSize() {
10258 return RoundedAllocationSize(sizeof(TypedDataViewLayout));
10259 }
10260
10261 static InstancePtr Data(const TypedDataView& view) {
10262 return view.typed_data();
10263 }
10264
10265 static SmiPtr OffsetInBytes(const TypedDataView& view) {
10266 return view.offset_in_bytes();
10267 }
10268
10269 static bool IsExternalTypedDataView(const TypedDataView& view_obj) {
10270 const auto& data = Instance::Handle(Data(view_obj));
10271 intptr_t cid = data.raw()->GetClassId();
10272 ASSERT(IsTypedDataClassId(cid) || IsExternalTypedDataClassId(cid));
10273 return IsExternalTypedDataClassId(cid);
10274 }
10275
10276 static intptr_t data_offset() {
10277 return OFFSET_OF(TypedDataViewLayout, typed_data_);
10278 }
10279
10280 static intptr_t offset_in_bytes_offset() {
10281 return OFFSET_OF(TypedDataViewLayout, offset_in_bytes_);
10282 }
10283
10284 InstancePtr typed_data() const { return raw_ptr()->typed_data_; }
10285
10286 void InitializeWith(const TypedDataBase& typed_data,
10287 intptr_t offset_in_bytes,
10288 intptr_t length) {
10289 const classid_t cid = typed_data.GetClassId();
10290 ASSERT(IsTypedDataClassId(cid) || IsExternalTypedDataClassId(cid));
10291 StorePointer(&raw_ptr()->typed_data_, typed_data.raw());
10292 StoreSmi(&raw_ptr()->length_, Smi::New(length));
10293 StoreSmi(&raw_ptr()->offset_in_bytes_, Smi::New(offset_in_bytes));
10294
10295 // Update the inner pointer.
10296 RecomputeDataField();
10297 }
10298
10299 SmiPtr offset_in_bytes() const { return raw_ptr()->offset_in_bytes_; }
10300
10301 protected:
10302 virtual uint8_t* Validate(uint8_t* data) const { return data; }
10303
10304 private:
10305 void RecomputeDataField() { raw()->ptr()->RecomputeDataField(); }
10306
10307 void Clear() {
10308 StoreSmi(&raw_ptr()->length_, Smi::New(0));
10309 StoreSmi(&raw_ptr()->offset_in_bytes_, Smi::New(0));
10310 StoreNonPointer(&raw_ptr()->data_, nullptr);
10311 StorePointer(&raw_ptr()->typed_data_,
10312 TypedDataBase::RawCast(Object::null()));
10313 }
10314
10315 FINAL_HEAP_OBJECT_IMPLEMENTATION(TypedDataView, TypedDataBase);
10316 friend class Class;
10317 friend class Object;
10318 friend class TypedDataViewDeserializationCluster;
10319};
10320
10321class ByteBuffer : public AllStatic {
10322 public:
10323 static InstancePtr Data(const Instance& view_obj) {
10324 ASSERT(!view_obj.IsNull());
10325 return *reinterpret_cast<InstancePtr const*>(view_obj.raw_ptr() +
10326 kDataOffset);
10327 }
10328
10329 static intptr_t NumberOfFields() { return kDataOffset; }
10330
10331 static intptr_t data_offset() { return kWordSize * kDataOffset; }
10332
10333 private:
10334 enum {
10335 kDataOffset = 1,
10336 };
10337};
10338
10339class Pointer : public Instance {
10340 public:
10341 static PointerPtr New(const AbstractType& type_arg,
10342 uword native_address,
10343 Heap::Space space = Heap::kNew);
10344
10345 static intptr_t InstanceSize() {
10346 return RoundedAllocationSize(sizeof(PointerLayout));
10347 }
10348
10349 static bool IsPointer(const Instance& obj);
10350
10351 size_t NativeAddress() const {
10352 return reinterpret_cast<size_t>(raw_ptr()->data_);
10353 }
10354
10355 void SetNativeAddress(size_t address) const {
10356 uint8_t* value = reinterpret_cast<uint8_t*>(address);
10357 StoreNonPointer(&raw_ptr()->data_, value);
10358 }
10359
10360 static intptr_t type_arguments_offset() {
10361 return OFFSET_OF(PointerLayout, type_arguments_);
10362 }
10363
10364 static intptr_t NextFieldOffset() { return sizeof(PointerLayout); }
10365
10366 static const intptr_t kNativeTypeArgPos = 0;
10367
10368 // Fetches the NativeType type argument.
10369 AbstractTypePtr type_argument() const {
10370 TypeArguments& type_args = TypeArguments::Handle(GetTypeArguments());
10371 return type_args.TypeAtNullSafe(Pointer::kNativeTypeArgPos);
10372 }
10373
10374 private:
10375 HEAP_OBJECT_IMPLEMENTATION(Pointer, Instance);
10376
10377 friend class Class;
10378};
10379
10380class DynamicLibrary : public Instance {
10381 public:
10382 static DynamicLibraryPtr New(void* handle, Heap::Space space = Heap::kNew);
10383
10384 static intptr_t InstanceSize() {
10385 return RoundedAllocationSize(sizeof(DynamicLibraryLayout));
10386 }
10387
10388 static bool IsDynamicLibrary(const Instance& obj) {
10389 ASSERT(!obj.IsNull());
10390 intptr_t cid = obj.raw()->GetClassId();
10391 return IsFfiDynamicLibraryClassId(cid);
10392 }
10393
10394 void* GetHandle() const {
10395 ASSERT(!IsNull());
10396 return raw_ptr()->handle_;
10397 }
10398
10399 void SetHandle(void* value) const {
10400 StoreNonPointer(&raw_ptr()->handle_, value);
10401 }
10402
10403 private:
10404 FINAL_HEAP_OBJECT_IMPLEMENTATION(DynamicLibrary, Instance);
10405
10406 friend class Class;
10407};
10408
10409// Corresponds to
10410// - "new Map()",
10411// - non-const map literals, and
10412// - the default constructor of LinkedHashMap in dart:collection.
10413class LinkedHashMap : public Instance {
10414 public:
10415 static intptr_t InstanceSize() {
10416 return RoundedAllocationSize(sizeof(LinkedHashMapLayout));
10417 }
10418
10419 // Allocates a map with some default capacity, just like "new Map()".
10420 static LinkedHashMapPtr NewDefault(Heap::Space space = Heap::kNew);
10421 static LinkedHashMapPtr New(const Array& data,
10422 const TypedData& index,
10423 intptr_t hash_mask,
10424 intptr_t used_data,
10425 intptr_t deleted_keys,
10426 Heap::Space space = Heap::kNew);
10427
10428 virtual TypeArgumentsPtr GetTypeArguments() const {
10429 return raw_ptr()->type_arguments_;
10430 }
10431 virtual void SetTypeArguments(const TypeArguments& value) const {
10432 ASSERT(value.IsNull() ||
10433 ((value.Length() >= 2) &&
10434 value.IsInstantiated() /*&& value.IsCanonical()*/));
10435 // TODO(asiva): Values read from a message snapshot are not properly marked
10436 // as canonical. See for example tests/isolate/message3_test.dart.
10437 StorePointer(&raw_ptr()->type_arguments_, value.raw());
10438 }
10439 static intptr_t type_arguments_offset() {
10440 return OFFSET_OF(LinkedHashMapLayout, type_arguments_);
10441 }
10442
10443 TypedDataPtr index() const { return raw_ptr()->index_; }
10444 void SetIndex(const TypedData& value) const {
10445 ASSERT(!value.IsNull());
10446 StorePointer(&raw_ptr()->index_, value.raw());
10447 }
10448 static intptr_t index_offset() {
10449 return OFFSET_OF(LinkedHashMapLayout, index_);
10450 }
10451
10452 ArrayPtr data() const { return raw_ptr()->data_; }
10453 void SetData(const Array& value) const {
10454 StorePointer(&raw_ptr()->data_, value.raw());
10455 }
10456 static intptr_t data_offset() {
10457 return OFFSET_OF(LinkedHashMapLayout, data_);
10458 }
10459
10460 SmiPtr hash_mask() const { return raw_ptr()->hash_mask_; }
10461 void SetHashMask(intptr_t value) const {
10462 StoreSmi(&raw_ptr()->hash_mask_, Smi::New(value));
10463 }
10464 static intptr_t hash_mask_offset() {
10465 return OFFSET_OF(LinkedHashMapLayout, hash_mask_);
10466 }
10467
10468 SmiPtr used_data() const { return raw_ptr()->used_data_; }
10469 void SetUsedData(intptr_t value) const {
10470 StoreSmi(&raw_ptr()->used_data_, Smi::New(value));
10471 }
10472 static intptr_t used_data_offset() {
10473 return OFFSET_OF(LinkedHashMapLayout, used_data_);
10474 }
10475
10476 SmiPtr deleted_keys() const { return raw_ptr()->deleted_keys_; }
10477 void SetDeletedKeys(intptr_t value) const {
10478 StoreSmi(&raw_ptr()->deleted_keys_, Smi::New(value));
10479 }
10480 static intptr_t deleted_keys_offset() {
10481 return OFFSET_OF(LinkedHashMapLayout, deleted_keys_);
10482 }
10483
10484 intptr_t Length() const {
10485 // The map may be uninitialized.
10486 if (raw_ptr()->used_data_ == Object::null()) return 0;
10487 if (raw_ptr()->deleted_keys_ == Object::null()) return 0;
10488
10489 intptr_t used = Smi::Value(raw_ptr()->used_data_);
10490 intptr_t deleted = Smi::Value(raw_ptr()->deleted_keys_);
10491 return (used >> 1) - deleted;
10492 }
10493
10494 // This iterator differs somewhat from its Dart counterpart (_CompactIterator
10495 // in runtime/lib/compact_hash.dart):
10496 // - There are no checks for concurrent modifications.
10497 // - Accessing a key or value before the first call to MoveNext and after
10498 // MoveNext returns false will result in crashes.
10499 class Iterator : ValueObject {
10500 public:
10501 explicit Iterator(const LinkedHashMap& map)
10502 : data_(Array::Handle(map.data())),
10503 scratch_(Object::Handle()),
10504 offset_(-2),
10505 length_(Smi::Value(map.used_data())) {}
10506
10507 bool MoveNext() {
10508 while (true) {
10509 offset_ += 2;
10510 if (offset_ >= length_) {
10511 return false;
10512 }
10513 scratch_ = data_.At(offset_);
10514 if (scratch_.raw() != data_.raw()) {
10515 // Slot is not deleted (self-reference indicates deletion).
10516 return true;
10517 }
10518 }
10519 }
10520
10521 ObjectPtr CurrentKey() const { return data_.At(offset_); }
10522
10523 ObjectPtr CurrentValue() const { return data_.At(offset_ + 1); }
10524
10525 private:
10526 const Array& data_;
10527 Object& scratch_;
10528 intptr_t offset_;
10529 const intptr_t length_;
10530 };
10531
10532 private:
10533 FINAL_HEAP_OBJECT_IMPLEMENTATION(LinkedHashMap, Instance);
10534
10535 // Keep this in sync with Dart implementation (lib/compact_hash.dart).
10536 static const intptr_t kInitialIndexBits = 3;
10537 static const intptr_t kInitialIndexSize = 1 << (kInitialIndexBits + 1);
10538
10539 // Allocate a map, but leave all fields set to null.
10540 // Used during deserialization (since map might contain itself as key/value).
10541 static LinkedHashMapPtr NewUninitialized(Heap::Space space = Heap::kNew);
10542
10543 friend class Class;
10544 friend class LinkedHashMapDeserializationCluster;
10545};
10546
10547class Closure : public Instance {
10548 public:
10549 TypeArgumentsPtr instantiator_type_arguments() const {
10550 return raw_ptr()->instantiator_type_arguments_;
10551 }
10552 static intptr_t instantiator_type_arguments_offset() {
10553 return OFFSET_OF(ClosureLayout, instantiator_type_arguments_);
10554 }
10555
10556 TypeArgumentsPtr function_type_arguments() const {
10557 return raw_ptr()->function_type_arguments_;
10558 }
10559 static intptr_t function_type_arguments_offset() {
10560 return OFFSET_OF(ClosureLayout, function_type_arguments_);
10561 }
10562
10563 TypeArgumentsPtr delayed_type_arguments() const {
10564 return raw_ptr()->delayed_type_arguments_;
10565 }
10566 static intptr_t delayed_type_arguments_offset() {
10567 return OFFSET_OF(ClosureLayout, delayed_type_arguments_);
10568 }
10569
10570 FunctionPtr function() const { return raw_ptr()->function_; }
10571 static intptr_t function_offset() {
10572 return OFFSET_OF(ClosureLayout, function_);
10573 }
10574
10575 ContextPtr context() const { return raw_ptr()->context_; }
10576 static intptr_t context_offset() {
10577 return OFFSET_OF(ClosureLayout, context_);
10578 }
10579
10580 bool IsGeneric(Thread* thread) const { return NumTypeParameters(thread) > 0; }
10581 intptr_t NumTypeParameters(Thread* thread) const;
10582 // No need for NumParentTypeParameters, as a closure is always closed over
10583 // its parents type parameters (i.e., function_type_parameters() above).
10584
10585 SmiPtr hash() const { return raw_ptr()->hash_; }
10586 static intptr_t hash_offset() { return OFFSET_OF(ClosureLayout, hash_); }
10587
10588 static intptr_t InstanceSize() {
10589 return RoundedAllocationSize(sizeof(ClosureLayout));
10590 }
10591
10592 // Returns true if all elements are OK for canonicalization.
10593 virtual bool CheckAndCanonicalizeFields(Thread* thread,
10594 const char** error_str) const {
10595 // None of the fields of a closure are instances.
10596 return true;
10597 }
10598 virtual uint32_t CanonicalizeHash() const {
10599 return Function::Handle(function()).Hash();
10600 }
10601 int64_t ComputeHash() const;
10602
10603 static ClosurePtr New(const TypeArguments& instantiator_type_arguments,
10604 const TypeArguments& function_type_arguments,
10605 const Function& function,
10606 const Context& context,
10607 Heap::Space space = Heap::kNew);
10608
10609 static ClosurePtr New(const TypeArguments& instantiator_type_arguments,
10610 const TypeArguments& function_type_arguments,
10611 const TypeArguments& delayed_type_arguments,
10612 const Function& function,
10613 const Context& context,
10614 Heap::Space space = Heap::kNew);
10615
10616 FunctionPtr GetInstantiatedSignature(Zone* zone) const;
10617
10618 private:
10619 static ClosurePtr New();
10620
10621 FINAL_HEAP_OBJECT_IMPLEMENTATION(Closure, Instance);
10622 friend class Class;
10623};
10624
10625class Capability : public Instance {
10626 public:
10627 uint64_t Id() const { return raw_ptr()->id_; }
10628
10629 static intptr_t InstanceSize() {
10630 return RoundedAllocationSize(sizeof(CapabilityLayout));
10631 }
10632 static CapabilityPtr New(uint64_t id, Heap::Space space = Heap::kNew);
10633
10634 private:
10635 FINAL_HEAP_OBJECT_IMPLEMENTATION(Capability, Instance);
10636 friend class Class;
10637};
10638
10639class ReceivePort : public Instance {
10640 public:
10641 SendPortPtr send_port() const { return raw_ptr()->send_port_; }
10642 Dart_Port Id() const { return send_port()->ptr()->id_; }
10643
10644 InstancePtr handler() const { return raw_ptr()->handler_; }
10645 void set_handler(const Instance& value) const;
10646
10647 static intptr_t InstanceSize() {
10648 return RoundedAllocationSize(sizeof(ReceivePortLayout));
10649 }
10650 static ReceivePortPtr New(Dart_Port id,
10651 bool is_control_port,
10652 Heap::Space space = Heap::kNew);
10653
10654 private:
10655 FINAL_HEAP_OBJECT_IMPLEMENTATION(ReceivePort, Instance);
10656 friend class Class;
10657};
10658
10659class SendPort : public Instance {
10660 public:
10661 Dart_Port Id() const { return raw_ptr()->id_; }
10662
10663 Dart_Port origin_id() const { return raw_ptr()->origin_id_; }
10664 void set_origin_id(Dart_Port id) const {
10665 ASSERT(origin_id() == 0);
10666 StoreNonPointer(&(raw_ptr()->origin_id_), id);
10667 }
10668
10669 static intptr_t InstanceSize() {
10670 return RoundedAllocationSize(sizeof(SendPortLayout));
10671 }
10672 static SendPortPtr New(Dart_Port id, Heap::Space space = Heap::kNew);
10673 static SendPortPtr New(Dart_Port id,
10674 Dart_Port origin_id,
10675 Heap::Space space = Heap::kNew);
10676
10677 private:
10678 FINAL_HEAP_OBJECT_IMPLEMENTATION(SendPort, Instance);
10679 friend class Class;
10680};
10681
10682// This is allocated when new instance of TransferableTypedData is created in
10683// [TransferableTypedData::New].
10684class TransferableTypedDataPeer {
10685 public:
10686 // [data] backing store should be malloc'ed, not new'ed.
10687 TransferableTypedDataPeer(uint8_t* data, intptr_t length)
10688 : data_(data), length_(length), handle_(nullptr) {}
10689
10690 ~TransferableTypedDataPeer() { free(data_); }
10691
10692 uint8_t* data() const { return data_; }
10693 intptr_t length() const { return length_; }
10694 FinalizablePersistentHandle* handle() const { return handle_; }
10695 void set_handle(FinalizablePersistentHandle* handle) { handle_ = handle; }
10696
10697 void ClearData() {
10698 data_ = nullptr;
10699 length_ = 0;
10700 handle_ = nullptr;
10701 }
10702
10703 private:
10704 uint8_t* data_;
10705 intptr_t length_;
10706 FinalizablePersistentHandle* handle_;
10707
10708 DISALLOW_COPY_AND_ASSIGN(TransferableTypedDataPeer);
10709};
10710
10711class TransferableTypedData : public Instance {
10712 public:
10713 static TransferableTypedDataPtr New(uint8_t* data,
10714 intptr_t len,
10715 Heap::Space space = Heap::kNew);
10716
10717 static intptr_t InstanceSize() {
10718 return RoundedAllocationSize(sizeof(TransferableTypedDataLayout));
10719 }
10720
10721 private:
10722 FINAL_HEAP_OBJECT_IMPLEMENTATION(TransferableTypedData, Instance);
10723 friend class Class;
10724};
10725
10726// Internal stacktrace object used in exceptions for printing stack traces.
10727class StackTrace : public Instance {
10728 public:
10729 static const int kPreallocatedStackdepth = 90;
10730
10731 intptr_t Length() const;
10732
10733 StackTracePtr async_link() const { return raw_ptr()->async_link_; }
10734 void set_async_link(const StackTrace& async_link) const;
10735 void set_expand_inlined(bool value) const;
10736
10737 ArrayPtr code_array() const { return raw_ptr()->code_array_; }
10738 ObjectPtr CodeAtFrame(intptr_t frame_index) const;
10739 void SetCodeAtFrame(intptr_t frame_index, const Object& code) const;
10740
10741 ArrayPtr pc_offset_array() const { return raw_ptr()->pc_offset_array_; }
10742 SmiPtr PcOffsetAtFrame(intptr_t frame_index) const;
10743 void SetPcOffsetAtFrame(intptr_t frame_index, const Smi& pc_offset) const;
10744
10745 bool skip_sync_start_in_parent_stack() const;
10746 void set_skip_sync_start_in_parent_stack(bool value) const;
10747
10748 // The number of frames that should be cut off the top of an async stack trace
10749 // if it's appended to a synchronous stack trace along a sync-async call.
10750 //
10751 // Without cropping, the border would look like:
10752 //
10753 // <async function>
10754 // ---------------------------
10755 // <asynchronous gap marker>
10756 // <async function>
10757 //
10758 // Since it's not actually an async call, we crop off the last two
10759 // frames when concatenating the sync and async stacktraces.
10760 static constexpr intptr_t kSyncAsyncCroppedFrames = 2;
10761
10762 static intptr_t InstanceSize() {
10763 return RoundedAllocationSize(sizeof(StackTraceLayout));
10764 }
10765 static StackTracePtr New(const Array& code_array,
10766 const Array& pc_offset_array,
10767 Heap::Space space = Heap::kNew);
10768
10769 static StackTracePtr New(const Array& code_array,
10770 const Array& pc_offset_array,
10771 const StackTrace& async_link,
10772 bool skip_sync_start_in_parent_stack,
10773 Heap::Space space = Heap::kNew);
10774
10775 private:
10776 void set_code_array(const Array& code_array) const;
10777 void set_pc_offset_array(const Array& pc_offset_array) const;
10778 bool expand_inlined() const;
10779
10780 FINAL_HEAP_OBJECT_IMPLEMENTATION(StackTrace, Instance);
10781 friend class Class;
10782 friend class Debugger;
10783};
10784
10785class RegExpFlags {
10786 public:
10787 // Flags are passed to a regex object as follows:
10788 // 'i': ignore case, 'g': do global matches, 'm': pattern is multi line,
10789 // 'u': pattern is full Unicode, not just BMP, 's': '.' in pattern matches
10790 // all characters including line terminators.
10791 enum Flags {
10792 kNone = 0,
10793 kGlobal = 1,
10794 kIgnoreCase = 2,
10795 kMultiLine = 4,
10796 kUnicode = 8,
10797 kDotAll = 16,
10798 };
10799
10800 static const int kDefaultFlags = 0;
10801
10802 RegExpFlags() : value_(kDefaultFlags) {}
10803 explicit RegExpFlags(int value) : value_(value) {}
10804
10805 inline bool IsGlobal() const { return (value_ & kGlobal) != 0; }
10806 inline bool IgnoreCase() const { return (value_ & kIgnoreCase) != 0; }
10807 inline bool IsMultiLine() const { return (value_ & kMultiLine) != 0; }
10808 inline bool IsUnicode() const { return (value_ & kUnicode) != 0; }
10809 inline bool IsDotAll() const { return (value_ & kDotAll) != 0; }
10810
10811 inline bool NeedsUnicodeCaseEquivalents() {
10812 // Both unicode and ignore_case flags are set. We need to use ICU to find
10813 // the closure over case equivalents.
10814 return IsUnicode() && IgnoreCase();
10815 }
10816
10817 void SetGlobal() { value_ |= kGlobal; }
10818 void SetIgnoreCase() { value_ |= kIgnoreCase; }
10819 void SetMultiLine() { value_ |= kMultiLine; }
10820 void SetUnicode() { value_ |= kUnicode; }
10821 void SetDotAll() { value_ |= kDotAll; }
10822
10823 const char* ToCString() const;
10824
10825 int value() const { return value_; }
10826
10827 bool operator==(const RegExpFlags& other) { return value_ == other.value_; }
10828 bool operator!=(const RegExpFlags& other) { return value_ != other.value_; }
10829
10830 private:
10831 int value_;
10832};
10833
10834// Internal JavaScript regular expression object.
10835class RegExp : public Instance {
10836 public:
10837 // Meaning of RegExType:
10838 // kUninitialized: the type of th regexp has not been initialized yet.
10839 // kSimple: A simple pattern to match against, using string indexOf operation.
10840 // kComplex: A complex pattern to match.
10841 enum RegExType {
10842 kUninitialized = 0,
10843 kSimple = 1,
10844 kComplex = 2,
10845 };
10846
10847 enum {
10848 kTypePos = 0,
10849 kTypeSize = 2,
10850 kFlagsPos = 2,
10851 kFlagsSize = 5,
10852 };
10853
10854 class TypeBits : public BitField<int8_t, RegExType, kTypePos, kTypeSize> {};
10855 class FlagsBits : public BitField<int8_t, intptr_t, kFlagsPos, kFlagsSize> {};
10856
10857 bool is_initialized() const { return (type() != kUninitialized); }
10858 bool is_simple() const { return (type() == kSimple); }
10859 bool is_complex() const { return (type() == kComplex); }
10860
10861 intptr_t num_registers(bool is_one_byte) const {
10862 return is_one_byte ? raw_ptr()->num_one_byte_registers_
10863 : raw_ptr()->num_two_byte_registers_;
10864 }
10865
10866 StringPtr pattern() const { return raw_ptr()->pattern_; }
10867 SmiPtr num_bracket_expressions() const {
10868 return raw_ptr()->num_bracket_expressions_;
10869 }
10870 ArrayPtr capture_name_map() const { return raw_ptr()->capture_name_map_; }
10871
10872 TypedDataPtr bytecode(bool is_one_byte, bool sticky) const {
10873 if (sticky) {
10874 return is_one_byte ? raw_ptr()->one_byte_sticky_.bytecode_
10875 : raw_ptr()->two_byte_sticky_.bytecode_;
10876 } else {
10877 return is_one_byte ? raw_ptr()->one_byte_.bytecode_
10878 : raw_ptr()->two_byte_.bytecode_;
10879 }
10880 }
10881
10882 static intptr_t function_offset(intptr_t cid, bool sticky) {
10883 if (sticky) {
10884 switch (cid) {
10885 case kOneByteStringCid:
10886 return OFFSET_OF(RegExpLayout, one_byte_sticky_.function_);
10887 case kTwoByteStringCid:
10888 return OFFSET_OF(RegExpLayout, two_byte_sticky_.function_);
10889 case kExternalOneByteStringCid:
10890 return OFFSET_OF(RegExpLayout, external_one_byte_sticky_function_);
10891 case kExternalTwoByteStringCid:
10892 return OFFSET_OF(RegExpLayout, external_two_byte_sticky_function_);
10893 }
10894 } else {
10895 switch (cid) {
10896 case kOneByteStringCid:
10897 return OFFSET_OF(RegExpLayout, one_byte_.function_);
10898 case kTwoByteStringCid:
10899 return OFFSET_OF(RegExpLayout, two_byte_.function_);
10900 case kExternalOneByteStringCid:
10901 return OFFSET_OF(RegExpLayout, external_one_byte_function_);
10902 case kExternalTwoByteStringCid:
10903 return OFFSET_OF(RegExpLayout, external_two_byte_function_);
10904 }
10905 }
10906
10907 UNREACHABLE();
10908 return -1;
10909 }
10910
10911 FunctionPtr* FunctionAddr(intptr_t cid, bool sticky) const {
10912 return reinterpret_cast<FunctionPtr*>(
10913 FieldAddrAtOffset(function_offset(cid, sticky)));
10914 }
10915
10916 FunctionPtr function(intptr_t cid, bool sticky) const {
10917 return *FunctionAddr(cid, sticky);
10918 }
10919
10920 void set_pattern(const String& pattern) const;
10921 void set_function(intptr_t cid, bool sticky, const Function& value) const;
10922 void set_bytecode(bool is_one_byte,
10923 bool sticky,
10924 const TypedData& bytecode) const;
10925
10926 void set_num_bracket_expressions(intptr_t value) const;
10927 void set_capture_name_map(const Array& array) const;
10928 void set_is_global() const {
10929 RegExpFlags f = flags();
10930 f.SetGlobal();
10931 set_flags(f);
10932 }
10933 void set_is_ignore_case() const {
10934 RegExpFlags f = flags();
10935 f.SetIgnoreCase();
10936 set_flags(f);
10937 }
10938 void set_is_multi_line() const {
10939 RegExpFlags f = flags();
10940 f.SetMultiLine();
10941 set_flags(f);
10942 }
10943 void set_is_unicode() const {
10944 RegExpFlags f = flags();
10945 f.SetUnicode();
10946 set_flags(f);
10947 }
10948 void set_is_dot_all() const {
10949 RegExpFlags f = flags();
10950 f.SetDotAll();
10951 set_flags(f);
10952 }
10953 void set_is_simple() const { set_type(kSimple); }
10954 void set_is_complex() const { set_type(kComplex); }
10955 void set_num_registers(bool is_one_byte, intptr_t value) const {
10956 if (is_one_byte) {
10957 StoreNonPointer(&raw_ptr()->num_one_byte_registers_, value);
10958 } else {
10959 StoreNonPointer(&raw_ptr()->num_two_byte_registers_, value);
10960 }
10961 }
10962
10963 RegExpFlags flags() const {
10964 return RegExpFlags(FlagsBits::decode(raw_ptr()->type_flags_));
10965 }
10966 void set_flags(RegExpFlags flags) const {
10967 StoreNonPointer(&raw_ptr()->type_flags_,
10968 FlagsBits::update(flags.value(), raw_ptr()->type_flags_));
10969 }
10970 const char* Flags() const;
10971
10972 virtual bool CanonicalizeEquals(const Instance& other) const;
10973
10974 static intptr_t InstanceSize() {
10975 return RoundedAllocationSize(sizeof(RegExpLayout));
10976 }
10977
10978 static RegExpPtr New(Heap::Space space = Heap::kNew);
10979
10980 private:
10981 void set_type(RegExType type) const {
10982 StoreNonPointer(&raw_ptr()->type_flags_,
10983 TypeBits::update(type, raw_ptr()->type_flags_));
10984 }
10985
10986 RegExType type() const { return TypeBits::decode(raw_ptr()->type_flags_); }
10987
10988 FINAL_HEAP_OBJECT_IMPLEMENTATION(RegExp, Instance);
10989 friend class Class;
10990};
10991
10992class WeakProperty : public Instance {
10993 public:
10994 ObjectPtr key() const { return raw_ptr()->key_; }
10995
10996 void set_key(const Object& key) const {
10997 StorePointer(&raw_ptr()->key_, key.raw());
10998 }
10999
11000 ObjectPtr value() const { return raw_ptr()->value_; }
11001
11002 void set_value(const Object& value) const {
11003 StorePointer(&raw_ptr()->value_, value.raw());
11004 }
11005
11006 static WeakPropertyPtr New(Heap::Space space = Heap::kNew);
11007
11008 static intptr_t InstanceSize() {
11009 return RoundedAllocationSize(sizeof(WeakPropertyLayout));
11010 }
11011
11012 static void Clear(WeakPropertyPtr raw_weak) {
11013 ASSERT(raw_weak->ptr()->next_ == 0);
11014 // This action is performed by the GC. No barrier.
11015 raw_weak->ptr()->key_ = Object::null();
11016 raw_weak->ptr()->value_ = Object::null();
11017 }
11018
11019 private:
11020 FINAL_HEAP_OBJECT_IMPLEMENTATION(WeakProperty, Instance);
11021 friend class Class;
11022};
11023
11024class MirrorReference : public Instance {
11025 public:
11026 ObjectPtr referent() const { return raw_ptr()->referent_; }
11027
11028 void set_referent(const Object& referent) const {
11029 StorePointer(&raw_ptr()->referent_, referent.raw());
11030 }
11031
11032 AbstractTypePtr GetAbstractTypeReferent() const;
11033
11034 ClassPtr GetClassReferent() const;
11035
11036 FieldPtr GetFieldReferent() const;
11037
11038 FunctionPtr GetFunctionReferent() const;
11039
11040 LibraryPtr GetLibraryReferent() const;
11041
11042 TypeParameterPtr GetTypeParameterReferent() const;
11043
11044 static MirrorReferencePtr New(const Object& referent,
11045 Heap::Space space = Heap::kNew);
11046
11047 static intptr_t InstanceSize() {
11048 return RoundedAllocationSize(sizeof(MirrorReferenceLayout));
11049 }
11050
11051 private:
11052 FINAL_HEAP_OBJECT_IMPLEMENTATION(MirrorReference, Instance);
11053 friend class Class;
11054};
11055
11056class UserTag : public Instance {
11057 public:
11058 uword tag() const { return raw_ptr()->tag(); }
11059 void set_tag(uword t) const {
11060 ASSERT(t >= UserTags::kUserTagIdOffset);
11061 ASSERT(t < UserTags::kUserTagIdOffset + UserTags::kMaxUserTags);
11062 StoreNonPointer(&raw_ptr()->tag_, t);
11063 }
11064 static intptr_t tag_offset() { return OFFSET_OF(UserTagLayout, tag_); }
11065
11066 StringPtr label() const { return raw_ptr()->label_; }
11067
11068 void MakeActive() const;
11069
11070 static intptr_t InstanceSize() {
11071 return RoundedAllocationSize(sizeof(UserTagLayout));
11072 }
11073
11074 static UserTagPtr New(const String& label, Heap::Space space = Heap::kOld);
11075 static UserTagPtr DefaultTag();
11076
11077 static bool TagTableIsFull(Thread* thread);
11078 static UserTagPtr FindTagById(uword tag_id);
11079
11080 private:
11081 static UserTagPtr FindTagInIsolate(Thread* thread, const String& label);
11082 static void AddTagToIsolate(Thread* thread, const UserTag& tag);
11083
11084 void set_label(const String& tag_label) const {
11085 StorePointer(&raw_ptr()->label_, tag_label.raw());
11086 }
11087
11088 FINAL_HEAP_OBJECT_IMPLEMENTATION(UserTag, Instance);
11089 friend class Class;
11090};
11091
11092// Represents abstract FutureOr class in dart:async.
11093class FutureOr : public Instance {
11094 public:
11095 static intptr_t InstanceSize() {
11096 return RoundedAllocationSize(sizeof(FutureOrLayout));
11097 }
11098
11099 virtual TypeArgumentsPtr GetTypeArguments() const {
11100 return raw_ptr()->type_arguments_;
11101 }
11102 static intptr_t type_arguments_offset() {
11103 return OFFSET_OF(FutureOrLayout, type_arguments_);
11104 }
11105
11106 private:
11107 FINAL_HEAP_OBJECT_IMPLEMENTATION(FutureOr, Instance);
11108
11109 friend class Class;
11110};
11111
11112// Breaking cycles and loops.
11113ClassPtr Object::clazz() const {
11114 uword raw_value = static_cast<uword>(raw_);
11115 if ((raw_value & kSmiTagMask) == kSmiTag) {
11116 return Smi::Class();
11117 }
11118 ASSERT(!IsolateGroup::Current()->compaction_in_progress());
11119 return Isolate::Current()->class_table()->At(raw()->GetClassId());
11120}
11121
11122DART_FORCE_INLINE
11123void Object::SetRaw(ObjectPtr value) {
11124 NoSafepointScope no_safepoint_scope;
11125 raw_ = value;
11126 intptr_t cid = value->GetClassIdMayBeSmi();
11127 // Free-list elements cannot be wrapped in a handle.
11128 ASSERT(cid != kFreeListElement);
11129 ASSERT(cid != kForwardingCorpse);
11130 if (cid >= kNumPredefinedCids) {
11131 cid = kInstanceCid;
11132 }
11133 set_vtable(builtin_vtables_[cid]);
11134#if defined(DEBUG)
11135 if (FLAG_verify_handles && raw_->IsHeapObject()) {
11136 Isolate* isolate = Isolate::Current();
11137 Heap* isolate_heap = isolate->heap();
11138 // TODO(rmacnak): Remove after rewriting StackFrame::VisitObjectPointers
11139 // to not use handles.
11140 if (!isolate_heap->new_space()->scavenging()) {
11141 Heap* vm_isolate_heap = Dart::vm_isolate()->heap();
11142 uword addr = ObjectLayout::ToAddr(raw_);
11143 if (!isolate_heap->Contains(addr) && !vm_isolate_heap->Contains(addr)) {
11144 ASSERT(FLAG_write_protect_code);
11145 addr = ObjectLayout::ToAddr(OldPage::ToWritable(raw_));
11146 ASSERT(isolate_heap->Contains(addr) || vm_isolate_heap->Contains(addr));
11147 }
11148 }
11149 }
11150#endif
11151}
11152
11153#if !defined(DART_PRECOMPILED_RUNTIME)
11154bool Function::HasBytecode() const {
11155 return raw_ptr()->bytecode_ != Bytecode::null();
11156}
11157
11158bool Function::HasBytecode(FunctionPtr function) {
11159 return function->ptr()->bytecode_ != Bytecode::null();
11160}
11161#endif // !defined(DART_PRECOMPILED_RUNTIME)
11162
11163intptr_t Field::HostOffset() const {
11164 ASSERT(is_instance()); // Valid only for dart instance fields.
11165 return (Smi::Value(raw_ptr()->host_offset_or_field_id_) * kWordSize);
11166}
11167
11168intptr_t Field::TargetOffset() const {
11169 ASSERT(is_instance()); // Valid only for dart instance fields.
11170#if !defined(DART_PRECOMPILED_RUNTIME)
11171 return (raw_ptr()->target_offset_ * compiler::target::kWordSize);
11172#else
11173 return HostOffset();
11174#endif // !defined(DART_PRECOMPILED_RUNTIME)
11175}
11176
11177inline intptr_t Field::TargetOffsetOf(const FieldPtr field) {
11178#if !defined(DART_PRECOMPILED_RUNTIME)
11179 return field->ptr()->target_offset_;
11180#else
11181 return Smi::Value(field->ptr()->host_offset_or_field_id_);
11182#endif // !defined(DART_PRECOMPILED_RUNTIME)
11183}
11184
11185void Field::SetOffset(intptr_t host_offset_in_bytes,
11186 intptr_t target_offset_in_bytes) const {
11187 ASSERT(is_instance()); // Valid only for dart instance fields.
11188 ASSERT(kWordSize != 0);
11189 StoreSmi(&raw_ptr()->host_offset_or_field_id_,
11190 Smi::New(host_offset_in_bytes / kWordSize));
11191#if !defined(DART_PRECOMPILED_RUNTIME)
11192 ASSERT(compiler::target::kWordSize != 0);
11193 StoreNonPointer(&raw_ptr()->target_offset_,
11194 target_offset_in_bytes / compiler::target::kWordSize);
11195#else
11196 ASSERT(host_offset_in_bytes == target_offset_in_bytes);
11197#endif // !defined(DART_PRECOMPILED_RUNTIME)
11198}
11199
11200InstancePtr Field::StaticValue() const {
11201 ASSERT(is_static()); // Valid only for static dart fields.
11202 return Isolate::Current()->field_table()->At(
11203 Smi::Value(raw_ptr()->host_offset_or_field_id_));
11204}
11205
11206inline intptr_t Field::field_id() const {
11207 return Smi::Value(raw_ptr()->host_offset_or_field_id_);
11208}
11209
11210void Field::set_field_id(intptr_t field_id) const {
11211 ASSERT(is_static());
11212 ASSERT(Thread::Current()->IsMutatorThread());
11213 StoreSmi(&raw_ptr()->host_offset_or_field_id_, Smi::New(field_id));
11214}
11215
11216#ifndef DART_PRECOMPILED_RUNTIME
11217void Field::set_saved_initial_value(const Instance& value) const {
11218 StorePointer(&raw_ptr()->saved_initial_value_, value.raw());
11219}
11220#endif
11221
11222void Context::SetAt(intptr_t index, const Object& value) const {
11223 StorePointer(ObjectAddr(index), value.raw());
11224}
11225
11226intptr_t Instance::GetNativeField(int index) const {
11227 ASSERT(IsValidNativeIndex(index));
11228 NoSafepointScope no_safepoint;
11229 TypedDataPtr native_fields = static_cast<TypedDataPtr>(*NativeFieldsAddr());
11230 if (native_fields == TypedData::null()) {
11231 return 0;
11232 }
11233 return reinterpret_cast<intptr_t*>(native_fields->ptr()->data())[index];
11234}
11235
11236void Instance::GetNativeFields(uint16_t num_fields,
11237 intptr_t* field_values) const {
11238 NoSafepointScope no_safepoint;
11239 ASSERT(num_fields == NumNativeFields());
11240 ASSERT(field_values != NULL);
11241 TypedDataPtr native_fields = static_cast<TypedDataPtr>(*NativeFieldsAddr());
11242 if (native_fields == TypedData::null()) {
11243 for (intptr_t i = 0; i < num_fields; i++) {
11244 field_values[i] = 0;
11245 }
11246 }
11247 intptr_t* fields = reinterpret_cast<intptr_t*>(native_fields->ptr()->data());
11248 for (intptr_t i = 0; i < num_fields; i++) {
11249 field_values[i] = fields[i];
11250 }
11251}
11252
11253bool String::Equals(const String& str) const {
11254 if (raw() == str.raw()) {
11255 return true; // Both handles point to the same raw instance.
11256 }
11257 if (str.IsNull()) {
11258 return false;
11259 }
11260 if (IsCanonical() && str.IsCanonical()) {
11261 return false; // Two symbols that aren't identical aren't equal.
11262 }
11263 if (HasHash() && str.HasHash() && (Hash() != str.Hash())) {
11264 return false; // Both sides have hash codes and they do not match.
11265 }
11266 return Equals(str, 0, str.Length());
11267}
11268
11269intptr_t Library::UrlHash() const {
11270 intptr_t result = String::GetCachedHash(url());
11271 ASSERT(result != 0);
11272 return result;
11273}
11274
11275void MegamorphicCache::SetEntry(const Array& array,
11276 intptr_t index,
11277 const Smi& class_id,
11278 const Object& target) {
11279 ASSERT(target.IsNull() || target.IsFunction() || target.IsSmi());
11280 array.SetAt((index * kEntryLength) + kClassIdIndex, class_id);
11281#if defined(DART_PRECOMPILED_RUNTIME)
11282 if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
11283 if (target.IsFunction()) {
11284 const auto& function = Function::Cast(target);
11285 const auto& entry_point = Smi::Handle(
11286 Smi::FromAlignedAddress(Code::EntryPointOf(function.CurrentCode())));
11287 array.SetAt((index * kEntryLength) + kTargetFunctionIndex, entry_point);
11288 return;
11289 }
11290 }
11291#endif // defined(DART_PRECOMPILED_RUNTIME)
11292 array.SetAt((index * kEntryLength) + kTargetFunctionIndex, target);
11293}
11294
11295ObjectPtr MegamorphicCache::GetClassId(const Array& array, intptr_t index) {
11296 return array.At((index * kEntryLength) + kClassIdIndex);
11297}
11298
11299ObjectPtr MegamorphicCache::GetTargetFunction(const Array& array,
11300 intptr_t index) {
11301 return array.At((index * kEntryLength) + kTargetFunctionIndex);
11302}
11303
11304inline intptr_t Type::Hash() const {
11305 intptr_t result = Smi::Value(raw_ptr()->hash_);
11306 if (result != 0) {
11307 return result;
11308 }
11309 return ComputeHash();
11310}
11311
11312inline void Type::SetHash(intptr_t value) const {
11313 // This is only safe because we create a new Smi, which does not cause
11314 // heap allocation.
11315 StoreSmi(&raw_ptr()->hash_, Smi::New(value));
11316}
11317
11318inline intptr_t TypeParameter::Hash() const {
11319 ASSERT(IsFinalized());
11320 intptr_t result = Smi::Value(raw_ptr()->hash_);
11321 if (result != 0) {
11322 return result;
11323 }
11324 return ComputeHash();
11325}
11326
11327inline void TypeParameter::SetHash(intptr_t value) const {
11328 // This is only safe because we create a new Smi, which does not cause
11329 // heap allocation.
11330 StoreSmi(&raw_ptr()->hash_, Smi::New(value));
11331}
11332
11333inline intptr_t TypeArguments::Hash() const {
11334 if (IsNull()) return kAllDynamicHash;
11335 intptr_t result = Smi::Value(raw_ptr()->hash_);
11336 if (result != 0) {
11337 return result;
11338 }
11339 return ComputeHash();
11340}
11341
11342inline void TypeArguments::SetHash(intptr_t value) const {
11343 // This is only safe because we create a new Smi, which does not cause
11344 // heap allocation.
11345 StoreSmi(&raw_ptr()->hash_, Smi::New(value));
11346}
11347
11348inline uint16_t String::CharAt(StringPtr str, intptr_t index) {
11349 switch (str->GetClassId()) {
11350 case kOneByteStringCid:
11351 return OneByteString::CharAt(static_cast<OneByteStringPtr>(str), index);
11352 case kTwoByteStringCid:
11353 return TwoByteString::CharAt(static_cast<TwoByteStringPtr>(str), index);
11354 case kExternalOneByteStringCid:
11355 return ExternalOneByteString::CharAt(
11356 static_cast<ExternalOneByteStringPtr>(str), index);
11357 case kExternalTwoByteStringCid:
11358 return ExternalTwoByteString::CharAt(
11359 static_cast<ExternalTwoByteStringPtr>(str), index);
11360 }
11361 UNREACHABLE();
11362 return 0;
11363}
11364
11365// A view on an [Array] as a list of tuples, optionally starting at an offset.
11366//
11367// Example: We store a list of (kind, function, code) tuples into the
11368// [Code::static_calls_target_table] array of type [Array].
11369//
11370// This helper class can then be used via
11371//
11372// using CallTableView = ArrayOfTuplesVied<
11373// Code::Kind, std::tuple<Smi, Function, Code>>;
11374//
11375// auto& array = Array::Handle(code.static_calls_targets_table());
11376// CallTableView static_calls(array);
11377//
11378// // Using convenient for loop.
11379// auto& function = Function::Handle();
11380// for (auto& call : static_calls) {
11381// function = call.Get<Code::kSCallTableFunctionTarget>();
11382// call.Set<Code::kSCallTableFunctionTarget>(function);
11383// }
11384//
11385// // Using manual loop.
11386// auto& function = Function::Handle();
11387// for (intptr_t i = 0; i < static_calls.Length(); ++i) {
11388// auto call = static_calls[i];
11389// function = call.Get<Code::kSCallTableFunctionTarget>();
11390// call.Set<Code::kSCallTableFunctionTarget>(function);
11391// }
11392//
11393//
11394// Template parameters:
11395//
11396// * [EnumType] must be a normal enum which enumerates the entries of the
11397// tuple
11398//
11399// * [kStartOffset] is the offset at which the first tuple in the array
11400// starts (can be 0).
11401//
11402// * [TupleT] must be a std::tuple<...> where "..." are the heap object handle
11403// classes (e.g. 'Code', 'Smi', 'Object')
11404template <typename EnumType, typename TupleT, int kStartOffset = 0>
11405class ArrayOfTuplesView {
11406 public:
11407 static constexpr intptr_t EntrySize = std::tuple_size<TupleT>::value;
11408
11409 class Iterator;
11410
11411 class TupleView {
11412 public:
11413 TupleView(const Array& array, intptr_t index)
11414 : array_(array), index_(index) {}
11415
11416 template <EnumType kElement,
11417 std::memory_order order = std::memory_order_relaxed>
11418 typename std::tuple_element<kElement, TupleT>::type::ObjectPtrType Get()
11419 const {
11420 using object_type = typename std::tuple_element<kElement, TupleT>::type;
11421 return object_type::RawCast(array_.At<order>(index_ + kElement));
11422 }
11423
11424 template <EnumType kElement,
11425 std::memory_order order = std::memory_order_relaxed>
11426 void Set(const typename std::tuple_element<kElement, TupleT>::type& value)
11427 const {
11428 array_.SetAt<order>(index_ + kElement, value);
11429 }
11430
11431 intptr_t index() const { return (index_ - kStartOffset) / EntrySize; }
11432
11433 private:
11434 const Array& array_;
11435 intptr_t index_;
11436
11437 friend class Iterator;
11438 };
11439
11440 class Iterator {
11441 public:
11442 Iterator(const Array& array, intptr_t index) : entry_(array, index) {}
11443
11444 bool operator==(const Iterator& other) {
11445 return entry_.index_ == other.entry_.index_;
11446 }
11447 bool operator!=(const Iterator& other) {
11448 return entry_.index_ != other.entry_.index_;
11449 }
11450
11451 const TupleView& operator*() const { return entry_; }
11452
11453 Iterator& operator++() {
11454 entry_.index_ += EntrySize;
11455 return *this;
11456 }
11457
11458 private:
11459 TupleView entry_;
11460 };
11461
11462 explicit ArrayOfTuplesView(const Array& array) : array_(array), index_(-1) {
11463 ASSERT(!array.IsNull());
11464 ASSERT(array.Length() >= kStartOffset);
11465 ASSERT((array.Length() - kStartOffset) % EntrySize == kStartOffset);
11466 }
11467
11468 intptr_t Length() const {
11469 return (array_.Length() - kStartOffset) / EntrySize;
11470 }
11471
11472 TupleView At(intptr_t i) const {
11473 return TupleView(array_, kStartOffset + i * EntrySize);
11474 }
11475
11476 TupleView operator[](intptr_t i) const { return At(i); }
11477
11478 Iterator begin() const { return Iterator(array_, kStartOffset); }
11479
11480 Iterator end() const {
11481 return Iterator(array_, kStartOffset + Length() * EntrySize);
11482 }
11483
11484 private:
11485 const Array& array_;
11486 intptr_t index_;
11487};
11488
11489using InvocationDispatcherTable =
11490 ArrayOfTuplesView<Class::InvocationDispatcherEntry,
11491 std::tuple<String, Array, Function>>;
11492
11493using StaticCallsTable =
11494 ArrayOfTuplesView<Code::SCallTableEntry, std::tuple<Smi, Object, Function>>;
11495
11496using StaticCallsTableEntry = StaticCallsTable::TupleView;
11497
11498using SubtypeTestCacheTable = ArrayOfTuplesView<SubtypeTestCache::Entries,
11499 std::tuple<Object,
11500 Object,
11501 TypeArguments,
11502 TypeArguments,
11503 TypeArguments,
11504 TypeArguments,
11505 TypeArguments>>;
11506
11507using MegamorphicCacheEntries =
11508 ArrayOfTuplesView<MegamorphicCache::EntryType, std::tuple<Smi, Object>>;
11509
11510void DumpTypeTable(Isolate* isolate);
11511void DumpTypeParameterTable(Isolate* isolate);
11512void DumpTypeArgumentsTable(Isolate* isolate);
11513
11514EntryPointPragma FindEntryPointPragma(Isolate* I,
11515 const Array& metadata,
11516 Field* reusable_field_handle,
11517 Object* reusable_object_handle);
11518
11519DART_WARN_UNUSED_RESULT
11520ErrorPtr EntryPointFieldInvocationError(const String& getter_name);
11521
11522} // namespace dart
11523
11524#endif // RUNTIME_VM_OBJECT_H_
11525