1 | // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/raw_object.h" |
6 | |
7 | #include "vm/class_table.h" |
8 | #include "vm/dart.h" |
9 | #include "vm/heap/become.h" |
10 | #include "vm/heap/freelist.h" |
11 | #include "vm/isolate.h" |
12 | #include "vm/isolate_reload.h" |
13 | #include "vm/object.h" |
14 | #include "vm/runtime_entry.h" |
15 | #include "vm/visitor.h" |
16 | |
17 | namespace dart { |
18 | |
19 | bool ObjectLayout::InVMIsolateHeap() const { |
20 | // All "vm-isolate" objects are pre-marked and in old space |
21 | // (see [Object::FinalizeVMIsolate]). |
22 | if (!IsOldObject() || !IsMarked()) return false; |
23 | |
24 | auto heap = Dart::vm_isolate()->heap(); |
25 | ASSERT(heap->UsedInWords(Heap::kNew) == 0); |
26 | return heap->old_space()->ContainsUnsafe(ToAddr(this)); |
27 | } |
28 | |
29 | void ObjectPtr::Validate(IsolateGroup* isolate_group) const { |
30 | // All Smi values are valid. |
31 | if (!IsHeapObject()) { |
32 | return; |
33 | } |
34 | // Slightly more readable than a segfault. |
35 | if (tagged_pointer_ == kHeapObjectTag) { |
36 | FATAL("RAW_NULL encountered" ); |
37 | } |
38 | ptr()->Validate(isolate_group); |
39 | } |
40 | |
41 | void ObjectLayout::Validate(IsolateGroup* isolate_group) const { |
42 | if (static_cast<uword>(Object::void_class_) == kHeapObjectTag) { |
43 | // Validation relies on properly initialized class classes. Skip if the |
44 | // VM is still being initialized. |
45 | return; |
46 | } |
47 | // Validate that the tags_ field is sensible. |
48 | uint32_t tags = tags_; |
49 | if (IsNewObject()) { |
50 | if (!NewBit::decode(tags)) { |
51 | FATAL1("New object missing kNewBit: %x\n" , tags); |
52 | } |
53 | if (OldBit::decode(tags)) { |
54 | FATAL1("New object has kOldBit: %x\n" , tags); |
55 | } |
56 | if (OldAndNotMarkedBit::decode(tags)) { |
57 | FATAL1("New object has kOldAndNotMarkedBit: %x\n" , tags); |
58 | } |
59 | if (OldAndNotRememberedBit::decode(tags)) { |
60 | FATAL1("New object has kOldAndNotRememberedBit: %x\n" , tags); |
61 | } |
62 | } else { |
63 | if (NewBit::decode(tags)) { |
64 | FATAL1("Old object has kNewBit: %x\n" , tags); |
65 | } |
66 | if (!OldBit::decode(tags)) { |
67 | FATAL1("Old object missing kOldBit: %x\n" , tags); |
68 | } |
69 | } |
70 | const intptr_t class_id = ClassIdTag::decode(tags); |
71 | if (!isolate_group->shared_class_table()->IsValidIndex(class_id)) { |
72 | FATAL1("Invalid class id encountered %" Pd "\n" , class_id); |
73 | } |
74 | if (class_id == kNullCid && |
75 | isolate_group->shared_class_table()->HasValidClassAt(class_id)) { |
76 | // Null class not yet initialized; skip. |
77 | return; |
78 | } |
79 | intptr_t size_from_tags = SizeTag::decode(tags); |
80 | intptr_t size_from_class = HeapSizeFromClass(tags); |
81 | if ((size_from_tags != 0) && (size_from_tags != size_from_class)) { |
82 | FATAL3( |
83 | "Inconsistent size encountered " |
84 | "cid: %" Pd ", size_from_tags: %" Pd ", size_from_class: %" Pd "\n" , |
85 | class_id, size_from_tags, size_from_class); |
86 | } |
87 | } |
88 | |
89 | // Can't look at the class object because it can be called during |
90 | // compaction when the class objects are moving. Can use the class |
91 | // id in the header and the sizes in the Class Table. |
92 | // Cannot deference ptr()->tags_. May dereference other parts of the object. |
93 | intptr_t ObjectLayout::HeapSizeFromClass(uint32_t tags) const { |
94 | intptr_t class_id = ClassIdTag::decode(tags); |
95 | intptr_t instance_size = 0; |
96 | switch (class_id) { |
97 | case kCodeCid: { |
98 | const CodePtr raw_code = static_cast<const CodePtr>(this); |
99 | intptr_t pointer_offsets_length = |
100 | Code::PtrOffBits::decode(raw_code->ptr()->state_bits_); |
101 | instance_size = Code::InstanceSize(pointer_offsets_length); |
102 | break; |
103 | } |
104 | case kInstructionsCid: { |
105 | const InstructionsPtr raw_instructions = |
106 | static_cast<const InstructionsPtr>(this); |
107 | intptr_t instructions_size = Instructions::Size(raw_instructions); |
108 | instance_size = Instructions::InstanceSize(instructions_size); |
109 | break; |
110 | } |
111 | case kInstructionsSectionCid: { |
112 | const InstructionsSectionPtr raw_section = |
113 | static_cast<const InstructionsSectionPtr>(this); |
114 | intptr_t section_size = InstructionsSection::Size(raw_section); |
115 | instance_size = InstructionsSection::InstanceSize(section_size); |
116 | break; |
117 | } |
118 | case kContextCid: { |
119 | const ContextPtr raw_context = static_cast<const ContextPtr>(this); |
120 | intptr_t num_variables = raw_context->ptr()->num_variables_; |
121 | instance_size = Context::InstanceSize(num_variables); |
122 | break; |
123 | } |
124 | case kContextScopeCid: { |
125 | const ContextScopePtr raw_context_scope = |
126 | static_cast<const ContextScopePtr>(this); |
127 | intptr_t num_variables = raw_context_scope->ptr()->num_variables_; |
128 | instance_size = ContextScope::InstanceSize(num_variables); |
129 | break; |
130 | } |
131 | case kOneByteStringCid: { |
132 | const OneByteStringPtr raw_string = |
133 | static_cast<const OneByteStringPtr>(this); |
134 | intptr_t string_length = Smi::Value(raw_string->ptr()->length_); |
135 | instance_size = OneByteString::InstanceSize(string_length); |
136 | break; |
137 | } |
138 | case kTwoByteStringCid: { |
139 | const TwoByteStringPtr raw_string = |
140 | static_cast<const TwoByteStringPtr>(this); |
141 | intptr_t string_length = Smi::Value(raw_string->ptr()->length_); |
142 | instance_size = TwoByteString::InstanceSize(string_length); |
143 | break; |
144 | } |
145 | case kArrayCid: |
146 | case kImmutableArrayCid: { |
147 | const ArrayPtr raw_array = static_cast<const ArrayPtr>(this); |
148 | intptr_t array_length = Smi::Value(raw_array->ptr()->length_); |
149 | instance_size = Array::InstanceSize(array_length); |
150 | break; |
151 | } |
152 | case kObjectPoolCid: { |
153 | const ObjectPoolPtr raw_object_pool = |
154 | static_cast<const ObjectPoolPtr>(this); |
155 | intptr_t len = raw_object_pool->ptr()->length_; |
156 | instance_size = ObjectPool::InstanceSize(len); |
157 | break; |
158 | } |
159 | #define SIZE_FROM_CLASS(clazz) case kTypedData##clazz##Cid: |
160 | CLASS_LIST_TYPED_DATA(SIZE_FROM_CLASS) { |
161 | const TypedDataPtr raw_obj = static_cast<const TypedDataPtr>(this); |
162 | intptr_t array_len = Smi::Value(raw_obj->ptr()->length_); |
163 | intptr_t lengthInBytes = |
164 | array_len * TypedData::ElementSizeInBytes(class_id); |
165 | instance_size = TypedData::InstanceSize(lengthInBytes); |
166 | break; |
167 | } |
168 | #undef SIZE_FROM_CLASS |
169 | case kFfiPointerCid: |
170 | instance_size = Pointer::InstanceSize(); |
171 | break; |
172 | case kTypeArgumentsCid: { |
173 | const TypeArgumentsPtr raw_array = |
174 | static_cast<const TypeArgumentsPtr>(this); |
175 | intptr_t array_length = Smi::Value(raw_array->ptr()->length_); |
176 | instance_size = TypeArguments::InstanceSize(array_length); |
177 | break; |
178 | } |
179 | case kPcDescriptorsCid: { |
180 | const PcDescriptorsPtr raw_descriptors = |
181 | static_cast<const PcDescriptorsPtr>(this); |
182 | intptr_t length = raw_descriptors->ptr()->length_; |
183 | instance_size = PcDescriptors::InstanceSize(length); |
184 | break; |
185 | } |
186 | case kCodeSourceMapCid: { |
187 | const CodeSourceMapPtr raw_code_source_map = |
188 | static_cast<const CodeSourceMapPtr>(this); |
189 | intptr_t length = raw_code_source_map->ptr()->length_; |
190 | instance_size = CodeSourceMap::InstanceSize(length); |
191 | break; |
192 | } |
193 | case kCompressedStackMapsCid: { |
194 | const CompressedStackMapsPtr maps = |
195 | static_cast<const CompressedStackMapsPtr>(this); |
196 | intptr_t length = CompressedStackMaps::PayloadSizeOf(maps); |
197 | instance_size = CompressedStackMaps::InstanceSize(length); |
198 | break; |
199 | } |
200 | case kLocalVarDescriptorsCid: { |
201 | const LocalVarDescriptorsPtr raw_descriptors = |
202 | static_cast<const LocalVarDescriptorsPtr>(this); |
203 | intptr_t num_descriptors = raw_descriptors->ptr()->num_entries_; |
204 | instance_size = LocalVarDescriptors::InstanceSize(num_descriptors); |
205 | break; |
206 | } |
207 | case kExceptionHandlersCid: { |
208 | const ExceptionHandlersPtr raw_handlers = |
209 | static_cast<const ExceptionHandlersPtr>(this); |
210 | intptr_t num_handlers = raw_handlers->ptr()->num_entries_; |
211 | instance_size = ExceptionHandlers::InstanceSize(num_handlers); |
212 | break; |
213 | } |
214 | case kFreeListElement: { |
215 | uword addr = ObjectLayout::ToAddr(this); |
216 | FreeListElement* element = reinterpret_cast<FreeListElement*>(addr); |
217 | instance_size = element->HeapSize(); |
218 | break; |
219 | } |
220 | case kForwardingCorpse: { |
221 | uword addr = ObjectLayout::ToAddr(this); |
222 | ForwardingCorpse* element = reinterpret_cast<ForwardingCorpse*>(addr); |
223 | instance_size = element->HeapSize(); |
224 | break; |
225 | } |
226 | case kWeakSerializationReferenceCid: { |
227 | instance_size = WeakSerializationReference::InstanceSize(); |
228 | break; |
229 | } |
230 | default: { |
231 | // Get the (constant) instance size out of the class object. |
232 | // TODO(koda): Add Size(ClassTable*) interface to allow caching in loops. |
233 | auto isolate_group = IsolateGroup::Current(); |
234 | #if defined(DEBUG) |
235 | #if !defined(DART_PRECOMPILED_RUNTIME) |
236 | auto reload_context = isolate_group->reload_context(); |
237 | const bool use_saved_class_table = |
238 | reload_context != nullptr ? reload_context->UseSavedSizeTableForGC() |
239 | : false; |
240 | #else |
241 | const bool use_saved_class_table = false; |
242 | #endif |
243 | |
244 | auto class_table = isolate_group->shared_class_table(); |
245 | ASSERT(use_saved_class_table || class_table->SizeAt(class_id) > 0); |
246 | if (!class_table->IsValidIndex(class_id) || |
247 | (!class_table->HasValidClassAt(class_id) && !use_saved_class_table)) { |
248 | FATAL3("Invalid cid: %" Pd ", obj: %p, tags: %x. Corrupt heap?" , |
249 | class_id, this, static_cast<uint32_t>(tags)); |
250 | } |
251 | #endif // DEBUG |
252 | instance_size = isolate_group->GetClassSizeForHeapWalkAt(class_id); |
253 | } |
254 | } |
255 | ASSERT(instance_size != 0); |
256 | #if defined(DEBUG) |
257 | intptr_t tags_size = SizeTag::decode(tags); |
258 | if ((class_id == kArrayCid) && (instance_size > tags_size && tags_size > 0)) { |
259 | // TODO(22501): Array::MakeFixedLength could be in the process of shrinking |
260 | // the array (see comment therein), having already updated the tags but not |
261 | // yet set the new length. Wait a millisecond and try again. |
262 | int retries_remaining = 1000; // ... but not forever. |
263 | do { |
264 | OS::Sleep(1); |
265 | const ArrayPtr raw_array = static_cast<const ArrayPtr>(this); |
266 | intptr_t array_length = Smi::Value(raw_array->ptr()->length_); |
267 | instance_size = Array::InstanceSize(array_length); |
268 | } while ((instance_size > tags_size) && (--retries_remaining > 0)); |
269 | } |
270 | if ((instance_size != tags_size) && (tags_size != 0)) { |
271 | FATAL3("Size mismatch: %" Pd " from class vs %" Pd " from tags %x\n" , |
272 | instance_size, tags_size, tags); |
273 | } |
274 | #endif // DEBUG |
275 | return instance_size; |
276 | } |
277 | |
278 | intptr_t ObjectLayout::VisitPointersPredefined(ObjectPointerVisitor* visitor, |
279 | intptr_t class_id) { |
280 | ASSERT(class_id < kNumPredefinedCids); |
281 | |
282 | intptr_t size = 0; |
283 | |
284 | switch (class_id) { |
285 | #define RAW_VISITPOINTERS(clazz) \ |
286 | case k##clazz##Cid: { \ |
287 | clazz##Ptr raw_obj = static_cast<clazz##Ptr>(this); \ |
288 | size = clazz##Layout::Visit##clazz##Pointers(raw_obj, visitor); \ |
289 | break; \ |
290 | } |
291 | CLASS_LIST_NO_OBJECT(RAW_VISITPOINTERS) |
292 | #undef RAW_VISITPOINTERS |
293 | #define RAW_VISITPOINTERS(clazz) case kTypedData##clazz##Cid: |
294 | CLASS_LIST_TYPED_DATA(RAW_VISITPOINTERS) { |
295 | TypedDataPtr raw_obj = static_cast<TypedDataPtr>(this); |
296 | size = TypedDataLayout::VisitTypedDataPointers(raw_obj, visitor); |
297 | break; |
298 | } |
299 | #undef RAW_VISITPOINTERS |
300 | #define RAW_VISITPOINTERS(clazz) case kExternalTypedData##clazz##Cid: |
301 | CLASS_LIST_TYPED_DATA(RAW_VISITPOINTERS) { |
302 | auto raw_obj = static_cast<ExternalTypedDataPtr>(this); |
303 | size = ExternalTypedDataLayout::VisitExternalTypedDataPointers(raw_obj, |
304 | visitor); |
305 | break; |
306 | } |
307 | #undef RAW_VISITPOINTERS |
308 | case kByteDataViewCid: |
309 | #define RAW_VISITPOINTERS(clazz) case kTypedData##clazz##ViewCid: |
310 | CLASS_LIST_TYPED_DATA(RAW_VISITPOINTERS) { |
311 | auto raw_obj = static_cast<TypedDataViewPtr>(this); |
312 | size = |
313 | TypedDataViewLayout::VisitTypedDataViewPointers(raw_obj, visitor); |
314 | break; |
315 | } |
316 | #undef RAW_VISITPOINTERS |
317 | case kByteBufferCid: { |
318 | InstancePtr raw_obj = static_cast<InstancePtr>(this); |
319 | size = InstanceLayout::VisitInstancePointers(raw_obj, visitor); |
320 | break; |
321 | } |
322 | case kFfiPointerCid: { |
323 | PointerPtr raw_obj = static_cast<PointerPtr>(this); |
324 | size = PointerLayout::VisitPointerPointers(raw_obj, visitor); |
325 | break; |
326 | } |
327 | case kFfiDynamicLibraryCid: { |
328 | DynamicLibraryPtr raw_obj = static_cast<DynamicLibraryPtr>(this); |
329 | size = |
330 | DynamicLibraryLayout::VisitDynamicLibraryPointers(raw_obj, visitor); |
331 | break; |
332 | } |
333 | #define RAW_VISITPOINTERS(clazz) case kFfi##clazz##Cid: |
334 | CLASS_LIST_FFI_TYPE_MARKER(RAW_VISITPOINTERS) { |
335 | // NativeType do not have any fields or type arguments. |
336 | size = HeapSize(); |
337 | break; |
338 | } |
339 | #undef RAW_VISITPOINTERS |
340 | #define RAW_VISITPOINTERS(clazz) case k##clazz##Cid: |
341 | CLASS_LIST_WASM(RAW_VISITPOINTERS) { |
342 | // These wasm types do not have any fields or type arguments. |
343 | size = HeapSize(); |
344 | break; |
345 | } |
346 | #undef RAW_VISITPOINTERS |
347 | case kFreeListElement: { |
348 | uword addr = ObjectLayout::ToAddr(this); |
349 | FreeListElement* element = reinterpret_cast<FreeListElement*>(addr); |
350 | size = element->HeapSize(); |
351 | break; |
352 | } |
353 | case kForwardingCorpse: { |
354 | uword addr = ObjectLayout::ToAddr(this); |
355 | ForwardingCorpse* forwarder = reinterpret_cast<ForwardingCorpse*>(addr); |
356 | size = forwarder->HeapSize(); |
357 | break; |
358 | } |
359 | case kNullCid: |
360 | case kNeverCid: |
361 | size = HeapSize(); |
362 | break; |
363 | default: |
364 | FATAL3("Invalid cid: %" Pd ", obj: %p, tags: %x. Corrupt heap?" , class_id, |
365 | this, static_cast<uint32_t>(tags_)); |
366 | break; |
367 | } |
368 | |
369 | #if defined(DEBUG) |
370 | ASSERT(size != 0); |
371 | const intptr_t expected_size = HeapSize(); |
372 | |
373 | // In general we expect that visitors return exactly the same size that |
374 | // HeapSize would compute. However in case of Arrays we might have a |
375 | // discrepancy when concurrently visiting an array that is being shrunk with |
376 | // Array::MakeFixedLength: the visitor might have visited the full array while |
377 | // here we are observing a smaller HeapSize(). |
378 | ASSERT(size == expected_size || |
379 | (class_id == kArrayCid && size > expected_size)); |
380 | return size; // Prefer larger size. |
381 | #else |
382 | return size; |
383 | #endif |
384 | } |
385 | |
386 | void ObjectLayout::VisitPointersPrecise(Isolate* isolate, |
387 | ObjectPointerVisitor* visitor) { |
388 | intptr_t class_id = GetClassId(); |
389 | if (class_id < kNumPredefinedCids) { |
390 | VisitPointersPredefined(visitor, class_id); |
391 | return; |
392 | } |
393 | |
394 | // N.B.: Not using the heap size! |
395 | uword next_field_offset = isolate->GetClassForHeapWalkAt(class_id) |
396 | ->ptr() |
397 | ->host_next_field_offset_in_words_ |
398 | << kWordSizeLog2; |
399 | ASSERT(next_field_offset > 0); |
400 | uword obj_addr = ObjectLayout::ToAddr(this); |
401 | uword from = obj_addr + sizeof(ObjectLayout); |
402 | uword to = obj_addr + next_field_offset - kWordSize; |
403 | const auto first = reinterpret_cast<ObjectPtr*>(from); |
404 | const auto last = reinterpret_cast<ObjectPtr*>(to); |
405 | |
406 | #if defined(SUPPORT_UNBOXED_INSTANCE_FIELDS) |
407 | const auto unboxed_fields_bitmap = |
408 | visitor->shared_class_table()->GetUnboxedFieldsMapAt(class_id); |
409 | |
410 | if (!unboxed_fields_bitmap.IsEmpty()) { |
411 | intptr_t bit = sizeof(ObjectLayout) / kWordSize; |
412 | for (ObjectPtr* current = first; current <= last; current++) { |
413 | if (!unboxed_fields_bitmap.Get(bit++)) { |
414 | visitor->VisitPointer(current); |
415 | } |
416 | } |
417 | } else { |
418 | visitor->VisitPointers(first, last); |
419 | } |
420 | #else |
421 | visitor->VisitPointers(first, last); |
422 | #endif // defined(SUPPORT_UNBOXED_INSTANCE_FIELDS) |
423 | } |
424 | |
425 | bool ObjectLayout::FindObject(FindObjectVisitor* visitor) { |
426 | ASSERT(visitor != NULL); |
427 | return visitor->FindObject(static_cast<ObjectPtr>(this)); |
428 | } |
429 | |
430 | // Most objects are visited with this function. It calls the from() and to() |
431 | // methods on the raw object to get the first and last cells that need |
432 | // visiting. |
433 | #define REGULAR_VISITOR(Type) \ |
434 | intptr_t Type##Layout::Visit##Type##Pointers( \ |
435 | Type##Ptr raw_obj, ObjectPointerVisitor* visitor) { \ |
436 | /* Make sure that we got here with the tagged pointer as this. */ \ |
437 | ASSERT(raw_obj->IsHeapObject()); \ |
438 | ASSERT_UNCOMPRESSED(Type); \ |
439 | visitor->VisitPointers(raw_obj->ptr()->from(), raw_obj->ptr()->to()); \ |
440 | return Type::InstanceSize(); \ |
441 | } |
442 | |
443 | // It calls the from() and to() methods on the raw object to get the first and |
444 | // last cells that need visiting. |
445 | // |
446 | // Though as opposed to Similar to [REGULAR_VISITOR] this visitor will call the |
447 | // specializd VisitTypedDataViewPointers |
448 | #define TYPED_DATA_VIEW_VISITOR(Type) \ |
449 | intptr_t Type##Layout::Visit##Type##Pointers( \ |
450 | Type##Ptr raw_obj, ObjectPointerVisitor* visitor) { \ |
451 | /* Make sure that we got here with the tagged pointer as this. */ \ |
452 | ASSERT(raw_obj->IsHeapObject()); \ |
453 | ASSERT_UNCOMPRESSED(Type); \ |
454 | visitor->VisitTypedDataViewPointers(raw_obj, raw_obj->ptr()->from(), \ |
455 | raw_obj->ptr()->to()); \ |
456 | return Type::InstanceSize(); \ |
457 | } |
458 | |
459 | // For variable length objects. get_length is a code snippet that gets the |
460 | // length of the object, which is passed to InstanceSize and the to() method. |
461 | #define VARIABLE_VISITOR(Type, get_length) \ |
462 | intptr_t Type##Layout::Visit##Type##Pointers( \ |
463 | Type##Ptr raw_obj, ObjectPointerVisitor* visitor) { \ |
464 | /* Make sure that we got here with the tagged pointer as this. */ \ |
465 | ASSERT(raw_obj->IsHeapObject()); \ |
466 | intptr_t length = get_length; \ |
467 | visitor->VisitPointers(raw_obj->ptr()->from(), \ |
468 | raw_obj->ptr()->to(length)); \ |
469 | return Type::InstanceSize(length); \ |
470 | } |
471 | |
472 | // For now there are no compressed pointers: |
473 | #define COMPRESSED_VISITOR(Type) REGULAR_VISITOR(Type) |
474 | #define VARIABLE_COMPRESSED_VISITOR(Type, get_length) \ |
475 | VARIABLE_VISITOR(Type, get_length) |
476 | |
477 | // For fixed-length objects that don't have any pointers that need visiting. |
478 | #define NULL_VISITOR(Type) \ |
479 | intptr_t Type##Layout::Visit##Type##Pointers( \ |
480 | Type##Ptr raw_obj, ObjectPointerVisitor* visitor) { \ |
481 | /* Make sure that we got here with the tagged pointer as this. */ \ |
482 | ASSERT(raw_obj->IsHeapObject()); \ |
483 | ASSERT_NOTHING_TO_VISIT(Type); \ |
484 | return Type::InstanceSize(); \ |
485 | } |
486 | |
487 | // For objects that don't have any pointers that need visiting, but have a |
488 | // variable length. |
489 | #define VARIABLE_NULL_VISITOR(Type, get_length) \ |
490 | intptr_t Type##Layout::Visit##Type##Pointers( \ |
491 | Type##Ptr raw_obj, ObjectPointerVisitor* visitor) { \ |
492 | /* Make sure that we got here with the tagged pointer as this. */ \ |
493 | ASSERT(raw_obj->IsHeapObject()); \ |
494 | ASSERT_NOTHING_TO_VISIT(Type); \ |
495 | intptr_t length = get_length; \ |
496 | return Type::InstanceSize(length); \ |
497 | } |
498 | |
499 | // For objects that are never instantiated on the heap. |
500 | #define UNREACHABLE_VISITOR(Type) \ |
501 | intptr_t Type##Layout::Visit##Type##Pointers( \ |
502 | Type##Ptr raw_obj, ObjectPointerVisitor* visitor) { \ |
503 | UNREACHABLE(); \ |
504 | return 0; \ |
505 | } |
506 | |
507 | REGULAR_VISITOR(Class) |
508 | REGULAR_VISITOR(Bytecode) |
509 | REGULAR_VISITOR(Type) |
510 | REGULAR_VISITOR(TypeRef) |
511 | REGULAR_VISITOR(TypeParameter) |
512 | REGULAR_VISITOR(PatchClass) |
513 | REGULAR_VISITOR(Function) |
514 | COMPRESSED_VISITOR(Closure) |
515 | REGULAR_VISITOR(ClosureData) |
516 | REGULAR_VISITOR(SignatureData) |
517 | REGULAR_VISITOR(RedirectionData) |
518 | REGULAR_VISITOR(FfiTrampolineData) |
519 | REGULAR_VISITOR(Field) |
520 | REGULAR_VISITOR(Script) |
521 | REGULAR_VISITOR(Library) |
522 | REGULAR_VISITOR(LibraryPrefix) |
523 | REGULAR_VISITOR(Namespace) |
524 | REGULAR_VISITOR(ParameterTypeCheck) |
525 | REGULAR_VISITOR(SingleTargetCache) |
526 | REGULAR_VISITOR(UnlinkedCall) |
527 | REGULAR_VISITOR(MonomorphicSmiableCall) |
528 | REGULAR_VISITOR(ICData) |
529 | REGULAR_VISITOR(MegamorphicCache) |
530 | REGULAR_VISITOR(ApiError) |
531 | REGULAR_VISITOR(LanguageError) |
532 | REGULAR_VISITOR(UnhandledException) |
533 | REGULAR_VISITOR(UnwindError) |
534 | REGULAR_VISITOR(ExternalOneByteString) |
535 | REGULAR_VISITOR(ExternalTwoByteString) |
536 | COMPRESSED_VISITOR(GrowableObjectArray) |
537 | COMPRESSED_VISITOR(LinkedHashMap) |
538 | COMPRESSED_VISITOR(ExternalTypedData) |
539 | TYPED_DATA_VIEW_VISITOR(TypedDataView) |
540 | REGULAR_VISITOR(ReceivePort) |
541 | REGULAR_VISITOR(StackTrace) |
542 | REGULAR_VISITOR(RegExp) |
543 | REGULAR_VISITOR(WeakProperty) |
544 | REGULAR_VISITOR(MirrorReference) |
545 | REGULAR_VISITOR(UserTag) |
546 | REGULAR_VISITOR(SubtypeTestCache) |
547 | REGULAR_VISITOR(LoadingUnit) |
548 | REGULAR_VISITOR(KernelProgramInfo) |
549 | VARIABLE_VISITOR(TypeArguments, Smi::Value(raw_obj->ptr()->length_)) |
550 | VARIABLE_VISITOR(LocalVarDescriptors, raw_obj->ptr()->num_entries_) |
551 | VARIABLE_VISITOR(ExceptionHandlers, raw_obj->ptr()->num_entries_) |
552 | VARIABLE_VISITOR(Context, raw_obj->ptr()->num_variables_) |
553 | VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->ptr()->length_)) |
554 | VARIABLE_COMPRESSED_VISITOR( |
555 | TypedData, |
556 | TypedData::ElementSizeInBytes(raw_obj->GetClassId()) * |
557 | Smi::Value(raw_obj->ptr()->length_)) |
558 | VARIABLE_VISITOR(ContextScope, raw_obj->ptr()->num_variables_) |
559 | NULL_VISITOR(Mint) |
560 | NULL_VISITOR(Double) |
561 | NULL_VISITOR(Float32x4) |
562 | NULL_VISITOR(Int32x4) |
563 | NULL_VISITOR(Float64x2) |
564 | NULL_VISITOR(Bool) |
565 | NULL_VISITOR(Capability) |
566 | NULL_VISITOR(SendPort) |
567 | NULL_VISITOR(TransferableTypedData) |
568 | REGULAR_VISITOR(Pointer) |
569 | NULL_VISITOR(DynamicLibrary) |
570 | VARIABLE_NULL_VISITOR(Instructions, Instructions::Size(raw_obj)) |
571 | VARIABLE_NULL_VISITOR(InstructionsSection, InstructionsSection::Size(raw_obj)) |
572 | VARIABLE_NULL_VISITOR(PcDescriptors, raw_obj->ptr()->length_) |
573 | VARIABLE_NULL_VISITOR(CodeSourceMap, raw_obj->ptr()->length_) |
574 | VARIABLE_NULL_VISITOR(CompressedStackMaps, |
575 | CompressedStackMaps::PayloadSizeOf(raw_obj)) |
576 | VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->ptr()->length_)) |
577 | VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->ptr()->length_)) |
578 | // Abstract types don't have their visitor called. |
579 | UNREACHABLE_VISITOR(AbstractType) |
580 | UNREACHABLE_VISITOR(CallSiteData) |
581 | UNREACHABLE_VISITOR(TypedDataBase) |
582 | UNREACHABLE_VISITOR(Error) |
583 | UNREACHABLE_VISITOR(Number) |
584 | UNREACHABLE_VISITOR(Integer) |
585 | UNREACHABLE_VISITOR(String) |
586 | UNREACHABLE_VISITOR(FutureOr) |
587 | // Smi has no heap representation. |
588 | UNREACHABLE_VISITOR(Smi) |
589 | #if defined(DART_PRECOMPILED_RUNTIME) |
590 | NULL_VISITOR(WeakSerializationReference) |
591 | #else |
592 | REGULAR_VISITOR(WeakSerializationReference) |
593 | #endif |
594 | |
595 | bool CodeLayout::ContainsPC(const ObjectPtr raw_obj, uword pc) { |
596 | if (!raw_obj->IsCode()) return false; |
597 | auto const raw_code = static_cast<const CodePtr>(raw_obj); |
598 | const uword start = Code::PayloadStartOf(raw_code); |
599 | const uword size = Code::PayloadSizeOf(raw_code); |
600 | return (pc - start) <= size; // pc may point just past last instruction. |
601 | } |
602 | |
603 | intptr_t CodeLayout::VisitCodePointers(CodePtr raw_obj, |
604 | ObjectPointerVisitor* visitor) { |
605 | visitor->VisitPointers(raw_obj->ptr()->from(), raw_obj->ptr()->to()); |
606 | |
607 | CodeLayout* obj = raw_obj->ptr(); |
608 | intptr_t length = Code::PtrOffBits::decode(obj->state_bits_); |
609 | #if defined(TARGET_ARCH_IA32) |
610 | // On IA32 only we embed pointers to objects directly in the generated |
611 | // instructions. The variable portion of a Code object describes where to |
612 | // find those pointers for tracing. |
613 | if (Code::AliveBit::decode(obj->state_bits_)) { |
614 | uword entry_point = Code::PayloadStartOf(raw_obj); |
615 | for (intptr_t i = 0; i < length; i++) { |
616 | int32_t offset = obj->data()[i]; |
617 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(entry_point + offset)); |
618 | } |
619 | } |
620 | return Code::InstanceSize(length); |
621 | #else |
622 | // On all other architectures, objects are referenced indirectly through |
623 | // either an ObjectPool or Thread. |
624 | ASSERT(length == 0); |
625 | return Code::InstanceSize(0); |
626 | #endif |
627 | } |
628 | |
629 | bool BytecodeLayout::ContainsPC(ObjectPtr raw_obj, uword pc) { |
630 | if (raw_obj->IsBytecode()) { |
631 | BytecodePtr raw_bytecode = static_cast<BytecodePtr>(raw_obj); |
632 | uword start = raw_bytecode->ptr()->instructions_; |
633 | uword size = raw_bytecode->ptr()->instructions_size_; |
634 | return (pc - start) <= size; // pc may point past last instruction. |
635 | } |
636 | return false; |
637 | } |
638 | |
639 | intptr_t ObjectPoolLayout::VisitObjectPoolPointers( |
640 | ObjectPoolPtr raw_obj, |
641 | ObjectPointerVisitor* visitor) { |
642 | const intptr_t length = raw_obj->ptr()->length_; |
643 | ObjectPoolLayout::Entry* entries = raw_obj->ptr()->data(); |
644 | uint8_t* entry_bits = raw_obj->ptr()->entry_bits(); |
645 | for (intptr_t i = 0; i < length; ++i) { |
646 | ObjectPool::EntryType entry_type = |
647 | ObjectPool::TypeBits::decode(entry_bits[i]); |
648 | if ((entry_type == ObjectPool::EntryType::kTaggedObject) || |
649 | (entry_type == ObjectPool::EntryType::kNativeEntryData)) { |
650 | visitor->VisitPointer(&entries[i].raw_obj_); |
651 | } |
652 | } |
653 | return ObjectPool::InstanceSize(length); |
654 | } |
655 | |
656 | bool InstructionsLayout::ContainsPC(const InstructionsPtr raw_instr, uword pc) { |
657 | const uword start = Instructions::PayloadStart(raw_instr); |
658 | const uword size = Instructions::Size(raw_instr); |
659 | // We use <= instead of < here because the saved-pc can be outside the |
660 | // instruction stream if the last instruction is a call we don't expect to |
661 | // return (e.g. because it throws an exception). |
662 | return (pc - start) <= size; |
663 | } |
664 | |
665 | intptr_t InstanceLayout::VisitInstancePointers(InstancePtr raw_obj, |
666 | ObjectPointerVisitor* visitor) { |
667 | // Make sure that we got here with the tagged pointer as this. |
668 | ASSERT(raw_obj->IsHeapObject()); |
669 | uint32_t tags = raw_obj->ptr()->tags_; |
670 | intptr_t instance_size = SizeTag::decode(tags); |
671 | if (instance_size == 0) { |
672 | instance_size = visitor->isolate_group()->GetClassSizeForHeapWalkAt( |
673 | raw_obj->GetClassId()); |
674 | } |
675 | |
676 | // Calculate the first and last raw object pointer fields. |
677 | uword obj_addr = ObjectLayout::ToAddr(raw_obj); |
678 | uword from = obj_addr + sizeof(ObjectLayout); |
679 | uword to = obj_addr + instance_size - kWordSize; |
680 | visitor->VisitPointers(reinterpret_cast<ObjectPtr*>(from), |
681 | reinterpret_cast<ObjectPtr*>(to)); |
682 | return instance_size; |
683 | } |
684 | |
685 | intptr_t ImmutableArrayLayout::VisitImmutableArrayPointers( |
686 | ImmutableArrayPtr raw_obj, |
687 | ObjectPointerVisitor* visitor) { |
688 | return ArrayLayout::VisitArrayPointers(raw_obj, visitor); |
689 | } |
690 | |
691 | void ObjectLayout::RememberCard(ObjectPtr const* slot) { |
692 | OldPage::Of(static_cast<ObjectPtr>(this))->RememberCard(slot); |
693 | } |
694 | |
695 | DEFINE_LEAF_RUNTIME_ENTRY(void, |
696 | RememberCard, |
697 | 2, |
698 | uword /*ObjectPtr*/ object_in, |
699 | ObjectPtr* slot) { |
700 | ObjectPtr object = static_cast<ObjectPtr>(object_in); |
701 | ASSERT(object->IsOldObject()); |
702 | ASSERT(object->ptr()->IsCardRemembered()); |
703 | OldPage::Of(object)->RememberCard(slot); |
704 | } |
705 | END_LEAF_RUNTIME_ENTRY |
706 | |
707 | const char* PcDescriptorsLayout::KindToCString(Kind k) { |
708 | switch (k) { |
709 | #define ENUM_CASE(name, init) \ |
710 | case Kind::k##name: \ |
711 | return #name; |
712 | FOR_EACH_RAW_PC_DESCRIPTOR(ENUM_CASE) |
713 | #undef ENUM_CASE |
714 | default: |
715 | return nullptr; |
716 | } |
717 | } |
718 | |
719 | bool PcDescriptorsLayout::ParseKind(const char* cstr, Kind* out) { |
720 | ASSERT(cstr != nullptr && out != nullptr); |
721 | #define ENUM_CASE(name, init) \ |
722 | if (strcmp(#name, cstr) == 0) { \ |
723 | *out = Kind::k##name; \ |
724 | return true; \ |
725 | } |
726 | FOR_EACH_RAW_PC_DESCRIPTOR(ENUM_CASE) |
727 | #undef ENUM_CASE |
728 | return false; |
729 | } |
730 | #undef PREFIXED_NAME |
731 | |
732 | } // namespace dart |
733 | |