1// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/object_graph.h"
6
7#include "vm/dart.h"
8#include "vm/dart_api_state.h"
9#include "vm/growable_array.h"
10#include "vm/isolate.h"
11#include "vm/native_symbol.h"
12#include "vm/object.h"
13#include "vm/object_store.h"
14#include "vm/profiler.h"
15#include "vm/raw_object.h"
16#include "vm/raw_object_fields.h"
17#include "vm/reusable_handles.h"
18#include "vm/visitor.h"
19
20namespace dart {
21
22#if !defined(PRODUCT)
23
24static bool IsUserClass(intptr_t cid) {
25 if (cid == kContextCid) return true;
26 if (cid == kTypeArgumentsCid) return false;
27 return cid >= kInstanceCid;
28}
29
30// The state of a pre-order, depth-first traversal of an object graph.
31// When a node is visited, *all* its children are pushed to the stack at once.
32// We insert a sentinel between the node and its children on the stack, to
33// remember that the node has been visited. The node is kept on the stack while
34// its children are processed, to give the visitor a complete chain of parents.
35//
36// TODO(koda): Potential optimizations:
37// - Use tag bits for compact Node and sentinel representations.
38class ObjectGraph::Stack : public ObjectPointerVisitor {
39 public:
40 explicit Stack(IsolateGroup* isolate_group)
41 : ObjectPointerVisitor(isolate_group),
42 include_vm_objects_(true),
43 data_(kInitialCapacity) {
44 object_ids_ = new WeakTable();
45 }
46 ~Stack() {
47 delete object_ids_;
48 object_ids_ = nullptr;
49 }
50
51 // Marks and pushes. Used to initialize this stack with roots.
52 // We can use ObjectIdTable normally used by serializers because it
53 // won't be in use while handling a service request (ObjectGraph's only use).
54 virtual void VisitPointers(ObjectPtr* first, ObjectPtr* last) {
55 for (ObjectPtr* current = first; current <= last; ++current) {
56 if ((*current)->IsHeapObject() && !(*current)->ptr()->InVMIsolateHeap() &&
57 object_ids_->GetValueExclusive(*current) == 0) { // not visited yet
58 if (!include_vm_objects_ && !IsUserClass((*current)->GetClassId())) {
59 continue;
60 }
61 object_ids_->SetValueExclusive(*current, 1);
62 Node node;
63 node.ptr = current;
64 node.obj = *current;
65 node.gc_root_type = gc_root_type();
66 data_.Add(node);
67 }
68 }
69 }
70
71 // Traverses the object graph from the current state.
72 void TraverseGraph(ObjectGraph::Visitor* visitor) {
73 while (!data_.is_empty()) {
74 Node node = data_.Last();
75 if (node.ptr == kSentinel) {
76 data_.RemoveLast();
77 // The node below the sentinel has already been visited.
78 data_.RemoveLast();
79 continue;
80 }
81 ObjectPtr obj = node.obj;
82 ASSERT(obj->IsHeapObject());
83 Node sentinel;
84 sentinel.ptr = kSentinel;
85 data_.Add(sentinel);
86 StackIterator it(this, data_.length() - 2);
87 visitor->gc_root_type = node.gc_root_type;
88 Visitor::Direction direction = visitor->VisitObject(&it);
89 if (direction == ObjectGraph::Visitor::kAbort) {
90 break;
91 }
92 if (direction == ObjectGraph::Visitor::kProceed) {
93 set_gc_root_type(node.gc_root_type);
94 obj->ptr()->VisitPointers(this);
95 clear_gc_root_type();
96 }
97 }
98 }
99
100 virtual bool visit_weak_persistent_handles() const {
101 return visit_weak_persistent_handles_;
102 }
103
104 void set_visit_weak_persistent_handles(bool value) {
105 visit_weak_persistent_handles_ = value;
106 }
107
108 bool include_vm_objects_;
109
110 private:
111 struct Node {
112 ObjectPtr* ptr; // kSentinel for the sentinel node.
113 ObjectPtr obj;
114 const char* gc_root_type;
115 };
116
117 bool visit_weak_persistent_handles_ = false;
118 static ObjectPtr* const kSentinel;
119 static const intptr_t kInitialCapacity = 1024;
120 static const intptr_t kNoParent = -1;
121
122 intptr_t Parent(intptr_t index) const {
123 // The parent is just below the next sentinel.
124 for (intptr_t i = index; i >= 1; --i) {
125 if (data_[i].ptr == kSentinel) {
126 return i - 1;
127 }
128 }
129 return kNoParent;
130 }
131
132 // During the iteration of the heap we are already at a safepoint, so there is
133 // no need to let the GC know about [object_ids_] (i.e. GC cannot run while we
134 // use [object_ids]).
135 WeakTable* object_ids_ = nullptr;
136 GrowableArray<Node> data_;
137 friend class StackIterator;
138 DISALLOW_COPY_AND_ASSIGN(Stack);
139};
140
141ObjectPtr* const ObjectGraph::Stack::kSentinel = NULL;
142
143ObjectPtr ObjectGraph::StackIterator::Get() const {
144 return stack_->data_[index_].obj;
145}
146
147bool ObjectGraph::StackIterator::MoveToParent() {
148 intptr_t parent = stack_->Parent(index_);
149 if (parent == Stack::kNoParent) {
150 return false;
151 } else {
152 index_ = parent;
153 return true;
154 }
155}
156
157intptr_t ObjectGraph::StackIterator::OffsetFromParentInWords() const {
158 intptr_t parent_index = stack_->Parent(index_);
159 if (parent_index == Stack::kNoParent) {
160 return -1;
161 }
162 Stack::Node parent = stack_->data_[parent_index];
163 uword parent_start = ObjectLayout::ToAddr(parent.obj);
164 Stack::Node child = stack_->data_[index_];
165 ASSERT(child.obj == *child.ptr);
166 uword child_ptr_addr = reinterpret_cast<uword>(child.ptr);
167 intptr_t offset = child_ptr_addr - parent_start;
168 if (offset > 0 && offset < parent.obj->ptr()->HeapSize()) {
169 ASSERT(Utils::IsAligned(offset, kWordSize));
170 return offset >> kWordSizeLog2;
171 } else {
172 // Some internal VM objects visit pointers not contained within the parent.
173 // For instance, CodeLayout::VisitCodePointers visits pointers in
174 // instructions.
175 ASSERT(!parent.obj->IsDartInstance());
176 return -1;
177 }
178}
179
180static void IterateUserFields(ObjectPointerVisitor* visitor) {
181 visitor->set_gc_root_type("user global");
182 Thread* thread = Thread::Current();
183 // Scope to prevent handles create here from appearing as stack references.
184 HANDLESCOPE(thread);
185 Zone* zone = thread->zone();
186 const GrowableObjectArray& libraries = GrowableObjectArray::Handle(
187 zone, thread->isolate()->object_store()->libraries());
188 Library& library = Library::Handle(zone);
189 Object& entry = Object::Handle(zone);
190 Class& cls = Class::Handle(zone);
191 Array& fields = Array::Handle(zone);
192 Field& field = Field::Handle(zone);
193 for (intptr_t i = 0; i < libraries.Length(); i++) {
194 library ^= libraries.At(i);
195 DictionaryIterator entries(library);
196 while (entries.HasNext()) {
197 entry = entries.GetNext();
198 if (entry.IsClass()) {
199 cls ^= entry.raw();
200 fields = cls.fields();
201 for (intptr_t j = 0; j < fields.Length(); j++) {
202 field ^= fields.At(j);
203 ObjectPtr ptr = field.raw();
204 visitor->VisitPointer(&ptr);
205 }
206 } else if (entry.IsField()) {
207 field ^= entry.raw();
208 ObjectPtr ptr = field.raw();
209 visitor->VisitPointer(&ptr);
210 }
211 }
212 }
213 visitor->clear_gc_root_type();
214}
215
216ObjectGraph::ObjectGraph(Thread* thread) : ThreadStackResource(thread) {
217 // The VM isolate has all its objects pre-marked, so iterating over it
218 // would be a no-op.
219 ASSERT(thread->isolate() != Dart::vm_isolate());
220}
221
222ObjectGraph::~ObjectGraph() {}
223
224void ObjectGraph::IterateObjects(ObjectGraph::Visitor* visitor) {
225 Stack stack(isolate_group());
226 stack.set_visit_weak_persistent_handles(
227 visitor->visit_weak_persistent_handles());
228 isolate_group()->VisitObjectPointers(&stack,
229 ValidationPolicy::kDontValidateFrames);
230 stack.TraverseGraph(visitor);
231}
232
233void ObjectGraph::IterateUserObjects(ObjectGraph::Visitor* visitor) {
234 Stack stack(isolate_group());
235 stack.set_visit_weak_persistent_handles(
236 visitor->visit_weak_persistent_handles());
237 IterateUserFields(&stack);
238 stack.include_vm_objects_ = false;
239 stack.TraverseGraph(visitor);
240}
241
242void ObjectGraph::IterateObjectsFrom(const Object& root,
243 ObjectGraph::Visitor* visitor) {
244 Stack stack(isolate_group());
245 stack.set_visit_weak_persistent_handles(
246 visitor->visit_weak_persistent_handles());
247 ObjectPtr root_raw = root.raw();
248 stack.VisitPointer(&root_raw);
249 stack.TraverseGraph(visitor);
250}
251
252class InstanceAccumulator : public ObjectVisitor {
253 public:
254 InstanceAccumulator(ObjectGraph::Stack* stack, intptr_t class_id)
255 : stack_(stack), class_id_(class_id) {}
256
257 void VisitObject(ObjectPtr obj) {
258 if (obj->GetClassId() == class_id_) {
259 ObjectPtr rawobj = obj;
260 stack_->VisitPointer(&rawobj);
261 }
262 }
263
264 private:
265 ObjectGraph::Stack* stack_;
266 const intptr_t class_id_;
267
268 DISALLOW_COPY_AND_ASSIGN(InstanceAccumulator);
269};
270
271void ObjectGraph::IterateObjectsFrom(intptr_t class_id,
272 HeapIterationScope* iteration,
273 ObjectGraph::Visitor* visitor) {
274 Stack stack(isolate_group());
275
276 InstanceAccumulator accumulator(&stack, class_id);
277 iteration->IterateObjectsNoImagePages(&accumulator);
278
279 stack.TraverseGraph(visitor);
280}
281
282class SizeVisitor : public ObjectGraph::Visitor {
283 public:
284 SizeVisitor() : size_(0) {}
285 intptr_t size() const { return size_; }
286 virtual bool ShouldSkip(ObjectPtr obj) const { return false; }
287 virtual Direction VisitObject(ObjectGraph::StackIterator* it) {
288 ObjectPtr obj = it->Get();
289 if (ShouldSkip(obj)) {
290 return kBacktrack;
291 }
292 size_ += obj->ptr()->HeapSize();
293 return kProceed;
294 }
295
296 private:
297 intptr_t size_;
298};
299
300class SizeExcludingObjectVisitor : public SizeVisitor {
301 public:
302 explicit SizeExcludingObjectVisitor(const Object& skip) : skip_(skip) {}
303 virtual bool ShouldSkip(ObjectPtr obj) const { return obj == skip_.raw(); }
304
305 private:
306 const Object& skip_;
307};
308
309class SizeExcludingClassVisitor : public SizeVisitor {
310 public:
311 explicit SizeExcludingClassVisitor(intptr_t skip) : skip_(skip) {}
312 virtual bool ShouldSkip(ObjectPtr obj) const {
313 return obj->GetClassId() == skip_;
314 }
315
316 private:
317 const intptr_t skip_;
318};
319
320intptr_t ObjectGraph::SizeRetainedByInstance(const Object& obj) {
321 HeapIterationScope iteration_scope(Thread::Current(), true);
322 SizeVisitor total;
323 IterateObjects(&total);
324 intptr_t size_total = total.size();
325 SizeExcludingObjectVisitor excluding_obj(obj);
326 IterateObjects(&excluding_obj);
327 intptr_t size_excluding_obj = excluding_obj.size();
328 return size_total - size_excluding_obj;
329}
330
331intptr_t ObjectGraph::SizeReachableByInstance(const Object& obj) {
332 HeapIterationScope iteration_scope(Thread::Current(), true);
333 SizeVisitor total;
334 IterateObjectsFrom(obj, &total);
335 return total.size();
336}
337
338intptr_t ObjectGraph::SizeRetainedByClass(intptr_t class_id) {
339 HeapIterationScope iteration_scope(Thread::Current(), true);
340 SizeVisitor total;
341 IterateObjects(&total);
342 intptr_t size_total = total.size();
343 SizeExcludingClassVisitor excluding_class(class_id);
344 IterateObjects(&excluding_class);
345 intptr_t size_excluding_class = excluding_class.size();
346 return size_total - size_excluding_class;
347}
348
349intptr_t ObjectGraph::SizeReachableByClass(intptr_t class_id) {
350 HeapIterationScope iteration_scope(Thread::Current(), true);
351 SizeVisitor total;
352 IterateObjectsFrom(class_id, &iteration_scope, &total);
353 return total.size();
354}
355
356class RetainingPathVisitor : public ObjectGraph::Visitor {
357 public:
358 // We cannot use a GrowableObjectArray, since we must not trigger GC.
359 RetainingPathVisitor(ObjectPtr obj, const Array& path)
360 : thread_(Thread::Current()), obj_(obj), path_(path), length_(0) {}
361
362 intptr_t length() const { return length_; }
363 virtual bool visit_weak_persistent_handles() const { return true; }
364
365 bool ShouldSkip(ObjectPtr obj) {
366 // A retaining path through ICData is never the only retaining path,
367 // and it is less informative than its alternatives.
368 intptr_t cid = obj->GetClassId();
369 switch (cid) {
370 case kICDataCid:
371 return true;
372 default:
373 return false;
374 }
375 }
376
377 bool ShouldStop(ObjectPtr obj) {
378 // A static field is considered a root from a language point of view.
379 if (obj->IsField()) {
380 const Field& field = Field::Handle(static_cast<FieldPtr>(obj));
381 return field.is_static();
382 }
383 return false;
384 }
385
386 void StartList() { was_last_array_ = false; }
387
388 intptr_t HideNDescendant(ObjectPtr obj) {
389 // A GrowableObjectArray overwrites its internal storage.
390 // Keeping both of them in the list is redundant.
391 if (was_last_array_ && obj->IsGrowableObjectArray()) {
392 was_last_array_ = false;
393 return 1;
394 }
395 // A LinkedHasMap overwrites its internal storage.
396 // Keeping both of them in the list is redundant.
397 if (was_last_array_ && obj->IsLinkedHashMap()) {
398 was_last_array_ = false;
399 return 1;
400 }
401 was_last_array_ = obj->IsArray();
402 return 0;
403 }
404
405 virtual Direction VisitObject(ObjectGraph::StackIterator* it) {
406 if (it->Get() != obj_) {
407 if (ShouldSkip(it->Get())) {
408 return kBacktrack;
409 } else {
410 return kProceed;
411 }
412 } else {
413 HANDLESCOPE(thread_);
414 Object& current = Object::Handle();
415 Smi& offset_from_parent = Smi::Handle();
416 StartList();
417 do {
418 // We collapse the backingstore of some internal objects.
419 length_ -= HideNDescendant(it->Get());
420 intptr_t obj_index = length_ * 2;
421 intptr_t offset_index = obj_index + 1;
422 if (!path_.IsNull() && offset_index < path_.Length()) {
423 current = it->Get();
424 path_.SetAt(obj_index, current);
425 offset_from_parent = Smi::New(it->OffsetFromParentInWords());
426 path_.SetAt(offset_index, offset_from_parent);
427 }
428 ++length_;
429 } while (!ShouldStop(it->Get()) && it->MoveToParent());
430 return kAbort;
431 }
432 }
433
434 private:
435 Thread* thread_;
436 ObjectPtr obj_;
437 const Array& path_;
438 intptr_t length_;
439 bool was_last_array_;
440};
441
442ObjectGraph::RetainingPathResult ObjectGraph::RetainingPath(Object* obj,
443 const Array& path) {
444 HeapIterationScope iteration_scope(Thread::Current(), true);
445 // To break the trivial path, the handle 'obj' is temporarily cleared during
446 // the search, but restored before returning.
447 ObjectPtr raw = obj->raw();
448 *obj = Object::null();
449 RetainingPathVisitor visitor(raw, path);
450 IterateUserObjects(&visitor);
451 if (visitor.length() == 0) {
452 IterateObjects(&visitor);
453 }
454 *obj = raw;
455 return {visitor.length(), visitor.gc_root_type};
456}
457
458class InboundReferencesVisitor : public ObjectVisitor,
459 public ObjectPointerVisitor {
460 public:
461 // We cannot use a GrowableObjectArray, since we must not trigger GC.
462 InboundReferencesVisitor(Isolate* isolate,
463 ObjectPtr target,
464 const Array& references,
465 Object* scratch)
466 : ObjectPointerVisitor(isolate->group()),
467 source_(nullptr),
468 target_(target),
469 references_(references),
470 scratch_(scratch),
471 length_(0) {
472 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
473 }
474
475 intptr_t length() const { return length_; }
476
477 virtual void VisitObject(ObjectPtr raw_obj) {
478 source_ = raw_obj;
479 raw_obj->ptr()->VisitPointers(this);
480 }
481
482 virtual void VisitPointers(ObjectPtr* first, ObjectPtr* last) {
483 for (ObjectPtr* current_ptr = first; current_ptr <= last; current_ptr++) {
484 ObjectPtr current_obj = *current_ptr;
485 if (current_obj == target_) {
486 intptr_t obj_index = length_ * 2;
487 intptr_t offset_index = obj_index + 1;
488 if (!references_.IsNull() && offset_index < references_.Length()) {
489 *scratch_ = source_;
490 references_.SetAt(obj_index, *scratch_);
491
492 *scratch_ = Smi::New(0);
493 uword source_start = ObjectLayout::ToAddr(source_);
494 uword current_ptr_addr = reinterpret_cast<uword>(current_ptr);
495 intptr_t offset = current_ptr_addr - source_start;
496 if (offset > 0 && offset < source_->ptr()->HeapSize()) {
497 ASSERT(Utils::IsAligned(offset, kWordSize));
498 *scratch_ = Smi::New(offset >> kWordSizeLog2);
499 } else {
500 // Some internal VM objects visit pointers not contained within the
501 // parent. For instance, CodeLayout::VisitCodePointers visits
502 // pointers in instructions.
503 ASSERT(!source_->IsDartInstance());
504 *scratch_ = Smi::New(-1);
505 }
506 references_.SetAt(offset_index, *scratch_);
507 }
508 ++length_;
509 }
510 }
511 }
512
513 private:
514 ObjectPtr source_;
515 ObjectPtr target_;
516 const Array& references_;
517 Object* scratch_;
518 intptr_t length_;
519};
520
521intptr_t ObjectGraph::InboundReferences(Object* obj, const Array& references) {
522 Object& scratch = Object::Handle();
523 HeapIterationScope iteration(Thread::Current());
524 NoSafepointScope no_safepoint;
525 InboundReferencesVisitor visitor(isolate(), obj->raw(), references, &scratch);
526 iteration.IterateObjects(&visitor);
527 return visitor.length();
528}
529
530// Each OldPage is divided into blocks of size kBlockSize. Each object belongs
531// to the block containing its header word.
532// When generating a heap snapshot, we assign objects sequential ids in heap
533// iteration order. A bitvector is computed that indicates the number of objects
534// in each block, so the id of any object in the block can be found be adding
535// the number of bits set before the object to the block's first id.
536// Compare ForwardingBlock used for heap compaction.
537class CountingBlock {
538 public:
539 void Clear() {
540 base_count_ = 0;
541 count_bitvector_ = 0;
542 }
543
544 intptr_t Lookup(uword addr) const {
545 uword block_offset = addr & ~kBlockMask;
546 intptr_t bitvector_shift = block_offset >> kObjectAlignmentLog2;
547 ASSERT(bitvector_shift < kBitsPerWord);
548 uword preceding_bitmask = (static_cast<uword>(1) << bitvector_shift) - 1;
549 return base_count_ +
550 Utils::CountOneBitsWord(count_bitvector_ & preceding_bitmask);
551 }
552
553 void Record(uword old_addr, intptr_t id) {
554 if (base_count_ == 0) {
555 ASSERT(count_bitvector_ == 0);
556 base_count_ = id; // First object in the block.
557 }
558
559 uword block_offset = old_addr & ~kBlockMask;
560 intptr_t bitvector_shift = block_offset >> kObjectAlignmentLog2;
561 ASSERT(bitvector_shift < kBitsPerWord);
562 count_bitvector_ |= static_cast<uword>(1) << bitvector_shift;
563 }
564
565 private:
566 intptr_t base_count_;
567 uword count_bitvector_;
568 COMPILE_ASSERT(kBitVectorWordsPerBlock == 1);
569
570 DISALLOW_COPY_AND_ASSIGN(CountingBlock);
571};
572
573class CountingPage {
574 public:
575 void Clear() {
576 for (intptr_t i = 0; i < kBlocksPerPage; i++) {
577 blocks_[i].Clear();
578 }
579 }
580
581 intptr_t Lookup(uword addr) { return BlockFor(addr)->Lookup(addr); }
582 void Record(uword addr, intptr_t id) {
583 return BlockFor(addr)->Record(addr, id);
584 }
585
586 CountingBlock* BlockFor(uword addr) {
587 intptr_t page_offset = addr & ~kOldPageMask;
588 intptr_t block_number = page_offset / kBlockSize;
589 ASSERT(block_number >= 0);
590 ASSERT(block_number <= kBlocksPerPage);
591 return &blocks_[block_number];
592 }
593
594 private:
595 CountingBlock blocks_[kBlocksPerPage];
596
597 DISALLOW_ALLOCATION();
598 DISALLOW_IMPLICIT_CONSTRUCTORS(CountingPage);
599};
600
601void HeapSnapshotWriter::EnsureAvailable(intptr_t needed) {
602 intptr_t available = capacity_ - size_;
603 if (available >= needed) {
604 return;
605 }
606
607 if (buffer_ != nullptr) {
608 Flush();
609 }
610 ASSERT(buffer_ == nullptr);
611
612 intptr_t chunk_size = kPreferredChunkSize;
613 if (chunk_size < needed + kMetadataReservation) {
614 chunk_size = needed + kMetadataReservation;
615 }
616 buffer_ = reinterpret_cast<uint8_t*>(malloc(chunk_size));
617 size_ = kMetadataReservation;
618 capacity_ = chunk_size;
619}
620
621void HeapSnapshotWriter::Flush(bool last) {
622 if (size_ == 0 && !last) {
623 return;
624 }
625
626 JSONStream js;
627 {
628 JSONObject jsobj(&js);
629 jsobj.AddProperty("jsonrpc", "2.0");
630 jsobj.AddProperty("method", "streamNotify");
631 {
632 JSONObject params(&jsobj, "params");
633 params.AddProperty("streamId", Service::heapsnapshot_stream.id());
634 {
635 JSONObject event(&params, "event");
636 event.AddProperty("type", "Event");
637 event.AddProperty("kind", "HeapSnapshot");
638 event.AddProperty("isolate", thread()->isolate());
639 event.AddPropertyTimeMillis("timestamp", OS::GetCurrentTimeMillis());
640 event.AddProperty("last", last);
641 }
642 }
643 }
644
645 Service::SendEventWithData(Service::heapsnapshot_stream.id(), "HeapSnapshot",
646 kMetadataReservation, js.buffer()->buffer(),
647 js.buffer()->length(), buffer_, size_);
648 buffer_ = nullptr;
649 size_ = 0;
650 capacity_ = 0;
651}
652
653void HeapSnapshotWriter::SetupCountingPages() {
654 for (intptr_t i = 0; i < kMaxImagePages; i++) {
655 image_page_ranges_[i].base = 0;
656 image_page_ranges_[i].size = 0;
657 }
658 intptr_t next_offset = 0;
659 OldPage* image_page = Dart::vm_isolate()->heap()->old_space()->image_pages_;
660 while (image_page != NULL) {
661 RELEASE_ASSERT(next_offset <= kMaxImagePages);
662 image_page_ranges_[next_offset].base = image_page->object_start();
663 image_page_ranges_[next_offset].size =
664 image_page->object_end() - image_page->object_start();
665 image_page = image_page->next();
666 next_offset++;
667 }
668 image_page = isolate()->heap()->old_space()->image_pages_;
669 while (image_page != NULL) {
670 RELEASE_ASSERT(next_offset <= kMaxImagePages);
671 image_page_ranges_[next_offset].base = image_page->object_start();
672 image_page_ranges_[next_offset].size =
673 image_page->object_end() - image_page->object_start();
674 image_page = image_page->next();
675 next_offset++;
676 }
677
678 OldPage* page = isolate()->heap()->old_space()->pages_;
679 while (page != NULL) {
680 page->forwarding_page();
681 CountingPage* counting_page =
682 reinterpret_cast<CountingPage*>(page->forwarding_page());
683 ASSERT(counting_page != NULL);
684 counting_page->Clear();
685 page = page->next();
686 }
687}
688
689bool HeapSnapshotWriter::OnImagePage(ObjectPtr obj) const {
690 const uword addr = ObjectLayout::ToAddr(obj);
691 for (intptr_t i = 0; i < kMaxImagePages; i++) {
692 if ((addr - image_page_ranges_[i].base) < image_page_ranges_[i].size) {
693 return true;
694 }
695 }
696 return false;
697}
698
699CountingPage* HeapSnapshotWriter::FindCountingPage(ObjectPtr obj) const {
700 if (obj->IsOldObject() && !OnImagePage(obj)) {
701 // On a regular or large page.
702 OldPage* page = OldPage::Of(obj);
703 return reinterpret_cast<CountingPage*>(page->forwarding_page());
704 }
705
706 // On an image page or in new space.
707 return nullptr;
708}
709
710void HeapSnapshotWriter::AssignObjectId(ObjectPtr obj) {
711 ASSERT(obj->IsHeapObject());
712
713 CountingPage* counting_page = FindCountingPage(obj);
714 if (counting_page != nullptr) {
715 // Likely: object on an ordinary page.
716 counting_page->Record(ObjectLayout::ToAddr(obj), ++object_count_);
717 } else {
718 // Unlikely: new space object, or object on a large or image page.
719 thread()->heap()->SetObjectId(obj, ++object_count_);
720 }
721}
722
723intptr_t HeapSnapshotWriter::GetObjectId(ObjectPtr obj) const {
724 if (!obj->IsHeapObject()) {
725 return 0;
726 }
727
728 if (FLAG_write_protect_code && obj->IsInstructions() && !OnImagePage(obj)) {
729 // A non-writable alias mapping may exist for instruction pages.
730 obj = OldPage::ToWritable(obj);
731 }
732
733 CountingPage* counting_page = FindCountingPage(obj);
734 intptr_t id;
735 if (counting_page != nullptr) {
736 // Likely: object on an ordinary page.
737 id = counting_page->Lookup(ObjectLayout::ToAddr(obj));
738 } else {
739 // Unlikely: new space object, or object on a large or image page.
740 id = thread()->heap()->GetObjectId(obj);
741 }
742 ASSERT(id != 0);
743 return id;
744}
745
746void HeapSnapshotWriter::ClearObjectIds() {
747 thread()->heap()->ResetObjectIdTable();
748}
749
750void HeapSnapshotWriter::CountReferences(intptr_t count) {
751 reference_count_ += count;
752}
753
754void HeapSnapshotWriter::CountExternalProperty() {
755 external_property_count_ += 1;
756}
757
758class Pass1Visitor : public ObjectVisitor,
759 public ObjectPointerVisitor,
760 public HandleVisitor {
761 public:
762 explicit Pass1Visitor(HeapSnapshotWriter* writer)
763 : ObjectVisitor(),
764 ObjectPointerVisitor(IsolateGroup::Current()),
765 HandleVisitor(Thread::Current()),
766 writer_(writer) {}
767
768 void VisitObject(ObjectPtr obj) {
769 if (obj->IsPseudoObject()) return;
770
771 writer_->AssignObjectId(obj);
772 obj->ptr()->VisitPointers(this);
773 }
774
775 void VisitPointers(ObjectPtr* from, ObjectPtr* to) {
776 intptr_t count = to - from + 1;
777 ASSERT(count >= 0);
778 writer_->CountReferences(count);
779 }
780
781 void VisitHandle(uword addr) {
782 FinalizablePersistentHandle* weak_persistent_handle =
783 reinterpret_cast<FinalizablePersistentHandle*>(addr);
784 if (!weak_persistent_handle->raw()->IsHeapObject()) {
785 return; // Free handle.
786 }
787
788 writer_->CountExternalProperty();
789 }
790
791 private:
792 HeapSnapshotWriter* const writer_;
793
794 DISALLOW_COPY_AND_ASSIGN(Pass1Visitor);
795};
796
797enum NonReferenceDataTags {
798 kNoData = 0,
799 kNullData,
800 kBoolData,
801 kIntData,
802 kDoubleData,
803 kLatin1Data,
804 kUTF16Data,
805 kLengthData,
806 kNameData,
807};
808
809static const intptr_t kMaxStringElements = 128;
810
811class Pass2Visitor : public ObjectVisitor,
812 public ObjectPointerVisitor,
813 public HandleVisitor {
814 public:
815 explicit Pass2Visitor(HeapSnapshotWriter* writer)
816 : ObjectVisitor(),
817 ObjectPointerVisitor(IsolateGroup::Current()),
818 HandleVisitor(Thread::Current()),
819 isolate_(thread()->isolate()),
820 writer_(writer) {}
821
822 void VisitObject(ObjectPtr obj) {
823 if (obj->IsPseudoObject()) return;
824
825 intptr_t cid = obj->GetClassId();
826 writer_->WriteUnsigned(cid);
827 writer_->WriteUnsigned(discount_sizes_ ? 0 : obj->ptr()->HeapSize());
828
829 if (cid == kNullCid) {
830 writer_->WriteUnsigned(kNullData);
831 } else if (cid == kBoolCid) {
832 writer_->WriteUnsigned(kBoolData);
833 writer_->WriteUnsigned(
834 static_cast<uintptr_t>(static_cast<BoolPtr>(obj)->ptr()->value_));
835 } else if (cid == kSmiCid) {
836 UNREACHABLE();
837 } else if (cid == kMintCid) {
838 writer_->WriteUnsigned(kIntData);
839 writer_->WriteSigned(static_cast<MintPtr>(obj)->ptr()->value_);
840 } else if (cid == kDoubleCid) {
841 writer_->WriteUnsigned(kDoubleData);
842 writer_->WriteBytes(&(static_cast<DoublePtr>(obj)->ptr()->value_),
843 sizeof(double));
844 } else if (cid == kOneByteStringCid) {
845 OneByteStringPtr str = static_cast<OneByteStringPtr>(obj);
846 intptr_t len = Smi::Value(str->ptr()->length_);
847 intptr_t trunc_len = Utils::Minimum(len, kMaxStringElements);
848 writer_->WriteUnsigned(kLatin1Data);
849 writer_->WriteUnsigned(len);
850 writer_->WriteUnsigned(trunc_len);
851 writer_->WriteBytes(&str->ptr()->data()[0], trunc_len);
852 } else if (cid == kExternalOneByteStringCid) {
853 ExternalOneByteStringPtr str = static_cast<ExternalOneByteStringPtr>(obj);
854 intptr_t len = Smi::Value(str->ptr()->length_);
855 intptr_t trunc_len = Utils::Minimum(len, kMaxStringElements);
856 writer_->WriteUnsigned(kLatin1Data);
857 writer_->WriteUnsigned(len);
858 writer_->WriteUnsigned(trunc_len);
859 writer_->WriteBytes(&str->ptr()->external_data_[0], trunc_len);
860 } else if (cid == kTwoByteStringCid) {
861 TwoByteStringPtr str = static_cast<TwoByteStringPtr>(obj);
862 intptr_t len = Smi::Value(str->ptr()->length_);
863 intptr_t trunc_len = Utils::Minimum(len, kMaxStringElements);
864 writer_->WriteUnsigned(kUTF16Data);
865 writer_->WriteUnsigned(len);
866 writer_->WriteUnsigned(trunc_len);
867 writer_->WriteBytes(&str->ptr()->data()[0], trunc_len * 2);
868 } else if (cid == kExternalTwoByteStringCid) {
869 ExternalTwoByteStringPtr str = static_cast<ExternalTwoByteStringPtr>(obj);
870 intptr_t len = Smi::Value(str->ptr()->length_);
871 intptr_t trunc_len = Utils::Minimum(len, kMaxStringElements);
872 writer_->WriteUnsigned(kUTF16Data);
873 writer_->WriteUnsigned(len);
874 writer_->WriteUnsigned(trunc_len);
875 writer_->WriteBytes(&str->ptr()->external_data_[0], trunc_len * 2);
876 } else if (cid == kArrayCid || cid == kImmutableArrayCid) {
877 writer_->WriteUnsigned(kLengthData);
878 writer_->WriteUnsigned(
879 Smi::Value(static_cast<ArrayPtr>(obj)->ptr()->length_));
880 } else if (cid == kGrowableObjectArrayCid) {
881 writer_->WriteUnsigned(kLengthData);
882 writer_->WriteUnsigned(
883 Smi::Value(static_cast<GrowableObjectArrayPtr>(obj)->ptr()->length_));
884 } else if (cid == kLinkedHashMapCid) {
885 writer_->WriteUnsigned(kLengthData);
886 writer_->WriteUnsigned(
887 Smi::Value(static_cast<LinkedHashMapPtr>(obj)->ptr()->used_data_));
888 } else if (cid == kObjectPoolCid) {
889 writer_->WriteUnsigned(kLengthData);
890 writer_->WriteUnsigned(static_cast<ObjectPoolPtr>(obj)->ptr()->length_);
891 } else if (IsTypedDataClassId(cid)) {
892 writer_->WriteUnsigned(kLengthData);
893 writer_->WriteUnsigned(
894 Smi::Value(static_cast<TypedDataPtr>(obj)->ptr()->length_));
895 } else if (IsExternalTypedDataClassId(cid)) {
896 writer_->WriteUnsigned(kLengthData);
897 writer_->WriteUnsigned(
898 Smi::Value(static_cast<ExternalTypedDataPtr>(obj)->ptr()->length_));
899 } else if (cid == kFunctionCid) {
900 writer_->WriteUnsigned(kNameData);
901 ScrubAndWriteUtf8(static_cast<FunctionPtr>(obj)->ptr()->name_);
902 } else if (cid == kCodeCid) {
903 ObjectPtr owner = static_cast<CodePtr>(obj)->ptr()->owner_;
904 if (owner->IsFunction()) {
905 writer_->WriteUnsigned(kNameData);
906 ScrubAndWriteUtf8(static_cast<FunctionPtr>(owner)->ptr()->name_);
907 } else if (owner->IsClass()) {
908 writer_->WriteUnsigned(kNameData);
909 ScrubAndWriteUtf8(static_cast<ClassPtr>(owner)->ptr()->name_);
910 } else {
911 writer_->WriteUnsigned(kNoData);
912 }
913 } else if (cid == kFieldCid) {
914 writer_->WriteUnsigned(kNameData);
915 ScrubAndWriteUtf8(static_cast<FieldPtr>(obj)->ptr()->name_);
916 } else if (cid == kClassCid) {
917 writer_->WriteUnsigned(kNameData);
918 ScrubAndWriteUtf8(static_cast<ClassPtr>(obj)->ptr()->name_);
919 } else if (cid == kLibraryCid) {
920 writer_->WriteUnsigned(kNameData);
921 ScrubAndWriteUtf8(static_cast<LibraryPtr>(obj)->ptr()->url_);
922 } else if (cid == kScriptCid) {
923 writer_->WriteUnsigned(kNameData);
924 ScrubAndWriteUtf8(static_cast<ScriptPtr>(obj)->ptr()->url_);
925 } else {
926 writer_->WriteUnsigned(kNoData);
927 }
928
929 DoCount();
930 obj->ptr()->VisitPointersPrecise(isolate_, this);
931 DoWrite();
932 obj->ptr()->VisitPointersPrecise(isolate_, this);
933 }
934
935 void ScrubAndWriteUtf8(StringPtr str) {
936 if (str == String::null()) {
937 writer_->WriteUtf8("null");
938 } else {
939 String handle;
940 handle = str;
941 char* value = handle.ToMallocCString();
942 writer_->ScrubAndWriteUtf8(value);
943 free(value);
944 }
945 }
946
947 void set_discount_sizes(bool value) { discount_sizes_ = value; }
948
949 void DoCount() {
950 writing_ = false;
951 counted_ = 0;
952 written_ = 0;
953 }
954 void DoWrite() {
955 writing_ = true;
956 writer_->WriteUnsigned(counted_);
957 }
958
959 void VisitPointers(ObjectPtr* from, ObjectPtr* to) {
960 if (writing_) {
961 for (ObjectPtr* ptr = from; ptr <= to; ptr++) {
962 ObjectPtr target = *ptr;
963 written_++;
964 total_++;
965 writer_->WriteUnsigned(writer_->GetObjectId(target));
966 }
967 } else {
968 intptr_t count = to - from + 1;
969 ASSERT(count >= 0);
970 counted_ += count;
971 }
972 }
973
974 void VisitHandle(uword addr) {
975 FinalizablePersistentHandle* weak_persistent_handle =
976 reinterpret_cast<FinalizablePersistentHandle*>(addr);
977 if (!weak_persistent_handle->raw()->IsHeapObject()) {
978 return; // Free handle.
979 }
980
981 writer_->WriteUnsigned(writer_->GetObjectId(weak_persistent_handle->raw()));
982 writer_->WriteUnsigned(weak_persistent_handle->external_size());
983 // Attempt to include a native symbol name.
984 auto const name = NativeSymbolResolver::LookupSymbolName(
985 weak_persistent_handle->callback_address(), nullptr);
986 writer_->WriteUtf8((name == nullptr) ? "Unknown native function" : name);
987 if (name != nullptr) {
988 NativeSymbolResolver::FreeSymbolName(name);
989 }
990 }
991
992 private:
993 // TODO(dartbug.com/36097): Once the shared class table contains more
994 // information than just the size (i.e. includes an immutable class
995 // descriptor), we can remove this dependency on the current isolate.
996 Isolate* isolate_;
997 HeapSnapshotWriter* const writer_;
998 bool writing_ = false;
999 intptr_t counted_ = 0;
1000 intptr_t written_ = 0;
1001 intptr_t total_ = 0;
1002 bool discount_sizes_ = false;
1003
1004 DISALLOW_COPY_AND_ASSIGN(Pass2Visitor);
1005};
1006
1007void HeapSnapshotWriter::Write() {
1008 HeapIterationScope iteration(thread());
1009
1010 WriteBytes("dartheap", 8); // Magic value.
1011 WriteUnsigned(0); // Flags.
1012 WriteUtf8(isolate()->name());
1013 Heap* H = thread()->heap();
1014
1015 {
1016 intptr_t used = H->TotalUsedInWords() << kWordSizeLog2;
1017 intptr_t capacity = H->TotalCapacityInWords() << kWordSizeLog2;
1018 intptr_t external = H->TotalExternalInWords() << kWordSizeLog2;
1019 intptr_t image = H->old_space()->ImageInWords() << kWordSizeLog2;
1020 WriteUnsigned(used + image);
1021 WriteUnsigned(capacity + image);
1022 WriteUnsigned(external);
1023 }
1024
1025 {
1026 HANDLESCOPE(thread());
1027 ClassTable* class_table = isolate()->class_table();
1028 class_count_ = class_table->NumCids() - 1;
1029
1030 Class& cls = Class::Handle();
1031 Library& lib = Library::Handle();
1032 String& str = String::Handle();
1033 Array& fields = Array::Handle();
1034 Field& field = Field::Handle();
1035
1036 WriteUnsigned(class_count_);
1037 for (intptr_t cid = 1; cid <= class_count_; cid++) {
1038 if (!class_table->HasValidClassAt(cid)) {
1039 WriteUnsigned(0); // Flags
1040 WriteUtf8(""); // Name
1041 WriteUtf8(""); // Library name
1042 WriteUtf8(""); // Library uri
1043 WriteUtf8(""); // Reserved
1044 WriteUnsigned(0); // Field count
1045 } else {
1046 cls = class_table->At(cid);
1047 WriteUnsigned(0); // Flags
1048 str = cls.Name();
1049 ScrubAndWriteUtf8(const_cast<char*>(str.ToCString()));
1050 lib = cls.library();
1051 if (lib.IsNull()) {
1052 WriteUtf8("");
1053 WriteUtf8("");
1054 } else {
1055 str = lib.name();
1056 ScrubAndWriteUtf8(const_cast<char*>(str.ToCString()));
1057 str = lib.url();
1058 ScrubAndWriteUtf8(const_cast<char*>(str.ToCString()));
1059 }
1060 WriteUtf8(""); // Reserved
1061
1062 intptr_t field_count = 0;
1063 intptr_t min_offset = kIntptrMax;
1064 for (intptr_t j = 0; OffsetsTable::offsets_table[j].class_id != -1;
1065 j++) {
1066 if (OffsetsTable::offsets_table[j].class_id == cid) {
1067 field_count++;
1068 intptr_t offset = OffsetsTable::offsets_table[j].offset;
1069 min_offset = Utils::Minimum(min_offset, offset);
1070 }
1071 }
1072 if (cls.is_finalized()) {
1073 do {
1074 fields = cls.fields();
1075 if (!fields.IsNull()) {
1076 for (intptr_t i = 0; i < fields.Length(); i++) {
1077 field ^= fields.At(i);
1078 if (field.is_instance()) {
1079 field_count++;
1080 }
1081 }
1082 }
1083 cls = cls.SuperClass();
1084 } while (!cls.IsNull());
1085 cls = class_table->At(cid);
1086 }
1087
1088 WriteUnsigned(field_count);
1089 for (intptr_t j = 0; OffsetsTable::offsets_table[j].class_id != -1;
1090 j++) {
1091 if (OffsetsTable::offsets_table[j].class_id == cid) {
1092 intptr_t flags = 1; // Strong.
1093 WriteUnsigned(flags);
1094 intptr_t offset = OffsetsTable::offsets_table[j].offset;
1095 intptr_t index = (offset - min_offset) / kWordSize;
1096 ASSERT(index >= 0);
1097 WriteUnsigned(index);
1098 WriteUtf8(OffsetsTable::offsets_table[j].field_name);
1099 WriteUtf8(""); // Reserved
1100 }
1101 }
1102 if (cls.is_finalized()) {
1103 do {
1104 fields = cls.fields();
1105 if (!fields.IsNull()) {
1106 for (intptr_t i = 0; i < fields.Length(); i++) {
1107 field ^= fields.At(i);
1108 if (field.is_instance()) {
1109 intptr_t flags = 1; // Strong.
1110 WriteUnsigned(flags);
1111 intptr_t index = field.HostOffset() / kWordSize - 1;
1112 ASSERT(index >= 0);
1113 WriteUnsigned(index);
1114 str = field.name();
1115 ScrubAndWriteUtf8(const_cast<char*>(str.ToCString()));
1116 WriteUtf8(""); // Reserved
1117 }
1118 }
1119 }
1120 cls = cls.SuperClass();
1121 } while (!cls.IsNull());
1122 cls = class_table->At(cid);
1123 }
1124 }
1125 }
1126 }
1127
1128 SetupCountingPages();
1129
1130 {
1131 Pass1Visitor visitor(this);
1132
1133 // Root "object".
1134 ++object_count_;
1135 isolate()->VisitObjectPointers(&visitor,
1136 ValidationPolicy::kDontValidateFrames);
1137
1138 // Heap objects.
1139 iteration.IterateVMIsolateObjects(&visitor);
1140 iteration.IterateObjects(&visitor);
1141
1142 // External properties.
1143 isolate()->group()->VisitWeakPersistentHandles(&visitor);
1144 }
1145
1146 {
1147 Pass2Visitor visitor(this);
1148
1149 WriteUnsigned(reference_count_);
1150 WriteUnsigned(object_count_);
1151
1152 // Root "object".
1153 WriteUnsigned(0); // cid
1154 WriteUnsigned(0); // shallowSize
1155 WriteUnsigned(kNoData);
1156 visitor.DoCount();
1157 isolate()->VisitObjectPointers(&visitor,
1158 ValidationPolicy::kDontValidateFrames);
1159 visitor.DoWrite();
1160 isolate()->VisitObjectPointers(&visitor,
1161 ValidationPolicy::kDontValidateFrames);
1162
1163 // Heap objects.
1164 visitor.set_discount_sizes(true);
1165 iteration.IterateVMIsolateObjects(&visitor);
1166 visitor.set_discount_sizes(false);
1167 iteration.IterateObjects(&visitor);
1168
1169 // External properties.
1170 WriteUnsigned(external_property_count_);
1171 isolate()->group()->VisitWeakPersistentHandles(&visitor);
1172 }
1173
1174 ClearObjectIds();
1175 Flush(true);
1176}
1177
1178CountObjectsVisitor::CountObjectsVisitor(Thread* thread, intptr_t class_count)
1179 : ObjectVisitor(),
1180 HandleVisitor(thread),
1181 new_count_(new intptr_t[class_count]),
1182 new_size_(new intptr_t[class_count]),
1183 new_external_size_(new intptr_t[class_count]),
1184 old_count_(new intptr_t[class_count]),
1185 old_size_(new intptr_t[class_count]),
1186 old_external_size_(new intptr_t[class_count]) {
1187 memset(new_count_.get(), 0, class_count * sizeof(intptr_t));
1188 memset(new_size_.get(), 0, class_count * sizeof(intptr_t));
1189 memset(new_external_size_.get(), 0, class_count * sizeof(intptr_t));
1190 memset(old_count_.get(), 0, class_count * sizeof(intptr_t));
1191 memset(old_size_.get(), 0, class_count * sizeof(intptr_t));
1192 memset(old_external_size_.get(), 0, class_count * sizeof(intptr_t));
1193}
1194
1195void CountObjectsVisitor::VisitObject(ObjectPtr obj) {
1196 intptr_t cid = obj->GetClassId();
1197 intptr_t size = obj->ptr()->HeapSize();
1198 if (obj->IsNewObject()) {
1199 new_count_[cid] += 1;
1200 new_size_[cid] += size;
1201 } else {
1202 old_count_[cid] += 1;
1203 old_size_[cid] += size;
1204 }
1205}
1206
1207void CountObjectsVisitor::VisitHandle(uword addr) {
1208 FinalizablePersistentHandle* handle =
1209 reinterpret_cast<FinalizablePersistentHandle*>(addr);
1210 ObjectPtr obj = handle->raw();
1211 if (!obj->IsHeapObject()) {
1212 return;
1213 }
1214 intptr_t cid = obj->GetClassId();
1215 intptr_t size = handle->external_size();
1216 if (obj->IsNewObject()) {
1217 new_external_size_[cid] += size;
1218 } else {
1219 old_external_size_[cid] += size;
1220 }
1221}
1222
1223#endif // !defined(PRODUCT)
1224
1225} // namespace dart
1226