1 | // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #ifndef RUNTIME_VM_PROFILER_H_ |
6 | #define RUNTIME_VM_PROFILER_H_ |
7 | |
8 | #include "platform/atomic.h" |
9 | |
10 | #include "vm/allocation.h" |
11 | #include "vm/bitfield.h" |
12 | #include "vm/code_observers.h" |
13 | #include "vm/globals.h" |
14 | #include "vm/growable_array.h" |
15 | #include "vm/malloc_hooks.h" |
16 | #include "vm/native_symbol.h" |
17 | #include "vm/object.h" |
18 | #include "vm/tags.h" |
19 | #include "vm/thread_interrupter.h" |
20 | |
21 | // Profiler sampling and stack walking support. |
22 | // NOTE: For service related code, see profile_service.h. |
23 | |
24 | namespace dart { |
25 | |
26 | // Forward declarations. |
27 | class ProcessedSample; |
28 | class ProcessedSampleBuffer; |
29 | |
30 | class Sample; |
31 | class AllocationSampleBuffer; |
32 | class SampleBuffer; |
33 | class ProfileTrieNode; |
34 | |
35 | #define PROFILER_COUNTERS(V) \ |
36 | V(bail_out_unknown_task) \ |
37 | V(bail_out_jump_to_exception_handler) \ |
38 | V(bail_out_check_isolate) \ |
39 | V(single_frame_sample_deoptimizing) \ |
40 | V(single_frame_sample_register_check) \ |
41 | V(single_frame_sample_get_and_validate_stack_bounds) \ |
42 | V(stack_walker_native) \ |
43 | V(stack_walker_dart_exit) \ |
44 | V(stack_walker_dart) \ |
45 | V(stack_walker_none) \ |
46 | V(incomplete_sample_fp_bounds) \ |
47 | V(incomplete_sample_fp_step) \ |
48 | V(incomplete_sample_bad_pc) \ |
49 | V(failure_native_allocation_sample) |
50 | |
51 | struct ProfilerCounters { |
52 | #define DECLARE_PROFILER_COUNTER(name) RelaxedAtomic<int64_t> name; |
53 | PROFILER_COUNTERS(DECLARE_PROFILER_COUNTER) |
54 | #undef DECLARE_PROFILER_COUNTER |
55 | }; |
56 | |
57 | class Profiler : public AllStatic { |
58 | public: |
59 | static void Init(); |
60 | static void InitAllocationSampleBuffer(); |
61 | static void Cleanup(); |
62 | |
63 | static void SetSampleDepth(intptr_t depth); |
64 | static void SetSamplePeriod(intptr_t period); |
65 | // Restarts sampling with a given profile period. This is called after the |
66 | // profile period is changed via the service protocol. |
67 | static void UpdateSamplePeriod(); |
68 | // Starts or shuts down the profiler after --profiler is changed via the |
69 | // service protocol. |
70 | static void UpdateRunningState(); |
71 | |
72 | static SampleBuffer* sample_buffer() { return sample_buffer_; } |
73 | static AllocationSampleBuffer* allocation_sample_buffer() { |
74 | return allocation_sample_buffer_; |
75 | } |
76 | |
77 | static void DumpStackTrace(void* context); |
78 | static void DumpStackTrace(bool for_crash = true); |
79 | |
80 | static void SampleAllocation(Thread* thread, intptr_t cid); |
81 | static Sample* SampleNativeAllocation(intptr_t skip_count, |
82 | uword address, |
83 | uintptr_t allocation_size); |
84 | |
85 | // SampleThread is called from inside the signal handler and hence it is very |
86 | // critical that the implementation of SampleThread does not do any of the |
87 | // following: |
88 | // * Accessing TLS -- Because on Windows and Fuchsia the callback will be |
89 | // running in a different thread. |
90 | // * Allocating memory -- Because this takes locks which may already be |
91 | // held, resulting in a dead lock. |
92 | // * Taking a lock -- See above. |
93 | static void SampleThread(Thread* thread, const InterruptedThreadState& state); |
94 | |
95 | static ProfilerCounters counters() { |
96 | // Copies the counter values. |
97 | return counters_; |
98 | } |
99 | inline static intptr_t Size(); |
100 | |
101 | private: |
102 | static void DumpStackTrace(uword sp, uword fp, uword pc, bool for_crash); |
103 | |
104 | // Calculates the sample buffer capacity. Returns |
105 | // SampleBuffer::kDefaultBufferCapacity if --sample-buffer-duration is not |
106 | // provided. Otherwise, the capacity is based on the sample rate, maximum |
107 | // sample stack depth, and the number of seconds of samples the sample buffer |
108 | // should be able to accomodate. |
109 | static intptr_t CalculateSampleBufferCapacity(); |
110 | |
111 | // Does not walk the thread's stack. |
112 | static void SampleThreadSingleFrame(Thread* thread, uintptr_t pc); |
113 | static RelaxedAtomic<bool> initialized_; |
114 | |
115 | static SampleBuffer* sample_buffer_; |
116 | static AllocationSampleBuffer* allocation_sample_buffer_; |
117 | |
118 | static ProfilerCounters counters_; |
119 | |
120 | friend class Thread; |
121 | }; |
122 | |
123 | class SampleVisitor : public ValueObject { |
124 | public: |
125 | explicit SampleVisitor(Dart_Port port) : port_(port), visited_(0) {} |
126 | virtual ~SampleVisitor() {} |
127 | |
128 | virtual void VisitSample(Sample* sample) = 0; |
129 | |
130 | intptr_t visited() const { return visited_; } |
131 | |
132 | void IncrementVisited() { visited_++; } |
133 | |
134 | Dart_Port port() const { return port_; } |
135 | |
136 | private: |
137 | Dart_Port port_; |
138 | intptr_t visited_; |
139 | |
140 | DISALLOW_IMPLICIT_CONSTRUCTORS(SampleVisitor); |
141 | }; |
142 | |
143 | class SampleFilter : public ValueObject { |
144 | public: |
145 | SampleFilter(Dart_Port port, |
146 | intptr_t thread_task_mask, |
147 | int64_t time_origin_micros, |
148 | int64_t time_extent_micros) |
149 | : port_(port), |
150 | thread_task_mask_(thread_task_mask), |
151 | time_origin_micros_(time_origin_micros), |
152 | time_extent_micros_(time_extent_micros) { |
153 | ASSERT(thread_task_mask != 0); |
154 | ASSERT(time_origin_micros_ >= -1); |
155 | ASSERT(time_extent_micros_ >= -1); |
156 | } |
157 | virtual ~SampleFilter() {} |
158 | |
159 | // Override this function. |
160 | // Return |true| if |sample| passes the filter. |
161 | virtual bool FilterSample(Sample* sample) { return true; } |
162 | |
163 | Dart_Port port() const { return port_; } |
164 | |
165 | // Returns |true| if |sample| passes the time filter. |
166 | bool TimeFilterSample(Sample* sample); |
167 | |
168 | // Returns |true| if |sample| passes the thread task filter. |
169 | bool TaskFilterSample(Sample* sample); |
170 | |
171 | static const intptr_t kNoTaskFilter = -1; |
172 | |
173 | private: |
174 | Dart_Port port_; |
175 | intptr_t thread_task_mask_; |
176 | int64_t time_origin_micros_; |
177 | int64_t time_extent_micros_; |
178 | }; |
179 | |
180 | class ClearProfileVisitor : public SampleVisitor { |
181 | public: |
182 | explicit ClearProfileVisitor(Isolate* isolate); |
183 | |
184 | virtual void VisitSample(Sample* sample); |
185 | }; |
186 | |
187 | // Each Sample holds a stack trace from an isolate. |
188 | class Sample { |
189 | public: |
190 | void Init(Dart_Port port, int64_t timestamp, ThreadId tid) { |
191 | Clear(); |
192 | timestamp_ = timestamp; |
193 | tid_ = tid; |
194 | port_ = port; |
195 | } |
196 | |
197 | Dart_Port port() const { return port_; } |
198 | |
199 | // Thread sample was taken on. |
200 | ThreadId tid() const { return tid_; } |
201 | |
202 | void Clear() { |
203 | port_ = ILLEGAL_PORT; |
204 | pc_marker_ = 0; |
205 | for (intptr_t i = 0; i < kStackBufferSizeInWords; i++) { |
206 | stack_buffer_[i] = 0; |
207 | } |
208 | vm_tag_ = VMTag::kInvalidTagId; |
209 | user_tag_ = UserTags::kDefaultUserTag; |
210 | lr_ = 0; |
211 | metadata_ = 0; |
212 | state_ = 0; |
213 | native_allocation_address_ = 0; |
214 | native_allocation_size_bytes_ = 0; |
215 | continuation_index_ = -1; |
216 | next_free_ = NULL; |
217 | uword* pcs = GetPCArray(); |
218 | for (intptr_t i = 0; i < pcs_length_; i++) { |
219 | pcs[i] = 0; |
220 | } |
221 | set_head_sample(true); |
222 | } |
223 | |
224 | // Timestamp sample was taken at. |
225 | int64_t timestamp() const { return timestamp_; } |
226 | |
227 | // Top most pc. |
228 | uword pc() const { return At(0); } |
229 | |
230 | // Get stack trace entry. |
231 | uword At(intptr_t i) const { |
232 | ASSERT(i >= 0); |
233 | ASSERT(i < pcs_length_); |
234 | uword* pcs = GetPCArray(); |
235 | return pcs[i]; |
236 | } |
237 | |
238 | // Set stack trace entry. |
239 | void SetAt(intptr_t i, uword pc) { |
240 | ASSERT(i >= 0); |
241 | ASSERT(i < pcs_length_); |
242 | uword* pcs = GetPCArray(); |
243 | pcs[i] = pc; |
244 | } |
245 | |
246 | void DumpStackTrace() { |
247 | for (intptr_t i = 0; i < pcs_length_; ++i) { |
248 | uintptr_t start = 0; |
249 | uword pc = At(i); |
250 | char* native_symbol_name = |
251 | NativeSymbolResolver::LookupSymbolName(pc, &start); |
252 | if (native_symbol_name == NULL) { |
253 | OS::PrintErr(" [0x%" Pp "] Unknown symbol\n" , pc); |
254 | } else { |
255 | OS::PrintErr(" [0x%" Pp "] %s\n" , pc, native_symbol_name); |
256 | NativeSymbolResolver::FreeSymbolName(native_symbol_name); |
257 | } |
258 | } |
259 | } |
260 | |
261 | uword vm_tag() const { return vm_tag_; } |
262 | void set_vm_tag(uword tag) { |
263 | ASSERT(tag != VMTag::kInvalidTagId); |
264 | vm_tag_ = tag; |
265 | } |
266 | |
267 | uword user_tag() const { return user_tag_; } |
268 | void set_user_tag(uword tag) { user_tag_ = tag; } |
269 | |
270 | uword pc_marker() const { return pc_marker_; } |
271 | |
272 | void set_pc_marker(uword pc_marker) { pc_marker_ = pc_marker; } |
273 | |
274 | uword lr() const { return lr_; } |
275 | |
276 | void set_lr(uword link_register) { lr_ = link_register; } |
277 | |
278 | bool leaf_frame_is_dart() const { return LeafFrameIsDart::decode(state_); } |
279 | |
280 | void set_leaf_frame_is_dart(bool leaf_frame_is_dart) { |
281 | state_ = LeafFrameIsDart::update(leaf_frame_is_dart, state_); |
282 | } |
283 | |
284 | bool ignore_sample() const { return IgnoreBit::decode(state_); } |
285 | |
286 | void set_ignore_sample(bool ignore_sample) { |
287 | state_ = IgnoreBit::update(ignore_sample, state_); |
288 | } |
289 | |
290 | bool exit_frame_sample() const { return ExitFrameBit::decode(state_); } |
291 | |
292 | void set_exit_frame_sample(bool exit_frame_sample) { |
293 | state_ = ExitFrameBit::update(exit_frame_sample, state_); |
294 | } |
295 | |
296 | bool missing_frame_inserted() const { |
297 | return MissingFrameInsertedBit::decode(state_); |
298 | } |
299 | |
300 | void set_missing_frame_inserted(bool missing_frame_inserted) { |
301 | state_ = MissingFrameInsertedBit::update(missing_frame_inserted, state_); |
302 | } |
303 | |
304 | bool truncated_trace() const { return TruncatedTraceBit::decode(state_); } |
305 | |
306 | void set_truncated_trace(bool truncated_trace) { |
307 | state_ = TruncatedTraceBit::update(truncated_trace, state_); |
308 | } |
309 | |
310 | bool is_allocation_sample() const { |
311 | return ClassAllocationSampleBit::decode(state_); |
312 | } |
313 | |
314 | void set_is_allocation_sample(bool allocation_sample) { |
315 | state_ = ClassAllocationSampleBit::update(allocation_sample, state_); |
316 | } |
317 | |
318 | uword native_allocation_address() const { return native_allocation_address_; } |
319 | |
320 | void set_native_allocation_address(uword address) { |
321 | native_allocation_address_ = address; |
322 | } |
323 | |
324 | uintptr_t native_allocation_size_bytes() const { |
325 | return native_allocation_size_bytes_; |
326 | } |
327 | |
328 | void set_native_allocation_size_bytes(uintptr_t size) { |
329 | native_allocation_size_bytes_ = size; |
330 | } |
331 | |
332 | Sample* next_free() const { return next_free_; } |
333 | void set_next_free(Sample* next_free) { next_free_ = next_free; } |
334 | |
335 | Thread::TaskKind thread_task() const { return ThreadTaskBit::decode(state_); } |
336 | |
337 | void set_thread_task(Thread::TaskKind task) { |
338 | state_ = ThreadTaskBit::update(task, state_); |
339 | } |
340 | |
341 | bool is_continuation_sample() const { |
342 | return ContinuationSampleBit::decode(state_); |
343 | } |
344 | |
345 | void SetContinuationIndex(intptr_t index) { |
346 | ASSERT(!is_continuation_sample()); |
347 | ASSERT(continuation_index_ == -1); |
348 | state_ = ContinuationSampleBit::update(true, state_); |
349 | continuation_index_ = index; |
350 | ASSERT(is_continuation_sample()); |
351 | } |
352 | |
353 | intptr_t continuation_index() const { |
354 | ASSERT(is_continuation_sample()); |
355 | return continuation_index_; |
356 | } |
357 | |
358 | intptr_t allocation_cid() const { |
359 | ASSERT(is_allocation_sample()); |
360 | return metadata_; |
361 | } |
362 | |
363 | void set_head_sample(bool head_sample) { |
364 | state_ = HeadSampleBit::update(head_sample, state_); |
365 | } |
366 | |
367 | bool head_sample() const { return HeadSampleBit::decode(state_); } |
368 | |
369 | void set_metadata(intptr_t metadata) { metadata_ = metadata; } |
370 | |
371 | void SetAllocationCid(intptr_t cid) { |
372 | set_is_allocation_sample(true); |
373 | set_metadata(cid); |
374 | } |
375 | |
376 | static void Init(); |
377 | |
378 | static intptr_t instance_size() { return instance_size_; } |
379 | |
380 | uword* GetPCArray() const; |
381 | |
382 | static const int kStackBufferSizeInWords = 2; |
383 | uword* GetStackBuffer() { return &stack_buffer_[0]; } |
384 | |
385 | private: |
386 | static intptr_t instance_size_; |
387 | static intptr_t pcs_length_; |
388 | enum StateBits { |
389 | kHeadSampleBit = 0, |
390 | kLeafFrameIsDartBit = 1, |
391 | kIgnoreBit = 2, |
392 | kExitFrameBit = 3, |
393 | kMissingFrameInsertedBit = 4, |
394 | kTruncatedTraceBit = 5, |
395 | kClassAllocationSampleBit = 6, |
396 | kContinuationSampleBit = 7, |
397 | kThreadTaskBit = 8, // 6 bits. |
398 | kNextFreeBit = 14, |
399 | }; |
400 | class HeadSampleBit : public BitField<uword, bool, kHeadSampleBit, 1> {}; |
401 | class LeafFrameIsDart : public BitField<uword, bool, kLeafFrameIsDartBit, 1> { |
402 | }; |
403 | class IgnoreBit : public BitField<uword, bool, kIgnoreBit, 1> {}; |
404 | class ExitFrameBit : public BitField<uword, bool, kExitFrameBit, 1> {}; |
405 | class MissingFrameInsertedBit |
406 | : public BitField<uword, bool, kMissingFrameInsertedBit, 1> {}; |
407 | class TruncatedTraceBit |
408 | : public BitField<uword, bool, kTruncatedTraceBit, 1> {}; |
409 | class ClassAllocationSampleBit |
410 | : public BitField<uword, bool, kClassAllocationSampleBit, 1> {}; |
411 | class ContinuationSampleBit |
412 | : public BitField<uword, bool, kContinuationSampleBit, 1> {}; |
413 | class ThreadTaskBit |
414 | : public BitField<uword, Thread::TaskKind, kThreadTaskBit, 6> {}; |
415 | |
416 | int64_t timestamp_; |
417 | ThreadId tid_; |
418 | Dart_Port port_; |
419 | uword pc_marker_; |
420 | uword stack_buffer_[kStackBufferSizeInWords]; |
421 | uword vm_tag_; |
422 | uword user_tag_; |
423 | uword metadata_; |
424 | uword lr_; |
425 | uword state_; |
426 | uword native_allocation_address_; |
427 | uintptr_t native_allocation_size_bytes_; |
428 | intptr_t continuation_index_; |
429 | Sample* next_free_; |
430 | |
431 | /* There are a variable number of words that follow, the words hold the |
432 | * sampled pc values. Access via GetPCArray() */ |
433 | DISALLOW_COPY_AND_ASSIGN(Sample); |
434 | }; |
435 | |
436 | class NativeAllocationSampleFilter : public SampleFilter { |
437 | public: |
438 | NativeAllocationSampleFilter(int64_t time_origin_micros, |
439 | int64_t time_extent_micros) |
440 | : SampleFilter(ILLEGAL_PORT, |
441 | SampleFilter::kNoTaskFilter, |
442 | time_origin_micros, |
443 | time_extent_micros) {} |
444 | |
445 | bool FilterSample(Sample* sample) { |
446 | // If the sample is an allocation sample, we need to check that the |
447 | // memory at the address hasn't been freed, and if the address associated |
448 | // with the allocation has been freed and then reissued. |
449 | void* alloc_address = |
450 | reinterpret_cast<void*>(sample->native_allocation_address()); |
451 | ASSERT(alloc_address != NULL); |
452 | Sample* recorded_sample = MallocHooks::GetSample(alloc_address); |
453 | return (sample == recorded_sample); |
454 | } |
455 | }; |
456 | |
457 | class AbstractCode { |
458 | public: |
459 | explicit AbstractCode(ObjectPtr code) : code_(Object::Handle(code)) { |
460 | ASSERT(code_.IsNull() || code_.IsCode() || code_.IsBytecode()); |
461 | } |
462 | |
463 | ObjectPtr raw() const { return code_.raw(); } |
464 | const Object* handle() const { return &code_; } |
465 | |
466 | uword PayloadStart() const { |
467 | if (code_.IsCode()) { |
468 | return Code::Cast(code_).PayloadStart(); |
469 | } else { |
470 | return Bytecode::Cast(code_).PayloadStart(); |
471 | } |
472 | } |
473 | |
474 | uword Size() const { |
475 | if (code_.IsCode()) { |
476 | return Code::Cast(code_).Size(); |
477 | } else { |
478 | return Bytecode::Cast(code_).Size(); |
479 | } |
480 | } |
481 | |
482 | int64_t compile_timestamp() const { |
483 | if (code_.IsCode()) { |
484 | return Code::Cast(code_).compile_timestamp(); |
485 | } else { |
486 | return 0; |
487 | } |
488 | } |
489 | |
490 | const char* Name() const { |
491 | if (code_.IsCode()) { |
492 | return Code::Cast(code_).Name(); |
493 | } else if (code_.IsBytecode()) { |
494 | return Bytecode::Cast(code_).Name(); |
495 | } else { |
496 | return "" ; |
497 | } |
498 | } |
499 | |
500 | const char* QualifiedName() const { |
501 | if (code_.IsCode()) { |
502 | return Code::Cast(code_).QualifiedName( |
503 | NameFormattingParams(Object::kUserVisibleName)); |
504 | } else if (code_.IsBytecode()) { |
505 | return Bytecode::Cast(code_).QualifiedName(); |
506 | } else { |
507 | return "" ; |
508 | } |
509 | } |
510 | |
511 | bool IsStubCode() const { |
512 | if (code_.IsCode()) { |
513 | return Code::Cast(code_).IsStubCode(); |
514 | } else if (code_.IsBytecode()) { |
515 | return (Bytecode::Cast(code_).function() == Function::null()); |
516 | } else { |
517 | return false; |
518 | } |
519 | } |
520 | |
521 | bool IsAllocationStubCode() const { |
522 | if (code_.IsCode()) { |
523 | return Code::Cast(code_).IsAllocationStubCode(); |
524 | } else { |
525 | return false; |
526 | } |
527 | } |
528 | |
529 | bool IsTypeTestStubCode() const { |
530 | if (code_.IsCode()) { |
531 | return Code::Cast(code_).IsTypeTestStubCode(); |
532 | } else { |
533 | return false; |
534 | } |
535 | } |
536 | |
537 | ObjectPtr owner() const { |
538 | if (code_.IsCode()) { |
539 | return Code::Cast(code_).owner(); |
540 | } else if (code_.IsBytecode()) { |
541 | return Bytecode::Cast(code_).function(); |
542 | } else { |
543 | return Object::null(); |
544 | } |
545 | } |
546 | |
547 | bool IsNull() const { return code_.IsNull(); } |
548 | bool IsCode() const { return code_.IsCode(); } |
549 | bool IsBytecode() const { return code_.IsBytecode(); } |
550 | |
551 | bool is_optimized() const { |
552 | if (code_.IsCode()) { |
553 | return Code::Cast(code_).is_optimized(); |
554 | } else { |
555 | return false; |
556 | } |
557 | } |
558 | |
559 | private: |
560 | const Object& code_; |
561 | }; |
562 | |
563 | // A Code object descriptor. |
564 | class CodeDescriptor : public ZoneAllocated { |
565 | public: |
566 | explicit CodeDescriptor(const AbstractCode code); |
567 | |
568 | uword Start() const; |
569 | |
570 | uword Size() const; |
571 | |
572 | int64_t CompileTimestamp() const; |
573 | |
574 | const AbstractCode code() const { return code_; } |
575 | |
576 | const char* Name() const { return code_.Name(); } |
577 | |
578 | bool Contains(uword pc) const { |
579 | uword end = Start() + Size(); |
580 | return (pc >= Start()) && (pc < end); |
581 | } |
582 | |
583 | static int Compare(CodeDescriptor* const* a, CodeDescriptor* const* b) { |
584 | ASSERT(a != NULL); |
585 | ASSERT(b != NULL); |
586 | |
587 | uword a_start = (*a)->Start(); |
588 | uword b_start = (*b)->Start(); |
589 | |
590 | if (a_start < b_start) { |
591 | return -1; |
592 | } else if (a_start > b_start) { |
593 | return 1; |
594 | } else { |
595 | return 0; |
596 | } |
597 | } |
598 | |
599 | private: |
600 | const AbstractCode code_; |
601 | |
602 | DISALLOW_COPY_AND_ASSIGN(CodeDescriptor); |
603 | }; |
604 | |
605 | // Fast lookup of Dart code objects. |
606 | class CodeLookupTable : public ZoneAllocated { |
607 | public: |
608 | explicit CodeLookupTable(Thread* thread); |
609 | |
610 | intptr_t length() const { return code_objects_.length(); } |
611 | |
612 | const CodeDescriptor* At(intptr_t index) const { |
613 | return code_objects_.At(index); |
614 | } |
615 | |
616 | const CodeDescriptor* FindCode(uword pc) const; |
617 | |
618 | private: |
619 | void Build(Thread* thread); |
620 | |
621 | void Add(const Object& code); |
622 | |
623 | // Code objects sorted by entry. |
624 | ZoneGrowableArray<CodeDescriptor*> code_objects_; |
625 | |
626 | friend class CodeLookupTableBuilder; |
627 | |
628 | DISALLOW_COPY_AND_ASSIGN(CodeLookupTable); |
629 | }; |
630 | |
631 | // Ring buffer of Samples that is (usually) shared by many isolates. |
632 | class SampleBuffer { |
633 | public: |
634 | // Up to 1 minute @ 1000Hz, less if samples are deep. |
635 | static const intptr_t kDefaultBufferCapacity = 60000; |
636 | |
637 | explicit SampleBuffer(intptr_t capacity = kDefaultBufferCapacity); |
638 | virtual ~SampleBuffer(); |
639 | |
640 | intptr_t capacity() const { return capacity_; } |
641 | |
642 | Sample* At(intptr_t idx) const; |
643 | intptr_t ReserveSampleSlot(); |
644 | virtual Sample* ReserveSample(); |
645 | virtual Sample* ReserveSampleAndLink(Sample* previous); |
646 | |
647 | void VisitSamples(SampleVisitor* visitor) { |
648 | ASSERT(visitor != NULL); |
649 | const intptr_t length = capacity(); |
650 | for (intptr_t i = 0; i < length; i++) { |
651 | Sample* sample = At(i); |
652 | if (!sample->head_sample()) { |
653 | // An inner sample in a chain of samples. |
654 | continue; |
655 | } |
656 | if (sample->ignore_sample()) { |
657 | // Bad sample. |
658 | continue; |
659 | } |
660 | if (sample->port() != visitor->port()) { |
661 | // Another isolate. |
662 | continue; |
663 | } |
664 | if (sample->timestamp() == 0) { |
665 | // Empty. |
666 | continue; |
667 | } |
668 | if (sample->At(0) == 0) { |
669 | // No frames. |
670 | continue; |
671 | } |
672 | visitor->IncrementVisited(); |
673 | visitor->VisitSample(sample); |
674 | } |
675 | } |
676 | |
677 | ProcessedSampleBuffer* BuildProcessedSampleBuffer(SampleFilter* filter); |
678 | |
679 | intptr_t Size() { return memory_->size(); } |
680 | |
681 | protected: |
682 | ProcessedSample* BuildProcessedSample(Sample* sample, |
683 | const CodeLookupTable& clt); |
684 | Sample* Next(Sample* sample); |
685 | |
686 | VirtualMemory* memory_; |
687 | Sample* samples_; |
688 | intptr_t capacity_; |
689 | RelaxedAtomic<uintptr_t> cursor_; |
690 | |
691 | private: |
692 | DISALLOW_COPY_AND_ASSIGN(SampleBuffer); |
693 | }; |
694 | |
695 | class AllocationSampleBuffer : public SampleBuffer { |
696 | public: |
697 | explicit AllocationSampleBuffer(intptr_t capacity = kDefaultBufferCapacity); |
698 | virtual ~AllocationSampleBuffer(); |
699 | |
700 | intptr_t ReserveSampleSlotLocked(); |
701 | virtual Sample* ReserveSample(); |
702 | virtual Sample* ReserveSampleAndLink(Sample* previous); |
703 | void FreeAllocationSample(Sample* sample); |
704 | |
705 | private: |
706 | Mutex mutex_; |
707 | Sample* free_sample_list_; |
708 | |
709 | DISALLOW_COPY_AND_ASSIGN(AllocationSampleBuffer); |
710 | }; |
711 | |
712 | intptr_t Profiler::Size() { |
713 | intptr_t size = 0; |
714 | if (sample_buffer_ != nullptr) { |
715 | size += sample_buffer_->Size(); |
716 | } |
717 | if (allocation_sample_buffer_ != nullptr) { |
718 | size += allocation_sample_buffer_->Size(); |
719 | } |
720 | return size; |
721 | } |
722 | |
723 | // A |ProcessedSample| is a combination of 1 (or more) |Sample|(s) that have |
724 | // been merged into a logical sample. The raw data may have been processed to |
725 | // improve the quality of the stack trace. |
726 | class ProcessedSample : public ZoneAllocated { |
727 | public: |
728 | ProcessedSample(); |
729 | |
730 | // Add |pc| to stack trace. |
731 | void Add(uword pc) { pcs_.Add(pc); } |
732 | |
733 | // Insert |pc| at |index|. |
734 | void InsertAt(intptr_t index, uword pc) { pcs_.InsertAt(index, pc); } |
735 | |
736 | // Number of pcs in stack trace. |
737 | intptr_t length() const { return pcs_.length(); } |
738 | |
739 | // Get |pc| at |index|. |
740 | uword At(intptr_t index) const { |
741 | ASSERT(index >= 0); |
742 | ASSERT(index < length()); |
743 | return pcs_[index]; |
744 | } |
745 | |
746 | // Timestamp sample was taken at. |
747 | int64_t timestamp() const { return timestamp_; } |
748 | void set_timestamp(int64_t timestamp) { timestamp_ = timestamp; } |
749 | |
750 | ThreadId tid() const { return tid_; } |
751 | void set_tid(ThreadId tid) { tid_ = tid; } |
752 | |
753 | // The VM tag. |
754 | uword vm_tag() const { return vm_tag_; } |
755 | void set_vm_tag(uword tag) { vm_tag_ = tag; } |
756 | |
757 | // The user tag. |
758 | uword user_tag() const { return user_tag_; } |
759 | void set_user_tag(uword tag) { user_tag_ = tag; } |
760 | |
761 | // The class id if this is an allocation profile sample. -1 otherwise. |
762 | intptr_t allocation_cid() const { return allocation_cid_; } |
763 | void set_allocation_cid(intptr_t cid) { allocation_cid_ = cid; } |
764 | |
765 | bool IsAllocationSample() const { return allocation_cid_ > 0; } |
766 | |
767 | bool is_native_allocation_sample() const { |
768 | return native_allocation_size_bytes_ != 0; |
769 | } |
770 | |
771 | uintptr_t native_allocation_size_bytes() const { |
772 | return native_allocation_size_bytes_; |
773 | } |
774 | void set_native_allocation_size_bytes(uintptr_t allocation_size) { |
775 | native_allocation_size_bytes_ = allocation_size; |
776 | } |
777 | |
778 | // Was the stack trace truncated? |
779 | bool truncated() const { return truncated_; } |
780 | void set_truncated(bool truncated) { truncated_ = truncated; } |
781 | |
782 | // Was the first frame in the stack trace executing? |
783 | bool first_frame_executing() const { return first_frame_executing_; } |
784 | void set_first_frame_executing(bool first_frame_executing) { |
785 | first_frame_executing_ = first_frame_executing; |
786 | } |
787 | |
788 | ProfileTrieNode* timeline_code_trie() const { return timeline_code_trie_; } |
789 | void set_timeline_code_trie(ProfileTrieNode* trie) { |
790 | ASSERT(timeline_code_trie_ == NULL); |
791 | timeline_code_trie_ = trie; |
792 | } |
793 | |
794 | ProfileTrieNode* timeline_function_trie() const { |
795 | return timeline_function_trie_; |
796 | } |
797 | void set_timeline_function_trie(ProfileTrieNode* trie) { |
798 | ASSERT(timeline_function_trie_ == NULL); |
799 | timeline_function_trie_ = trie; |
800 | } |
801 | |
802 | private: |
803 | void FixupCaller(const CodeLookupTable& clt, |
804 | uword pc_marker, |
805 | uword* stack_buffer); |
806 | |
807 | void CheckForMissingDartFrame(const CodeLookupTable& clt, |
808 | const CodeDescriptor* code, |
809 | uword pc_marker, |
810 | uword* stack_buffer); |
811 | |
812 | ZoneGrowableArray<uword> pcs_; |
813 | int64_t timestamp_; |
814 | ThreadId tid_; |
815 | uword vm_tag_; |
816 | uword user_tag_; |
817 | intptr_t allocation_cid_; |
818 | bool truncated_; |
819 | bool first_frame_executing_; |
820 | uword native_allocation_address_; |
821 | uintptr_t native_allocation_size_bytes_; |
822 | ProfileTrieNode* timeline_code_trie_; |
823 | ProfileTrieNode* timeline_function_trie_; |
824 | |
825 | friend class SampleBuffer; |
826 | DISALLOW_COPY_AND_ASSIGN(ProcessedSample); |
827 | }; |
828 | |
829 | // A collection of |ProcessedSample|s. |
830 | class ProcessedSampleBuffer : public ZoneAllocated { |
831 | public: |
832 | ProcessedSampleBuffer(); |
833 | |
834 | void Add(ProcessedSample* sample) { samples_.Add(sample); } |
835 | |
836 | intptr_t length() const { return samples_.length(); } |
837 | |
838 | ProcessedSample* At(intptr_t index) { return samples_.At(index); } |
839 | |
840 | const CodeLookupTable& code_lookup_table() const { |
841 | return *code_lookup_table_; |
842 | } |
843 | |
844 | private: |
845 | ZoneGrowableArray<ProcessedSample*> samples_; |
846 | CodeLookupTable* code_lookup_table_; |
847 | |
848 | DISALLOW_COPY_AND_ASSIGN(ProcessedSampleBuffer); |
849 | }; |
850 | |
851 | } // namespace dart |
852 | |
853 | #endif // RUNTIME_VM_PROFILER_H_ |
854 | |