1// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "platform/assert.h"
6
7#include "vm/dart_api_impl.h"
8#include "vm/dart_api_state.h"
9#include "vm/globals.h"
10#include "vm/profiler.h"
11#include "vm/profiler_service.h"
12#include "vm/source_report.h"
13#include "vm/symbols.h"
14#include "vm/unit_test.h"
15
16namespace dart {
17
18#ifndef PRODUCT
19
20DECLARE_FLAG(bool, profile_vm);
21DECLARE_FLAG(bool, profile_vm_allocation);
22DECLARE_FLAG(int, max_profile_depth);
23DECLARE_FLAG(int, optimization_counter_threshold);
24
25// Some tests are written assuming native stack trace profiling is disabled.
26class DisableNativeProfileScope : public ValueObject {
27 public:
28 DisableNativeProfileScope()
29 : FLAG_profile_vm_(FLAG_profile_vm),
30 FLAG_profile_vm_allocation_(FLAG_profile_vm_allocation) {
31 FLAG_profile_vm = false;
32 FLAG_profile_vm_allocation = false;
33 }
34
35 ~DisableNativeProfileScope() {
36 FLAG_profile_vm = FLAG_profile_vm_;
37 FLAG_profile_vm_allocation = FLAG_profile_vm_allocation_;
38 }
39
40 private:
41 const bool FLAG_profile_vm_;
42 const bool FLAG_profile_vm_allocation_;
43};
44
45class DisableBackgroundCompilationScope : public ValueObject {
46 public:
47 DisableBackgroundCompilationScope()
48 : FLAG_background_compilation_(FLAG_background_compilation) {
49 FLAG_background_compilation = false;
50 }
51
52 ~DisableBackgroundCompilationScope() {
53 FLAG_background_compilation = FLAG_background_compilation_;
54 }
55
56 private:
57 const bool FLAG_background_compilation_;
58};
59
60// Temporarily adjust the maximum profile depth.
61class MaxProfileDepthScope : public ValueObject {
62 public:
63 explicit MaxProfileDepthScope(intptr_t new_max_depth)
64 : FLAG_max_profile_depth_(FLAG_max_profile_depth) {
65 Profiler::SetSampleDepth(new_max_depth);
66 }
67
68 ~MaxProfileDepthScope() { Profiler::SetSampleDepth(FLAG_max_profile_depth_); }
69
70 private:
71 const intptr_t FLAG_max_profile_depth_;
72};
73
74class ProfileSampleBufferTestHelper {
75 public:
76 static intptr_t IterateCount(const Dart_Port port,
77 const SampleBuffer& sample_buffer) {
78 intptr_t c = 0;
79 for (intptr_t i = 0; i < sample_buffer.capacity(); i++) {
80 Sample* sample = sample_buffer.At(i);
81 if (sample->port() != port) {
82 continue;
83 }
84 c++;
85 }
86 return c;
87 }
88
89 static intptr_t IterateSumPC(const Dart_Port port,
90 const SampleBuffer& sample_buffer) {
91 intptr_t c = 0;
92 for (intptr_t i = 0; i < sample_buffer.capacity(); i++) {
93 Sample* sample = sample_buffer.At(i);
94 if (sample->port() != port) {
95 continue;
96 }
97 c += sample->At(0);
98 }
99 return c;
100 }
101};
102
103TEST_CASE(Profiler_SampleBufferWrapTest) {
104 SampleBuffer* sample_buffer = new SampleBuffer(3);
105 Dart_Port i = 123;
106 EXPECT_EQ(0, ProfileSampleBufferTestHelper::IterateSumPC(i, *sample_buffer));
107 Sample* s;
108 s = sample_buffer->ReserveSample();
109 s->Init(i, 0, 0);
110 s->SetAt(0, 2);
111 EXPECT_EQ(2, ProfileSampleBufferTestHelper::IterateSumPC(i, *sample_buffer));
112 s = sample_buffer->ReserveSample();
113 s->Init(i, 0, 0);
114 s->SetAt(0, 4);
115 EXPECT_EQ(6, ProfileSampleBufferTestHelper::IterateSumPC(i, *sample_buffer));
116 s = sample_buffer->ReserveSample();
117 s->Init(i, 0, 0);
118 s->SetAt(0, 6);
119 EXPECT_EQ(12, ProfileSampleBufferTestHelper::IterateSumPC(i, *sample_buffer));
120 s = sample_buffer->ReserveSample();
121 s->Init(i, 0, 0);
122 s->SetAt(0, 8);
123 EXPECT_EQ(18, ProfileSampleBufferTestHelper::IterateSumPC(i, *sample_buffer));
124 delete sample_buffer;
125}
126
127TEST_CASE(Profiler_SampleBufferIterateTest) {
128 SampleBuffer* sample_buffer = new SampleBuffer(3);
129 Dart_Port i = 123;
130 EXPECT_EQ(0, ProfileSampleBufferTestHelper::IterateCount(i, *sample_buffer));
131 Sample* s;
132 s = sample_buffer->ReserveSample();
133 s->Init(i, 0, 0);
134 EXPECT_EQ(1, ProfileSampleBufferTestHelper::IterateCount(i, *sample_buffer));
135 s = sample_buffer->ReserveSample();
136 s->Init(i, 0, 0);
137 EXPECT_EQ(2, ProfileSampleBufferTestHelper::IterateCount(i, *sample_buffer));
138 s = sample_buffer->ReserveSample();
139 s->Init(i, 0, 0);
140 EXPECT_EQ(3, ProfileSampleBufferTestHelper::IterateCount(i, *sample_buffer));
141 s = sample_buffer->ReserveSample();
142 s->Init(i, 0, 0);
143 EXPECT_EQ(3, ProfileSampleBufferTestHelper::IterateCount(i, *sample_buffer));
144 delete sample_buffer;
145}
146
147TEST_CASE(Profiler_AllocationSampleTest) {
148 Isolate* isolate = Isolate::Current();
149 SampleBuffer* sample_buffer = new SampleBuffer(3);
150 Sample* sample = sample_buffer->ReserveSample();
151 sample->Init(isolate->main_port(), 0, 0);
152 sample->set_metadata(99);
153 sample->set_is_allocation_sample(true);
154 EXPECT_EQ(99, sample->allocation_cid());
155 delete sample_buffer;
156}
157
158static LibraryPtr LoadTestScript(const char* script) {
159 Dart_Handle api_lib;
160 {
161 TransitionVMToNative transition(Thread::Current());
162 api_lib = TestCase::LoadTestScript(script, NULL);
163 EXPECT_VALID(api_lib);
164 }
165 Library& lib = Library::Handle();
166 lib ^= Api::UnwrapHandle(api_lib);
167 return lib.raw();
168}
169
170static ClassPtr GetClass(const Library& lib, const char* name) {
171 Thread* thread = Thread::Current();
172 const Class& cls = Class::Handle(
173 lib.LookupClassAllowPrivate(String::Handle(Symbols::New(thread, name))));
174 EXPECT(!cls.IsNull()); // No ambiguity error expected.
175 return cls.raw();
176}
177
178static FunctionPtr GetFunction(const Library& lib, const char* name) {
179 Thread* thread = Thread::Current();
180 const Function& func = Function::Handle(lib.LookupFunctionAllowPrivate(
181 String::Handle(Symbols::New(thread, name))));
182 EXPECT(!func.IsNull()); // No ambiguity error expected.
183 return func.raw();
184}
185
186static void Invoke(const Library& lib,
187 const char* name,
188 intptr_t argc = 0,
189 Dart_Handle* argv = NULL) {
190 Thread* thread = Thread::Current();
191 Dart_Handle api_lib = Api::NewHandle(thread, lib.raw());
192 TransitionVMToNative transition(thread);
193 Dart_Handle result = Dart_Invoke(api_lib, NewString(name), argc, argv);
194 EXPECT_VALID(result);
195}
196
197class AllocationFilter : public SampleFilter {
198 public:
199 AllocationFilter(Dart_Port port,
200 intptr_t cid,
201 int64_t time_origin_micros = -1,
202 int64_t time_extent_micros = -1)
203 : SampleFilter(port,
204 Thread::kMutatorTask,
205 time_origin_micros,
206 time_extent_micros),
207 cid_(cid),
208 enable_vm_ticks_(false) {}
209
210 bool FilterSample(Sample* sample) {
211 if (!enable_vm_ticks_ && (sample->vm_tag() == VMTag::kVMTagId)) {
212 // We don't want to see embedder ticks in the test.
213 return false;
214 }
215 return sample->is_allocation_sample() && (sample->allocation_cid() == cid_);
216 }
217
218 void set_enable_vm_ticks(bool enable) { enable_vm_ticks_ = enable; }
219
220 private:
221 intptr_t cid_;
222 bool enable_vm_ticks_;
223};
224
225static void EnableProfiler() {
226 if (!FLAG_profiler) {
227 FLAG_profiler = true;
228 Profiler::Init();
229 }
230}
231
232class ProfileStackWalker {
233 public:
234 explicit ProfileStackWalker(Profile* profile, bool as_func = false)
235 : profile_(profile),
236 as_functions_(as_func),
237 index_(0),
238 sample_(profile->SampleAt(0)) {
239 ClearInliningData();
240 }
241
242 bool Down() {
243 if (as_functions_) {
244 return UpdateFunctionIndex();
245 } else {
246 ++index_;
247 return (index_ < sample_->length());
248 }
249 }
250
251 const char* CurrentName() {
252 if (as_functions_) {
253 ProfileFunction* func = GetFunction();
254 EXPECT(func != NULL);
255 return func->Name();
256 } else {
257 ProfileCode* code = GetCode();
258 EXPECT(code != NULL);
259 return code->name();
260 }
261 }
262
263 const char* CurrentToken() {
264 if (!as_functions_) {
265 return NULL;
266 }
267 ProfileFunction* func = GetFunction();
268 const Function& function = *(func->function());
269 if (function.IsNull()) {
270 // No function.
271 return NULL;
272 }
273 Zone* zone = Thread::Current()->zone();
274 const Script& script = Script::Handle(zone, function.script());
275 if (script.IsNull()) {
276 // No script.
277 return NULL;
278 }
279 ProfileFunctionSourcePosition pfsp(TokenPosition::kNoSource);
280 if (!func->GetSinglePosition(&pfsp)) {
281 // Not exactly one source position.
282 return NULL;
283 }
284 TokenPosition token_pos = pfsp.token_pos();
285 if (!token_pos.IsSourcePosition()) {
286 // Not a location in a script.
287 return NULL;
288 }
289 if (token_pos.IsSynthetic()) {
290 token_pos = token_pos.FromSynthetic();
291 }
292
293 intptr_t line = 0, column = 0, token_len = 0;
294 script.GetTokenLocation(token_pos, &line, &column, &token_len);
295 const auto& str = String::Handle(
296 zone, script.GetSnippet(line, column, line, column + token_len));
297 return str.IsNull() ? NULL : str.ToCString();
298 }
299
300 intptr_t CurrentInclusiveTicks() {
301 if (as_functions_) {
302 ProfileFunction* func = GetFunction();
303 EXPECT(func != NULL);
304 return func->inclusive_ticks();
305 } else {
306 ProfileCode* code = GetCode();
307 ASSERT(code != NULL);
308 return code->inclusive_ticks();
309 }
310 }
311
312 intptr_t CurrentExclusiveTicks() {
313 if (as_functions_) {
314 ProfileFunction* func = GetFunction();
315 EXPECT(func != NULL);
316 return func->exclusive_ticks();
317 } else {
318 ProfileCode* code = GetCode();
319 ASSERT(code != NULL);
320 return code->exclusive_ticks();
321 }
322 }
323
324 const char* VMTagName() { return VMTag::TagName(sample_->vm_tag()); }
325
326 private:
327 ProfileCode* GetCode() {
328 uword pc = sample_->At(index_);
329 int64_t timestamp = sample_->timestamp();
330 return profile_->GetCodeFromPC(pc, timestamp);
331 }
332
333 static const intptr_t kInvalidInlinedIndex = -1;
334
335 bool UpdateFunctionIndex() {
336 if (inlined_index_ != kInvalidInlinedIndex) {
337 if (inlined_index_ - 1 >= 0) {
338 --inlined_index_;
339 return true;
340 }
341 ClearInliningData();
342 }
343 ++index_;
344 return (index_ < sample_->length());
345 }
346
347 void ClearInliningData() {
348 inlined_index_ = kInvalidInlinedIndex;
349 inlined_functions_ = NULL;
350 inlined_token_positions_ = NULL;
351 }
352
353 ProfileFunction* GetFunction() {
354 // Check to see if we're currently processing inlined functions. If so,
355 // return the next inlined function.
356 ProfileFunction* function = GetInlinedFunction();
357 if (function != NULL) {
358 return function;
359 }
360
361 const uword pc = sample_->At(index_);
362 ProfileCode* profile_code =
363 profile_->GetCodeFromPC(pc, sample_->timestamp());
364 ASSERT(profile_code != NULL);
365 function = profile_code->function();
366 ASSERT(function != NULL);
367
368 TokenPosition token_position = TokenPosition::kNoSource;
369 Code& code = Code::ZoneHandle();
370 if (profile_code->code().IsCode()) {
371 code ^= profile_code->code().raw();
372 inlined_functions_cache_.Get(pc, code, sample_, index_,
373 &inlined_functions_,
374 &inlined_token_positions_, &token_position);
375 } else if (profile_code->code().IsBytecode()) {
376 // No inlining in bytecode.
377 const Bytecode& bc = Bytecode::CheckedHandle(Thread::Current()->zone(),
378 profile_code->code().raw());
379 token_position = bc.GetTokenIndexOfPC(pc);
380 }
381
382 if (code.IsNull() || (inlined_functions_ == NULL) ||
383 (inlined_functions_->length() <= 1)) {
384 ClearInliningData();
385 // No inlined functions.
386 return function;
387 }
388
389 ASSERT(code.is_optimized());
390 inlined_index_ = inlined_functions_->length() - 1;
391 function = GetInlinedFunction();
392 ASSERT(function != NULL);
393 return function;
394 }
395
396 ProfileFunction* GetInlinedFunction() {
397 if ((inlined_index_ != kInvalidInlinedIndex) &&
398 (inlined_index_ < inlined_functions_->length())) {
399 return profile_->FindFunction(*(*inlined_functions_)[inlined_index_]);
400 }
401 return NULL;
402 }
403
404 Profile* profile_;
405 bool as_functions_;
406 intptr_t index_;
407 ProcessedSample* sample_;
408 ProfileCodeInlinedFunctionsCache inlined_functions_cache_;
409 GrowableArray<const Function*>* inlined_functions_;
410 GrowableArray<TokenPosition>* inlined_token_positions_;
411 intptr_t inlined_index_;
412};
413
414ISOLATE_UNIT_TEST_CASE(Profiler_TrivialRecordAllocation) {
415 EnableProfiler();
416 DisableNativeProfileScope dnps;
417 DisableBackgroundCompilationScope dbcs;
418 const char* kScript =
419 "class A {\n"
420 " var a;\n"
421 " var b;\n"
422 "}\n"
423 "class B {\n"
424 " static boo() {\n"
425 " return new A();\n"
426 " }\n"
427 "}\n"
428 "main() {\n"
429 " return B.boo();\n"
430 "}\n";
431
432 const Library& root_library = Library::Handle(LoadTestScript(kScript));
433
434 const int64_t before_allocations_micros = Dart_TimelineGetMicros();
435 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
436 EXPECT(!class_a.IsNull());
437 class_a.SetTraceAllocation(true);
438
439 Invoke(root_library, "main");
440
441 const int64_t after_allocations_micros = Dart_TimelineGetMicros();
442 const int64_t allocation_extent_micros =
443 after_allocations_micros - before_allocations_micros;
444 {
445 Thread* thread = Thread::Current();
446 Isolate* isolate = thread->isolate();
447 StackZone zone(thread);
448 HANDLESCOPE(thread);
449 Profile profile(isolate);
450 // Filter for the class in the time range.
451 AllocationFilter filter(isolate->main_port(), class_a.id(),
452 before_allocations_micros,
453 allocation_extent_micros);
454 profile.Build(thread, &filter, Profiler::sample_buffer());
455 // We should have 1 allocation sample.
456 EXPECT_EQ(1, profile.sample_count());
457 ProfileStackWalker walker(&profile);
458
459 // Move down from the root.
460 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
461 if (FLAG_enable_interpreter) {
462 EXPECT_STREQ("[Bytecode] B.boo", walker.CurrentName());
463 EXPECT(walker.Down());
464 EXPECT_STREQ("[Bytecode] main", walker.CurrentName());
465 EXPECT(!walker.Down());
466 } else {
467#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
468 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
469#else
470 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
471#endif
472 EXPECT(walker.Down());
473 EXPECT_STREQ("[Unoptimized] B.boo", walker.CurrentName());
474 EXPECT(walker.Down());
475 EXPECT_STREQ("[Unoptimized] main", walker.CurrentName());
476 EXPECT(!walker.Down());
477 }
478 }
479
480 // Query with a time filter where no allocations occurred.
481 {
482 Thread* thread = Thread::Current();
483 Isolate* isolate = thread->isolate();
484 StackZone zone(thread);
485 HANDLESCOPE(thread);
486 Profile profile(isolate);
487 AllocationFilter filter(isolate->main_port(), class_a.id(),
488 Dart_TimelineGetMicros(), 16000);
489 profile.Build(thread, &filter, Profiler::sample_buffer());
490 // We should have no allocation samples because none occured within
491 // the specified time range.
492 EXPECT_EQ(0, profile.sample_count());
493 }
494}
495
496#if defined(DART_USE_TCMALLOC) && defined(HOST_OS_LINUX) && defined(DEBUG) && \
497 defined(HOST_ARCH_X64)
498
499DART_NOINLINE static void NativeAllocationSampleHelper(char** result) {
500 ASSERT(result != NULL);
501 *result = static_cast<char*>(malloc(sizeof(char) * 1024));
502}
503
504ISOLATE_UNIT_TEST_CASE(Profiler_NativeAllocation) {
505 bool enable_malloc_hooks_saved = FLAG_profiler_native_memory;
506 FLAG_profiler_native_memory = true;
507
508 EnableProfiler();
509
510 MallocHooks::Init();
511 MallocHooks::ResetStats();
512 bool stack_trace_collection_enabled =
513 MallocHooks::stack_trace_collection_enabled();
514 MallocHooks::set_stack_trace_collection_enabled(true);
515
516 char* result = NULL;
517 const int64_t before_allocations_micros = Dart_TimelineGetMicros();
518 NativeAllocationSampleHelper(&result);
519
520 // Disable stack allocation stack trace collection to avoid muddying up
521 // results.
522 MallocHooks::set_stack_trace_collection_enabled(false);
523 const int64_t after_allocations_micros = Dart_TimelineGetMicros();
524 const int64_t allocation_extent_micros =
525 after_allocations_micros - before_allocations_micros;
526
527 // Walk the trie and do a sanity check of the allocation values associated
528 // with each node.
529 {
530 Thread* thread = Thread::Current();
531 Isolate* isolate = thread->isolate();
532 StackZone zone(thread);
533 HANDLESCOPE(thread);
534 Profile profile(isolate);
535
536 // Filter for the class in the time range.
537 NativeAllocationSampleFilter filter(before_allocations_micros,
538 allocation_extent_micros);
539 profile.Build(thread, &filter, Profiler::allocation_sample_buffer());
540 // We should have 1 allocation sample.
541 EXPECT_EQ(1, profile.sample_count());
542 ProfileStackWalker walker(&profile);
543
544 // Move down from the root.
545 EXPECT_SUBSTRING("[Native]", walker.CurrentName());
546 EXPECT_EQ(1024ul, profile.SampleAt(0)->native_allocation_size_bytes());
547 EXPECT(walker.Down());
548 EXPECT_STREQ("dart::Dart_TestProfiler_NativeAllocation()",
549 walker.CurrentName());
550 EXPECT(walker.Down());
551 EXPECT_STREQ("dart::TestCase::Run()", walker.CurrentName());
552 EXPECT(walker.Down());
553 EXPECT_STREQ("dart::TestCaseBase::RunTest()", walker.CurrentName());
554 EXPECT(walker.Down());
555 EXPECT_STREQ("dart::TestCaseBase::RunAll()", walker.CurrentName());
556 EXPECT(walker.Down());
557 EXPECT_SUBSTRING("[Native]", walker.CurrentName());
558 EXPECT(walker.Down());
559 EXPECT_STREQ("main", walker.CurrentName());
560 EXPECT(!walker.Down());
561 }
562
563 MallocHooks::set_stack_trace_collection_enabled(true);
564 free(result);
565 MallocHooks::set_stack_trace_collection_enabled(false);
566
567 // Check to see that the native allocation sample associated with the memory
568 // freed above is marked as free and is no longer reported.
569 {
570 Thread* thread = Thread::Current();
571 Isolate* isolate = thread->isolate();
572 StackZone zone(thread);
573 HANDLESCOPE(thread);
574 Profile profile(isolate);
575
576 // Filter for the class in the time range.
577 NativeAllocationSampleFilter filter(before_allocations_micros,
578 allocation_extent_micros);
579 profile.Build(thread, &filter, Profiler::sample_buffer());
580 // We should have 0 allocation samples since we freed the memory.
581 EXPECT_EQ(0, profile.sample_count());
582 }
583
584 // Query with a time filter where no allocations occurred.
585 {
586 Thread* thread = Thread::Current();
587 Isolate* isolate = thread->isolate();
588 StackZone zone(thread);
589 HANDLESCOPE(thread);
590 Profile profile(isolate);
591 NativeAllocationSampleFilter filter(Dart_TimelineGetMicros(), 16000);
592 profile.Build(thread, &filter, Profiler::sample_buffer());
593 // We should have no allocation samples because none occured within
594 // the specified time range.
595 EXPECT_EQ(0, profile.sample_count());
596 }
597
598 MallocHooks::set_stack_trace_collection_enabled(
599 stack_trace_collection_enabled);
600 FLAG_profiler_native_memory = enable_malloc_hooks_saved;
601}
602#endif // defined(DART_USE_TCMALLOC) && defined(HOST_OS_LINUX) && \
603 // defined(DEBUG) && defined(HOST_ARCH_X64)
604
605ISOLATE_UNIT_TEST_CASE(Profiler_ToggleRecordAllocation) {
606 EnableProfiler();
607
608 DisableNativeProfileScope dnps;
609 DisableBackgroundCompilationScope dbcs;
610 const char* kScript =
611 "class A {\n"
612 " var a;\n"
613 " var b;\n"
614 "}\n"
615 "class B {\n"
616 " static boo() {\n"
617 " return new A();\n"
618 " }\n"
619 "}\n"
620 "main() {\n"
621 " return B.boo();\n"
622 "}\n";
623
624 const Library& root_library = Library::Handle(LoadTestScript(kScript));
625
626 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
627 EXPECT(!class_a.IsNull());
628
629 Invoke(root_library, "main");
630
631 {
632 Thread* thread = Thread::Current();
633 Isolate* isolate = thread->isolate();
634 StackZone zone(thread);
635 HANDLESCOPE(thread);
636 Profile profile(isolate);
637 AllocationFilter filter(isolate->main_port(), class_a.id());
638 profile.Build(thread, &filter, Profiler::sample_buffer());
639 // We should have no allocation samples.
640 EXPECT_EQ(0, profile.sample_count());
641 }
642
643 // Turn on allocation tracing for A.
644 class_a.SetTraceAllocation(true);
645
646 Invoke(root_library, "main");
647
648 {
649 Thread* thread = Thread::Current();
650 Isolate* isolate = thread->isolate();
651 StackZone zone(thread);
652 HANDLESCOPE(thread);
653 Profile profile(isolate);
654 AllocationFilter filter(isolate->main_port(), class_a.id());
655 profile.Build(thread, &filter, Profiler::sample_buffer());
656 // We should have one allocation sample.
657 EXPECT_EQ(1, profile.sample_count());
658 ProfileStackWalker walker(&profile);
659
660 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
661 if (FLAG_enable_interpreter) {
662 EXPECT_STREQ("[Bytecode] B.boo", walker.CurrentName());
663 EXPECT(walker.Down());
664 EXPECT_STREQ("[Bytecode] main", walker.CurrentName());
665 EXPECT(!walker.Down());
666 } else {
667#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
668 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
669#else
670 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
671#endif
672 EXPECT(walker.Down());
673 EXPECT_STREQ("[Unoptimized] B.boo", walker.CurrentName());
674 EXPECT(walker.Down());
675 EXPECT_STREQ("[Unoptimized] main", walker.CurrentName());
676 EXPECT(!walker.Down());
677 }
678 }
679
680 // Turn off allocation tracing for A.
681 class_a.SetTraceAllocation(false);
682
683 Invoke(root_library, "main");
684
685 {
686 Thread* thread = Thread::Current();
687 Isolate* isolate = thread->isolate();
688 StackZone zone(thread);
689 HANDLESCOPE(thread);
690 Profile profile(isolate);
691 AllocationFilter filter(isolate->main_port(), class_a.id());
692 profile.Build(thread, &filter, Profiler::sample_buffer());
693 // We should still only have one allocation sample.
694 EXPECT_EQ(1, profile.sample_count());
695 }
696}
697
698ISOLATE_UNIT_TEST_CASE(Profiler_CodeTicks) {
699 EnableProfiler();
700 DisableNativeProfileScope dnps;
701 DisableBackgroundCompilationScope dbcs;
702 const char* kScript =
703 "class A {\n"
704 " var a;\n"
705 " var b;\n"
706 "}\n"
707 "class B {\n"
708 " static boo() {\n"
709 " return new A();\n"
710 " }\n"
711 "}\n"
712 "main() {\n"
713 " return B.boo();\n"
714 "}\n";
715
716 const Library& root_library = Library::Handle(LoadTestScript(kScript));
717
718 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
719 EXPECT(!class_a.IsNull());
720
721 Invoke(root_library, "main");
722
723 {
724 Thread* thread = Thread::Current();
725 Isolate* isolate = thread->isolate();
726 StackZone zone(thread);
727 HANDLESCOPE(thread);
728 Profile profile(isolate);
729 AllocationFilter filter(isolate->main_port(), class_a.id());
730 profile.Build(thread, &filter, Profiler::sample_buffer());
731 // We should have no allocation samples.
732 EXPECT_EQ(0, profile.sample_count());
733 }
734
735 // Turn on allocation tracing for A.
736 class_a.SetTraceAllocation(true);
737
738 // Allocate three times.
739 Invoke(root_library, "main");
740 Invoke(root_library, "main");
741 Invoke(root_library, "main");
742
743 {
744 Thread* thread = Thread::Current();
745 Isolate* isolate = thread->isolate();
746 StackZone zone(thread);
747 HANDLESCOPE(thread);
748 Profile profile(isolate);
749 AllocationFilter filter(isolate->main_port(), class_a.id());
750 profile.Build(thread, &filter, Profiler::sample_buffer());
751 // We should have three allocation samples.
752 EXPECT_EQ(3, profile.sample_count());
753 ProfileStackWalker walker(&profile);
754
755 // Move down from the root.
756 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
757 if (FLAG_enable_interpreter) {
758 EXPECT_STREQ("[Bytecode] B.boo", walker.CurrentName());
759 EXPECT_EQ(3, walker.CurrentInclusiveTicks());
760 EXPECT(walker.Down());
761 EXPECT_STREQ("[Bytecode] main", walker.CurrentName());
762 EXPECT_EQ(3, walker.CurrentInclusiveTicks());
763 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
764 EXPECT(!walker.Down());
765 } else {
766#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
767 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
768#else
769 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
770#endif
771 EXPECT_EQ(3, walker.CurrentExclusiveTicks());
772 EXPECT(walker.Down());
773 EXPECT_STREQ("[Unoptimized] B.boo", walker.CurrentName());
774 EXPECT_EQ(3, walker.CurrentInclusiveTicks());
775 EXPECT(walker.Down());
776 EXPECT_STREQ("[Unoptimized] main", walker.CurrentName());
777 EXPECT_EQ(3, walker.CurrentInclusiveTicks());
778 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
779 EXPECT(!walker.Down());
780 }
781 }
782}
783ISOLATE_UNIT_TEST_CASE(Profiler_FunctionTicks) {
784 EnableProfiler();
785 DisableNativeProfileScope dnps;
786 DisableBackgroundCompilationScope dbcs;
787 const char* kScript =
788 "class A {\n"
789 " var a;\n"
790 " var b;\n"
791 "}\n"
792 "class B {\n"
793 " static boo() {\n"
794 " return new A();\n"
795 " }\n"
796 "}\n"
797 "main() {\n"
798 " return B.boo();\n"
799 "}\n";
800
801 const Library& root_library = Library::Handle(LoadTestScript(kScript));
802
803 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
804 EXPECT(!class_a.IsNull());
805
806 Invoke(root_library, "main");
807
808 {
809 Thread* thread = Thread::Current();
810 Isolate* isolate = thread->isolate();
811 StackZone zone(thread);
812 HANDLESCOPE(thread);
813 Profile profile(isolate);
814 AllocationFilter filter(isolate->main_port(), class_a.id());
815 profile.Build(thread, &filter, Profiler::sample_buffer());
816 // We should have no allocation samples.
817 EXPECT_EQ(0, profile.sample_count());
818 }
819
820 // Turn on allocation tracing for A.
821 class_a.SetTraceAllocation(true);
822
823 // Allocate three times.
824 Invoke(root_library, "main");
825 Invoke(root_library, "main");
826 Invoke(root_library, "main");
827
828 {
829 Thread* thread = Thread::Current();
830 Isolate* isolate = thread->isolate();
831 StackZone zone(thread);
832 HANDLESCOPE(thread);
833 Profile profile(isolate);
834 AllocationFilter filter(isolate->main_port(), class_a.id());
835 profile.Build(thread, &filter, Profiler::sample_buffer());
836 // We should have three allocation samples.
837 EXPECT_EQ(3, profile.sample_count());
838 ProfileStackWalker walker(&profile, true);
839
840 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
841
842 if (!FLAG_enable_interpreter) {
843#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
844 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
845#else
846 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
847#endif
848 EXPECT_EQ(3, walker.CurrentExclusiveTicks());
849 EXPECT(walker.Down());
850 }
851 EXPECT_STREQ("B.boo", walker.CurrentName());
852 EXPECT_EQ(3, walker.CurrentInclusiveTicks());
853 EXPECT(walker.Down());
854 EXPECT_STREQ("main", walker.CurrentName());
855 EXPECT_EQ(3, walker.CurrentInclusiveTicks());
856 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
857 EXPECT(!walker.Down());
858 }
859}
860
861ISOLATE_UNIT_TEST_CASE(Profiler_IntrinsicAllocation) {
862 EnableProfiler();
863 DisableNativeProfileScope dnps;
864 DisableBackgroundCompilationScope dbcs;
865 const char* kScript = "double foo(double a, double b) => a + b;";
866 const Library& root_library = Library::Handle(LoadTestScript(kScript));
867 Isolate* isolate = thread->isolate();
868
869 const Class& double_class =
870 Class::Handle(isolate->object_store()->double_class());
871 EXPECT(!double_class.IsNull());
872
873 Dart_Handle args[2];
874 {
875 TransitionVMToNative transition(thread);
876 args[0] = Dart_NewDouble(1.0);
877 args[1] = Dart_NewDouble(2.0);
878 }
879
880 Invoke(root_library, "foo", 2, &args[0]);
881
882 {
883 StackZone zone(thread);
884 HANDLESCOPE(thread);
885 Profile profile(isolate);
886 AllocationFilter filter(isolate->main_port(), double_class.id());
887 profile.Build(thread, &filter, Profiler::sample_buffer());
888 // We should have no allocation samples.
889 EXPECT_EQ(0, profile.sample_count());
890 }
891
892 double_class.SetTraceAllocation(true);
893 Invoke(root_library, "foo", 2, &args[0]);
894
895 {
896 StackZone zone(thread);
897 HANDLESCOPE(thread);
898 Profile profile(isolate);
899 AllocationFilter filter(isolate->main_port(), double_class.id());
900 profile.Build(thread, &filter, Profiler::sample_buffer());
901 // We should have one allocation sample.
902 EXPECT_EQ(1, profile.sample_count());
903 ProfileStackWalker walker(&profile);
904
905 if (FLAG_enable_interpreter) {
906 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
907 EXPECT_STREQ("[Bytecode] foo", walker.CurrentName());
908 EXPECT(!walker.Down());
909 } else {
910 EXPECT_STREQ("Double_add", walker.VMTagName());
911 EXPECT_STREQ("[Unoptimized] double._add", walker.CurrentName());
912 EXPECT(walker.Down());
913 EXPECT_STREQ("[Unoptimized] double.+", walker.CurrentName());
914 EXPECT(walker.Down());
915 EXPECT_STREQ("[Unoptimized] foo", walker.CurrentName());
916 EXPECT(!walker.Down());
917 }
918 }
919
920 double_class.SetTraceAllocation(false);
921 Invoke(root_library, "foo", 2, &args[0]);
922
923 {
924 StackZone zone(thread);
925 HANDLESCOPE(thread);
926 Profile profile(isolate);
927 AllocationFilter filter(isolate->main_port(), double_class.id());
928 profile.Build(thread, &filter, Profiler::sample_buffer());
929 // We should still only have one allocation sample.
930 EXPECT_EQ(1, profile.sample_count());
931 }
932}
933
934ISOLATE_UNIT_TEST_CASE(Profiler_ArrayAllocation) {
935 EnableProfiler();
936 DisableNativeProfileScope dnps;
937 DisableBackgroundCompilationScope dbcs;
938 const char* kScript =
939 "List foo() => List.filled(4, null);\n"
940 "List bar() => List.filled(0, null, growable: true);\n";
941 const Library& root_library = Library::Handle(LoadTestScript(kScript));
942 Isolate* isolate = thread->isolate();
943
944 const Class& array_class =
945 Class::Handle(isolate->object_store()->array_class());
946 EXPECT(!array_class.IsNull());
947
948 Invoke(root_library, "foo");
949
950 {
951 StackZone zone(thread);
952 HANDLESCOPE(thread);
953 Profile profile(isolate);
954 AllocationFilter filter(isolate->main_port(), array_class.id());
955 profile.Build(thread, &filter, Profiler::sample_buffer());
956 // We should have no allocation samples.
957 EXPECT_EQ(0, profile.sample_count());
958 }
959
960 array_class.SetTraceAllocation(true);
961 Invoke(root_library, "foo");
962
963 {
964 StackZone zone(thread);
965 HANDLESCOPE(thread);
966 Profile profile(isolate);
967 AllocationFilter filter(isolate->main_port(), array_class.id());
968 profile.Build(thread, &filter, Profiler::sample_buffer());
969 // We should have one allocation sample.
970 EXPECT_EQ(1, profile.sample_count());
971 ProfileStackWalker walker(&profile);
972
973 EXPECT_STREQ("DRT_AllocateArray", walker.VMTagName());
974 if (FLAG_enable_interpreter) {
975 EXPECT_STREQ("[Bytecode] new _List", walker.CurrentName());
976 EXPECT(walker.Down());
977 EXPECT_STREQ("[Bytecode] foo", walker.CurrentName());
978 EXPECT(!walker.Down());
979 } else {
980 EXPECT_STREQ("[Stub] AllocateArray", walker.CurrentName());
981 EXPECT(walker.Down());
982 EXPECT_STREQ("[Unoptimized] new _List", walker.CurrentName());
983 EXPECT(walker.Down());
984 EXPECT_STREQ("[Unoptimized] foo", walker.CurrentName());
985 EXPECT(!walker.Down());
986 }
987 }
988
989 array_class.SetTraceAllocation(false);
990 Invoke(root_library, "foo");
991
992 {
993 StackZone zone(thread);
994 HANDLESCOPE(thread);
995 Profile profile(isolate);
996 AllocationFilter filter(isolate->main_port(), array_class.id());
997 profile.Build(thread, &filter, Profiler::sample_buffer());
998 // We should still only have one allocation sample.
999 EXPECT_EQ(1, profile.sample_count());
1000 }
1001
1002 // Clear the samples.
1003 ProfilerService::ClearSamples();
1004
1005 // Compile bar (many List objects allocated).
1006 Invoke(root_library, "bar");
1007
1008 // Enable again.
1009 array_class.SetTraceAllocation(true);
1010
1011 // Run bar.
1012 Invoke(root_library, "bar");
1013
1014 {
1015 StackZone zone(thread);
1016 HANDLESCOPE(thread);
1017 Profile profile(isolate);
1018 AllocationFilter filter(isolate->main_port(), array_class.id());
1019 profile.Build(thread, &filter, Profiler::sample_buffer());
1020 // We should have no allocation samples, since empty
1021 // growable lists use a shared backing.
1022 EXPECT_EQ(0, profile.sample_count());
1023 }
1024}
1025
1026ISOLATE_UNIT_TEST_CASE(Profiler_ContextAllocation) {
1027 EnableProfiler();
1028 DisableNativeProfileScope dnps;
1029 DisableBackgroundCompilationScope dbcs;
1030 const char* kScript =
1031 "var msg1 = 'a';\n"
1032 "foo() {\n"
1033 " var msg = msg1 + msg1;\n"
1034 " return (x) { return '$msg + $msg'; }(msg);\n"
1035 "}\n";
1036 const Library& root_library = Library::Handle(LoadTestScript(kScript));
1037 Isolate* isolate = thread->isolate();
1038
1039 const Class& context_class = Class::Handle(Object::context_class());
1040 EXPECT(!context_class.IsNull());
1041
1042 Invoke(root_library, "foo");
1043
1044 {
1045 StackZone zone(thread);
1046 HANDLESCOPE(thread);
1047 Profile profile(isolate);
1048 AllocationFilter filter(isolate->main_port(), context_class.id());
1049 profile.Build(thread, &filter, Profiler::sample_buffer());
1050 // We should have no allocation samples.
1051 EXPECT_EQ(0, profile.sample_count());
1052 }
1053
1054 context_class.SetTraceAllocation(true);
1055 Invoke(root_library, "foo");
1056
1057 {
1058 StackZone zone(thread);
1059 HANDLESCOPE(thread);
1060 Profile profile(isolate);
1061 AllocationFilter filter(isolate->main_port(), context_class.id());
1062 profile.Build(thread, &filter, Profiler::sample_buffer());
1063 // We should have one allocation sample.
1064 EXPECT_EQ(1, profile.sample_count());
1065 ProfileStackWalker walker(&profile);
1066
1067 EXPECT_STREQ("DRT_AllocateContext", walker.VMTagName());
1068 if (FLAG_enable_interpreter) {
1069 EXPECT_STREQ("[Bytecode] foo", walker.CurrentName());
1070 EXPECT(!walker.Down());
1071 } else {
1072 EXPECT_STREQ("[Stub] AllocateContext", walker.CurrentName());
1073 EXPECT(walker.Down());
1074 EXPECT_STREQ("[Unoptimized] foo", walker.CurrentName());
1075 EXPECT(!walker.Down());
1076 }
1077 }
1078
1079 context_class.SetTraceAllocation(false);
1080 Invoke(root_library, "foo");
1081
1082 {
1083 StackZone zone(thread);
1084 HANDLESCOPE(thread);
1085 Profile profile(isolate);
1086 AllocationFilter filter(isolate->main_port(), context_class.id());
1087 profile.Build(thread, &filter, Profiler::sample_buffer());
1088 // We should still only have one allocation sample.
1089 EXPECT_EQ(1, profile.sample_count());
1090 }
1091}
1092
1093ISOLATE_UNIT_TEST_CASE(Profiler_ClosureAllocation) {
1094 EnableProfiler();
1095 DisableNativeProfileScope dnps;
1096 DisableBackgroundCompilationScope dbcs;
1097 const char* kScript =
1098 "var msg1 = 'a';\n"
1099 "\n"
1100 "foo() {\n"
1101 " var msg = msg1 + msg1;\n"
1102 " var msg2 = msg + msg;\n"
1103 " return (x, y, z, w) { return '$x + $y + $z'; }(msg, msg2, msg, msg);\n"
1104 "}\n"
1105 "bar() {\n"
1106 " var msg = msg1 + msg1;\n"
1107 " var msg2 = msg + msg;\n"
1108 " return (x, y) { return '$x + $y'; }(msg, msg2);\n"
1109 "}\n";
1110
1111 const Library& root_library = Library::Handle(LoadTestScript(kScript));
1112 Isolate* isolate = thread->isolate();
1113
1114 const Class& closure_class =
1115 Class::Handle(Isolate::Current()->object_store()->closure_class());
1116 EXPECT(!closure_class.IsNull());
1117 closure_class.SetTraceAllocation(true);
1118
1119 // Invoke "foo" which during compilation, triggers a closure allocation.
1120 Invoke(root_library, "foo");
1121
1122 {
1123 StackZone zone(thread);
1124 HANDLESCOPE(thread);
1125 Profile profile(isolate);
1126 AllocationFilter filter(isolate->main_port(), closure_class.id());
1127 filter.set_enable_vm_ticks(true);
1128 profile.Build(thread, &filter, Profiler::sample_buffer());
1129 // We should have one allocation sample.
1130 EXPECT_EQ(1, profile.sample_count());
1131 ProfileStackWalker walker(&profile);
1132
1133 EXPECT_SUBSTRING("DRT_AllocateObject", walker.VMTagName());
1134 if (!FLAG_enable_interpreter) {
1135#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
1136 EXPECT_STREQ("[Stub] Allocate _Closure", walker.CurrentName());
1137#else
1138 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
1139#endif
1140 EXPECT(walker.Down());
1141 }
1142 EXPECT_SUBSTRING("foo", walker.CurrentName());
1143 EXPECT(!walker.Down());
1144 }
1145
1146 // Disable allocation tracing for Closure.
1147 closure_class.SetTraceAllocation(false);
1148
1149 // Invoke "bar" which during compilation, triggers a closure allocation.
1150 Invoke(root_library, "bar");
1151
1152 {
1153 StackZone zone(thread);
1154 HANDLESCOPE(thread);
1155 Profile profile(isolate);
1156 AllocationFilter filter(isolate->main_port(), closure_class.id());
1157 filter.set_enable_vm_ticks(true);
1158 profile.Build(thread, &filter, Profiler::sample_buffer());
1159 // We should still only have one allocation sample.
1160 EXPECT_EQ(1, profile.sample_count());
1161 }
1162}
1163
1164ISOLATE_UNIT_TEST_CASE(Profiler_TypedArrayAllocation) {
1165 EnableProfiler();
1166 DisableNativeProfileScope dnps;
1167 DisableBackgroundCompilationScope dbcs;
1168 const char* kScript =
1169 "import 'dart:typed_data';\n"
1170 "List foo() => new Float32List(4);\n";
1171 const Library& root_library = Library::Handle(LoadTestScript(kScript));
1172 Isolate* isolate = thread->isolate();
1173
1174 const Library& typed_data_library =
1175 Library::Handle(isolate->object_store()->typed_data_library());
1176
1177 const Class& float32_list_class =
1178 Class::Handle(GetClass(typed_data_library, "_Float32List"));
1179 EXPECT(!float32_list_class.IsNull());
1180
1181 Invoke(root_library, "foo");
1182
1183 {
1184 StackZone zone(thread);
1185 HANDLESCOPE(thread);
1186 Profile profile(isolate);
1187 AllocationFilter filter(isolate->main_port(), float32_list_class.id());
1188 profile.Build(thread, &filter, Profiler::sample_buffer());
1189 // We should have no allocation samples.
1190 EXPECT_EQ(0, profile.sample_count());
1191 }
1192
1193 float32_list_class.SetTraceAllocation(true);
1194 Invoke(root_library, "foo");
1195
1196 {
1197 StackZone zone(thread);
1198 HANDLESCOPE(thread);
1199 Profile profile(isolate);
1200 AllocationFilter filter(isolate->main_port(), float32_list_class.id());
1201 profile.Build(thread, &filter, Profiler::sample_buffer());
1202 // We should have one allocation sample.
1203 EXPECT_EQ(1, profile.sample_count());
1204 ProfileStackWalker walker(&profile);
1205
1206 EXPECT_STREQ("TypedData_Float32Array_new", walker.VMTagName());
1207 if (FLAG_enable_interpreter) {
1208 EXPECT_STREQ("[Bytecode] new Float32List", walker.CurrentName());
1209 EXPECT(walker.Down());
1210 EXPECT_STREQ("[Bytecode] foo", walker.CurrentName());
1211 EXPECT(!walker.Down());
1212 } else {
1213 EXPECT_STREQ("[Unoptimized] new Float32List", walker.CurrentName());
1214 EXPECT(walker.Down());
1215 EXPECT_STREQ("[Unoptimized] foo", walker.CurrentName());
1216 EXPECT(!walker.Down());
1217 }
1218 }
1219
1220 float32_list_class.SetTraceAllocation(false);
1221 Invoke(root_library, "foo");
1222
1223 {
1224 StackZone zone(thread);
1225 HANDLESCOPE(thread);
1226 Profile profile(isolate);
1227 AllocationFilter filter(isolate->main_port(), float32_list_class.id());
1228 profile.Build(thread, &filter, Profiler::sample_buffer());
1229 // We should still only have one allocation sample.
1230 EXPECT_EQ(1, profile.sample_count());
1231 }
1232
1233 float32_list_class.SetTraceAllocation(true);
1234 Invoke(root_library, "foo");
1235
1236 {
1237 StackZone zone(thread);
1238 HANDLESCOPE(thread);
1239 Profile profile(isolate);
1240 AllocationFilter filter(isolate->main_port(), float32_list_class.id());
1241 profile.Build(thread, &filter, Profiler::sample_buffer());
1242 // We should now have two allocation samples.
1243 EXPECT_EQ(2, profile.sample_count());
1244 }
1245}
1246
1247ISOLATE_UNIT_TEST_CASE(Profiler_StringAllocation) {
1248 EnableProfiler();
1249 DisableNativeProfileScope dnps;
1250 DisableBackgroundCompilationScope dbcs;
1251 const char* kScript = "String foo(String a, String b) => a + b;";
1252 const Library& root_library = Library::Handle(LoadTestScript(kScript));
1253 Isolate* isolate = thread->isolate();
1254
1255 const Class& one_byte_string_class =
1256 Class::Handle(isolate->object_store()->one_byte_string_class());
1257 EXPECT(!one_byte_string_class.IsNull());
1258
1259 Dart_Handle args[2];
1260 {
1261 TransitionVMToNative transition(thread);
1262 args[0] = NewString("a");
1263 args[1] = NewString("b");
1264 }
1265
1266 Invoke(root_library, "foo", 2, &args[0]);
1267
1268 {
1269 StackZone zone(thread);
1270 HANDLESCOPE(thread);
1271 Profile profile(isolate);
1272 AllocationFilter filter(isolate->main_port(), one_byte_string_class.id());
1273 profile.Build(thread, &filter, Profiler::sample_buffer());
1274 // We should have no allocation samples.
1275 EXPECT_EQ(0, profile.sample_count());
1276 }
1277
1278 one_byte_string_class.SetTraceAllocation(true);
1279 Invoke(root_library, "foo", 2, &args[0]);
1280
1281 {
1282 StackZone zone(thread);
1283 HANDLESCOPE(thread);
1284 Profile profile(isolate);
1285 AllocationFilter filter(isolate->main_port(), one_byte_string_class.id());
1286 profile.Build(thread, &filter, Profiler::sample_buffer());
1287 // We should still only have one allocation sample.
1288 EXPECT_EQ(1, profile.sample_count());
1289 ProfileStackWalker walker(&profile);
1290
1291 EXPECT_STREQ("String_concat", walker.VMTagName());
1292 if (FLAG_enable_interpreter) {
1293 EXPECT_STREQ("[Bytecode] _StringBase.+", walker.CurrentName());
1294 EXPECT(walker.Down());
1295 EXPECT_STREQ("[Bytecode] foo", walker.CurrentName());
1296 EXPECT(!walker.Down());
1297 } else {
1298 EXPECT_STREQ("[Unoptimized] _StringBase.+", walker.CurrentName());
1299 EXPECT(walker.Down());
1300 EXPECT_STREQ("[Unoptimized] foo", walker.CurrentName());
1301 EXPECT(!walker.Down());
1302 }
1303 }
1304
1305 one_byte_string_class.SetTraceAllocation(false);
1306 Invoke(root_library, "foo", 2, &args[0]);
1307
1308 {
1309 StackZone zone(thread);
1310 HANDLESCOPE(thread);
1311 Profile profile(isolate);
1312 AllocationFilter filter(isolate->main_port(), one_byte_string_class.id());
1313 profile.Build(thread, &filter, Profiler::sample_buffer());
1314 // We should still only have one allocation sample.
1315 EXPECT_EQ(1, profile.sample_count());
1316 }
1317
1318 one_byte_string_class.SetTraceAllocation(true);
1319 Invoke(root_library, "foo", 2, &args[0]);
1320
1321 {
1322 StackZone zone(thread);
1323 HANDLESCOPE(thread);
1324 Profile profile(isolate);
1325 AllocationFilter filter(isolate->main_port(), one_byte_string_class.id());
1326 profile.Build(thread, &filter, Profiler::sample_buffer());
1327 // We should now have two allocation samples.
1328 EXPECT_EQ(2, profile.sample_count());
1329 }
1330}
1331
1332ISOLATE_UNIT_TEST_CASE(Profiler_StringInterpolation) {
1333 EnableProfiler();
1334 DisableNativeProfileScope dnps;
1335 DisableBackgroundCompilationScope dbcs;
1336 const char* kScript = "String foo(String a, String b) => '$a | $b';";
1337 const Library& root_library = Library::Handle(LoadTestScript(kScript));
1338 Isolate* isolate = thread->isolate();
1339
1340 const Class& one_byte_string_class =
1341 Class::Handle(isolate->object_store()->one_byte_string_class());
1342 EXPECT(!one_byte_string_class.IsNull());
1343
1344 Dart_Handle args[2];
1345 {
1346 TransitionVMToNative transition(thread);
1347 args[0] = NewString("a");
1348 args[1] = NewString("b");
1349 }
1350
1351 Invoke(root_library, "foo", 2, &args[0]);
1352
1353 {
1354 StackZone zone(thread);
1355 HANDLESCOPE(thread);
1356 Profile profile(isolate);
1357 AllocationFilter filter(isolate->main_port(), one_byte_string_class.id());
1358 profile.Build(thread, &filter, Profiler::sample_buffer());
1359 // We should have no allocation samples.
1360 EXPECT_EQ(0, profile.sample_count());
1361 }
1362
1363 one_byte_string_class.SetTraceAllocation(true);
1364 Invoke(root_library, "foo", 2, &args[0]);
1365
1366 {
1367 StackZone zone(thread);
1368 HANDLESCOPE(thread);
1369 Profile profile(isolate);
1370 AllocationFilter filter(isolate->main_port(), one_byte_string_class.id());
1371 profile.Build(thread, &filter, Profiler::sample_buffer());
1372 // We should still only have one allocation sample.
1373 EXPECT_EQ(1, profile.sample_count());
1374 ProfileStackWalker walker(&profile);
1375
1376 EXPECT_STREQ("Internal_allocateOneByteString", walker.VMTagName());
1377 if (FLAG_enable_interpreter) {
1378 EXPECT_STREQ("Internal_allocateOneByteString", walker.VMTagName());
1379 EXPECT(walker.Down());
1380 EXPECT_STREQ("[Unoptimized] String._allocate", walker.CurrentName());
1381 EXPECT(walker.Down());
1382 EXPECT_STREQ("[Unoptimized] String._concatAll", walker.CurrentName());
1383 EXPECT(walker.Down());
1384 EXPECT_STREQ("[Unoptimized] _StringBase._interpolate",
1385 walker.CurrentName());
1386 EXPECT(walker.Down());
1387 EXPECT_STREQ("[Bytecode] foo", walker.CurrentName());
1388 EXPECT(!walker.Down());
1389 } else {
1390 EXPECT_STREQ("Internal_allocateOneByteString", walker.VMTagName());
1391 EXPECT(walker.Down());
1392 EXPECT_STREQ("[Unoptimized] String._allocate", walker.CurrentName());
1393 EXPECT(walker.Down());
1394 EXPECT_STREQ("[Unoptimized] String._concatAll", walker.CurrentName());
1395 EXPECT(walker.Down());
1396 EXPECT_STREQ("[Unoptimized] _StringBase._interpolate",
1397 walker.CurrentName());
1398 EXPECT(walker.Down());
1399 EXPECT_STREQ("[Unoptimized] foo", walker.CurrentName());
1400 EXPECT(!walker.Down());
1401 }
1402 }
1403
1404 one_byte_string_class.SetTraceAllocation(false);
1405 Invoke(root_library, "foo", 2, &args[0]);
1406
1407 {
1408 StackZone zone(thread);
1409 HANDLESCOPE(thread);
1410 Profile profile(isolate);
1411 AllocationFilter filter(isolate->main_port(), one_byte_string_class.id());
1412 profile.Build(thread, &filter, Profiler::sample_buffer());
1413 // We should still only have one allocation sample.
1414 EXPECT_EQ(1, profile.sample_count());
1415 }
1416
1417 one_byte_string_class.SetTraceAllocation(true);
1418 Invoke(root_library, "foo", 2, &args[0]);
1419
1420 {
1421 StackZone zone(thread);
1422 HANDLESCOPE(thread);
1423 Profile profile(isolate);
1424 AllocationFilter filter(isolate->main_port(), one_byte_string_class.id());
1425 profile.Build(thread, &filter, Profiler::sample_buffer());
1426 // We should now have two allocation samples.
1427 EXPECT_EQ(2, profile.sample_count());
1428 }
1429}
1430
1431ISOLATE_UNIT_TEST_CASE(Profiler_FunctionInline) {
1432 EnableProfiler();
1433 DisableNativeProfileScope dnps;
1434 DisableBackgroundCompilationScope dbcs;
1435 SetFlagScope<int> sfs(&FLAG_optimization_counter_threshold, 30000);
1436 SetFlagScope<int> sfs2(&FLAG_compilation_counter_threshold, 0);
1437
1438 const char* kScript =
1439 "class A {\n"
1440 " var a;\n"
1441 " var b;\n"
1442 "}\n"
1443 "class B {\n"
1444 " static choo(bool alloc) {\n"
1445 " if (alloc) return new A();\n"
1446 " return alloc && alloc && !alloc;\n"
1447 " }\n"
1448 " static foo(bool alloc) {\n"
1449 " choo(alloc);\n"
1450 " }\n"
1451 " static boo(bool alloc) {\n"
1452 " for (var i = 0; i < 50000; i++) {\n"
1453 " foo(alloc);\n"
1454 " }\n"
1455 " }\n"
1456 "}\n"
1457 "main() {\n"
1458 " B.boo(false);\n"
1459 "}\n"
1460 "mainA() {\n"
1461 " B.boo(true);\n"
1462 "}\n";
1463
1464 const Library& root_library = Library::Handle(LoadTestScript(kScript));
1465
1466 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
1467 EXPECT(!class_a.IsNull());
1468
1469 // Compile "main".
1470 Invoke(root_library, "main");
1471 // Compile "mainA".
1472 Invoke(root_library, "mainA");
1473 // At this point B.boo should be optimized and inlined B.foo and B.choo.
1474
1475 {
1476 Thread* thread = Thread::Current();
1477 Isolate* isolate = thread->isolate();
1478 StackZone zone(thread);
1479 HANDLESCOPE(thread);
1480 Profile profile(isolate);
1481 AllocationFilter filter(isolate->main_port(), class_a.id());
1482 profile.Build(thread, &filter, Profiler::sample_buffer());
1483 // We should have no allocation samples.
1484 EXPECT_EQ(0, profile.sample_count());
1485 }
1486
1487 // Turn on allocation tracing for A.
1488 class_a.SetTraceAllocation(true);
1489
1490 // Allocate 50,000 instances of A.
1491 Invoke(root_library, "mainA");
1492
1493 {
1494 Thread* thread = Thread::Current();
1495 Isolate* isolate = thread->isolate();
1496 StackZone zone(thread);
1497 HANDLESCOPE(thread);
1498 Profile profile(isolate);
1499 AllocationFilter filter(isolate->main_port(), class_a.id());
1500 profile.Build(thread, &filter, Profiler::sample_buffer());
1501 // We should have 50,000 allocation samples.
1502 EXPECT_EQ(50000, profile.sample_count());
1503 {
1504 ProfileStackWalker walker(&profile);
1505 // We have two code objects: mainA and B.boo.
1506 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
1507#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
1508 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
1509#else
1510 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
1511#endif
1512 EXPECT_EQ(50000, walker.CurrentExclusiveTicks());
1513 EXPECT(walker.Down());
1514 EXPECT_STREQ("[Optimized] B.boo", walker.CurrentName());
1515 EXPECT_EQ(50000, walker.CurrentInclusiveTicks());
1516 EXPECT(walker.Down());
1517 EXPECT_STREQ("[Unoptimized] mainA", walker.CurrentName());
1518 EXPECT_EQ(50000, walker.CurrentInclusiveTicks());
1519 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
1520 EXPECT(!walker.Down());
1521 }
1522 {
1523 ProfileStackWalker walker(&profile, true);
1524 // Inline expansion should show us the complete call chain:
1525 // mainA -> B.boo -> B.foo -> B.choo.
1526 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
1527#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
1528 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
1529#else
1530 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
1531#endif
1532 EXPECT_EQ(50000, walker.CurrentExclusiveTicks());
1533 EXPECT(walker.Down());
1534 EXPECT_STREQ("B.choo", walker.CurrentName());
1535 EXPECT_EQ(50000, walker.CurrentInclusiveTicks());
1536 EXPECT(walker.Down());
1537 EXPECT_STREQ("B.foo", walker.CurrentName());
1538 EXPECT_EQ(50000, walker.CurrentInclusiveTicks());
1539 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
1540 EXPECT(walker.Down());
1541 EXPECT_STREQ("B.boo", walker.CurrentName());
1542 EXPECT_EQ(50000, walker.CurrentInclusiveTicks());
1543 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
1544 EXPECT(walker.Down());
1545 EXPECT_STREQ("mainA", walker.CurrentName());
1546 EXPECT_EQ(50000, walker.CurrentInclusiveTicks());
1547 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
1548 EXPECT(!walker.Down());
1549 }
1550 }
1551}
1552
1553ISOLATE_UNIT_TEST_CASE(Profiler_InliningIntervalBoundry) {
1554 // The PC of frames below the top frame is a call's return address,
1555 // which can belong to a different inlining interval than the call.
1556 // This test checks the profiler service takes this into account; see
1557 // ProfileBuilder::ProcessFrame.
1558
1559 EnableProfiler();
1560 DisableNativeProfileScope dnps;
1561 DisableBackgroundCompilationScope dbcs;
1562 SetFlagScope<int> sfs(&FLAG_optimization_counter_threshold, 30000);
1563 SetFlagScope<int> sfs2(&FLAG_compilation_counter_threshold, 0);
1564
1565 const char* kScript =
1566 "class A {\n"
1567 "}\n"
1568 "bool alloc = false;"
1569 "maybeAlloc() {\n"
1570 " try {\n"
1571 " if (alloc) new A();\n"
1572 " } catch (e) {\n"
1573 " }\n"
1574 "}\n"
1575 "right() => maybeAlloc();\n"
1576 "doNothing() {\n"
1577 " try {\n"
1578 " } catch (e) {\n"
1579 " }\n"
1580 "}\n"
1581 "wrong() => doNothing();\n"
1582 "a() {\n"
1583 " try {\n"
1584 " right();\n"
1585 " wrong();\n"
1586 " } catch (e) {\n"
1587 " }\n"
1588 "}\n"
1589 "mainNoAlloc() {\n"
1590 " for (var i = 0; i < 20000; i++) {\n"
1591 " a();\n"
1592 " }\n"
1593 "}\n"
1594 "mainAlloc() {\n"
1595 " alloc = true;\n"
1596 " a();\n"
1597 "}\n";
1598
1599 const Library& root_library = Library::Handle(LoadTestScript(kScript));
1600
1601 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
1602 EXPECT(!class_a.IsNull());
1603
1604 // Compile and optimize.
1605 Invoke(root_library, "mainNoAlloc");
1606 Invoke(root_library, "mainAlloc");
1607
1608 // At this point a should be optimized and have inlined both right and wrong,
1609 // but not maybeAllocate or doNothing.
1610 Function& func = Function::Handle();
1611 func = GetFunction(root_library, "a");
1612 EXPECT(!func.is_inlinable());
1613 EXPECT(func.HasOptimizedCode());
1614 func = GetFunction(root_library, "right");
1615 EXPECT(func.is_inlinable());
1616 func = GetFunction(root_library, "wrong");
1617 EXPECT(func.is_inlinable());
1618 func = GetFunction(root_library, "doNothing");
1619 EXPECT(!func.is_inlinable());
1620 func = GetFunction(root_library, "maybeAlloc");
1621 EXPECT(!func.is_inlinable());
1622
1623 {
1624 Thread* thread = Thread::Current();
1625 Isolate* isolate = thread->isolate();
1626 StackZone zone(thread);
1627 HANDLESCOPE(thread);
1628 Profile profile(isolate);
1629 AllocationFilter filter(isolate->main_port(), class_a.id());
1630 profile.Build(thread, &filter, Profiler::sample_buffer());
1631 // We should have no allocation samples.
1632 EXPECT_EQ(0, profile.sample_count());
1633 }
1634
1635 // Turn on allocation tracing for A.
1636 class_a.SetTraceAllocation(true);
1637
1638 Invoke(root_library, "mainAlloc");
1639
1640 {
1641 Thread* thread = Thread::Current();
1642 Isolate* isolate = thread->isolate();
1643 StackZone zone(thread);
1644 HANDLESCOPE(thread);
1645 Profile profile(isolate);
1646 AllocationFilter filter(isolate->main_port(), class_a.id());
1647 profile.Build(thread, &filter, Profiler::sample_buffer());
1648 EXPECT_EQ(1, profile.sample_count());
1649 ProfileStackWalker walker(&profile, true);
1650
1651 // Inline expansion should show us the complete call chain:
1652 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
1653#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
1654 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
1655#else
1656 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
1657#endif
1658 EXPECT(walker.Down());
1659 EXPECT_STREQ("maybeAlloc", walker.CurrentName());
1660 EXPECT(walker.Down());
1661 EXPECT_STREQ("right", walker.CurrentName());
1662 EXPECT(walker.Down());
1663 EXPECT_STREQ("a", walker.CurrentName());
1664 EXPECT(walker.Down());
1665 EXPECT_STREQ("mainAlloc", walker.CurrentName());
1666 }
1667}
1668
1669ISOLATE_UNIT_TEST_CASE(Profiler_ChainedSamples) {
1670 EnableProfiler();
1671 MaxProfileDepthScope mpds(32);
1672 DisableNativeProfileScope dnps;
1673
1674 // Each sample holds 8 stack frames.
1675 // This chain is 20 stack frames deep.
1676 const char* kScript =
1677 "class A {\n"
1678 " var a;\n"
1679 " var b;\n"
1680 "}\n"
1681 "class B {\n"
1682 " static boo() {\n"
1683 " return new A();\n"
1684 " }\n"
1685 "}\n"
1686 "go() => init();\n"
1687 "init() => secondInit();\n"
1688 "secondInit() => apple();\n"
1689 "apple() => banana();\n"
1690 "banana() => cantaloupe();\n"
1691 "cantaloupe() => dog();\n"
1692 "dog() => elephant();\n"
1693 "elephant() => fred();\n"
1694 "fred() => granola();\n"
1695 "granola() => haystack();\n"
1696 "haystack() => ice();\n"
1697 "ice() => jeep();\n"
1698 "jeep() => kindle();\n"
1699 "kindle() => lemon();\n"
1700 "lemon() => mayo();\n"
1701 "mayo() => napkin();\n"
1702 "napkin() => orange();\n"
1703 "orange() => B.boo();\n"
1704 "main() {\n"
1705 " return go();\n"
1706 "}\n";
1707
1708 const Library& root_library = Library::Handle(LoadTestScript(kScript));
1709
1710 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
1711 EXPECT(!class_a.IsNull());
1712 class_a.SetTraceAllocation(true);
1713
1714 Invoke(root_library, "main");
1715
1716 {
1717 Thread* thread = Thread::Current();
1718 Isolate* isolate = thread->isolate();
1719 StackZone zone(thread);
1720 HANDLESCOPE(thread);
1721 Profile profile(isolate);
1722 AllocationFilter filter(isolate->main_port(), class_a.id());
1723 profile.Build(thread, &filter, Profiler::sample_buffer());
1724 // We should have 1 allocation sample.
1725 EXPECT_EQ(1, profile.sample_count());
1726 ProfileStackWalker walker(&profile);
1727
1728 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
1729 if (FLAG_enable_interpreter) {
1730 EXPECT_STREQ("[Bytecode] B.boo", walker.CurrentName());
1731 EXPECT(walker.Down());
1732 EXPECT_STREQ("[Bytecode] orange", walker.CurrentName());
1733 EXPECT(walker.Down());
1734 EXPECT_STREQ("[Bytecode] napkin", walker.CurrentName());
1735 EXPECT(walker.Down());
1736 EXPECT_STREQ("[Bytecode] mayo", walker.CurrentName());
1737 EXPECT(walker.Down());
1738 EXPECT_STREQ("[Bytecode] lemon", walker.CurrentName());
1739 EXPECT(walker.Down());
1740 EXPECT_STREQ("[Bytecode] kindle", walker.CurrentName());
1741 EXPECT(walker.Down());
1742 EXPECT_STREQ("[Bytecode] jeep", walker.CurrentName());
1743 EXPECT(walker.Down());
1744 EXPECT_STREQ("[Bytecode] ice", walker.CurrentName());
1745 EXPECT(walker.Down());
1746 EXPECT_STREQ("[Bytecode] haystack", walker.CurrentName());
1747 EXPECT(walker.Down());
1748 EXPECT_STREQ("[Bytecode] granola", walker.CurrentName());
1749 EXPECT(walker.Down());
1750 EXPECT_STREQ("[Bytecode] fred", walker.CurrentName());
1751 EXPECT(walker.Down());
1752 EXPECT_STREQ("[Bytecode] elephant", walker.CurrentName());
1753 EXPECT(walker.Down());
1754 EXPECT_STREQ("[Bytecode] dog", walker.CurrentName());
1755 EXPECT(walker.Down());
1756 EXPECT_STREQ("[Bytecode] cantaloupe", walker.CurrentName());
1757 EXPECT(walker.Down());
1758 EXPECT_STREQ("[Bytecode] banana", walker.CurrentName());
1759 EXPECT(walker.Down());
1760 EXPECT_STREQ("[Bytecode] apple", walker.CurrentName());
1761 EXPECT(walker.Down());
1762 EXPECT_STREQ("[Bytecode] secondInit", walker.CurrentName());
1763 EXPECT(walker.Down());
1764 EXPECT_STREQ("[Bytecode] init", walker.CurrentName());
1765 EXPECT(walker.Down());
1766 EXPECT_STREQ("[Bytecode] go", walker.CurrentName());
1767 EXPECT(walker.Down());
1768 EXPECT_STREQ("[Bytecode] main", walker.CurrentName());
1769 EXPECT(!walker.Down());
1770 } else {
1771#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
1772 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
1773#else
1774 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
1775#endif
1776 EXPECT(walker.Down());
1777 EXPECT_STREQ("[Unoptimized] B.boo", walker.CurrentName());
1778 EXPECT(walker.Down());
1779 EXPECT_STREQ("[Unoptimized] orange", walker.CurrentName());
1780 EXPECT(walker.Down());
1781 EXPECT_STREQ("[Unoptimized] napkin", walker.CurrentName());
1782 EXPECT(walker.Down());
1783 EXPECT_STREQ("[Unoptimized] mayo", walker.CurrentName());
1784 EXPECT(walker.Down());
1785 EXPECT_STREQ("[Unoptimized] lemon", walker.CurrentName());
1786 EXPECT(walker.Down());
1787 EXPECT_STREQ("[Unoptimized] kindle", walker.CurrentName());
1788 EXPECT(walker.Down());
1789 EXPECT_STREQ("[Unoptimized] jeep", walker.CurrentName());
1790 EXPECT(walker.Down());
1791 EXPECT_STREQ("[Unoptimized] ice", walker.CurrentName());
1792 EXPECT(walker.Down());
1793 EXPECT_STREQ("[Unoptimized] haystack", walker.CurrentName());
1794 EXPECT(walker.Down());
1795 EXPECT_STREQ("[Unoptimized] granola", walker.CurrentName());
1796 EXPECT(walker.Down());
1797 EXPECT_STREQ("[Unoptimized] fred", walker.CurrentName());
1798 EXPECT(walker.Down());
1799 EXPECT_STREQ("[Unoptimized] elephant", walker.CurrentName());
1800 EXPECT(walker.Down());
1801 EXPECT_STREQ("[Unoptimized] dog", walker.CurrentName());
1802 EXPECT(walker.Down());
1803 EXPECT_STREQ("[Unoptimized] cantaloupe", walker.CurrentName());
1804 EXPECT(walker.Down());
1805 EXPECT_STREQ("[Unoptimized] banana", walker.CurrentName());
1806 EXPECT(walker.Down());
1807 EXPECT_STREQ("[Unoptimized] apple", walker.CurrentName());
1808 EXPECT(walker.Down());
1809 EXPECT_STREQ("[Unoptimized] secondInit", walker.CurrentName());
1810 EXPECT(walker.Down());
1811 EXPECT_STREQ("[Unoptimized] init", walker.CurrentName());
1812 EXPECT(walker.Down());
1813 EXPECT_STREQ("[Unoptimized] go", walker.CurrentName());
1814 EXPECT(walker.Down());
1815 EXPECT_STREQ("[Unoptimized] main", walker.CurrentName());
1816 EXPECT(!walker.Down());
1817 }
1818 }
1819}
1820
1821ISOLATE_UNIT_TEST_CASE(Profiler_BasicSourcePosition) {
1822 EnableProfiler();
1823 DisableNativeProfileScope dnps;
1824 DisableBackgroundCompilationScope dbcs;
1825 const char* kScript =
1826 "class A {\n"
1827 " var a;\n"
1828 " var b;\n"
1829 " @pragma('vm:never-inline') A() { }\n"
1830 "}\n"
1831 "class B {\n"
1832 " @pragma('vm:prefer-inline')\n"
1833 " static boo() {\n"
1834 " return new A();\n"
1835 " }\n"
1836 "}\n"
1837 "main() {\n"
1838 " B.boo();\n"
1839 "}\n";
1840
1841 const Library& root_library = Library::Handle(LoadTestScript(kScript));
1842
1843 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
1844 EXPECT(!class_a.IsNull());
1845
1846 Invoke(root_library, "main");
1847
1848 // Turn on allocation tracing for A.
1849 class_a.SetTraceAllocation(true);
1850
1851 // Allocate one time.
1852 Invoke(root_library, "main");
1853
1854 {
1855 Thread* thread = Thread::Current();
1856 Isolate* isolate = thread->isolate();
1857 StackZone zone(thread);
1858 HANDLESCOPE(thread);
1859 Profile profile(isolate);
1860 AllocationFilter filter(isolate->main_port(), class_a.id());
1861 profile.Build(thread, &filter, Profiler::sample_buffer());
1862 // We should have one allocation samples.
1863 EXPECT_EQ(1, profile.sample_count());
1864 ProfileStackWalker walker(&profile, true);
1865
1866 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
1867 if (!FLAG_enable_interpreter) {
1868#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
1869 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
1870#else
1871 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
1872#endif
1873 EXPECT_EQ(1, walker.CurrentExclusiveTicks());
1874 EXPECT(walker.Down());
1875 }
1876 EXPECT_STREQ("B.boo", walker.CurrentName());
1877 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
1878 EXPECT_STREQ("A", walker.CurrentToken());
1879 EXPECT(walker.Down());
1880 EXPECT_STREQ("main", walker.CurrentName());
1881 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
1882 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
1883 EXPECT_STREQ("boo", walker.CurrentToken());
1884 EXPECT(!walker.Down());
1885 }
1886}
1887
1888ISOLATE_UNIT_TEST_CASE(Profiler_BasicSourcePositionOptimized) {
1889 EnableProfiler();
1890 DisableNativeProfileScope dnps;
1891 DisableBackgroundCompilationScope dbcs;
1892 // Optimize quickly.
1893 SetFlagScope<int> sfs(&FLAG_optimization_counter_threshold, 5);
1894 SetFlagScope<int> sfs2(&FLAG_compilation_counter_threshold, 0);
1895 const char* kScript =
1896 "class A {\n"
1897 " var a;\n"
1898 " var b;\n"
1899 " @pragma('vm:never-inline') A() { }\n"
1900 "}\n"
1901 "class B {\n"
1902 " @pragma('vm:prefer-inline')\n"
1903 " static boo() {\n"
1904 " return new A();\n"
1905 " }\n"
1906 "}\n"
1907 "main() {\n"
1908 " B.boo();\n"
1909 "}\n";
1910
1911 const Library& root_library = Library::Handle(LoadTestScript(kScript));
1912
1913 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
1914 EXPECT(!class_a.IsNull());
1915
1916 const Function& main = Function::Handle(GetFunction(root_library, "main"));
1917 EXPECT(!main.IsNull());
1918
1919 // Warm up function.
1920 while (true) {
1921 Invoke(root_library, "main");
1922 const Code& code = Code::Handle(main.CurrentCode());
1923 if (code.is_optimized()) {
1924 // Warmed up.
1925 break;
1926 }
1927 }
1928
1929 // Turn on allocation tracing for A.
1930 class_a.SetTraceAllocation(true);
1931
1932 // Allocate one time.
1933 Invoke(root_library, "main");
1934
1935 // Still optimized.
1936 const Code& code = Code::Handle(main.CurrentCode());
1937 EXPECT(code.is_optimized());
1938
1939 {
1940 Thread* thread = Thread::Current();
1941 Isolate* isolate = thread->isolate();
1942 StackZone zone(thread);
1943 HANDLESCOPE(thread);
1944 Profile profile(isolate);
1945 AllocationFilter filter(isolate->main_port(), class_a.id());
1946 profile.Build(thread, &filter, Profiler::sample_buffer());
1947 // We should have one allocation samples.
1948 EXPECT_EQ(1, profile.sample_count());
1949 ProfileStackWalker walker(&profile, true);
1950
1951 // Move down from the root.
1952 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
1953#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
1954 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
1955#else
1956 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
1957#endif
1958 EXPECT_EQ(1, walker.CurrentExclusiveTicks());
1959 EXPECT(walker.Down());
1960 EXPECT_STREQ("B.boo", walker.CurrentName());
1961 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
1962 EXPECT_STREQ("A", walker.CurrentToken());
1963 EXPECT(walker.Down());
1964 EXPECT_STREQ("main", walker.CurrentName());
1965 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
1966 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
1967 EXPECT_STREQ("boo", walker.CurrentToken());
1968 EXPECT(!walker.Down());
1969 }
1970}
1971
1972ISOLATE_UNIT_TEST_CASE(Profiler_SourcePosition) {
1973 EnableProfiler();
1974 DisableNativeProfileScope dnps;
1975 DisableBackgroundCompilationScope dbcs;
1976 const char* kScript =
1977 "class A {\n"
1978 " var a;\n"
1979 " var b;\n"
1980 " @pragma('vm:never-inline') A() { }\n"
1981 "}\n"
1982 "class B {\n"
1983 " @pragma('vm:never-inline')\n"
1984 " static oats() {\n"
1985 " return boo();\n"
1986 " }\n"
1987 " @pragma('vm:prefer-inline')\n"
1988 " static boo() {\n"
1989 " return new A();\n"
1990 " }\n"
1991 "}\n"
1992 "class C {\n"
1993 " @pragma('vm:never-inline') bacon() {\n"
1994 " return fox();\n"
1995 " }\n"
1996 " @pragma('vm:prefer-inline') fox() {\n"
1997 " return B.oats();\n"
1998 " }\n"
1999 "}\n"
2000 "main() {\n"
2001 " new C()..bacon();\n"
2002 "}\n";
2003
2004 const Library& root_library = Library::Handle(LoadTestScript(kScript));
2005
2006 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
2007 EXPECT(!class_a.IsNull());
2008
2009 Invoke(root_library, "main");
2010
2011 // Turn on allocation tracing for A.
2012 class_a.SetTraceAllocation(true);
2013
2014 // Allocate one time.
2015 Invoke(root_library, "main");
2016
2017 {
2018 Thread* thread = Thread::Current();
2019 Isolate* isolate = thread->isolate();
2020 StackZone zone(thread);
2021 HANDLESCOPE(thread);
2022 Profile profile(isolate);
2023 AllocationFilter filter(isolate->main_port(), class_a.id());
2024 profile.Build(thread, &filter, Profiler::sample_buffer());
2025 // We should have one allocation samples.
2026 EXPECT_EQ(1, profile.sample_count());
2027 ProfileStackWalker walker(&profile, true);
2028
2029 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
2030 if (!FLAG_enable_interpreter) {
2031#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
2032 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
2033#else
2034 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
2035#endif
2036 EXPECT_EQ(1, walker.CurrentExclusiveTicks());
2037 EXPECT(walker.Down());
2038 }
2039 EXPECT_STREQ("B.boo", walker.CurrentName());
2040 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2041 EXPECT_STREQ("A", walker.CurrentToken());
2042 EXPECT(walker.Down());
2043 EXPECT_STREQ("B.oats", walker.CurrentName());
2044 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2045 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2046 EXPECT_STREQ("boo", walker.CurrentToken());
2047 EXPECT(walker.Down());
2048 EXPECT_STREQ("C.fox", walker.CurrentName());
2049 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2050 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2051 EXPECT_STREQ("oats", walker.CurrentToken());
2052 EXPECT(walker.Down());
2053 EXPECT_STREQ("C.bacon", walker.CurrentName());
2054 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2055 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2056 EXPECT_STREQ("fox", walker.CurrentToken());
2057 EXPECT(walker.Down());
2058 EXPECT_STREQ("main", walker.CurrentName());
2059 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2060 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2061 EXPECT_STREQ("bacon", walker.CurrentToken());
2062 EXPECT(!walker.Down());
2063 }
2064}
2065
2066ISOLATE_UNIT_TEST_CASE(Profiler_SourcePositionOptimized) {
2067 EnableProfiler();
2068 DisableNativeProfileScope dnps;
2069 DisableBackgroundCompilationScope dbcs;
2070 // Optimize quickly.
2071 SetFlagScope<int> sfs(&FLAG_optimization_counter_threshold, 5);
2072 SetFlagScope<int> sfs2(&FLAG_compilation_counter_threshold, 0);
2073
2074 const char* kScript =
2075 "class A {\n"
2076 " var a;\n"
2077 " var b;\n"
2078 " @pragma('vm:never-inline') A() { }\n"
2079 "}\n"
2080 "class B {\n"
2081 " @pragma('vm:never-inline')\n"
2082 " static oats() {\n"
2083 " return boo();\n"
2084 " }\n"
2085 " @pragma('vm:prefer-inline')\n"
2086 " static boo() {\n"
2087 " return new A();\n"
2088 " }\n"
2089 "}\n"
2090 "class C {\n"
2091 " @pragma('vm:never-inline') bacon() {\n"
2092 " return fox();\n"
2093 " }\n"
2094 " @pragma('vm:prefer-inline') fox() {\n"
2095 " return B.oats();\n"
2096 " }\n"
2097 "}\n"
2098 "main() {\n"
2099 " new C()..bacon();\n"
2100 "}\n";
2101
2102 const Library& root_library = Library::Handle(LoadTestScript(kScript));
2103
2104 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
2105 EXPECT(!class_a.IsNull());
2106
2107 const Function& main = Function::Handle(GetFunction(root_library, "main"));
2108 EXPECT(!main.IsNull());
2109
2110 // Warm up function.
2111 while (true) {
2112 Invoke(root_library, "main");
2113 const Code& code = Code::Handle(main.CurrentCode());
2114 if (code.is_optimized()) {
2115 // Warmed up.
2116 break;
2117 }
2118 }
2119
2120 // Turn on allocation tracing for A.
2121 class_a.SetTraceAllocation(true);
2122
2123 // Allocate one time.
2124 Invoke(root_library, "main");
2125
2126 // Still optimized.
2127 const Code& code = Code::Handle(main.CurrentCode());
2128 EXPECT(code.is_optimized());
2129
2130 {
2131 Thread* thread = Thread::Current();
2132 Isolate* isolate = thread->isolate();
2133 StackZone zone(thread);
2134 HANDLESCOPE(thread);
2135 Profile profile(isolate);
2136 AllocationFilter filter(isolate->main_port(), class_a.id());
2137 profile.Build(thread, &filter, Profiler::sample_buffer());
2138 // We should have one allocation samples.
2139 EXPECT_EQ(1, profile.sample_count());
2140 ProfileStackWalker walker(&profile, true);
2141
2142 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
2143#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
2144 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
2145#else
2146 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
2147#endif
2148 EXPECT_EQ(1, walker.CurrentExclusiveTicks());
2149 EXPECT(walker.Down());
2150 EXPECT_STREQ("B.boo", walker.CurrentName());
2151 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2152 EXPECT_STREQ("A", walker.CurrentToken());
2153 EXPECT(walker.Down());
2154 EXPECT_STREQ("B.oats", walker.CurrentName());
2155 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2156 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2157 EXPECT_STREQ("boo", walker.CurrentToken());
2158 EXPECT(walker.Down());
2159 EXPECT_STREQ("C.fox", walker.CurrentName());
2160 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2161 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2162 EXPECT_STREQ("oats", walker.CurrentToken());
2163 EXPECT(walker.Down());
2164 EXPECT_STREQ("C.bacon", walker.CurrentName());
2165 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2166 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2167 EXPECT_STREQ("fox", walker.CurrentToken());
2168 EXPECT(walker.Down());
2169 EXPECT_STREQ("main", walker.CurrentName());
2170 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2171 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2172 EXPECT_STREQ("bacon", walker.CurrentToken());
2173 EXPECT(!walker.Down());
2174 }
2175}
2176
2177ISOLATE_UNIT_TEST_CASE(Profiler_BinaryOperatorSourcePosition) {
2178 EnableProfiler();
2179 DisableNativeProfileScope dnps;
2180 DisableBackgroundCompilationScope dbcs;
2181 const char* kScript =
2182 "class A {\n"
2183 " var a;\n"
2184 " var b;\n"
2185 " @pragma('vm:never-inline') A() { }\n"
2186 "}\n"
2187 "class B {\n"
2188 " @pragma('vm:never-inline')\n"
2189 " static oats() {\n"
2190 " return boo();\n"
2191 " }\n"
2192 " @pragma('vm:prefer-inline')\n"
2193 " static boo() {\n"
2194 " return new A();\n"
2195 " }\n"
2196 "}\n"
2197 "class C {\n"
2198 " @pragma('vm:never-inline') bacon() {\n"
2199 " return this + this;\n"
2200 " }\n"
2201 " @pragma('vm:prefer-inline') operator+(C other) {\n"
2202 " return fox();\n"
2203 " }\n"
2204 " @pragma('vm:prefer-inline') fox() {\n"
2205 " return B.oats();\n"
2206 " }\n"
2207 "}\n"
2208 "main() {\n"
2209 " new C()..bacon();\n"
2210 "}\n";
2211
2212 const Library& root_library = Library::Handle(LoadTestScript(kScript));
2213
2214 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
2215 EXPECT(!class_a.IsNull());
2216
2217 Invoke(root_library, "main");
2218
2219 // Turn on allocation tracing for A.
2220 class_a.SetTraceAllocation(true);
2221
2222 // Allocate one time.
2223 Invoke(root_library, "main");
2224
2225 {
2226 Thread* thread = Thread::Current();
2227 Isolate* isolate = thread->isolate();
2228 StackZone zone(thread);
2229 HANDLESCOPE(thread);
2230 Profile profile(isolate);
2231 AllocationFilter filter(isolate->main_port(), class_a.id());
2232 profile.Build(thread, &filter, Profiler::sample_buffer());
2233 // We should have one allocation samples.
2234 EXPECT_EQ(1, profile.sample_count());
2235 ProfileStackWalker walker(&profile, true);
2236
2237 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
2238 if (!FLAG_enable_interpreter) {
2239#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
2240 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
2241#else
2242 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
2243#endif
2244 EXPECT_EQ(1, walker.CurrentExclusiveTicks());
2245 EXPECT(walker.Down());
2246 }
2247 EXPECT_STREQ("B.boo", walker.CurrentName());
2248 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2249 EXPECT_STREQ("A", walker.CurrentToken());
2250 EXPECT(walker.Down());
2251 EXPECT_STREQ("B.oats", walker.CurrentName());
2252 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2253 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2254 EXPECT_STREQ("boo", walker.CurrentToken());
2255 EXPECT(walker.Down());
2256 EXPECT_STREQ("C.fox", walker.CurrentName());
2257 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2258 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2259 EXPECT_STREQ("oats", walker.CurrentToken());
2260 EXPECT(walker.Down());
2261 EXPECT_STREQ("C.+", walker.CurrentName());
2262 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2263 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2264 EXPECT_STREQ("fox", walker.CurrentToken());
2265 EXPECT(walker.Down());
2266 EXPECT_STREQ("C.bacon", walker.CurrentName());
2267 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2268 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2269 EXPECT_STREQ("+", walker.CurrentToken());
2270 EXPECT(walker.Down());
2271 EXPECT_STREQ("main", walker.CurrentName());
2272 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2273 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2274 EXPECT_STREQ("bacon", walker.CurrentToken());
2275 EXPECT(!walker.Down());
2276 }
2277}
2278
2279ISOLATE_UNIT_TEST_CASE(Profiler_BinaryOperatorSourcePositionOptimized) {
2280 EnableProfiler();
2281 DisableNativeProfileScope dnps;
2282 DisableBackgroundCompilationScope dbcs;
2283 // Optimize quickly.
2284 SetFlagScope<int> sfs(&FLAG_optimization_counter_threshold, 5);
2285 SetFlagScope<int> sfs2(&FLAG_compilation_counter_threshold, 0);
2286
2287 const char* kScript =
2288 "class A {\n"
2289 " var a;\n"
2290 " var b;\n"
2291 " @pragma('vm:never-inline') A() { }\n"
2292 "}\n"
2293 "class B {\n"
2294 " @pragma('vm:never-inline')\n"
2295 " static oats() {\n"
2296 " return boo();\n"
2297 " }\n"
2298 " @pragma('vm:prefer-inline')\n"
2299 " static boo() {\n"
2300 " return new A();\n"
2301 " }\n"
2302 "}\n"
2303 "class C {\n"
2304 " @pragma('vm:never-inline') bacon() {\n"
2305 " return this + this;\n"
2306 " }\n"
2307 " @pragma('vm:prefer-inline') operator+(C other) {\n"
2308 " return fox();\n"
2309 " }\n"
2310 " @pragma('vm:prefer-inline') fox() {\n"
2311 " return B.oats();\n"
2312 " }\n"
2313 "}\n"
2314 "main() {\n"
2315 " new C()..bacon();\n"
2316 "}\n";
2317
2318 const Library& root_library = Library::Handle(LoadTestScript(kScript));
2319
2320 const Class& class_a = Class::Handle(GetClass(root_library, "A"));
2321 EXPECT(!class_a.IsNull());
2322
2323 const Function& main = Function::Handle(GetFunction(root_library, "main"));
2324 EXPECT(!main.IsNull());
2325
2326 // Warm up function.
2327 while (true) {
2328 Invoke(root_library, "main");
2329 const Code& code = Code::Handle(main.CurrentCode());
2330 if (code.is_optimized()) {
2331 // Warmed up.
2332 break;
2333 }
2334 }
2335
2336 // Turn on allocation tracing for A.
2337 class_a.SetTraceAllocation(true);
2338
2339 // Allocate one time.
2340 Invoke(root_library, "main");
2341
2342 // Still optimized.
2343 const Code& code = Code::Handle(main.CurrentCode());
2344 EXPECT(code.is_optimized());
2345
2346 {
2347 Thread* thread = Thread::Current();
2348 Isolate* isolate = thread->isolate();
2349 StackZone zone(thread);
2350 HANDLESCOPE(thread);
2351 Profile profile(isolate);
2352 AllocationFilter filter(isolate->main_port(), class_a.id());
2353 profile.Build(thread, &filter, Profiler::sample_buffer());
2354 // We should have one allocation samples.
2355 EXPECT_EQ(1, profile.sample_count());
2356 ProfileStackWalker walker(&profile, true);
2357
2358 EXPECT_STREQ("DRT_AllocateObject", walker.VMTagName());
2359#if defined(TARGET_ARCH_IA32) // Alloc. stub not impl. for ia32.
2360 EXPECT_STREQ("[Stub] Allocate A", walker.CurrentName());
2361#else
2362 EXPECT_STREQ("[Stub] AllocateObjectSlow", walker.CurrentName());
2363#endif
2364 EXPECT_EQ(1, walker.CurrentExclusiveTicks());
2365 EXPECT(walker.Down());
2366 EXPECT_STREQ("B.boo", walker.CurrentName());
2367 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2368 EXPECT_STREQ("A", walker.CurrentToken());
2369 EXPECT(walker.Down());
2370 EXPECT_STREQ("B.oats", walker.CurrentName());
2371 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2372 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2373 EXPECT_STREQ("boo", walker.CurrentToken());
2374 EXPECT(walker.Down());
2375 EXPECT_STREQ("C.fox", walker.CurrentName());
2376 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2377 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2378 EXPECT_STREQ("oats", walker.CurrentToken());
2379 EXPECT(walker.Down());
2380 EXPECT_STREQ("C.+", walker.CurrentName());
2381 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2382 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2383 EXPECT_STREQ("fox", walker.CurrentToken());
2384 EXPECT(walker.Down());
2385 EXPECT_STREQ("C.bacon", walker.CurrentName());
2386 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2387 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2388 EXPECT_STREQ("+", walker.CurrentToken());
2389 EXPECT(walker.Down());
2390 EXPECT_STREQ("main", walker.CurrentName());
2391 EXPECT_EQ(1, walker.CurrentInclusiveTicks());
2392 EXPECT_EQ(0, walker.CurrentExclusiveTicks());
2393 EXPECT_STREQ("bacon", walker.CurrentToken());
2394 EXPECT(!walker.Down());
2395 }
2396}
2397
2398static void InsertFakeSample(SampleBuffer* sample_buffer, uword* pc_offsets) {
2399 ASSERT(sample_buffer != NULL);
2400 Isolate* isolate = Isolate::Current();
2401 Sample* sample = sample_buffer->ReserveSample();
2402 ASSERT(sample != NULL);
2403 sample->Init(isolate->main_port(), OS::GetCurrentMonotonicMicros(),
2404 OSThread::Current()->trace_id());
2405 sample->set_thread_task(Thread::kMutatorTask);
2406
2407 intptr_t i = 0;
2408 while (pc_offsets[i] != 0) {
2409 // When we collect a real stack trace, all PCs collected aside from the
2410 // executing one (i == 0) are actually return addresses. Return addresses
2411 // are one byte beyond the call instruction that is executing. The profiler
2412 // accounts for this and subtracts one from these addresses when querying
2413 // inline and token position ranges. To be consistent with real stack
2414 // traces, we add one byte to all PCs except the executing one.
2415 // See OffsetForPC in profiler_service.cc for more context.
2416 const intptr_t return_address_offset = i > 0 ? 1 : 0;
2417 sample->SetAt(i, pc_offsets[i] + return_address_offset);
2418 i++;
2419 }
2420 sample->SetAt(i, 0);
2421}
2422
2423static uword FindPCForTokenPosition(const Code& code, TokenPosition tp) {
2424 GrowableArray<const Function*> functions;
2425 GrowableArray<TokenPosition> token_positions;
2426 for (intptr_t pc_offset = 0; pc_offset < code.Size(); pc_offset++) {
2427 code.GetInlinedFunctionsAtInstruction(pc_offset, &functions,
2428 &token_positions);
2429 if (token_positions[0] == tp) {
2430 return code.PayloadStart() + pc_offset;
2431 }
2432 }
2433
2434 return 0;
2435}
2436
2437ISOLATE_UNIT_TEST_CASE(Profiler_GetSourceReport) {
2438 EnableProfiler();
2439 const char* kScript =
2440 "int doWork(i) => i * i;\n"
2441 "int main() {\n"
2442 " int sum = 0;\n"
2443 " for (int i = 0; i < 100; i++) {\n"
2444 " sum += doWork(i);\n"
2445 " }\n"
2446 " return sum;\n"
2447 "}\n";
2448
2449 // Token position of * in `i * i`.
2450 const TokenPosition squarePosition = TokenPosition(19);
2451
2452 // Token position of the call to `doWork`.
2453 const TokenPosition callPosition = TokenPosition(95);
2454
2455 DisableNativeProfileScope dnps;
2456 // Disable profiling for this thread.
2457 DisableThreadInterruptsScope dtis(Thread::Current());
2458
2459 DisableBackgroundCompilationScope dbcs;
2460
2461 SampleBuffer* sample_buffer = Profiler::sample_buffer();
2462 EXPECT(sample_buffer != NULL);
2463
2464 const Library& root_library = Library::Handle(LoadTestScript(kScript));
2465
2466 // Invoke main so that it gets compiled.
2467 Invoke(root_library, "main");
2468
2469 {
2470 // Clear the profile for this isolate.
2471 ClearProfileVisitor cpv(Isolate::Current());
2472 sample_buffer->VisitSamples(&cpv);
2473 }
2474
2475 // Query the code object for main and determine the PC at some token
2476 // positions.
2477 const Function& main = Function::Handle(GetFunction(root_library, "main"));
2478 EXPECT(!main.IsNull());
2479
2480 const Function& do_work =
2481 Function::Handle(GetFunction(root_library, "doWork"));
2482 EXPECT(!do_work.IsNull());
2483
2484 const Script& script = Script::Handle(main.script());
2485 EXPECT(!script.IsNull());
2486
2487 const Code& main_code = Code::Handle(main.CurrentCode());
2488 EXPECT(!main_code.IsNull());
2489
2490 const Code& do_work_code = Code::Handle(do_work.CurrentCode());
2491 EXPECT(!do_work_code.IsNull());
2492
2493 // Dump code source map.
2494 do_work_code.DumpSourcePositions();
2495 main_code.DumpSourcePositions();
2496
2497 // Look up some source token position's pc.
2498 uword squarePositionPc = FindPCForTokenPosition(do_work_code, squarePosition);
2499 EXPECT(squarePositionPc != 0);
2500
2501 uword callPositionPc = FindPCForTokenPosition(main_code, callPosition);
2502 EXPECT(callPositionPc != 0);
2503
2504 // Look up some classifying token position's pc.
2505 uword controlFlowPc =
2506 FindPCForTokenPosition(do_work_code, TokenPosition::kControlFlow);
2507 EXPECT(controlFlowPc != 0);
2508
2509 // Insert fake samples.
2510
2511 // Sample 1:
2512 // squarePositionPc exclusive.
2513 // callPositionPc inclusive.
2514 uword sample1[] = {squarePositionPc, // doWork.
2515 callPositionPc, // main.
2516 0};
2517
2518 // Sample 2:
2519 // squarePositionPc exclusive.
2520 uword sample2[] = {
2521 squarePositionPc, // doWork.
2522 0,
2523 };
2524
2525 // Sample 3:
2526 // controlFlowPc exclusive.
2527 // callPositionPc inclusive.
2528 uword sample3[] = {controlFlowPc, // doWork.
2529 callPositionPc, // main.
2530 0};
2531
2532 InsertFakeSample(sample_buffer, &sample1[0]);
2533 InsertFakeSample(sample_buffer, &sample2[0]);
2534 InsertFakeSample(sample_buffer, &sample3[0]);
2535
2536 // Generate source report for main.
2537 JSONStream js;
2538 {
2539 SourceReport sourceReport(SourceReport::kProfile);
2540 sourceReport.PrintJSON(&js, script, do_work.token_pos(),
2541 main.end_token_pos());
2542 }
2543
2544 // Verify positions in do_work.
2545 EXPECT_SUBSTRING("\"positions\":[\"ControlFlow\",19]", js.ToCString());
2546 // Verify exclusive ticks in do_work.
2547 EXPECT_SUBSTRING("\"exclusiveTicks\":[1,2]", js.ToCString());
2548 // Verify inclusive ticks in do_work.
2549 EXPECT_SUBSTRING("\"inclusiveTicks\":[1,2]", js.ToCString());
2550
2551 // Verify positions in main.
2552 EXPECT_SUBSTRING("\"positions\":[95]", js.ToCString());
2553 // Verify exclusive ticks in main.
2554 EXPECT_SUBSTRING("\"exclusiveTicks\":[0]", js.ToCString());
2555 // Verify inclusive ticks in main.
2556 EXPECT_SUBSTRING("\"inclusiveTicks\":[2]", js.ToCString());
2557}
2558
2559ISOLATE_UNIT_TEST_CASE(Profiler_ProfileCodeTableTest) {
2560 Zone* Z = Thread::Current()->zone();
2561
2562 ProfileCodeTable* table = new (Z) ProfileCodeTable();
2563 EXPECT_EQ(table->length(), 0);
2564 EXPECT_EQ(table->FindCodeForPC(42), static_cast<ProfileCode*>(NULL));
2565
2566 int64_t timestamp = 0;
2567 const AbstractCode null_code(Code::null());
2568
2569 ProfileCode* code1 = new (Z)
2570 ProfileCode(ProfileCode::kNativeCode, 50, 60, timestamp, null_code);
2571 EXPECT_EQ(table->InsertCode(code1), 0);
2572 EXPECT_EQ(table->FindCodeForPC(0), static_cast<ProfileCode*>(NULL));
2573 EXPECT_EQ(table->FindCodeForPC(100), static_cast<ProfileCode*>(NULL));
2574 EXPECT_EQ(table->FindCodeForPC(50), code1);
2575 EXPECT_EQ(table->FindCodeForPC(55), code1);
2576 EXPECT_EQ(table->FindCodeForPC(59), code1);
2577 EXPECT_EQ(table->FindCodeForPC(60), static_cast<ProfileCode*>(NULL));
2578
2579 // Insert below all.
2580 ProfileCode* code2 = new (Z)
2581 ProfileCode(ProfileCode::kNativeCode, 10, 20, timestamp, null_code);
2582 EXPECT_EQ(table->InsertCode(code2), 0);
2583 EXPECT_EQ(table->FindCodeForPC(0), static_cast<ProfileCode*>(NULL));
2584 EXPECT_EQ(table->FindCodeForPC(100), static_cast<ProfileCode*>(NULL));
2585 EXPECT_EQ(table->FindCodeForPC(50), code1);
2586 EXPECT_EQ(table->FindCodeForPC(10), code2);
2587 EXPECT_EQ(table->FindCodeForPC(19), code2);
2588 EXPECT_EQ(table->FindCodeForPC(20), static_cast<ProfileCode*>(NULL));
2589
2590 // Insert above all.
2591 ProfileCode* code3 = new (Z)
2592 ProfileCode(ProfileCode::kNativeCode, 80, 90, timestamp, null_code);
2593 EXPECT_EQ(table->InsertCode(code3), 2);
2594 EXPECT_EQ(table->FindCodeForPC(0), static_cast<ProfileCode*>(NULL));
2595 EXPECT_EQ(table->FindCodeForPC(100), static_cast<ProfileCode*>(NULL));
2596 EXPECT_EQ(table->FindCodeForPC(50), code1);
2597 EXPECT_EQ(table->FindCodeForPC(10), code2);
2598 EXPECT_EQ(table->FindCodeForPC(80), code3);
2599 EXPECT_EQ(table->FindCodeForPC(89), code3);
2600 EXPECT_EQ(table->FindCodeForPC(90), static_cast<ProfileCode*>(NULL));
2601
2602 // Insert between.
2603 ProfileCode* code4 = new (Z)
2604 ProfileCode(ProfileCode::kNativeCode, 65, 75, timestamp, null_code);
2605 EXPECT_EQ(table->InsertCode(code4), 2);
2606 EXPECT_EQ(table->FindCodeForPC(0), static_cast<ProfileCode*>(NULL));
2607 EXPECT_EQ(table->FindCodeForPC(100), static_cast<ProfileCode*>(NULL));
2608 EXPECT_EQ(table->FindCodeForPC(50), code1);
2609 EXPECT_EQ(table->FindCodeForPC(10), code2);
2610 EXPECT_EQ(table->FindCodeForPC(80), code3);
2611 EXPECT_EQ(table->FindCodeForPC(65), code4);
2612 EXPECT_EQ(table->FindCodeForPC(74), code4);
2613 EXPECT_EQ(table->FindCodeForPC(75), static_cast<ProfileCode*>(NULL));
2614
2615 // Insert overlapping left.
2616 ProfileCode* code5 = new (Z)
2617 ProfileCode(ProfileCode::kNativeCode, 15, 25, timestamp, null_code);
2618 EXPECT_EQ(table->InsertCode(code5), 0);
2619 EXPECT_EQ(table->FindCodeForPC(0), static_cast<ProfileCode*>(NULL));
2620 EXPECT_EQ(table->FindCodeForPC(100), static_cast<ProfileCode*>(NULL));
2621 EXPECT_EQ(table->FindCodeForPC(50), code1);
2622 EXPECT_EQ(table->FindCodeForPC(10), code2);
2623 EXPECT_EQ(table->FindCodeForPC(80), code3);
2624 EXPECT_EQ(table->FindCodeForPC(65), code4);
2625 EXPECT_EQ(table->FindCodeForPC(15), code2); // Merged left.
2626 EXPECT_EQ(table->FindCodeForPC(24), code2); // Merged left.
2627 EXPECT_EQ(table->FindCodeForPC(25), static_cast<ProfileCode*>(NULL));
2628
2629 // Insert overlapping right.
2630 ProfileCode* code6 = new (Z)
2631 ProfileCode(ProfileCode::kNativeCode, 45, 55, timestamp, null_code);
2632 EXPECT_EQ(table->InsertCode(code6), 1);
2633 EXPECT_EQ(table->FindCodeForPC(0), static_cast<ProfileCode*>(NULL));
2634 EXPECT_EQ(table->FindCodeForPC(100), static_cast<ProfileCode*>(NULL));
2635 EXPECT_EQ(table->FindCodeForPC(50), code1);
2636 EXPECT_EQ(table->FindCodeForPC(10), code2);
2637 EXPECT_EQ(table->FindCodeForPC(80), code3);
2638 EXPECT_EQ(table->FindCodeForPC(65), code4);
2639 EXPECT_EQ(table->FindCodeForPC(15), code2); // Merged left.
2640 EXPECT_EQ(table->FindCodeForPC(24), code2); // Merged left.
2641 EXPECT_EQ(table->FindCodeForPC(45), code1); // Merged right.
2642 EXPECT_EQ(table->FindCodeForPC(54), code1); // Merged right.
2643 EXPECT_EQ(table->FindCodeForPC(55), code1);
2644
2645 // Insert overlapping both.
2646 ProfileCode* code7 = new (Z)
2647 ProfileCode(ProfileCode::kNativeCode, 20, 50, timestamp, null_code);
2648 EXPECT_EQ(table->InsertCode(code7), 0);
2649 EXPECT_EQ(table->FindCodeForPC(0), static_cast<ProfileCode*>(NULL));
2650 EXPECT_EQ(table->FindCodeForPC(100), static_cast<ProfileCode*>(NULL));
2651 EXPECT_EQ(table->FindCodeForPC(50), code1);
2652 EXPECT_EQ(table->FindCodeForPC(10), code2);
2653 EXPECT_EQ(table->FindCodeForPC(80), code3);
2654 EXPECT_EQ(table->FindCodeForPC(65), code4);
2655 EXPECT_EQ(table->FindCodeForPC(15), code2); // Merged left.
2656 EXPECT_EQ(table->FindCodeForPC(24), code2); // Merged left.
2657 EXPECT_EQ(table->FindCodeForPC(45), code1); // Merged right.
2658 EXPECT_EQ(table->FindCodeForPC(54), code1); // Merged right.
2659 EXPECT_EQ(table->FindCodeForPC(20), code2); // Merged left.
2660 EXPECT_EQ(table->FindCodeForPC(49), code1); // Truncated.
2661 EXPECT_EQ(table->FindCodeForPC(50), code1);
2662}
2663
2664#endif // !PRODUCT
2665
2666} // namespace dart
2667