1 | // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/profiler_service.h" |
6 | |
7 | #include "platform/text_buffer.h" |
8 | #include "vm/growable_array.h" |
9 | #include "vm/hash_map.h" |
10 | #include "vm/log.h" |
11 | #include "vm/malloc_hooks.h" |
12 | #include "vm/native_symbol.h" |
13 | #include "vm/object.h" |
14 | #include "vm/os.h" |
15 | #include "vm/profiler.h" |
16 | #include "vm/reusable_handles.h" |
17 | #include "vm/scope_timer.h" |
18 | #include "vm/timeline.h" |
19 | |
20 | namespace dart { |
21 | |
22 | DECLARE_FLAG(int, max_profile_depth); |
23 | DECLARE_FLAG(int, profile_period); |
24 | DECLARE_FLAG(bool, profile_vm); |
25 | |
26 | #ifndef PRODUCT |
27 | |
28 | class DeoptimizedCodeSet : public ZoneAllocated { |
29 | public: |
30 | explicit DeoptimizedCodeSet(Isolate* isolate) |
31 | : previous_( |
32 | GrowableObjectArray::ZoneHandle(isolate->deoptimized_code_array())), |
33 | current_(GrowableObjectArray::ZoneHandle( |
34 | previous_.IsNull() ? GrowableObjectArray::null() |
35 | : GrowableObjectArray::New())) {} |
36 | |
37 | void Add(const Code& code) { |
38 | if (current_.IsNull()) { |
39 | return; |
40 | } |
41 | if (!Contained(code, previous_) || Contained(code, current_)) { |
42 | return; |
43 | } |
44 | current_.Add(code); |
45 | } |
46 | |
47 | void UpdateIsolate(Isolate* isolate) { |
48 | intptr_t size_before = SizeOf(previous_); |
49 | intptr_t size_after = SizeOf(current_); |
50 | if ((size_before > 0) && FLAG_trace_profiler) { |
51 | intptr_t length_before = previous_.Length(); |
52 | intptr_t length_after = current_.Length(); |
53 | OS::PrintErr( |
54 | "Updating isolate deoptimized code array: " |
55 | "%" Pd " -> %" Pd " [%" Pd " -> %" Pd "]\n" , |
56 | size_before, size_after, length_before, length_after); |
57 | } |
58 | isolate->set_deoptimized_code_array(current_); |
59 | } |
60 | |
61 | private: |
62 | bool Contained(const Code& code, const GrowableObjectArray& array) { |
63 | if (array.IsNull() || code.IsNull()) { |
64 | return false; |
65 | } |
66 | NoSafepointScope no_safepoint_scope; |
67 | for (intptr_t i = 0; i < array.Length(); i++) { |
68 | if (code.raw() == array.At(i)) { |
69 | return true; |
70 | } |
71 | } |
72 | return false; |
73 | } |
74 | |
75 | intptr_t SizeOf(const GrowableObjectArray& array) { |
76 | if (array.IsNull()) { |
77 | return 0; |
78 | } |
79 | Code& code = Code::ZoneHandle(); |
80 | intptr_t size = 0; |
81 | for (intptr_t i = 0; i < array.Length(); i++) { |
82 | code ^= array.At(i); |
83 | ASSERT(!code.IsNull()); |
84 | size += code.Size(); |
85 | } |
86 | return size; |
87 | } |
88 | |
89 | // Array holding code that is being kept around only for the profiler. |
90 | const GrowableObjectArray& previous_; |
91 | // Array holding code that should continue to be kept around for the profiler. |
92 | const GrowableObjectArray& current_; |
93 | }; |
94 | |
95 | ProfileFunctionSourcePosition::ProfileFunctionSourcePosition( |
96 | TokenPosition token_pos) |
97 | : token_pos_(token_pos), exclusive_ticks_(0), inclusive_ticks_(0) {} |
98 | |
99 | void ProfileFunctionSourcePosition::Tick(bool exclusive) { |
100 | if (exclusive) { |
101 | exclusive_ticks_++; |
102 | } else { |
103 | inclusive_ticks_++; |
104 | } |
105 | } |
106 | |
107 | ProfileFunction::ProfileFunction(Kind kind, |
108 | const char* name, |
109 | const Function& function, |
110 | const intptr_t table_index) |
111 | : kind_(kind), |
112 | name_(name), |
113 | function_(Function::ZoneHandle(function.raw())), |
114 | table_index_(table_index), |
115 | profile_codes_(0), |
116 | source_position_ticks_(0), |
117 | exclusive_ticks_(0), |
118 | inclusive_ticks_(0), |
119 | inclusive_serial_(-1) { |
120 | ASSERT((kind_ != kDartFunction) || !function_.IsNull()); |
121 | ASSERT((kind_ != kDartFunction) || (table_index_ >= 0)); |
122 | ASSERT(profile_codes_.length() == 0); |
123 | } |
124 | |
125 | const char* ProfileFunction::Name() const { |
126 | if (name_ != NULL) { |
127 | return name_; |
128 | } |
129 | ASSERT(!function_.IsNull()); |
130 | const String& func_name = |
131 | String::Handle(function_.QualifiedUserVisibleName()); |
132 | return func_name.ToCString(); |
133 | } |
134 | |
135 | const char* ProfileFunction::ResolvedScriptUrl() const { |
136 | if (function_.IsNull()) { |
137 | return NULL; |
138 | } |
139 | const Script& script = Script::Handle(function_.script()); |
140 | const String& uri = String::Handle(script.resolved_url()); |
141 | if (uri.IsNull()) { |
142 | return NULL; |
143 | } |
144 | return uri.ToCString(); |
145 | } |
146 | |
147 | bool ProfileFunction::is_visible() const { |
148 | if (function_.IsNull()) { |
149 | // Some synthetic function. |
150 | return true; |
151 | } |
152 | return FLAG_show_invisible_frames || function_.is_visible(); |
153 | } |
154 | |
155 | void ProfileFunction::Tick(bool exclusive, |
156 | intptr_t inclusive_serial, |
157 | TokenPosition token_position) { |
158 | if (exclusive) { |
159 | exclusive_ticks_++; |
160 | TickSourcePosition(token_position, exclusive); |
161 | } |
162 | // Fall through and tick inclusive count too. |
163 | if (inclusive_serial_ == inclusive_serial) { |
164 | // Already ticked. |
165 | return; |
166 | } |
167 | inclusive_serial_ = inclusive_serial; |
168 | inclusive_ticks_++; |
169 | TickSourcePosition(token_position, false); |
170 | } |
171 | |
172 | void ProfileFunction::TickSourcePosition(TokenPosition token_position, |
173 | bool exclusive) { |
174 | intptr_t i = 0; |
175 | for (; i < source_position_ticks_.length(); i++) { |
176 | ProfileFunctionSourcePosition& position = source_position_ticks_[i]; |
177 | if (position.token_pos().value() == token_position.value()) { |
178 | if (FLAG_trace_profiler_verbose) { |
179 | OS::PrintErr("Ticking source position %s %s\n" , |
180 | exclusive ? "exclusive" : "inclusive" , |
181 | token_position.ToCString()); |
182 | } |
183 | // Found existing position, tick it. |
184 | position.Tick(exclusive); |
185 | return; |
186 | } |
187 | if (position.token_pos().value() > token_position.value()) { |
188 | break; |
189 | } |
190 | } |
191 | |
192 | // Add new one, sorted by token position value. |
193 | ProfileFunctionSourcePosition pfsp(token_position); |
194 | if (FLAG_trace_profiler_verbose) { |
195 | OS::PrintErr("Ticking source position %s %s\n" , |
196 | exclusive ? "exclusive" : "inclusive" , |
197 | token_position.ToCString()); |
198 | } |
199 | pfsp.Tick(exclusive); |
200 | |
201 | if (i < source_position_ticks_.length()) { |
202 | source_position_ticks_.InsertAt(i, pfsp); |
203 | } else { |
204 | source_position_ticks_.Add(pfsp); |
205 | } |
206 | } |
207 | |
208 | const char* ProfileFunction::KindToCString(Kind kind) { |
209 | switch (kind) { |
210 | case kDartFunction: |
211 | return "Dart" ; |
212 | case kNativeFunction: |
213 | return "Native" ; |
214 | case kTagFunction: |
215 | return "Tag" ; |
216 | case kStubFunction: |
217 | return "Stub" ; |
218 | case kUnknownFunction: |
219 | return "Collected" ; |
220 | default: |
221 | UNIMPLEMENTED(); |
222 | return "" ; |
223 | } |
224 | } |
225 | |
226 | void ProfileFunction::PrintToJSONObject(JSONObject* func) { |
227 | func->AddProperty("type" , "NativeFunction" ); |
228 | func->AddProperty("name" , name()); |
229 | func->AddProperty("_kind" , KindToCString(kind())); |
230 | } |
231 | |
232 | void ProfileFunction::PrintToJSONArray(JSONArray* functions) { |
233 | JSONObject obj(functions); |
234 | obj.AddProperty("type" , "ProfileFunction" ); |
235 | obj.AddProperty("kind" , KindToCString(kind())); |
236 | obj.AddProperty("inclusiveTicks" , inclusive_ticks()); |
237 | obj.AddProperty("exclusiveTicks" , exclusive_ticks()); |
238 | obj.AddProperty("resolvedUrl" , ResolvedScriptUrl()); |
239 | if (kind() == kDartFunction) { |
240 | ASSERT(!function_.IsNull()); |
241 | obj.AddProperty("function" , function_); |
242 | } else { |
243 | JSONObject func(&obj, "function" ); |
244 | PrintToJSONObject(&func); |
245 | } |
246 | { |
247 | JSONArray codes(&obj, "_codes" ); |
248 | for (intptr_t i = 0; i < profile_codes_.length(); i++) { |
249 | intptr_t code_index = profile_codes_[i]; |
250 | codes.AddValue(code_index); |
251 | } |
252 | } |
253 | } |
254 | |
255 | void ProfileFunction::AddProfileCode(intptr_t code_table_index) { |
256 | for (intptr_t i = 0; i < profile_codes_.length(); i++) { |
257 | if (profile_codes_[i] == code_table_index) { |
258 | return; |
259 | } |
260 | } |
261 | profile_codes_.Add(code_table_index); |
262 | } |
263 | |
264 | bool ProfileFunction::GetSinglePosition(ProfileFunctionSourcePosition* pfsp) { |
265 | if (pfsp == NULL) { |
266 | return false; |
267 | } |
268 | if (source_position_ticks_.length() != 1) { |
269 | return false; |
270 | } |
271 | *pfsp = source_position_ticks_[0]; |
272 | return true; |
273 | } |
274 | |
275 | ProfileCodeAddress::ProfileCodeAddress(uword pc) |
276 | : pc_(pc), exclusive_ticks_(0), inclusive_ticks_(0) {} |
277 | |
278 | void ProfileCodeAddress::Tick(bool exclusive) { |
279 | if (exclusive) { |
280 | exclusive_ticks_++; |
281 | } else { |
282 | inclusive_ticks_++; |
283 | } |
284 | } |
285 | |
286 | ProfileCode::ProfileCode(Kind kind, |
287 | uword start, |
288 | uword end, |
289 | int64_t timestamp, |
290 | const AbstractCode code) |
291 | : kind_(kind), |
292 | start_(start), |
293 | end_(end), |
294 | exclusive_ticks_(0), |
295 | inclusive_ticks_(0), |
296 | inclusive_serial_(-1), |
297 | code_(code), |
298 | name_(NULL), |
299 | compile_timestamp_(0), |
300 | function_(NULL), |
301 | code_table_index_(-1), |
302 | address_ticks_(0) { |
303 | ASSERT(start_ < end_); |
304 | } |
305 | |
306 | void ProfileCode::TruncateLower(uword start) { |
307 | if (start > start_) { |
308 | start_ = start; |
309 | } |
310 | ASSERT(start_ < end_); |
311 | } |
312 | |
313 | void ProfileCode::TruncateUpper(uword end) { |
314 | if (end < end_) { |
315 | end_ = end; |
316 | } |
317 | ASSERT(start_ < end_); |
318 | } |
319 | |
320 | void ProfileCode::ExpandLower(uword start) { |
321 | if (start < start_) { |
322 | start_ = start; |
323 | } |
324 | ASSERT(start_ < end_); |
325 | } |
326 | |
327 | void ProfileCode::ExpandUpper(uword end) { |
328 | if (end > end_) { |
329 | end_ = end; |
330 | } |
331 | ASSERT(start_ < end_); |
332 | } |
333 | |
334 | bool ProfileCode::Overlaps(const ProfileCode* other) const { |
335 | ASSERT(other != NULL); |
336 | return other->Contains(start_) || other->Contains(end_ - 1) || |
337 | Contains(other->start()) || Contains(other->end() - 1); |
338 | } |
339 | |
340 | bool ProfileCode::IsOptimizedDart() const { |
341 | return !code_.IsNull() && code_.is_optimized(); |
342 | } |
343 | |
344 | void ProfileCode::SetName(const char* name) { |
345 | if (name == NULL) { |
346 | name_ = NULL; |
347 | } |
348 | intptr_t len = strlen(name) + 1; |
349 | name_ = Thread::Current()->zone()->Alloc<char>(len); |
350 | strncpy(name_, name, len); |
351 | } |
352 | |
353 | void ProfileCode::GenerateAndSetSymbolName(const char* prefix) { |
354 | const intptr_t kBuffSize = 512; |
355 | char buff[kBuffSize]; |
356 | Utils::SNPrint(&buff[0], kBuffSize - 1, "%s [%" Px ", %" Px ")" , prefix, |
357 | start(), end()); |
358 | SetName(buff); |
359 | } |
360 | |
361 | void ProfileCode::Tick(uword pc, bool exclusive, intptr_t serial) { |
362 | // If exclusive is set, tick it. |
363 | if (exclusive) { |
364 | exclusive_ticks_++; |
365 | TickAddress(pc, true); |
366 | } |
367 | // Fall through and tick inclusive count too. |
368 | if (inclusive_serial_ == serial) { |
369 | // Already gave inclusive tick for this sample. |
370 | return; |
371 | } |
372 | inclusive_serial_ = serial; |
373 | inclusive_ticks_++; |
374 | TickAddress(pc, false); |
375 | } |
376 | |
377 | void ProfileCode::TickAddress(uword pc, bool exclusive) { |
378 | const intptr_t length = address_ticks_.length(); |
379 | |
380 | intptr_t i = 0; |
381 | for (; i < length; i++) { |
382 | ProfileCodeAddress& entry = address_ticks_[i]; |
383 | if (entry.pc() == pc) { |
384 | // Tick the address entry. |
385 | entry.Tick(exclusive); |
386 | return; |
387 | } |
388 | if (entry.pc() > pc) { |
389 | break; |
390 | } |
391 | } |
392 | |
393 | // New address, add entry. |
394 | ProfileCodeAddress entry(pc); |
395 | |
396 | entry.Tick(exclusive); |
397 | |
398 | if (i < length) { |
399 | // Insert at i. |
400 | address_ticks_.InsertAt(i, entry); |
401 | } else { |
402 | // Add to end. |
403 | address_ticks_.Add(entry); |
404 | } |
405 | } |
406 | |
407 | void ProfileCode::PrintNativeCode(JSONObject* profile_code_obj) { |
408 | ASSERT(kind() == kNativeCode); |
409 | JSONObject obj(profile_code_obj, "code" ); |
410 | obj.AddProperty("type" , "@Code" ); |
411 | obj.AddProperty("kind" , "Native" ); |
412 | obj.AddProperty("name" , name()); |
413 | obj.AddProperty("_optimized" , false); |
414 | obj.AddPropertyF("start" , "%" Px "" , start()); |
415 | obj.AddPropertyF("end" , "%" Px "" , end()); |
416 | { |
417 | // Generate a fake function entry. |
418 | JSONObject func(&obj, "function" ); |
419 | ASSERT(function_ != NULL); |
420 | function_->PrintToJSONObject(&func); |
421 | } |
422 | } |
423 | |
424 | void ProfileCode::PrintCollectedCode(JSONObject* profile_code_obj) { |
425 | ASSERT(kind() == kCollectedCode); |
426 | JSONObject obj(profile_code_obj, "code" ); |
427 | obj.AddProperty("type" , "@Code" ); |
428 | obj.AddProperty("kind" , "Collected" ); |
429 | obj.AddProperty("name" , name()); |
430 | obj.AddProperty("_optimized" , false); |
431 | obj.AddPropertyF("start" , "%" Px "" , start()); |
432 | obj.AddPropertyF("end" , "%" Px "" , end()); |
433 | { |
434 | // Generate a fake function entry. |
435 | JSONObject func(&obj, "function" ); |
436 | ASSERT(function_ != NULL); |
437 | function_->PrintToJSONObject(&func); |
438 | } |
439 | } |
440 | |
441 | void ProfileCode::PrintOverwrittenCode(JSONObject* profile_code_obj) { |
442 | ASSERT(kind() == kReusedCode); |
443 | JSONObject obj(profile_code_obj, "code" ); |
444 | obj.AddProperty("type" , "@Code" ); |
445 | obj.AddProperty("kind" , "Collected" ); |
446 | obj.AddProperty("name" , name()); |
447 | obj.AddProperty("_optimized" , false); |
448 | obj.AddPropertyF("start" , "%" Px "" , start()); |
449 | obj.AddPropertyF("end" , "%" Px "" , end()); |
450 | { |
451 | // Generate a fake function entry. |
452 | JSONObject func(&obj, "function" ); |
453 | ASSERT(function_ != NULL); |
454 | function_->PrintToJSONObject(&func); |
455 | } |
456 | } |
457 | |
458 | void ProfileCode::PrintTagCode(JSONObject* profile_code_obj) { |
459 | ASSERT(kind() == kTagCode); |
460 | JSONObject obj(profile_code_obj, "code" ); |
461 | obj.AddProperty("type" , "@Code" ); |
462 | obj.AddProperty("kind" , "Tag" ); |
463 | obj.AddProperty("name" , name()); |
464 | obj.AddPropertyF("start" , "%" Px "" , start()); |
465 | obj.AddPropertyF("end" , "%" Px "" , end()); |
466 | obj.AddProperty("_optimized" , false); |
467 | { |
468 | // Generate a fake function entry. |
469 | JSONObject func(&obj, "function" ); |
470 | ASSERT(function_ != NULL); |
471 | function_->PrintToJSONObject(&func); |
472 | } |
473 | } |
474 | |
475 | const char* ProfileCode::KindToCString(Kind kind) { |
476 | switch (kind) { |
477 | case kDartCode: |
478 | return "Dart" ; |
479 | case kCollectedCode: |
480 | return "Collected" ; |
481 | case kNativeCode: |
482 | return "Native" ; |
483 | case kReusedCode: |
484 | return "Overwritten" ; |
485 | case kTagCode: |
486 | return "Tag" ; |
487 | } |
488 | UNREACHABLE(); |
489 | return NULL; |
490 | } |
491 | |
492 | void ProfileCode::PrintToJSONArray(JSONArray* codes) { |
493 | JSONObject obj(codes); |
494 | obj.AddProperty("kind" , ProfileCode::KindToCString(kind())); |
495 | obj.AddProperty("inclusiveTicks" , inclusive_ticks()); |
496 | obj.AddProperty("exclusiveTicks" , exclusive_ticks()); |
497 | if (kind() == kDartCode) { |
498 | ASSERT(!code_.IsNull()); |
499 | obj.AddProperty("code" , *code_.handle()); |
500 | } else if (kind() == kCollectedCode) { |
501 | PrintCollectedCode(&obj); |
502 | } else if (kind() == kReusedCode) { |
503 | PrintOverwrittenCode(&obj); |
504 | } else if (kind() == kTagCode) { |
505 | PrintTagCode(&obj); |
506 | } else { |
507 | ASSERT(kind() == kNativeCode); |
508 | PrintNativeCode(&obj); |
509 | } |
510 | { |
511 | JSONArray ticks(&obj, "ticks" ); |
512 | for (intptr_t i = 0; i < address_ticks_.length(); i++) { |
513 | const ProfileCodeAddress& entry = address_ticks_[i]; |
514 | ticks.AddValueF("%" Px "" , entry.pc()); |
515 | ticks.AddValue(entry.exclusive_ticks()); |
516 | ticks.AddValue(entry.inclusive_ticks()); |
517 | } |
518 | } |
519 | } |
520 | |
521 | class ProfileFunctionTable : public ZoneAllocated { |
522 | public: |
523 | ProfileFunctionTable() |
524 | : null_function_(Function::ZoneHandle()), |
525 | unknown_function_(NULL), |
526 | table_(8) { |
527 | unknown_function_ = |
528 | Add(ProfileFunction::kUnknownFunction, "<unknown Dart function>" ); |
529 | } |
530 | |
531 | ProfileFunction* LookupOrAdd(const Function& function) { |
532 | ASSERT(!function.IsNull()); |
533 | ProfileFunction* profile_function = Lookup(function); |
534 | if (profile_function != NULL) { |
535 | return profile_function; |
536 | } |
537 | return Add(function); |
538 | } |
539 | |
540 | ProfileFunction* Lookup(const Function& function) { |
541 | ASSERT(!function.IsNull()); |
542 | return function_hash_.LookupValue(&function); |
543 | } |
544 | |
545 | ProfileFunction* GetUnknown() { |
546 | ASSERT(unknown_function_ != NULL); |
547 | return unknown_function_; |
548 | } |
549 | |
550 | // No protection against being called more than once for the same tag_id. |
551 | ProfileFunction* AddTag(uword tag_id, const char* name) { |
552 | // TODO(johnmccutchan): Canonicalize ProfileFunctions for tags. |
553 | return Add(ProfileFunction::kTagFunction, name); |
554 | } |
555 | |
556 | // No protection against being called more than once for the same native |
557 | // address. |
558 | ProfileFunction* AddNative(uword start_address, const char* name) { |
559 | // TODO(johnmccutchan): Canonicalize ProfileFunctions for natives. |
560 | return Add(ProfileFunction::kNativeFunction, name); |
561 | } |
562 | |
563 | // No protection against being called more tha once for the same stub. |
564 | ProfileFunction* AddStub(uword start_address, const char* name) { |
565 | return Add(ProfileFunction::kStubFunction, name); |
566 | } |
567 | |
568 | intptr_t length() const { return table_.length(); } |
569 | |
570 | ProfileFunction* At(intptr_t i) const { |
571 | ASSERT(i >= 0); |
572 | ASSERT(i < length()); |
573 | return table_[i]; |
574 | } |
575 | |
576 | private: |
577 | ProfileFunction* Add(ProfileFunction::Kind kind, const char* name) { |
578 | ASSERT(kind != ProfileFunction::kDartFunction); |
579 | ASSERT(name != NULL); |
580 | ProfileFunction* profile_function = |
581 | new ProfileFunction(kind, name, null_function_, table_.length()); |
582 | table_.Add(profile_function); |
583 | return profile_function; |
584 | } |
585 | |
586 | ProfileFunction* Add(const Function& function) { |
587 | ASSERT(Lookup(function) == NULL); |
588 | ProfileFunction* profile_function = new ProfileFunction( |
589 | ProfileFunction::kDartFunction, NULL, function, table_.length()); |
590 | table_.Add(profile_function); |
591 | function_hash_.Insert(profile_function); |
592 | return profile_function; |
593 | } |
594 | |
595 | // Needed for DirectChainedHashMap. |
596 | struct ProfileFunctionTableTrait { |
597 | typedef ProfileFunction* Value; |
598 | typedef const Function* Key; |
599 | typedef ProfileFunction* Pair; |
600 | |
601 | static Key KeyOf(Pair kv) { return kv->function(); } |
602 | |
603 | static Value ValueOf(Pair kv) { return kv; } |
604 | |
605 | static inline intptr_t Hashcode(Key key) { return key->Hash(); } |
606 | |
607 | static inline bool IsKeyEqual(Pair kv, Key key) { |
608 | return kv->function()->raw() == key->raw(); |
609 | } |
610 | }; |
611 | |
612 | const Function& null_function_; |
613 | ProfileFunction* unknown_function_; |
614 | ZoneGrowableArray<ProfileFunction*> table_; |
615 | DirectChainedHashMap<ProfileFunctionTableTrait> function_hash_; |
616 | }; |
617 | |
618 | ProfileFunction* ProfileCode::SetFunctionAndName(ProfileFunctionTable* table) { |
619 | ASSERT(function_ == NULL); |
620 | |
621 | ProfileFunction* function = NULL; |
622 | if ((kind() == kReusedCode) || (kind() == kCollectedCode)) { |
623 | if (name() == NULL) { |
624 | // Lazily set generated name. |
625 | GenerateAndSetSymbolName("[Collected]" ); |
626 | } |
627 | // Map these to a canonical unknown function. |
628 | function = table->GetUnknown(); |
629 | } else if (kind() == kDartCode) { |
630 | ASSERT(!code_.IsNull()); |
631 | const char* name = code_.QualifiedName(); |
632 | const Object& obj = Object::Handle(code_.owner()); |
633 | if (obj.IsFunction()) { |
634 | function = table->LookupOrAdd(Function::Cast(obj)); |
635 | } else { |
636 | // A stub. |
637 | function = table->AddStub(start(), name); |
638 | } |
639 | SetName(name); |
640 | } else if (kind() == kNativeCode) { |
641 | if (name() == NULL) { |
642 | // Lazily set generated name. |
643 | const intptr_t kBuffSize = 512; |
644 | char buff[kBuffSize]; |
645 | uword dso_base; |
646 | char* dso_name; |
647 | if (NativeSymbolResolver::LookupSharedObject(start(), &dso_base, |
648 | &dso_name)) { |
649 | uword dso_offset = start() - dso_base; |
650 | Utils::SNPrint(&buff[0], kBuffSize - 1, "[Native] %s+0x%" Px, dso_name, |
651 | dso_offset); |
652 | NativeSymbolResolver::FreeSymbolName(dso_name); |
653 | } else { |
654 | Utils::SNPrint(&buff[0], kBuffSize - 1, "[Native] %" Px, start()); |
655 | } |
656 | SetName(buff); |
657 | } |
658 | function = table->AddNative(start(), name()); |
659 | } else if (kind() == kTagCode) { |
660 | if (name() == NULL) { |
661 | if (UserTags::IsUserTag(start())) { |
662 | const char* tag_name = UserTags::TagName(start()); |
663 | ASSERT(tag_name != NULL); |
664 | SetName(tag_name); |
665 | } else if (VMTag::IsVMTag(start()) || VMTag::IsRuntimeEntryTag(start()) || |
666 | VMTag::IsNativeEntryTag(start())) { |
667 | const char* tag_name = VMTag::TagName(start()); |
668 | ASSERT(tag_name != NULL); |
669 | SetName(tag_name); |
670 | } else { |
671 | switch (start()) { |
672 | case VMTag::kRootTagId: |
673 | SetName("Root" ); |
674 | break; |
675 | case VMTag::kTruncatedTagId: |
676 | SetName("[Truncated]" ); |
677 | break; |
678 | case VMTag::kNoneCodeTagId: |
679 | SetName("[No Code]" ); |
680 | break; |
681 | case VMTag::kOptimizedCodeTagId: |
682 | SetName("[Optimized Code]" ); |
683 | break; |
684 | case VMTag::kUnoptimizedCodeTagId: |
685 | SetName("[Unoptimized Code]" ); |
686 | break; |
687 | case VMTag::kNativeCodeTagId: |
688 | SetName("[Native Code]" ); |
689 | break; |
690 | case VMTag::kInlineStartCodeTagId: |
691 | SetName("[Inline Start]" ); |
692 | break; |
693 | case VMTag::kInlineEndCodeTagId: |
694 | SetName("[Inline End]" ); |
695 | break; |
696 | default: |
697 | UNIMPLEMENTED(); |
698 | break; |
699 | } |
700 | } |
701 | } |
702 | function = table->AddTag(start(), name()); |
703 | } else { |
704 | UNREACHABLE(); |
705 | } |
706 | ASSERT(function != NULL); |
707 | |
708 | function->AddProfileCode(code_table_index()); |
709 | |
710 | function_ = function; |
711 | return function_; |
712 | } |
713 | |
714 | intptr_t ProfileCodeTable::FindCodeIndexForPC(uword pc) const { |
715 | intptr_t length = table_.length(); |
716 | if (length == 0) { |
717 | return -1; // Not found. |
718 | } |
719 | intptr_t lo = 0; |
720 | intptr_t hi = length - 1; |
721 | while (lo <= hi) { |
722 | intptr_t mid = (hi - lo + 1) / 2 + lo; |
723 | ASSERT(mid >= lo); |
724 | ASSERT(mid <= hi); |
725 | ProfileCode* code = At(mid); |
726 | if (code->Contains(pc)) { |
727 | return mid; |
728 | } else if (pc < code->start()) { |
729 | hi = mid - 1; |
730 | } else { |
731 | lo = mid + 1; |
732 | } |
733 | } |
734 | return -1; |
735 | } |
736 | |
737 | intptr_t ProfileCodeTable::InsertCode(ProfileCode* new_code) { |
738 | const intptr_t length = table_.length(); |
739 | if (length == 0) { |
740 | table_.Add(new_code); |
741 | return length; |
742 | } |
743 | |
744 | // Determine the correct place to insert or merge |new_code| into table. |
745 | intptr_t lo = -1; |
746 | intptr_t hi = -1; |
747 | ProfileCode* lo_code = NULL; |
748 | ProfileCode* hi_code = NULL; |
749 | const uword pc = new_code->end() - 1; |
750 | FindNeighbors(pc, &lo, &hi, &lo_code, &hi_code); |
751 | ASSERT((lo_code != NULL) || (hi_code != NULL)); |
752 | |
753 | if (lo != -1) { |
754 | // Has left neighbor. |
755 | new_code->TruncateLower(lo_code->end()); |
756 | ASSERT(!new_code->Overlaps(lo_code)); |
757 | } |
758 | if (hi != -1) { |
759 | // Has right neighbor. |
760 | new_code->TruncateUpper(hi_code->start()); |
761 | ASSERT(!new_code->Overlaps(hi_code)); |
762 | } |
763 | |
764 | if ((lo != -1) && (lo_code->kind() == ProfileCode::kNativeCode) && |
765 | (new_code->kind() == ProfileCode::kNativeCode) && |
766 | (lo_code->end() == new_code->start())) { |
767 | // Adjacent left neighbor of the same kind: merge. |
768 | // (dladdr doesn't give us symbol size so processing more samples may see |
769 | // more PCs we didn't previously know belonged to it.) |
770 | lo_code->ExpandUpper(new_code->end()); |
771 | return lo; |
772 | } |
773 | |
774 | if ((hi != -1) && (hi_code->kind() == ProfileCode::kNativeCode) && |
775 | (new_code->kind() == ProfileCode::kNativeCode) && |
776 | (new_code->end() == hi_code->start())) { |
777 | // Adjacent right neighbor of the same kind: merge. |
778 | // (dladdr doesn't give us symbol size so processing more samples may see |
779 | // more PCs we didn't previously know belonged to it.) |
780 | hi_code->ExpandLower(new_code->start()); |
781 | return hi; |
782 | } |
783 | |
784 | intptr_t insert; |
785 | if (lo == -1) { |
786 | insert = 0; |
787 | } else if (hi == -1) { |
788 | insert = length; |
789 | } else { |
790 | insert = lo + 1; |
791 | } |
792 | table_.InsertAt(insert, new_code); |
793 | return insert; |
794 | } |
795 | |
796 | void ProfileCodeTable::FindNeighbors(uword pc, |
797 | intptr_t* lo, |
798 | intptr_t* hi, |
799 | ProfileCode** lo_code, |
800 | ProfileCode** hi_code) const { |
801 | ASSERT(table_.length() >= 1); |
802 | |
803 | intptr_t length = table_.length(); |
804 | |
805 | if (pc < At(0)->start()) { |
806 | // Lower than any existing code. |
807 | *lo = -1; |
808 | *lo_code = NULL; |
809 | *hi = 0; |
810 | *hi_code = At(*hi); |
811 | return; |
812 | } |
813 | |
814 | if (pc >= At(length - 1)->end()) { |
815 | // Higher than any existing code. |
816 | *lo = length - 1; |
817 | *lo_code = At(*lo); |
818 | *hi = -1; |
819 | *hi_code = NULL; |
820 | return; |
821 | } |
822 | |
823 | *lo = 0; |
824 | *lo_code = At(*lo); |
825 | *hi = length - 1; |
826 | *hi_code = At(*hi); |
827 | |
828 | while ((*hi - *lo) > 1) { |
829 | intptr_t mid = (*hi - *lo + 1) / 2 + *lo; |
830 | ASSERT(*lo <= mid); |
831 | ASSERT(*hi >= mid); |
832 | ProfileCode* code = At(mid); |
833 | if (code->end() <= pc) { |
834 | *lo = mid; |
835 | *lo_code = code; |
836 | } |
837 | if (pc < code->end()) { |
838 | *hi = mid; |
839 | *hi_code = code; |
840 | } |
841 | } |
842 | } |
843 | |
844 | void ProfileCodeTable::VerifyOrder() { |
845 | const intptr_t length = table_.length(); |
846 | if (length == 0) { |
847 | return; |
848 | } |
849 | uword last = table_[0]->end(); |
850 | for (intptr_t i = 1; i < length; i++) { |
851 | ProfileCode* a = table_[i]; |
852 | ASSERT(last <= a->start()); |
853 | last = a->end(); |
854 | } |
855 | } |
856 | |
857 | void ProfileCodeTable::VerifyOverlap() { |
858 | const intptr_t length = table_.length(); |
859 | for (intptr_t i = 0; i < length; i++) { |
860 | ProfileCode* a = table_[i]; |
861 | for (intptr_t j = i + 1; j < length; j++) { |
862 | ProfileCode* b = table_[j]; |
863 | ASSERT(!a->Contains(b->start()) && !a->Contains(b->end() - 1) && |
864 | !b->Contains(a->start()) && !b->Contains(a->end() - 1)); |
865 | } |
866 | } |
867 | } |
868 | |
869 | void ProfileCodeInlinedFunctionsCache::Get( |
870 | uword pc, |
871 | const Code& code, |
872 | ProcessedSample* sample, |
873 | intptr_t frame_index, |
874 | // Outputs: |
875 | GrowableArray<const Function*>** inlined_functions, |
876 | GrowableArray<TokenPosition>** inlined_token_positions, |
877 | TokenPosition* token_position) { |
878 | const intptr_t offset = OffsetForPC(pc, code, sample, frame_index); |
879 | if (FindInCache(pc, offset, inlined_functions, inlined_token_positions, |
880 | token_position)) { |
881 | // Found in cache. |
882 | return; |
883 | } |
884 | Add(pc, code, sample, frame_index, inlined_functions, inlined_token_positions, |
885 | token_position); |
886 | } |
887 | |
888 | bool ProfileCodeInlinedFunctionsCache::FindInCache( |
889 | uword pc, |
890 | intptr_t offset, |
891 | GrowableArray<const Function*>** inlined_functions, |
892 | GrowableArray<TokenPosition>** inlined_token_positions, |
893 | TokenPosition* token_position) { |
894 | // Simple linear scan. |
895 | for (intptr_t i = 0; i < kCacheSize; i++) { |
896 | intptr_t index = (last_hit_ + i) % kCacheSize; |
897 | if ((cache_[index].pc == pc) && (cache_[index].offset == offset)) { |
898 | // Hit. |
899 | if (cache_[index].inlined_functions.length() == 0) { |
900 | *inlined_functions = NULL; |
901 | *inlined_token_positions = NULL; |
902 | } else { |
903 | *inlined_functions = &cache_[index].inlined_functions; |
904 | *inlined_token_positions = &cache_[index].inlined_token_positions; |
905 | } |
906 | *token_position = cache_[index].token_position; |
907 | cache_hit_++; |
908 | last_hit_ = index; |
909 | return true; |
910 | } |
911 | } |
912 | cache_miss_++; |
913 | return false; |
914 | } |
915 | |
916 | // Add to cache and fill in outputs. |
917 | void ProfileCodeInlinedFunctionsCache::Add( |
918 | uword pc, |
919 | const Code& code, |
920 | ProcessedSample* sample, |
921 | intptr_t frame_index, |
922 | // Outputs: |
923 | GrowableArray<const Function*>** inlined_functions, |
924 | GrowableArray<TokenPosition>** inlined_token_positions, |
925 | TokenPosition* token_position) { |
926 | const intptr_t offset = OffsetForPC(pc, code, sample, frame_index); |
927 | CacheEntry* cache_entry = &cache_[NextFreeIndex()]; |
928 | cache_entry->Reset(); |
929 | cache_entry->pc = pc; |
930 | cache_entry->offset = offset; |
931 | code.GetInlinedFunctionsAtInstruction( |
932 | offset, &(cache_entry->inlined_functions), |
933 | &(cache_entry->inlined_token_positions)); |
934 | if (cache_entry->inlined_functions.length() == 0) { |
935 | *inlined_functions = NULL; |
936 | *inlined_token_positions = NULL; |
937 | *token_position = cache_entry->token_position = TokenPosition(); |
938 | return; |
939 | } |
940 | |
941 | // Write outputs. |
942 | *inlined_functions = &(cache_entry->inlined_functions); |
943 | *inlined_token_positions = &(cache_entry->inlined_token_positions); |
944 | *token_position = cache_entry->token_position = |
945 | cache_entry->inlined_token_positions[0]; |
946 | } |
947 | |
948 | intptr_t ProfileCodeInlinedFunctionsCache::OffsetForPC(uword pc, |
949 | const Code& code, |
950 | ProcessedSample* sample, |
951 | intptr_t frame_index) { |
952 | intptr_t offset = pc - code.PayloadStart(); |
953 | if (frame_index != 0) { |
954 | // The PC of frames below the top frame is a call's return address, |
955 | // which can belong to a different inlining interval than the call. |
956 | offset--; |
957 | } else if (sample->IsAllocationSample()) { |
958 | // Allocation samples skip the top frame, so the top frame's pc is |
959 | // also a call's return address. |
960 | offset--; |
961 | } else if (!sample->first_frame_executing()) { |
962 | // If the first frame wasn't executing code (i.e. we started to collect |
963 | // the stack trace at an exit frame), the top frame's pc is also a |
964 | // call's return address. |
965 | offset--; |
966 | } |
967 | return offset; |
968 | } |
969 | |
970 | class ProfileBuilder : public ValueObject { |
971 | public: |
972 | enum ProfileInfoKind { |
973 | kNone, |
974 | kOptimized, |
975 | kUnoptimized, |
976 | kNative, |
977 | kInlineStart, |
978 | kInlineFinish, |
979 | kNumProfileInfoKind, |
980 | }; |
981 | |
982 | ProfileBuilder(Thread* thread, |
983 | SampleFilter* filter, |
984 | SampleBuffer* sample_buffer, |
985 | Profile* profile) |
986 | : thread_(thread), |
987 | vm_isolate_(Dart::vm_isolate()), |
988 | filter_(filter), |
989 | sample_buffer_(sample_buffer), |
990 | profile_(profile), |
991 | deoptimized_code_(new DeoptimizedCodeSet(thread->isolate())), |
992 | null_code_(Code::null()), |
993 | null_function_(Function::ZoneHandle()), |
994 | inclusive_tree_(false), |
995 | inlined_functions_cache_(new ProfileCodeInlinedFunctionsCache()), |
996 | samples_(NULL), |
997 | info_kind_(kNone) { |
998 | ASSERT((sample_buffer_ == Profiler::sample_buffer()) || |
999 | (sample_buffer_ == Profiler::allocation_sample_buffer())); |
1000 | ASSERT(profile_ != NULL); |
1001 | } |
1002 | |
1003 | void Build() { |
1004 | ScopeTimer sw("ProfileBuilder::Build" , FLAG_trace_profiler); |
1005 | if (!FilterSamples()) { |
1006 | return; |
1007 | } |
1008 | |
1009 | Setup(); |
1010 | BuildCodeTable(); |
1011 | FinalizeCodeIndexes(); |
1012 | BuildFunctionTable(); |
1013 | PopulateFunctionTicks(); |
1014 | } |
1015 | |
1016 | private: |
1017 | // Returns true if |frame_index| in |sample| is using CPU. |
1018 | static bool IsExecutingFrame(ProcessedSample* sample, intptr_t frame_index) { |
1019 | return (frame_index == 0) && |
1020 | (sample->first_frame_executing() || sample->IsAllocationSample()); |
1021 | } |
1022 | |
1023 | void Setup() { |
1024 | profile_->live_code_ = new ProfileCodeTable(); |
1025 | profile_->dead_code_ = new ProfileCodeTable(); |
1026 | profile_->tag_code_ = new ProfileCodeTable(); |
1027 | profile_->functions_ = new ProfileFunctionTable(); |
1028 | // Register some synthetic tags. |
1029 | RegisterProfileCodeTag(VMTag::kRootTagId); |
1030 | RegisterProfileCodeTag(VMTag::kTruncatedTagId); |
1031 | RegisterProfileCodeTag(VMTag::kNoneCodeTagId); |
1032 | RegisterProfileCodeTag(VMTag::kOptimizedCodeTagId); |
1033 | RegisterProfileCodeTag(VMTag::kUnoptimizedCodeTagId); |
1034 | RegisterProfileCodeTag(VMTag::kNativeCodeTagId); |
1035 | RegisterProfileCodeTag(VMTag::kInlineStartCodeTagId); |
1036 | RegisterProfileCodeTag(VMTag::kInlineEndCodeTagId); |
1037 | } |
1038 | |
1039 | bool FilterSamples() { |
1040 | ScopeTimer sw("ProfileBuilder::FilterSamples" , FLAG_trace_profiler); |
1041 | ASSERT(sample_buffer_ != NULL); |
1042 | samples_ = sample_buffer_->BuildProcessedSampleBuffer(filter_); |
1043 | profile_->samples_ = samples_; |
1044 | profile_->sample_count_ = samples_->length(); |
1045 | return true; |
1046 | } |
1047 | |
1048 | void UpdateMinMaxTimes(int64_t timestamp) { |
1049 | profile_->min_time_ = |
1050 | timestamp < profile_->min_time_ ? timestamp : profile_->min_time_; |
1051 | profile_->max_time_ = |
1052 | timestamp > profile_->max_time_ ? timestamp : profile_->max_time_; |
1053 | } |
1054 | |
1055 | void SanitizeMinMaxTimes() { |
1056 | if ((profile_->min_time_ == kMaxInt64) && (profile_->max_time_ == 0)) { |
1057 | profile_->min_time_ = 0; |
1058 | profile_->max_time_ = 0; |
1059 | } |
1060 | } |
1061 | |
1062 | void BuildCodeTable() { |
1063 | ScopeTimer sw("ProfileBuilder::BuildCodeTable" , FLAG_trace_profiler); |
1064 | |
1065 | Isolate* isolate = thread_->isolate(); |
1066 | ASSERT(isolate != NULL); |
1067 | |
1068 | // Build the live code table eagerly by populating it with code objects |
1069 | // from the processed sample buffer. |
1070 | const CodeLookupTable& code_lookup_table = samples_->code_lookup_table(); |
1071 | for (intptr_t i = 0; i < code_lookup_table.length(); i++) { |
1072 | const CodeDescriptor* descriptor = code_lookup_table.At(i); |
1073 | ASSERT(descriptor != NULL); |
1074 | const AbstractCode code = descriptor->code(); |
1075 | RegisterLiveProfileCode(new ProfileCode( |
1076 | ProfileCode::kDartCode, code.PayloadStart(), |
1077 | code.PayloadStart() + code.Size(), code.compile_timestamp(), code)); |
1078 | } |
1079 | |
1080 | // Iterate over samples. |
1081 | for (intptr_t sample_index = 0; sample_index < samples_->length(); |
1082 | sample_index++) { |
1083 | ProcessedSample* sample = samples_->At(sample_index); |
1084 | const int64_t timestamp = sample->timestamp(); |
1085 | |
1086 | // This is our first pass over the sample buffer, use this as an |
1087 | // opportunity to determine the min and max time ranges of this profile. |
1088 | UpdateMinMaxTimes(timestamp); |
1089 | |
1090 | // Make sure VM tag exists. |
1091 | if (VMTag::IsNativeEntryTag(sample->vm_tag())) { |
1092 | RegisterProfileCodeTag(VMTag::kNativeTagId); |
1093 | } else if (VMTag::IsRuntimeEntryTag(sample->vm_tag())) { |
1094 | RegisterProfileCodeTag(VMTag::kRuntimeTagId); |
1095 | } |
1096 | RegisterProfileCodeTag(sample->vm_tag()); |
1097 | // Make sure user tag exists. |
1098 | RegisterProfileCodeTag(sample->user_tag()); |
1099 | |
1100 | // Make sure that a ProfileCode objects exist for all pcs in the sample |
1101 | // and tick each one. |
1102 | for (intptr_t frame_index = 0; frame_index < sample->length(); |
1103 | frame_index++) { |
1104 | const uword pc = sample->At(frame_index); |
1105 | ASSERT(pc != 0); |
1106 | ProfileCode* code = FindOrRegisterProfileCode(pc, timestamp); |
1107 | ASSERT(code != NULL); |
1108 | code->Tick(pc, IsExecutingFrame(sample, frame_index), sample_index); |
1109 | } |
1110 | |
1111 | TickExitFrame(sample->vm_tag(), sample_index, sample); |
1112 | } |
1113 | SanitizeMinMaxTimes(); |
1114 | } |
1115 | |
1116 | void FinalizeCodeIndexes() { |
1117 | ScopeTimer sw("ProfileBuilder::FinalizeCodeIndexes" , FLAG_trace_profiler); |
1118 | ProfileCodeTable* live_table = profile_->live_code_; |
1119 | ProfileCodeTable* dead_table = profile_->dead_code_; |
1120 | ProfileCodeTable* tag_table = profile_->tag_code_; |
1121 | const intptr_t dead_code_index_offset = live_table->length(); |
1122 | const intptr_t tag_code_index_offset = |
1123 | dead_table->length() + dead_code_index_offset; |
1124 | |
1125 | profile_->dead_code_index_offset_ = dead_code_index_offset; |
1126 | profile_->tag_code_index_offset_ = tag_code_index_offset; |
1127 | |
1128 | for (intptr_t i = 0; i < live_table->length(); i++) { |
1129 | const intptr_t index = i; |
1130 | ProfileCode* code = live_table->At(i); |
1131 | ASSERT(code != NULL); |
1132 | code->set_code_table_index(index); |
1133 | } |
1134 | |
1135 | for (intptr_t i = 0; i < dead_table->length(); i++) { |
1136 | const intptr_t index = dead_code_index_offset + i; |
1137 | ProfileCode* code = dead_table->At(i); |
1138 | ASSERT(code != NULL); |
1139 | code->set_code_table_index(index); |
1140 | } |
1141 | |
1142 | for (intptr_t i = 0; i < tag_table->length(); i++) { |
1143 | const intptr_t index = tag_code_index_offset + i; |
1144 | ProfileCode* code = tag_table->At(i); |
1145 | ASSERT(code != NULL); |
1146 | code->set_code_table_index(index); |
1147 | } |
1148 | } |
1149 | |
1150 | void BuildFunctionTable() { |
1151 | ScopeTimer sw("ProfileBuilder::BuildFunctionTable" , FLAG_trace_profiler); |
1152 | ProfileCodeTable* live_table = profile_->live_code_; |
1153 | ProfileCodeTable* dead_table = profile_->dead_code_; |
1154 | ProfileCodeTable* tag_table = profile_->tag_code_; |
1155 | ProfileFunctionTable* function_table = profile_->functions_; |
1156 | for (intptr_t i = 0; i < live_table->length(); i++) { |
1157 | ProfileCode* code = live_table->At(i); |
1158 | ASSERT(code != NULL); |
1159 | code->SetFunctionAndName(function_table); |
1160 | } |
1161 | |
1162 | for (intptr_t i = 0; i < dead_table->length(); i++) { |
1163 | ProfileCode* code = dead_table->At(i); |
1164 | ASSERT(code != NULL); |
1165 | code->SetFunctionAndName(function_table); |
1166 | } |
1167 | |
1168 | for (intptr_t i = 0; i < tag_table->length(); i++) { |
1169 | ProfileCode* code = tag_table->At(i); |
1170 | ASSERT(code != NULL); |
1171 | code->SetFunctionAndName(function_table); |
1172 | } |
1173 | } |
1174 | |
1175 | void PopulateFunctionTicks() { |
1176 | ScopeTimer sw("ProfileBuilder::PopulateFunctionTicks" , FLAG_trace_profiler); |
1177 | for (intptr_t sample_index = 0; sample_index < samples_->length(); |
1178 | sample_index++) { |
1179 | ProcessedSample* sample = samples_->At(sample_index); |
1180 | |
1181 | // Walk the sampled PCs. |
1182 | for (intptr_t frame_index = 0; frame_index < sample->length(); |
1183 | frame_index++) { |
1184 | ASSERT(sample->At(frame_index) != 0); |
1185 | ProcessFrame(sample_index, sample, frame_index); |
1186 | } |
1187 | if (sample->truncated()) { |
1188 | InclusiveTickTruncatedTag(sample); |
1189 | } |
1190 | } |
1191 | } |
1192 | |
1193 | void ProcessFrame(intptr_t sample_index, |
1194 | ProcessedSample* sample, |
1195 | intptr_t frame_index) { |
1196 | const uword pc = sample->At(frame_index); |
1197 | ProfileCode* profile_code = GetProfileCode(pc, sample->timestamp()); |
1198 | ProfileFunction* function = profile_code->function(); |
1199 | ASSERT(function != NULL); |
1200 | const intptr_t code_index = profile_code->code_table_index(); |
1201 | ASSERT(profile_code != NULL); |
1202 | |
1203 | GrowableArray<const Function*>* inlined_functions = NULL; |
1204 | GrowableArray<TokenPosition>* inlined_token_positions = NULL; |
1205 | TokenPosition token_position = TokenPosition::kNoSource; |
1206 | Code& code = Code::ZoneHandle(); |
1207 | if (profile_code->code().IsCode()) { |
1208 | code ^= profile_code->code().raw(); |
1209 | inlined_functions_cache_->Get(pc, code, sample, frame_index, |
1210 | &inlined_functions, |
1211 | &inlined_token_positions, &token_position); |
1212 | if (FLAG_trace_profiler_verbose && (inlined_functions != NULL)) { |
1213 | for (intptr_t i = 0; i < inlined_functions->length(); i++) { |
1214 | const String& name = |
1215 | String::Handle((*inlined_functions)[i]->QualifiedScrubbedName()); |
1216 | THR_Print("InlinedFunction[%" Pd "] = {%s, %s}\n" , i, |
1217 | name.ToCString(), |
1218 | (*inlined_token_positions)[i].ToCString()); |
1219 | } |
1220 | } |
1221 | } else if (profile_code->code().IsBytecode()) { |
1222 | // No inlining in bytecode. |
1223 | const Bytecode& bc = Bytecode::CheckedHandle(Thread::Current()->zone(), |
1224 | profile_code->code().raw()); |
1225 | token_position = bc.GetTokenIndexOfPC(pc); |
1226 | } |
1227 | |
1228 | if (code.IsNull() || (inlined_functions == NULL) || |
1229 | (inlined_functions->length() <= 1)) { |
1230 | ProcessFunction(sample_index, sample, frame_index, function, |
1231 | token_position, code_index); |
1232 | return; |
1233 | } |
1234 | |
1235 | if (!code.is_optimized()) { |
1236 | OS::PrintErr("Code that should be optimized is not. Please file a bug\n" ); |
1237 | OS::PrintErr("Code object: %s\n" , code.ToCString()); |
1238 | OS::PrintErr("Inlined functions length: %" Pd "\n" , |
1239 | inlined_functions->length()); |
1240 | for (intptr_t i = 0; i < inlined_functions->length(); i++) { |
1241 | OS::PrintErr("IF[%" Pd "] = %s\n" , i, |
1242 | (*inlined_functions)[i]->ToFullyQualifiedCString()); |
1243 | } |
1244 | } |
1245 | |
1246 | ASSERT(code.is_optimized()); |
1247 | |
1248 | // Append the inlined children. |
1249 | for (intptr_t i = inlined_functions->length() - 1; i >= 0; i--) { |
1250 | const Function* inlined_function = (*inlined_functions)[i]; |
1251 | ASSERT(inlined_function != NULL); |
1252 | ASSERT(!inlined_function->IsNull()); |
1253 | TokenPosition inlined_token_position = (*inlined_token_positions)[i]; |
1254 | ProcessInlinedFunction(sample_index, sample, frame_index + i, |
1255 | inlined_function, inlined_token_position, |
1256 | code_index); |
1257 | } |
1258 | } |
1259 | |
1260 | void ProcessInlinedFunction(intptr_t sample_index, |
1261 | ProcessedSample* sample, |
1262 | intptr_t frame_index, |
1263 | const Function* inlined_function, |
1264 | TokenPosition inlined_token_position, |
1265 | intptr_t code_index) { |
1266 | ProfileFunctionTable* function_table = profile_->functions_; |
1267 | ProfileFunction* function = function_table->LookupOrAdd(*inlined_function); |
1268 | ASSERT(function != NULL); |
1269 | ProcessFunction(sample_index, sample, frame_index, function, |
1270 | inlined_token_position, code_index); |
1271 | } |
1272 | |
1273 | bool ShouldTickNode(ProcessedSample* sample, intptr_t frame_index) { |
1274 | if (frame_index != 0) { |
1275 | return true; |
1276 | } |
1277 | // Only tick the first frame's node, if we are executing |
1278 | return IsExecutingFrame(sample, frame_index) || !FLAG_profile_vm; |
1279 | } |
1280 | |
1281 | void ProcessFunction(intptr_t sample_index, |
1282 | ProcessedSample* sample, |
1283 | intptr_t frame_index, |
1284 | ProfileFunction* function, |
1285 | TokenPosition token_position, |
1286 | intptr_t code_index) { |
1287 | if (!function->is_visible()) { |
1288 | return; |
1289 | } |
1290 | if (FLAG_trace_profiler_verbose) { |
1291 | THR_Print("S[%" Pd "]F[%" Pd "] %s %s 0x%" Px "\n" , sample_index, |
1292 | frame_index, function->Name(), token_position.ToCString(), |
1293 | sample->At(frame_index)); |
1294 | } |
1295 | function->Tick(IsExecutingFrame(sample, frame_index), sample_index, |
1296 | token_position); |
1297 | function->AddProfileCode(code_index); |
1298 | } |
1299 | |
1300 | // Tick the truncated tag's inclusive tick count. |
1301 | void InclusiveTickTruncatedTag(ProcessedSample* sample) { |
1302 | ProfileCodeTable* tag_table = profile_->tag_code_; |
1303 | intptr_t index = tag_table->FindCodeIndexForPC(VMTag::kTruncatedTagId); |
1304 | ASSERT(index >= 0); |
1305 | ProfileCode* code = tag_table->At(index); |
1306 | code->IncInclusiveTicks(); |
1307 | ASSERT(code != NULL); |
1308 | ProfileFunction* function = code->function(); |
1309 | function->IncInclusiveTicks(); |
1310 | } |
1311 | |
1312 | uword ProfileInfoKindToVMTag(ProfileInfoKind kind) { |
1313 | switch (kind) { |
1314 | case kNone: |
1315 | return VMTag::kNoneCodeTagId; |
1316 | case kOptimized: |
1317 | return VMTag::kOptimizedCodeTagId; |
1318 | case kUnoptimized: |
1319 | return VMTag::kUnoptimizedCodeTagId; |
1320 | case kNative: |
1321 | return VMTag::kNativeCodeTagId; |
1322 | case kInlineStart: |
1323 | return VMTag::kInlineStartCodeTagId; |
1324 | case kInlineFinish: |
1325 | return VMTag::kInlineEndCodeTagId; |
1326 | default: |
1327 | UNIMPLEMENTED(); |
1328 | return VMTag::kInvalidTagId; |
1329 | } |
1330 | } |
1331 | |
1332 | void TickExitFrame(uword vm_tag, intptr_t serial, ProcessedSample* sample) { |
1333 | if (FLAG_profile_vm) { |
1334 | return; |
1335 | } |
1336 | if (!VMTag::IsExitFrameTag(vm_tag)) { |
1337 | return; |
1338 | } |
1339 | ProfileCodeTable* tag_table = profile_->tag_code_; |
1340 | ProfileCode* code = tag_table->FindCodeForPC(vm_tag); |
1341 | ASSERT(code != NULL); |
1342 | code->Tick(vm_tag, true, serial); |
1343 | } |
1344 | |
1345 | void TickExitFrameFunction(uword vm_tag, intptr_t serial) { |
1346 | if (FLAG_profile_vm) { |
1347 | return; |
1348 | } |
1349 | if (!VMTag::IsExitFrameTag(vm_tag)) { |
1350 | return; |
1351 | } |
1352 | ProfileCodeTable* tag_table = profile_->tag_code_; |
1353 | ProfileCode* code = tag_table->FindCodeForPC(vm_tag); |
1354 | ASSERT(code != NULL); |
1355 | ProfileFunction* function = code->function(); |
1356 | ASSERT(function != NULL); |
1357 | function->Tick(true, serial, TokenPosition::kNoSource); |
1358 | } |
1359 | |
1360 | intptr_t GetProfileCodeTagIndex(uword tag) { |
1361 | ProfileCodeTable* tag_table = profile_->tag_code_; |
1362 | intptr_t index = tag_table->FindCodeIndexForPC(tag); |
1363 | ASSERT(index >= 0); |
1364 | ProfileCode* code = tag_table->At(index); |
1365 | ASSERT(code != NULL); |
1366 | return code->code_table_index(); |
1367 | } |
1368 | |
1369 | intptr_t GetProfileFunctionTagIndex(uword tag) { |
1370 | ProfileCodeTable* tag_table = profile_->tag_code_; |
1371 | intptr_t index = tag_table->FindCodeIndexForPC(tag); |
1372 | ASSERT(index >= 0); |
1373 | ProfileCode* code = tag_table->At(index); |
1374 | ASSERT(code != NULL); |
1375 | ProfileFunction* function = code->function(); |
1376 | ASSERT(function != NULL); |
1377 | return function->table_index(); |
1378 | } |
1379 | |
1380 | intptr_t GetProfileCodeIndex(uword pc, int64_t timestamp) { |
1381 | return GetProfileCode(pc, timestamp)->code_table_index(); |
1382 | } |
1383 | |
1384 | ProfileCode* GetProfileCode(uword pc, int64_t timestamp) { |
1385 | return profile_->GetCodeFromPC(pc, timestamp); |
1386 | } |
1387 | |
1388 | void RegisterProfileCodeTag(uword tag) { |
1389 | if (tag == 0) { |
1390 | // No tag. |
1391 | return; |
1392 | } |
1393 | ProfileCodeTable* tag_table = profile_->tag_code_; |
1394 | intptr_t index = tag_table->FindCodeIndexForPC(tag); |
1395 | if (index >= 0) { |
1396 | // Already created. |
1397 | return; |
1398 | } |
1399 | ProfileCode* code = |
1400 | new ProfileCode(ProfileCode::kTagCode, tag, tag + 1, 0, null_code_); |
1401 | index = tag_table->InsertCode(code); |
1402 | ASSERT(index >= 0); |
1403 | } |
1404 | |
1405 | ProfileCode* CreateProfileCodeReused(uword pc) { |
1406 | ProfileCode* code = |
1407 | new ProfileCode(ProfileCode::kReusedCode, pc, pc + 1, 0, null_code_); |
1408 | return code; |
1409 | } |
1410 | |
1411 | bool IsPCInDartHeap(uword pc) { |
1412 | return vm_isolate_->heap()->CodeContains(pc) || |
1413 | thread_->isolate()->heap()->CodeContains(pc); |
1414 | } |
1415 | |
1416 | ProfileCode* FindOrRegisterNativeProfileCode(uword pc) { |
1417 | // Check if |pc| is already known in the live code table. |
1418 | ProfileCodeTable* live_table = profile_->live_code_; |
1419 | ProfileCode* profile_code = live_table->FindCodeForPC(pc); |
1420 | if (profile_code != NULL) { |
1421 | return profile_code; |
1422 | } |
1423 | |
1424 | // We haven't seen this pc yet. |
1425 | |
1426 | // Check NativeSymbolResolver for pc. |
1427 | uword native_start = 0; |
1428 | char* native_name = |
1429 | NativeSymbolResolver::LookupSymbolName(pc, &native_start); |
1430 | if (native_name == NULL) { |
1431 | // Failed to find a native symbol for pc. |
1432 | native_start = pc; |
1433 | } |
1434 | |
1435 | #if defined(HOST_ARCH_ARM) |
1436 | // The symbol for a Thumb function will be xxx1, but we may have samples |
1437 | // at function entry which will have pc xxx0. |
1438 | native_start &= ~1; |
1439 | #endif |
1440 | |
1441 | if (native_start > pc) { |
1442 | // Bogus lookup result. |
1443 | if (native_name != NULL) { |
1444 | NativeSymbolResolver::FreeSymbolName(native_name); |
1445 | native_name = NULL; |
1446 | } |
1447 | native_start = pc; |
1448 | } |
1449 | if ((pc - native_start) > (32 * KB)) { |
1450 | // Suspect lookup result. More likely dladdr going off the rails than a |
1451 | // jumbo function. |
1452 | if (native_name != NULL) { |
1453 | NativeSymbolResolver::FreeSymbolName(native_name); |
1454 | native_name = NULL; |
1455 | } |
1456 | native_start = pc; |
1457 | } |
1458 | |
1459 | ASSERT(pc >= native_start); |
1460 | ASSERT(pc < (pc + 1)); // Should not overflow. |
1461 | profile_code = new ProfileCode(ProfileCode::kNativeCode, native_start, |
1462 | pc + 1, 0, null_code_); |
1463 | if (native_name != NULL) { |
1464 | profile_code->SetName(native_name); |
1465 | NativeSymbolResolver::FreeSymbolName(native_name); |
1466 | } |
1467 | |
1468 | RegisterLiveProfileCode(profile_code); |
1469 | return profile_code; |
1470 | } |
1471 | |
1472 | void RegisterLiveProfileCode(ProfileCode* code) { |
1473 | ProfileCodeTable* live_table = profile_->live_code_; |
1474 | intptr_t index = live_table->InsertCode(code); |
1475 | ASSERT(index >= 0); |
1476 | } |
1477 | |
1478 | ProfileCode* FindOrRegisterDeadProfileCode(uword pc) { |
1479 | ProfileCodeTable* dead_table = profile_->dead_code_; |
1480 | |
1481 | ProfileCode* code = dead_table->FindCodeForPC(pc); |
1482 | if (code != NULL) { |
1483 | return code; |
1484 | } |
1485 | |
1486 | // Create a new dead code entry. |
1487 | intptr_t index = dead_table->InsertCode(CreateProfileCodeReused(pc)); |
1488 | ASSERT(index >= 0); |
1489 | return dead_table->At(index); |
1490 | } |
1491 | |
1492 | ProfileCode* FindOrRegisterProfileCode(uword pc, int64_t timestamp) { |
1493 | ProfileCodeTable* live_table = profile_->live_code_; |
1494 | ProfileCode* code = live_table->FindCodeForPC(pc); |
1495 | if ((code != NULL) && (code->compile_timestamp() <= timestamp)) { |
1496 | // Code was compiled before sample was taken. |
1497 | return code; |
1498 | } |
1499 | if ((code == NULL) && !IsPCInDartHeap(pc)) { |
1500 | // Not a PC from Dart code. Check with native code. |
1501 | return FindOrRegisterNativeProfileCode(pc); |
1502 | } |
1503 | // We either didn't find the code or it was compiled after the sample. |
1504 | return FindOrRegisterDeadProfileCode(pc); |
1505 | } |
1506 | |
1507 | Thread* thread_; |
1508 | Isolate* vm_isolate_; |
1509 | SampleFilter* filter_; |
1510 | SampleBuffer* sample_buffer_; |
1511 | Profile* profile_; |
1512 | DeoptimizedCodeSet* deoptimized_code_; |
1513 | const AbstractCode null_code_; |
1514 | const Function& null_function_; |
1515 | bool inclusive_tree_; |
1516 | ProfileCodeInlinedFunctionsCache* inlined_functions_cache_; |
1517 | ProcessedSampleBuffer* samples_; |
1518 | ProfileInfoKind info_kind_; |
1519 | }; // ProfileBuilder. |
1520 | |
1521 | Profile::Profile(Isolate* isolate) |
1522 | : isolate_(isolate), |
1523 | zone_(Thread::Current()->zone()), |
1524 | samples_(NULL), |
1525 | live_code_(NULL), |
1526 | dead_code_(NULL), |
1527 | tag_code_(NULL), |
1528 | functions_(NULL), |
1529 | dead_code_index_offset_(-1), |
1530 | tag_code_index_offset_(-1), |
1531 | min_time_(kMaxInt64), |
1532 | max_time_(0) { |
1533 | ASSERT(isolate_ != NULL); |
1534 | } |
1535 | |
1536 | void Profile::Build(Thread* thread, |
1537 | SampleFilter* filter, |
1538 | SampleBuffer* sample_buffer) { |
1539 | // Disable thread interrupts while processing the buffer. |
1540 | DisableThreadInterruptsScope dtis(thread); |
1541 | ThreadInterrupter::SampleBufferReaderScope scope; |
1542 | |
1543 | ProfileBuilder builder(thread, filter, sample_buffer, this); |
1544 | builder.Build(); |
1545 | } |
1546 | |
1547 | ProcessedSample* Profile::SampleAt(intptr_t index) { |
1548 | ASSERT(index >= 0); |
1549 | ASSERT(index < sample_count_); |
1550 | return samples_->At(index); |
1551 | } |
1552 | |
1553 | intptr_t Profile::NumFunctions() const { |
1554 | return functions_->length(); |
1555 | } |
1556 | |
1557 | ProfileFunction* Profile::GetFunction(intptr_t index) { |
1558 | ASSERT(functions_ != NULL); |
1559 | return functions_->At(index); |
1560 | } |
1561 | |
1562 | ProfileCode* Profile::GetCode(intptr_t index) { |
1563 | ASSERT(live_code_ != NULL); |
1564 | ASSERT(dead_code_ != NULL); |
1565 | ASSERT(tag_code_ != NULL); |
1566 | ASSERT(dead_code_index_offset_ >= 0); |
1567 | ASSERT(tag_code_index_offset_ >= 0); |
1568 | |
1569 | // Code indexes span three arrays. |
1570 | // 0 ... |live_code| |
1571 | // |live_code| ... |dead_code| |
1572 | // |dead_code| ... |tag_code| |
1573 | |
1574 | if (index < dead_code_index_offset_) { |
1575 | return live_code_->At(index); |
1576 | } |
1577 | |
1578 | if (index < tag_code_index_offset_) { |
1579 | index -= dead_code_index_offset_; |
1580 | return dead_code_->At(index); |
1581 | } |
1582 | |
1583 | index -= tag_code_index_offset_; |
1584 | return tag_code_->At(index); |
1585 | } |
1586 | |
1587 | ProfileCode* Profile::GetCodeFromPC(uword pc, int64_t timestamp) { |
1588 | intptr_t index = live_code_->FindCodeIndexForPC(pc); |
1589 | ProfileCode* code = NULL; |
1590 | if (index < 0) { |
1591 | index = dead_code_->FindCodeIndexForPC(pc); |
1592 | ASSERT(index >= 0); |
1593 | code = dead_code_->At(index); |
1594 | } else { |
1595 | code = live_code_->At(index); |
1596 | ASSERT(code != NULL); |
1597 | if (code->compile_timestamp() > timestamp) { |
1598 | // Code is newer than sample. Fall back to dead code table. |
1599 | index = dead_code_->FindCodeIndexForPC(pc); |
1600 | ASSERT(index >= 0); |
1601 | code = dead_code_->At(index); |
1602 | } |
1603 | } |
1604 | |
1605 | ASSERT(code != NULL); |
1606 | ASSERT(code->Contains(pc)); |
1607 | ASSERT(code->compile_timestamp() <= timestamp); |
1608 | return code; |
1609 | } |
1610 | |
1611 | void Profile::(JSONObject* obj) { |
1612 | intptr_t pid = OS::ProcessId(); |
1613 | |
1614 | obj->AddProperty("samplePeriod" , static_cast<intptr_t>(FLAG_profile_period)); |
1615 | obj->AddProperty("maxStackDepth" , |
1616 | static_cast<intptr_t>(FLAG_max_profile_depth)); |
1617 | obj->AddProperty("sampleCount" , sample_count()); |
1618 | obj->AddProperty("timespan" , MicrosecondsToSeconds(GetTimeSpan())); |
1619 | obj->AddPropertyTimeMicros("timeOriginMicros" , min_time()); |
1620 | obj->AddPropertyTimeMicros("timeExtentMicros" , GetTimeSpan()); |
1621 | obj->AddProperty64("pid" , pid); |
1622 | ProfilerCounters counters = Profiler::counters(); |
1623 | { |
1624 | JSONObject counts(obj, "_counters" ); |
1625 | counts.AddProperty64("bail_out_unknown_task" , |
1626 | counters.bail_out_unknown_task); |
1627 | counts.AddProperty64("bail_out_jump_to_exception_handler" , |
1628 | counters.bail_out_jump_to_exception_handler); |
1629 | counts.AddProperty64("bail_out_check_isolate" , |
1630 | counters.bail_out_check_isolate); |
1631 | counts.AddProperty64("single_frame_sample_deoptimizing" , |
1632 | counters.single_frame_sample_deoptimizing); |
1633 | counts.AddProperty64("single_frame_sample_register_check" , |
1634 | counters.single_frame_sample_register_check); |
1635 | counts.AddProperty64( |
1636 | "single_frame_sample_get_and_validate_stack_bounds" , |
1637 | counters.single_frame_sample_get_and_validate_stack_bounds); |
1638 | counts.AddProperty64("stack_walker_native" , counters.stack_walker_native); |
1639 | counts.AddProperty64("stack_walker_dart_exit" , |
1640 | counters.stack_walker_dart_exit); |
1641 | counts.AddProperty64("stack_walker_dart" , counters.stack_walker_dart); |
1642 | counts.AddProperty64("stack_walker_none" , counters.stack_walker_none); |
1643 | } |
1644 | } |
1645 | |
1646 | void Profile::ProcessSampleFrameJSON(JSONArray* stack, |
1647 | ProfileCodeInlinedFunctionsCache* cache_, |
1648 | ProcessedSample* sample, |
1649 | intptr_t frame_index) { |
1650 | const uword pc = sample->At(frame_index); |
1651 | ProfileCode* profile_code = GetCodeFromPC(pc, sample->timestamp()); |
1652 | ASSERT(profile_code != NULL); |
1653 | ProfileFunction* function = profile_code->function(); |
1654 | ASSERT(function != NULL); |
1655 | |
1656 | // Don't show stubs in stack traces. |
1657 | if (!function->is_visible() || |
1658 | (function->kind() == ProfileFunction::kStubFunction)) { |
1659 | return; |
1660 | } |
1661 | |
1662 | GrowableArray<const Function*>* inlined_functions = NULL; |
1663 | GrowableArray<TokenPosition>* inlined_token_positions = NULL; |
1664 | TokenPosition token_position = TokenPosition::kNoSource; |
1665 | Code& code = Code::ZoneHandle(); |
1666 | |
1667 | if (profile_code->code().IsCode()) { |
1668 | code ^= profile_code->code().raw(); |
1669 | cache_->Get(pc, code, sample, frame_index, &inlined_functions, |
1670 | &inlined_token_positions, &token_position); |
1671 | if (FLAG_trace_profiler_verbose && (inlined_functions != NULL)) { |
1672 | for (intptr_t i = 0; i < inlined_functions->length(); i++) { |
1673 | const String& name = |
1674 | String::Handle((*inlined_functions)[i]->QualifiedScrubbedName()); |
1675 | THR_Print("InlinedFunction[%" Pd "] = {%s, %s}\n" , i, name.ToCString(), |
1676 | (*inlined_token_positions)[i].ToCString()); |
1677 | } |
1678 | } |
1679 | } else if (profile_code->code().IsBytecode()) { |
1680 | // No inlining in bytecode. |
1681 | const Bytecode& bc = Bytecode::CheckedHandle(Thread::Current()->zone(), |
1682 | profile_code->code().raw()); |
1683 | token_position = bc.GetTokenIndexOfPC(pc); |
1684 | } |
1685 | |
1686 | if (code.IsNull() || (inlined_functions == NULL) || |
1687 | (inlined_functions->length() <= 1)) { |
1688 | PrintFunctionFrameIndexJSON(stack, function); |
1689 | return; |
1690 | } |
1691 | |
1692 | if (!code.is_optimized()) { |
1693 | OS::PrintErr("Code that should be optimized is not. Please file a bug\n" ); |
1694 | OS::PrintErr("Code object: %s\n" , code.ToCString()); |
1695 | OS::PrintErr("Inlined functions length: %" Pd "\n" , |
1696 | inlined_functions->length()); |
1697 | for (intptr_t i = 0; i < inlined_functions->length(); i++) { |
1698 | OS::PrintErr("IF[%" Pd "] = %s\n" , i, |
1699 | (*inlined_functions)[i]->ToFullyQualifiedCString()); |
1700 | } |
1701 | } |
1702 | |
1703 | ASSERT(code.is_optimized()); |
1704 | |
1705 | for (intptr_t i = inlined_functions->length() - 1; i >= 0; i--) { |
1706 | const Function* inlined_function = (*inlined_functions)[i]; |
1707 | ASSERT(inlined_function != NULL); |
1708 | ASSERT(!inlined_function->IsNull()); |
1709 | ProcessInlinedFunctionFrameJSON(stack, inlined_function); |
1710 | } |
1711 | } |
1712 | |
1713 | void Profile::ProcessInlinedFunctionFrameJSON( |
1714 | JSONArray* stack, |
1715 | const Function* inlined_function) { |
1716 | ProfileFunction* function = functions_->LookupOrAdd(*inlined_function); |
1717 | ASSERT(function != NULL); |
1718 | PrintFunctionFrameIndexJSON(stack, function); |
1719 | } |
1720 | |
1721 | void Profile::PrintFunctionFrameIndexJSON(JSONArray* stack, |
1722 | ProfileFunction* function) { |
1723 | stack->AddValue64(function->table_index()); |
1724 | } |
1725 | |
1726 | void Profile::PrintCodeFrameIndexJSON(JSONArray* stack, |
1727 | ProcessedSample* sample, |
1728 | intptr_t frame_index) { |
1729 | ProfileCode* code = |
1730 | GetCodeFromPC(sample->At(frame_index), sample->timestamp()); |
1731 | const AbstractCode codeObj = code->code(); |
1732 | |
1733 | // Ignore stub code objects. |
1734 | if (codeObj.IsStubCode() || codeObj.IsAllocationStubCode() || |
1735 | codeObj.IsTypeTestStubCode()) { |
1736 | return; |
1737 | } |
1738 | stack->AddValue64(code->code_table_index()); |
1739 | } |
1740 | |
1741 | void Profile::PrintSamplesJSON(JSONObject* obj, bool code_samples) { |
1742 | JSONArray samples(obj, "samples" ); |
1743 | auto* cache = new ProfileCodeInlinedFunctionsCache(); |
1744 | for (intptr_t sample_index = 0; sample_index < samples_->length(); |
1745 | sample_index++) { |
1746 | JSONObject sample_obj(&samples); |
1747 | ProcessedSample* sample = samples_->At(sample_index); |
1748 | sample_obj.AddProperty64("tid" , OSThread::ThreadIdToIntPtr(sample->tid())); |
1749 | sample_obj.AddPropertyTimeMicros("timestamp" , sample->timestamp()); |
1750 | sample_obj.AddProperty("vmTag" , VMTag::TagName(sample->vm_tag())); |
1751 | if (VMTag::IsNativeEntryTag(sample->vm_tag())) { |
1752 | sample_obj.AddProperty("nativeEntryTag" , true); |
1753 | } |
1754 | if (VMTag::IsRuntimeEntryTag(sample->vm_tag())) { |
1755 | sample_obj.AddProperty("runtimeEntryTag" , true); |
1756 | } |
1757 | if (UserTags::IsUserTag(sample->user_tag())) { |
1758 | sample_obj.AddProperty("userTag" , UserTags::TagName(sample->user_tag())); |
1759 | } |
1760 | if (sample->truncated()) { |
1761 | sample_obj.AddProperty("truncated" , true); |
1762 | } |
1763 | if (sample->is_native_allocation_sample()) { |
1764 | sample_obj.AddProperty64("_nativeAllocationSizeBytes" , |
1765 | sample->native_allocation_size_bytes()); |
1766 | } |
1767 | { |
1768 | JSONArray stack(&sample_obj, "stack" ); |
1769 | // Walk the sampled PCs. |
1770 | for (intptr_t frame_index = 0; frame_index < sample->length(); |
1771 | frame_index++) { |
1772 | ASSERT(sample->At(frame_index) != 0); |
1773 | ProcessSampleFrameJSON(&stack, cache, sample, frame_index); |
1774 | } |
1775 | } |
1776 | if (code_samples) { |
1777 | JSONArray stack(&sample_obj, "_codeStack" ); |
1778 | for (intptr_t frame_index = 0; frame_index < sample->length(); |
1779 | frame_index++) { |
1780 | ASSERT(sample->At(frame_index) != 0); |
1781 | PrintCodeFrameIndexJSON(&stack, sample, frame_index); |
1782 | } |
1783 | } |
1784 | } |
1785 | } |
1786 | |
1787 | ProfileFunction* Profile::FindFunction(const Function& function) { |
1788 | return (functions_ != NULL) ? functions_->Lookup(function) : NULL; |
1789 | } |
1790 | |
1791 | void Profile::PrintProfileJSON(JSONStream* stream, bool include_code_samples) { |
1792 | ScopeTimer sw("Profile::PrintProfileJSON" , FLAG_trace_profiler); |
1793 | JSONObject obj(stream); |
1794 | obj.AddProperty("type" , "CpuSamples" ); |
1795 | PrintHeaderJSON(&obj); |
1796 | if (include_code_samples) { |
1797 | JSONArray codes(&obj, "_codes" ); |
1798 | for (intptr_t i = 0; i < live_code_->length(); i++) { |
1799 | ProfileCode* code = live_code_->At(i); |
1800 | ASSERT(code != NULL); |
1801 | code->PrintToJSONArray(&codes); |
1802 | } |
1803 | for (intptr_t i = 0; i < dead_code_->length(); i++) { |
1804 | ProfileCode* code = dead_code_->At(i); |
1805 | ASSERT(code != NULL); |
1806 | code->PrintToJSONArray(&codes); |
1807 | } |
1808 | for (intptr_t i = 0; i < tag_code_->length(); i++) { |
1809 | ProfileCode* code = tag_code_->At(i); |
1810 | ASSERT(code != NULL); |
1811 | code->PrintToJSONArray(&codes); |
1812 | } |
1813 | } |
1814 | |
1815 | { |
1816 | JSONArray functions(&obj, "functions" ); |
1817 | for (intptr_t i = 0; i < functions_->length(); i++) { |
1818 | ProfileFunction* function = functions_->At(i); |
1819 | ASSERT(function != NULL); |
1820 | function->PrintToJSONArray(&functions); |
1821 | } |
1822 | } |
1823 | PrintSamplesJSON(&obj, include_code_samples); |
1824 | } |
1825 | |
1826 | void ProfilerService::PrintJSONImpl(Thread* thread, |
1827 | JSONStream* stream, |
1828 | SampleFilter* filter, |
1829 | SampleBuffer* sample_buffer, |
1830 | bool include_code_samples) { |
1831 | Isolate* isolate = thread->isolate(); |
1832 | |
1833 | // We should bail out in service.cc if the profiler is disabled. |
1834 | ASSERT(sample_buffer != NULL); |
1835 | |
1836 | StackZone zone(thread); |
1837 | HANDLESCOPE(thread); |
1838 | Profile profile(isolate); |
1839 | profile.Build(thread, filter, sample_buffer); |
1840 | profile.PrintProfileJSON(stream, include_code_samples); |
1841 | } |
1842 | |
1843 | class NoAllocationSampleFilter : public SampleFilter { |
1844 | public: |
1845 | NoAllocationSampleFilter(Dart_Port port, |
1846 | intptr_t thread_task_mask, |
1847 | int64_t time_origin_micros, |
1848 | int64_t time_extent_micros) |
1849 | : SampleFilter(port, |
1850 | thread_task_mask, |
1851 | time_origin_micros, |
1852 | time_extent_micros) {} |
1853 | |
1854 | bool FilterSample(Sample* sample) { return !sample->is_allocation_sample(); } |
1855 | }; |
1856 | |
1857 | void ProfilerService::PrintJSON(JSONStream* stream, |
1858 | int64_t time_origin_micros, |
1859 | int64_t time_extent_micros, |
1860 | bool include_code_samples) { |
1861 | Thread* thread = Thread::Current(); |
1862 | Isolate* isolate = thread->isolate(); |
1863 | NoAllocationSampleFilter filter(isolate->main_port(), Thread::kMutatorTask, |
1864 | time_origin_micros, time_extent_micros); |
1865 | PrintJSONImpl(thread, stream, &filter, Profiler::sample_buffer(), |
1866 | include_code_samples); |
1867 | } |
1868 | |
1869 | class ClassAllocationSampleFilter : public SampleFilter { |
1870 | public: |
1871 | ClassAllocationSampleFilter(Dart_Port port, |
1872 | const Class& cls, |
1873 | intptr_t thread_task_mask, |
1874 | int64_t time_origin_micros, |
1875 | int64_t time_extent_micros) |
1876 | : SampleFilter(port, |
1877 | thread_task_mask, |
1878 | time_origin_micros, |
1879 | time_extent_micros), |
1880 | cls_(Class::Handle(cls.raw())) { |
1881 | ASSERT(!cls_.IsNull()); |
1882 | } |
1883 | |
1884 | bool FilterSample(Sample* sample) { |
1885 | return sample->is_allocation_sample() && |
1886 | (sample->allocation_cid() == cls_.id()); |
1887 | } |
1888 | |
1889 | private: |
1890 | const Class& cls_; |
1891 | }; |
1892 | |
1893 | void ProfilerService::PrintAllocationJSON(JSONStream* stream, |
1894 | const Class& cls, |
1895 | int64_t time_origin_micros, |
1896 | int64_t time_extent_micros) { |
1897 | Thread* thread = Thread::Current(); |
1898 | Isolate* isolate = thread->isolate(); |
1899 | ClassAllocationSampleFilter filter(isolate->main_port(), cls, |
1900 | Thread::kMutatorTask, time_origin_micros, |
1901 | time_extent_micros); |
1902 | PrintJSONImpl(thread, stream, &filter, Profiler::sample_buffer(), true); |
1903 | } |
1904 | |
1905 | void ProfilerService::PrintNativeAllocationJSON(JSONStream* stream, |
1906 | int64_t time_origin_micros, |
1907 | int64_t time_extent_micros, |
1908 | bool include_code_samples) { |
1909 | Thread* thread = Thread::Current(); |
1910 | NativeAllocationSampleFilter filter(time_origin_micros, time_extent_micros); |
1911 | PrintJSONImpl(thread, stream, &filter, Profiler::allocation_sample_buffer(), |
1912 | include_code_samples); |
1913 | } |
1914 | |
1915 | void ProfilerService::ClearSamples() { |
1916 | SampleBuffer* sample_buffer = Profiler::sample_buffer(); |
1917 | if (sample_buffer == NULL) { |
1918 | return; |
1919 | } |
1920 | |
1921 | Thread* thread = Thread::Current(); |
1922 | Isolate* isolate = thread->isolate(); |
1923 | |
1924 | // Disable thread interrupts while processing the buffer. |
1925 | DisableThreadInterruptsScope dtis(thread); |
1926 | ThreadInterrupter::SampleBufferReaderScope scope; |
1927 | |
1928 | ClearProfileVisitor clear_profile(isolate); |
1929 | sample_buffer->VisitSamples(&clear_profile); |
1930 | } |
1931 | |
1932 | #endif // !PRODUCT |
1933 | |
1934 | } // namespace dart |
1935 | |