1 | // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/debugger.h" |
6 | |
7 | #include "include/dart_api.h" |
8 | |
9 | #include "vm/code_descriptors.h" |
10 | #include "vm/code_patcher.h" |
11 | #include "vm/compiler/api/deopt_id.h" |
12 | #include "vm/compiler/assembler/disassembler.h" |
13 | #include "vm/compiler/assembler/disassembler_kbc.h" |
14 | #include "vm/compiler/jit/compiler.h" |
15 | #include "vm/dart_entry.h" |
16 | #include "vm/flags.h" |
17 | #include "vm/globals.h" |
18 | #include "vm/interpreter.h" |
19 | #include "vm/isolate_reload.h" |
20 | #include "vm/json_stream.h" |
21 | #include "vm/kernel.h" |
22 | #include "vm/longjump.h" |
23 | #include "vm/message_handler.h" |
24 | #include "vm/object.h" |
25 | #include "vm/object_store.h" |
26 | #include "vm/os.h" |
27 | #include "vm/parser.h" |
28 | #include "vm/port.h" |
29 | #include "vm/runtime_entry.h" |
30 | #include "vm/service.h" |
31 | #include "vm/service_event.h" |
32 | #include "vm/service_isolate.h" |
33 | #include "vm/stack_frame.h" |
34 | #include "vm/stack_trace.h" |
35 | #include "vm/stub_code.h" |
36 | #include "vm/symbols.h" |
37 | #include "vm/thread_interrupter.h" |
38 | #include "vm/timeline.h" |
39 | #include "vm/token_position.h" |
40 | #include "vm/visitor.h" |
41 | |
42 | #if !defined(DART_PRECOMPILED_RUNTIME) |
43 | #include "vm/compiler/frontend/bytecode_reader.h" |
44 | #include "vm/deopt_instructions.h" |
45 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
46 | |
47 | namespace dart { |
48 | |
49 | DEFINE_FLAG(bool, |
50 | trace_debugger_stacktrace, |
51 | false, |
52 | "Trace debugger stacktrace collection" ); |
53 | DEFINE_FLAG(bool, trace_rewind, false, "Trace frame rewind" ); |
54 | DEFINE_FLAG(bool, verbose_debug, false, "Verbose debugger messages" ); |
55 | |
56 | DECLARE_FLAG(bool, enable_interpreter); |
57 | DECLARE_FLAG(bool, trace_deoptimization); |
58 | DECLARE_FLAG(bool, warn_on_pause_with_no_debugger); |
59 | |
60 | #ifndef PRODUCT |
61 | |
62 | // Create an unresolved breakpoint in given token range and script. |
63 | BreakpointLocation::BreakpointLocation(const Script& script, |
64 | TokenPosition token_pos, |
65 | TokenPosition end_token_pos, |
66 | intptr_t requested_line_number, |
67 | intptr_t requested_column_number) |
68 | : script_(script.raw()), |
69 | url_(script.url()), |
70 | token_pos_(token_pos), |
71 | end_token_pos_(end_token_pos), |
72 | next_(NULL), |
73 | conditions_(NULL), |
74 | requested_line_number_(requested_line_number), |
75 | requested_column_number_(requested_column_number), |
76 | function_(Function::null()), |
77 | bytecode_token_pos_(TokenPosition::kNoSource), |
78 | code_token_pos_(TokenPosition::kNoSource) { |
79 | ASSERT(!script.IsNull()); |
80 | ASSERT(token_pos_.IsReal()); |
81 | } |
82 | |
83 | // Create a latent breakpoint at given url and line number. |
84 | BreakpointLocation::BreakpointLocation(const String& url, |
85 | intptr_t requested_line_number, |
86 | intptr_t requested_column_number) |
87 | : script_(Script::null()), |
88 | url_(url.raw()), |
89 | token_pos_(TokenPosition::kNoSource), |
90 | end_token_pos_(TokenPosition::kNoSource), |
91 | next_(NULL), |
92 | conditions_(NULL), |
93 | requested_line_number_(requested_line_number), |
94 | requested_column_number_(requested_column_number), |
95 | function_(Function::null()), |
96 | bytecode_token_pos_(TokenPosition::kNoSource), |
97 | code_token_pos_(TokenPosition::kNoSource) { |
98 | ASSERT(requested_line_number_ >= 0); |
99 | } |
100 | |
101 | BreakpointLocation::~BreakpointLocation() { |
102 | Breakpoint* bpt = breakpoints(); |
103 | while (bpt != NULL) { |
104 | Breakpoint* temp = bpt; |
105 | bpt = bpt->next(); |
106 | delete temp; |
107 | } |
108 | } |
109 | |
110 | bool BreakpointLocation::AnyEnabled() const { |
111 | return breakpoints() != NULL; |
112 | } |
113 | |
114 | void BreakpointLocation::SetResolved(bool in_bytecode, |
115 | const Function& func, |
116 | TokenPosition token_pos) { |
117 | ASSERT(!IsLatent()); |
118 | ASSERT(func.script() == script_); |
119 | ASSERT((func.token_pos() <= token_pos) && |
120 | (token_pos <= func.end_token_pos())); |
121 | ASSERT(func.is_debuggable()); |
122 | function_ = func.raw(); |
123 | token_pos_ = token_pos; |
124 | end_token_pos_ = token_pos; |
125 | if (in_bytecode) { |
126 | bytecode_token_pos_ = token_pos; |
127 | } else { |
128 | code_token_pos_ = token_pos; |
129 | } |
130 | } |
131 | |
132 | // Returned resolved pos is either in code or in bytecode. |
133 | void BreakpointLocation::GetCodeLocation(Script* script, |
134 | TokenPosition* pos) const { |
135 | if (IsLatent()) { |
136 | *script = Script::null(); |
137 | *pos = TokenPosition::kNoSource; |
138 | } else { |
139 | *script = this->script(); |
140 | *pos = token_pos_; |
141 | } |
142 | } |
143 | |
144 | void Breakpoint::set_bpt_location(BreakpointLocation* new_bpt_location) { |
145 | // Only latent breakpoints can be moved. |
146 | ASSERT((new_bpt_location == NULL) || bpt_location_->IsLatent()); |
147 | bpt_location_ = new_bpt_location; |
148 | } |
149 | |
150 | void Breakpoint::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
151 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&closure_)); |
152 | } |
153 | |
154 | void BreakpointLocation::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
155 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&script_)); |
156 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&url_)); |
157 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&function_)); |
158 | |
159 | Breakpoint* bpt = conditions_; |
160 | while (bpt != NULL) { |
161 | bpt->VisitObjectPointers(visitor); |
162 | bpt = bpt->next(); |
163 | } |
164 | } |
165 | |
166 | void Breakpoint::PrintJSON(JSONStream* stream) { |
167 | JSONObject jsobj(stream); |
168 | jsobj.AddProperty("type" , "Breakpoint" ); |
169 | |
170 | jsobj.AddFixedServiceId("breakpoints/%" Pd "" , id()); |
171 | jsobj.AddProperty("breakpointNumber" , id()); |
172 | if (is_synthetic_async()) { |
173 | jsobj.AddProperty("isSyntheticAsyncContinuation" , is_synthetic_async()); |
174 | } |
175 | jsobj.AddProperty("resolved" , bpt_location_->IsResolved()); |
176 | if (bpt_location_->IsResolved()) { |
177 | jsobj.AddLocation(bpt_location_); |
178 | } else { |
179 | jsobj.AddUnresolvedLocation(bpt_location_); |
180 | } |
181 | } |
182 | |
183 | void CodeBreakpoint::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
184 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&code_)); |
185 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&bytecode_)); |
186 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&saved_value_)); |
187 | } |
188 | |
189 | ActivationFrame::ActivationFrame(uword pc, |
190 | uword fp, |
191 | uword sp, |
192 | const Code& code, |
193 | const Array& deopt_frame, |
194 | intptr_t deopt_frame_offset, |
195 | ActivationFrame::Kind kind) |
196 | : pc_(pc), |
197 | fp_(fp), |
198 | sp_(sp), |
199 | ctx_(Context::ZoneHandle()), |
200 | code_(Code::ZoneHandle(code.raw())), |
201 | bytecode_(Bytecode::ZoneHandle()), |
202 | function_(Function::ZoneHandle(code.function())), |
203 | live_frame_((kind == kRegular) || (kind == kAsyncActivation)), |
204 | token_pos_initialized_(false), |
205 | token_pos_(TokenPosition::kNoSource), |
206 | try_index_(-1), |
207 | deopt_id_(DeoptId::kNone), |
208 | line_number_(-1), |
209 | column_number_(-1), |
210 | context_level_(-1), |
211 | deopt_frame_(Array::ZoneHandle(deopt_frame.raw())), |
212 | deopt_frame_offset_(deopt_frame_offset), |
213 | kind_(kind), |
214 | vars_initialized_(false), |
215 | var_descriptors_(LocalVarDescriptors::ZoneHandle()), |
216 | desc_indices_(8), |
217 | pc_desc_(PcDescriptors::ZoneHandle()) { |
218 | ASSERT(!function_.IsNull()); |
219 | } |
220 | |
221 | #if !defined(DART_PRECOMPILED_RUNTIME) |
222 | ActivationFrame::ActivationFrame(uword pc, |
223 | uword fp, |
224 | uword sp, |
225 | const Bytecode& bytecode, |
226 | ActivationFrame::Kind kind) |
227 | : pc_(pc), |
228 | fp_(fp), |
229 | sp_(sp), |
230 | ctx_(Context::ZoneHandle()), |
231 | code_(Code::ZoneHandle()), |
232 | bytecode_(Bytecode::ZoneHandle(bytecode.raw())), |
233 | function_(Function::ZoneHandle(bytecode.function())), |
234 | live_frame_((kind == kRegular) || (kind == kAsyncActivation)), |
235 | token_pos_initialized_(false), |
236 | token_pos_(TokenPosition::kNoSource), |
237 | try_index_(-1), |
238 | deopt_id_(DeoptId::kNone), |
239 | line_number_(-1), |
240 | column_number_(-1), |
241 | context_level_(-1), |
242 | deopt_frame_(Array::ZoneHandle()), |
243 | deopt_frame_offset_(0), |
244 | kind_(kind), |
245 | vars_initialized_(false), |
246 | var_descriptors_(LocalVarDescriptors::ZoneHandle()), |
247 | desc_indices_(8), |
248 | pc_desc_(PcDescriptors::ZoneHandle()) { |
249 | // The frame of a bytecode stub has a null function. It may be encountered |
250 | // when single stepping. |
251 | } |
252 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
253 | |
254 | ActivationFrame::ActivationFrame(Kind kind) |
255 | : pc_(0), |
256 | fp_(0), |
257 | sp_(0), |
258 | ctx_(Context::ZoneHandle()), |
259 | code_(Code::ZoneHandle()), |
260 | bytecode_(Bytecode::ZoneHandle()), |
261 | function_(Function::ZoneHandle()), |
262 | live_frame_(kind == kRegular), |
263 | token_pos_initialized_(false), |
264 | token_pos_(TokenPosition::kNoSource), |
265 | try_index_(-1), |
266 | deopt_id_(DeoptId::kNone), |
267 | line_number_(-1), |
268 | column_number_(-1), |
269 | context_level_(-1), |
270 | deopt_frame_(Array::ZoneHandle()), |
271 | deopt_frame_offset_(0), |
272 | kind_(kind), |
273 | vars_initialized_(false), |
274 | var_descriptors_(LocalVarDescriptors::ZoneHandle()), |
275 | desc_indices_(8), |
276 | pc_desc_(PcDescriptors::ZoneHandle()) {} |
277 | |
278 | ActivationFrame::ActivationFrame(const Closure& async_activation) |
279 | : pc_(0), |
280 | fp_(0), |
281 | sp_(0), |
282 | ctx_(Context::ZoneHandle()), |
283 | code_(Code::ZoneHandle()), |
284 | bytecode_(Bytecode::ZoneHandle()), |
285 | function_(Function::ZoneHandle()), |
286 | live_frame_(false), |
287 | token_pos_initialized_(false), |
288 | token_pos_(TokenPosition::kNoSource), |
289 | try_index_(-1), |
290 | deopt_id_(DeoptId::kNone), |
291 | line_number_(-1), |
292 | column_number_(-1), |
293 | context_level_(-1), |
294 | deopt_frame_(Array::ZoneHandle()), |
295 | deopt_frame_offset_(0), |
296 | kind_(kAsyncActivation), |
297 | vars_initialized_(false), |
298 | var_descriptors_(LocalVarDescriptors::ZoneHandle()), |
299 | desc_indices_(8), |
300 | pc_desc_(PcDescriptors::ZoneHandle()) { |
301 | // Extract the function and the code from the asynchronous activation. |
302 | function_ = async_activation.function(); |
303 | #if !defined(DART_PRECOMPILED_RUNTIME) |
304 | if (!function_.HasCode() && function_.HasBytecode()) { |
305 | bytecode_ = function_.bytecode(); |
306 | } |
307 | #endif |
308 | if (bytecode_.IsNull()) { |
309 | // Force-optimize functions should not be debuggable. |
310 | ASSERT(!function_.ForceOptimize()); |
311 | function_.EnsureHasCompiledUnoptimizedCode(); |
312 | code_ = function_.unoptimized_code(); |
313 | } |
314 | ctx_ = async_activation.context(); |
315 | ASSERT(fp_ == 0); |
316 | ASSERT(!ctx_.IsNull()); |
317 | } |
318 | |
319 | bool Debugger::NeedsIsolateEvents() { |
320 | return !Isolate::IsVMInternalIsolate(isolate_) && |
321 | Service::isolate_stream.enabled(); |
322 | } |
323 | |
324 | bool Debugger::NeedsDebugEvents() { |
325 | ASSERT(!Isolate::IsVMInternalIsolate(isolate_)); |
326 | return FLAG_warn_on_pause_with_no_debugger || Service::debug_stream.enabled(); |
327 | } |
328 | |
329 | void Debugger::InvokeEventHandler(ServiceEvent* event) { |
330 | ASSERT(!event->IsPause()); // For pause events, call Pause instead. |
331 | Service::HandleEvent(event); |
332 | } |
333 | |
334 | ErrorPtr Debugger::PauseInterrupted() { |
335 | return PauseRequest(ServiceEvent::kPauseInterrupted); |
336 | } |
337 | |
338 | ErrorPtr Debugger::PausePostRequest() { |
339 | return PauseRequest(ServiceEvent::kPausePostRequest); |
340 | } |
341 | |
342 | ErrorPtr Debugger::PauseRequest(ServiceEvent::EventKind kind) { |
343 | if (ignore_breakpoints_ || IsPaused()) { |
344 | // We don't let the isolate get interrupted if we are already |
345 | // paused or ignoring breakpoints. |
346 | return Thread::Current()->StealStickyError(); |
347 | } |
348 | ServiceEvent event(isolate_, kind); |
349 | DebuggerStackTrace* trace = CollectStackTrace(); |
350 | if (trace->Length() > 0) { |
351 | event.set_top_frame(trace->FrameAt(0)); |
352 | } |
353 | CacheStackTraces(trace, CollectAsyncCausalStackTrace(), |
354 | CollectAwaiterReturnStackTrace()); |
355 | resume_action_ = kContinue; |
356 | Pause(&event); |
357 | HandleSteppingRequest(trace); |
358 | ClearCachedStackTraces(); |
359 | |
360 | // If any error occurred while in the debug message loop, return it here. |
361 | NoSafepointScope no_safepoint; |
362 | ErrorPtr error = Thread::Current()->StealStickyError(); |
363 | ASSERT((error == Error::null()) || error->IsUnwindError()); |
364 | return error; |
365 | } |
366 | |
367 | void Debugger::SendBreakpointEvent(ServiceEvent::EventKind kind, |
368 | Breakpoint* bpt) { |
369 | if (NeedsDebugEvents()) { |
370 | // TODO(turnidge): Currently we send single-shot breakpoint events |
371 | // to the vm service. Do we want to change this? |
372 | ServiceEvent event(isolate_, kind); |
373 | event.set_breakpoint(bpt); |
374 | InvokeEventHandler(&event); |
375 | } |
376 | } |
377 | |
378 | void BreakpointLocation::AddBreakpoint(Breakpoint* bpt, Debugger* dbg) { |
379 | bpt->set_next(breakpoints()); |
380 | set_breakpoints(bpt); |
381 | |
382 | dbg->SyncBreakpointLocation(this); |
383 | dbg->SendBreakpointEvent(ServiceEvent::kBreakpointAdded, bpt); |
384 | } |
385 | |
386 | Breakpoint* BreakpointLocation::AddRepeated(Debugger* dbg) { |
387 | Breakpoint* bpt = breakpoints(); |
388 | while (bpt != NULL) { |
389 | if (bpt->IsRepeated()) break; |
390 | bpt = bpt->next(); |
391 | } |
392 | if (bpt == NULL) { |
393 | bpt = new Breakpoint(dbg->nextId(), this); |
394 | bpt->SetIsRepeated(); |
395 | AddBreakpoint(bpt, dbg); |
396 | } |
397 | return bpt; |
398 | } |
399 | |
400 | Breakpoint* BreakpointLocation::AddSingleShot(Debugger* dbg) { |
401 | Breakpoint* bpt = breakpoints(); |
402 | while (bpt != NULL) { |
403 | if (bpt->IsSingleShot()) break; |
404 | bpt = bpt->next(); |
405 | } |
406 | if (bpt == NULL) { |
407 | bpt = new Breakpoint(dbg->nextId(), this); |
408 | bpt->SetIsSingleShot(); |
409 | AddBreakpoint(bpt, dbg); |
410 | } |
411 | return bpt; |
412 | } |
413 | |
414 | Breakpoint* BreakpointLocation::AddPerClosure(Debugger* dbg, |
415 | const Instance& closure, |
416 | bool for_over_await) { |
417 | Breakpoint* bpt = NULL; |
418 | // Do not reuse existing breakpoints for stepping over await clauses. |
419 | // A second async step-over command will set a new breakpoint before |
420 | // the existing one gets deleted when first async step-over resumes. |
421 | if (!for_over_await) { |
422 | bpt = breakpoints(); |
423 | while (bpt != NULL) { |
424 | if (bpt->IsPerClosure() && (bpt->closure() == closure.raw())) break; |
425 | bpt = bpt->next(); |
426 | } |
427 | } |
428 | if (bpt == NULL) { |
429 | bpt = new Breakpoint(dbg->nextId(), this); |
430 | bpt->SetIsPerClosure(closure); |
431 | bpt->set_is_synthetic_async(for_over_await); |
432 | AddBreakpoint(bpt, dbg); |
433 | } |
434 | return bpt; |
435 | } |
436 | |
437 | const char* Debugger::QualifiedFunctionName(const Function& func) { |
438 | const String& func_name = String::Handle(func.name()); |
439 | Class& func_class = Class::Handle(func.Owner()); |
440 | String& class_name = String::Handle(func_class.Name()); |
441 | |
442 | return OS::SCreate(Thread::Current()->zone(), "%s%s%s" , |
443 | func_class.IsTopLevel() ? "" : class_name.ToCString(), |
444 | func_class.IsTopLevel() ? "" : "." , func_name.ToCString()); |
445 | } |
446 | |
447 | // Returns true if the function |func| overlaps the token range |
448 | // [|token_pos|, |end_token_pos|] in |script|. |
449 | static bool FunctionOverlaps(const Function& func, |
450 | const Script& script, |
451 | TokenPosition token_pos, |
452 | TokenPosition end_token_pos) { |
453 | TokenPosition func_start = func.token_pos(); |
454 | if (((func_start <= token_pos) && (token_pos <= func.end_token_pos())) || |
455 | ((token_pos <= func_start) && (func_start <= end_token_pos))) { |
456 | // Check script equality second because it allocates |
457 | // handles as a side effect. |
458 | return func.script() == script.raw(); |
459 | } |
460 | return false; |
461 | } |
462 | |
463 | static bool IsImplicitFunction(const Function& func) { |
464 | switch (func.kind()) { |
465 | case FunctionLayout::kImplicitGetter: |
466 | case FunctionLayout::kImplicitSetter: |
467 | case FunctionLayout::kImplicitStaticGetter: |
468 | case FunctionLayout::kFieldInitializer: |
469 | case FunctionLayout::kMethodExtractor: |
470 | case FunctionLayout::kNoSuchMethodDispatcher: |
471 | case FunctionLayout::kInvokeFieldDispatcher: |
472 | case FunctionLayout::kIrregexpFunction: |
473 | return true; |
474 | default: |
475 | if (func.token_pos() == func.end_token_pos()) { |
476 | // |func| could be an implicit constructor for example. |
477 | return true; |
478 | } |
479 | } |
480 | return false; |
481 | } |
482 | |
483 | bool Debugger::HasBreakpoint(const Function& func, Zone* zone) { |
484 | if (!func.HasCode() && !func.HasBytecode()) { |
485 | // If the function is not compiled yet, just check whether there |
486 | // is a user-defined breakpoint that falls into the token |
487 | // range of the function. This may be a false positive: the breakpoint |
488 | // might be inside a local closure. |
489 | Script& script = Script::Handle(zone); |
490 | BreakpointLocation* sbpt = breakpoint_locations_; |
491 | while (sbpt != NULL) { |
492 | script = sbpt->script(); |
493 | if (FunctionOverlaps(func, script, sbpt->token_pos(), |
494 | sbpt->end_token_pos())) { |
495 | return true; |
496 | } |
497 | sbpt = sbpt->next_; |
498 | } |
499 | return false; |
500 | } |
501 | CodeBreakpoint* cbpt = code_breakpoints_; |
502 | while (cbpt != NULL) { |
503 | if (func.raw() == cbpt->function()) { |
504 | return true; |
505 | } |
506 | cbpt = cbpt->next_; |
507 | } |
508 | return false; |
509 | } |
510 | |
511 | bool Debugger::HasBreakpoint(const Code& code) { |
512 | CodeBreakpoint* cbpt = code_breakpoints_; |
513 | while (cbpt != NULL) { |
514 | if (code.raw() == cbpt->code_) { |
515 | return true; |
516 | } |
517 | cbpt = cbpt->next_; |
518 | } |
519 | return false; |
520 | } |
521 | |
522 | void Debugger::PrintBreakpointsToJSONArray(JSONArray* jsarr) const { |
523 | PrintBreakpointsListToJSONArray(breakpoint_locations_, jsarr); |
524 | PrintBreakpointsListToJSONArray(latent_locations_, jsarr); |
525 | } |
526 | |
527 | void Debugger::PrintBreakpointsListToJSONArray(BreakpointLocation* sbpt, |
528 | JSONArray* jsarr) const { |
529 | while (sbpt != NULL) { |
530 | Breakpoint* bpt = sbpt->breakpoints(); |
531 | while (bpt != NULL) { |
532 | jsarr->AddValue(bpt); |
533 | bpt = bpt->next(); |
534 | } |
535 | sbpt = sbpt->next_; |
536 | } |
537 | } |
538 | |
539 | void Debugger::PrintSettingsToJSONObject(JSONObject* jsobj) const { |
540 | // This won't cut it when we support filtering by class, etc. |
541 | switch (GetExceptionPauseInfo()) { |
542 | case kNoPauseOnExceptions: |
543 | jsobj->AddProperty("_exceptions" , "none" ); |
544 | break; |
545 | case kPauseOnAllExceptions: |
546 | jsobj->AddProperty("_exceptions" , "all" ); |
547 | break; |
548 | case kPauseOnUnhandledExceptions: |
549 | jsobj->AddProperty("_exceptions" , "unhandled" ); |
550 | break; |
551 | default: |
552 | UNREACHABLE(); |
553 | } |
554 | } |
555 | |
556 | // If the current top Dart frame is interpreted, returns the fp of the caller |
557 | // in compiled code that invoked the interpreter, or 0 if not found. |
558 | // If the current top Dart frame is compiled, returns the fp of the caller in |
559 | // interpreted bytecode that invoked compiled code, or ULONG_MAX if not found. |
560 | // Returning compiled code fp 0 (or bytecode fp ULONG_MAX) as fp value insures |
561 | // that the fp will compare as a callee of any valid frame pointer of the same |
562 | // mode (compiled code or bytecode). |
563 | static uword CrossCallerFp() { |
564 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
565 | Thread::Current(), |
566 | StackFrameIterator::kNoCrossThreadIteration); |
567 | StackFrame* frame; |
568 | do { |
569 | frame = iterator.NextFrame(); |
570 | RELEASE_ASSERT(frame != nullptr); |
571 | } while (!frame->IsDartFrame()); |
572 | const bool top_is_interpreted = frame->is_interpreted(); |
573 | do { |
574 | frame = iterator.NextFrame(); |
575 | if (frame == nullptr) { |
576 | return top_is_interpreted ? 0 : ULONG_MAX; |
577 | } |
578 | if (!frame->IsDartFrame()) { |
579 | continue; |
580 | } |
581 | } while (top_is_interpreted == frame->is_interpreted()); |
582 | return frame->fp(); |
583 | } |
584 | |
585 | ActivationFrame::Relation ActivationFrame::CompareTo( |
586 | uword other_fp, |
587 | bool other_is_interpreted) const { |
588 | if (fp() == other_fp) { |
589 | ASSERT(IsInterpreted() == other_is_interpreted); |
590 | return kSelf; |
591 | } |
592 | if (IsInterpreted()) { |
593 | if (!other_is_interpreted) { |
594 | // Instead of fp(), use the fp of the compiled frame that called into the |
595 | // interpreter (CrossCallerFp). |
596 | // Note that if CrossCallerFp == other_fp, it must compare as a caller. |
597 | return IsCalleeFrameOf(other_fp, CrossCallerFp()) ? kCallee : kCaller; |
598 | } |
599 | return IsBytecodeCalleeFrameOf(other_fp, fp()) ? kCallee : kCaller; |
600 | } |
601 | if (other_is_interpreted) { |
602 | // Instead of fp(), use the fp of the interpreted frame that called into |
603 | // compiled code (CrossCallerFp). |
604 | // Note that if CrossCallerFp == other_fp, it must compare as a caller. |
605 | return IsBytecodeCalleeFrameOf(other_fp, CrossCallerFp()) ? kCallee |
606 | : kCaller; |
607 | } |
608 | return IsCalleeFrameOf(other_fp, fp()) ? kCallee : kCaller; |
609 | } |
610 | |
611 | StringPtr ActivationFrame::QualifiedFunctionName() { |
612 | return String::New(Debugger::QualifiedFunctionName(function())); |
613 | } |
614 | |
615 | StringPtr ActivationFrame::SourceUrl() { |
616 | const Script& script = Script::Handle(SourceScript()); |
617 | return script.url(); |
618 | } |
619 | |
620 | ScriptPtr ActivationFrame::SourceScript() { |
621 | return function().script(); |
622 | } |
623 | |
624 | LibraryPtr ActivationFrame::Library() { |
625 | const Class& cls = Class::Handle(function().origin()); |
626 | return cls.library(); |
627 | } |
628 | |
629 | void ActivationFrame::GetPcDescriptors() { |
630 | ASSERT(!IsInterpreted()); // We need to set try_index_ simultaneously. |
631 | if (pc_desc_.IsNull()) { |
632 | pc_desc_ = code().pc_descriptors(); |
633 | ASSERT(!pc_desc_.IsNull()); |
634 | } |
635 | } |
636 | |
637 | // If not token_pos_initialized_, compute token_pos_, try_index_ and, |
638 | // if not IsInterpreted(), also compute deopt_id_. |
639 | TokenPosition ActivationFrame::TokenPos() { |
640 | if (!token_pos_initialized_) { |
641 | token_pos_initialized_ = true; |
642 | if (IsInterpreted()) { |
643 | token_pos_ = bytecode().GetTokenIndexOfPC(pc_); |
644 | try_index_ = bytecode().GetTryIndexAtPc(pc_); |
645 | return token_pos_; |
646 | } |
647 | token_pos_ = TokenPosition::kNoSource; |
648 | GetPcDescriptors(); |
649 | PcDescriptors::Iterator iter(pc_desc_, PcDescriptorsLayout::kAnyKind); |
650 | const uword pc_offset = pc_ - code().PayloadStart(); |
651 | while (iter.MoveNext()) { |
652 | if (iter.PcOffset() == pc_offset) { |
653 | try_index_ = iter.TryIndex(); |
654 | token_pos_ = iter.TokenPos(); |
655 | deopt_id_ = iter.DeoptId(); |
656 | break; |
657 | } |
658 | } |
659 | } |
660 | return token_pos_; |
661 | } |
662 | |
663 | intptr_t ActivationFrame::TryIndex() { |
664 | if (!token_pos_initialized_) { |
665 | TokenPos(); // Side effect: computes token_pos_initialized_, try_index_. |
666 | } |
667 | return try_index_; |
668 | } |
669 | |
670 | intptr_t ActivationFrame::DeoptId() { |
671 | ASSERT(!IsInterpreted()); |
672 | if (!token_pos_initialized_) { |
673 | TokenPos(); // Side effect: computes token_pos_initialized_, try_index_. |
674 | } |
675 | return deopt_id_; |
676 | } |
677 | |
678 | intptr_t ActivationFrame::LineNumber() { |
679 | // Compute line number lazily since it causes scanning of the script. |
680 | if ((line_number_ < 0) && TokenPos().IsSourcePosition()) { |
681 | const TokenPosition token_pos = TokenPos().SourcePosition(); |
682 | const Script& script = Script::Handle(SourceScript()); |
683 | script.GetTokenLocation(token_pos, &line_number_, NULL); |
684 | } |
685 | return line_number_; |
686 | } |
687 | |
688 | intptr_t ActivationFrame::ColumnNumber() { |
689 | // Compute column number lazily since it causes scanning of the script. |
690 | if ((column_number_ < 0) && TokenPos().IsSourcePosition()) { |
691 | const TokenPosition token_pos = TokenPos().SourcePosition(); |
692 | const Script& script = Script::Handle(SourceScript()); |
693 | if (script.HasSource()) { |
694 | script.GetTokenLocation(token_pos, &line_number_, &column_number_); |
695 | } else { |
696 | column_number_ = -1; |
697 | } |
698 | } |
699 | return column_number_; |
700 | } |
701 | |
702 | void ActivationFrame::GetVarDescriptors() { |
703 | if (var_descriptors_.IsNull()) { |
704 | if (IsInterpreted()) { |
705 | var_descriptors_ = bytecode().GetLocalVarDescriptors(); |
706 | ASSERT(!var_descriptors_.IsNull()); |
707 | return; |
708 | } |
709 | Code& unoptimized_code = Code::Handle(function().unoptimized_code()); |
710 | if (unoptimized_code.IsNull()) { |
711 | Thread* thread = Thread::Current(); |
712 | Zone* zone = thread->zone(); |
713 | const Error& error = Error::Handle( |
714 | zone, Compiler::EnsureUnoptimizedCode(thread, function())); |
715 | if (!error.IsNull()) { |
716 | Exceptions::PropagateError(error); |
717 | } |
718 | unoptimized_code = function().unoptimized_code(); |
719 | } |
720 | ASSERT(!unoptimized_code.IsNull()); |
721 | var_descriptors_ = unoptimized_code.GetLocalVarDescriptors(); |
722 | ASSERT(!var_descriptors_.IsNull()); |
723 | } |
724 | } |
725 | |
726 | bool ActivationFrame::IsDebuggable() const { |
727 | // When stepping in bytecode stub, function is null. |
728 | return !function().IsNull() && Debugger::IsDebuggable(function()); |
729 | } |
730 | |
731 | void ActivationFrame::PrintDescriptorsError(const char* message) { |
732 | OS::PrintErr("Bad descriptors: %s\n" , message); |
733 | OS::PrintErr("function %s\n" , function().ToQualifiedCString()); |
734 | OS::PrintErr("pc_ %" Px "\n" , pc_); |
735 | OS::PrintErr("deopt_id_ %" Px "\n" , deopt_id_); |
736 | OS::PrintErr("context_level_ %" Px "\n" , context_level_); |
737 | OS::PrintErr("token_pos_ %s\n" , token_pos_.ToCString()); |
738 | if (function().is_declared_in_bytecode()) { |
739 | #if !defined(DART_PRECOMPILED_RUNTIME) |
740 | KernelBytecodeDisassembler::Disassemble(function()); |
741 | #else |
742 | UNREACHABLE(); |
743 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
744 | } |
745 | if (!IsInterpreted()) { |
746 | DisassembleToStdout formatter; |
747 | code().Disassemble(&formatter); |
748 | PcDescriptors::Handle(code().pc_descriptors()).Print(); |
749 | } |
750 | StackFrameIterator frames(ValidationPolicy::kDontValidateFrames, |
751 | Thread::Current(), |
752 | StackFrameIterator::kNoCrossThreadIteration); |
753 | StackFrame* frame = frames.NextFrame(); |
754 | while (frame != NULL) { |
755 | OS::PrintErr("%s\n" , frame->ToCString()); |
756 | frame = frames.NextFrame(); |
757 | } |
758 | OS::Abort(); |
759 | } |
760 | |
761 | // Calculate the context level at the current pc of the frame. |
762 | intptr_t ActivationFrame::ContextLevel() { |
763 | ASSERT(live_frame_); |
764 | const Context& ctx = GetSavedCurrentContext(); |
765 | if (context_level_ < 0 && !ctx.IsNull()) { |
766 | if (IsInterpreted()) { |
767 | #if !defined(DART_PRECOMPILED_RUNTIME) |
768 | Thread* thread = Thread::Current(); |
769 | Zone* zone = thread->zone(); |
770 | const auto& bytecode = Bytecode::Handle(zone, function_.bytecode()); |
771 | if (!bytecode.HasLocalVariablesInfo()) { |
772 | PrintDescriptorsError("Missing local variables info" ); |
773 | } |
774 | intptr_t pc_offset = pc_ - bytecode.PayloadStart(); |
775 | // Look for innermost scope, i.e. with the highest context level. |
776 | // Since scopes are ordered by StartPC(), the last scope which includes |
777 | // pc_offset will be the innermost one. |
778 | kernel::BytecodeLocalVariablesIterator local_vars(zone, bytecode); |
779 | while (local_vars.MoveNext()) { |
780 | if (local_vars.Kind() == |
781 | kernel::BytecodeLocalVariablesIterator::kScope) { |
782 | if (local_vars.StartPC() > pc_offset) { |
783 | break; |
784 | } |
785 | if (pc_offset <= local_vars.EndPC()) { |
786 | ASSERT(context_level_ <= local_vars.ContextLevel()); |
787 | context_level_ = local_vars.ContextLevel(); |
788 | } |
789 | } |
790 | } |
791 | if (context_level_ < 0) { |
792 | PrintDescriptorsError("Missing context level in local variables info" ); |
793 | } |
794 | #else |
795 | UNREACHABLE(); |
796 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
797 | } else { |
798 | ASSERT(!code_.is_optimized()); |
799 | GetVarDescriptors(); |
800 | intptr_t deopt_id = DeoptId(); |
801 | if (deopt_id == DeoptId::kNone) { |
802 | PrintDescriptorsError("Missing deopt id" ); |
803 | } |
804 | intptr_t var_desc_len = var_descriptors_.Length(); |
805 | bool found = false; |
806 | for (intptr_t cur_idx = 0; cur_idx < var_desc_len; cur_idx++) { |
807 | LocalVarDescriptorsLayout::VarInfo var_info; |
808 | var_descriptors_.GetInfo(cur_idx, &var_info); |
809 | const int8_t kind = var_info.kind(); |
810 | if ((kind == LocalVarDescriptorsLayout::kContextLevel) && |
811 | (deopt_id >= var_info.begin_pos.value()) && |
812 | (deopt_id <= var_info.end_pos.value())) { |
813 | context_level_ = var_info.index(); |
814 | found = true; |
815 | break; |
816 | } |
817 | } |
818 | if (!found) { |
819 | PrintDescriptorsError("Missing context level in var descriptors" ); |
820 | } |
821 | ASSERT(context_level_ >= 0); |
822 | } |
823 | } |
824 | return context_level_; |
825 | } |
826 | |
827 | ObjectPtr ActivationFrame::GetAsyncContextVariable(const String& name) { |
828 | if (!function_.IsAsyncClosure() && !function_.IsAsyncGenClosure()) { |
829 | return Object::null(); |
830 | } |
831 | GetVarDescriptors(); |
832 | intptr_t var_ctxt_level = -1; |
833 | intptr_t ctxt_slot = -1; |
834 | intptr_t var_desc_len = var_descriptors_.Length(); |
835 | for (intptr_t i = 0; i < var_desc_len; i++) { |
836 | LocalVarDescriptorsLayout::VarInfo var_info; |
837 | var_descriptors_.GetInfo(i, &var_info); |
838 | if (var_descriptors_.GetName(i) == name.raw()) { |
839 | const int8_t kind = var_info.kind(); |
840 | if (!live_frame_) { |
841 | ASSERT(kind == LocalVarDescriptorsLayout::kContextVar); |
842 | } |
843 | const auto variable_index = VariableIndex(var_info.index()); |
844 | if (kind == LocalVarDescriptorsLayout::kStackVar) { |
845 | return GetStackVar(variable_index); |
846 | } else { |
847 | ASSERT(kind == LocalVarDescriptorsLayout::kContextVar); |
848 | // Variable descriptors constructed from bytecode have all variables of |
849 | // enclosing functions, even shadowed by the current function. |
850 | // Pick the variable with the highest context level. |
851 | if (var_info.scope_id > var_ctxt_level) { |
852 | var_ctxt_level = var_info.scope_id; |
853 | ctxt_slot = variable_index.value(); |
854 | } |
855 | } |
856 | } |
857 | } |
858 | if (var_ctxt_level >= 0) { |
859 | if (!live_frame_) { |
860 | ASSERT(!ctx_.IsNull()); |
861 | // Compiled code uses relative context levels, i.e. the frame context |
862 | // level is always 0 on entry. |
863 | // Bytecode uses absolute context levels, i.e. the frame context level |
864 | // on entry must be calculated. |
865 | const intptr_t frame_ctx_level = |
866 | function().is_declared_in_bytecode() ? ctx_.GetLevel() : 0; |
867 | return GetRelativeContextVar(var_ctxt_level, ctxt_slot, frame_ctx_level); |
868 | } |
869 | return GetContextVar(var_ctxt_level, ctxt_slot); |
870 | } |
871 | return Object::null(); |
872 | } |
873 | |
874 | ObjectPtr ActivationFrame::GetAsyncCompleter() { |
875 | return GetAsyncContextVariable(Symbols::AsyncCompleter()); |
876 | } |
877 | |
878 | ObjectPtr ActivationFrame::GetAsyncCompleterAwaiter(const Object& completer) { |
879 | DEBUG_ASSERT(Thread::Current()->TopErrorHandlerIsExitFrame()); |
880 | |
881 | Object& future = Object::Handle(); |
882 | const Class& completer_cls = Class::Handle(completer.clazz()); |
883 | ASSERT(!completer_cls.IsNull()); |
884 | const Function& future_getter = Function::Handle( |
885 | completer_cls.LookupGetterFunction(Symbols::CompleterFuture())); |
886 | ASSERT(!future_getter.IsNull()); |
887 | const Array& args = Array::Handle(Array::New(1)); |
888 | args.SetAt(0, Instance::Cast(completer)); |
889 | future = DartEntry::InvokeFunction(future_getter, args); |
890 | if (future.IsError()) { |
891 | Exceptions::PropagateError(Error::Cast(future)); |
892 | } |
893 | if (future.IsNull()) { |
894 | // The completer object may not be fully initialized yet. |
895 | return Object::null(); |
896 | } |
897 | const Class& future_cls = Class::Handle(future.clazz()); |
898 | ASSERT(!future_cls.IsNull()); |
899 | const Field& awaiter_field = Field::Handle( |
900 | future_cls.LookupInstanceFieldAllowPrivate(Symbols::_Awaiter())); |
901 | ASSERT(!awaiter_field.IsNull()); |
902 | return Instance::Cast(future).GetField(awaiter_field); |
903 | } |
904 | |
905 | ObjectPtr ActivationFrame::GetAsyncStreamControllerStream() { |
906 | return GetAsyncContextVariable(Symbols::ControllerStream()); |
907 | } |
908 | |
909 | ObjectPtr ActivationFrame::GetAsyncStreamControllerStreamAwaiter( |
910 | const Object& stream) { |
911 | const Class& stream_cls = Class::Handle(stream.clazz()); |
912 | ASSERT(!stream_cls.IsNull()); |
913 | const Class& stream_impl_cls = Class::Handle(stream_cls.SuperClass()); |
914 | const Field& awaiter_field = Field::Handle( |
915 | stream_impl_cls.LookupInstanceFieldAllowPrivate(Symbols::_Awaiter())); |
916 | ASSERT(!awaiter_field.IsNull()); |
917 | return Instance::Cast(stream).GetField(awaiter_field); |
918 | } |
919 | |
920 | ObjectPtr ActivationFrame::GetAsyncAwaiter() { |
921 | const Object& async_stream_controller_stream = |
922 | Object::Handle(GetAsyncStreamControllerStream()); |
923 | if (!async_stream_controller_stream.IsNull()) { |
924 | return GetAsyncStreamControllerStreamAwaiter( |
925 | async_stream_controller_stream); |
926 | } |
927 | const Object& completer = Object::Handle(GetAsyncCompleter()); |
928 | if (!completer.IsNull()) { |
929 | return GetAsyncCompleterAwaiter(completer); |
930 | } |
931 | return Object::null(); |
932 | } |
933 | |
934 | ObjectPtr ActivationFrame::GetCausalStack() { |
935 | return GetAsyncContextVariable(Symbols::AsyncStackTraceVar()); |
936 | } |
937 | |
938 | bool ActivationFrame::HandlesException(const Instance& exc_obj) { |
939 | if ((kind_ == kAsyncSuspensionMarker) || (kind_ == kAsyncCausal)) { |
940 | // These frames are historical. |
941 | return false; |
942 | } |
943 | intptr_t try_index = TryIndex(); |
944 | if (try_index < 0) { |
945 | return false; |
946 | } |
947 | ExceptionHandlers& handlers = ExceptionHandlers::Handle(); |
948 | Array& handled_types = Array::Handle(); |
949 | AbstractType& type = Type::Handle(); |
950 | const bool is_async = |
951 | function().IsAsyncClosure() || function().IsAsyncGenClosure(); |
952 | if (IsInterpreted()) { |
953 | handlers = bytecode().exception_handlers(); |
954 | } else { |
955 | handlers = code().exception_handlers(); |
956 | } |
957 | ASSERT(!handlers.IsNull()); |
958 | intptr_t num_handlers_checked = 0; |
959 | while (try_index != kInvalidTryIndex) { |
960 | // Detect circles in the exception handler data. |
961 | num_handlers_checked++; |
962 | ASSERT(num_handlers_checked <= handlers.num_entries()); |
963 | // Only consider user written handlers for async methods. |
964 | if (!is_async || !handlers.IsGenerated(try_index)) { |
965 | handled_types = handlers.GetHandledTypes(try_index); |
966 | const intptr_t num_types = handled_types.Length(); |
967 | for (intptr_t k = 0; k < num_types; k++) { |
968 | type ^= handled_types.At(k); |
969 | ASSERT(!type.IsNull()); |
970 | // Uninstantiated types are not added to ExceptionHandlers data. |
971 | ASSERT(type.IsInstantiated()); |
972 | if (type.IsDynamicType()) { |
973 | return true; |
974 | } |
975 | if (exc_obj.IsInstanceOf(type, Object::null_type_arguments(), |
976 | Object::null_type_arguments())) { |
977 | return true; |
978 | } |
979 | } |
980 | } |
981 | try_index = handlers.OuterTryIndex(try_index); |
982 | } |
983 | return false; |
984 | } |
985 | |
986 | intptr_t ActivationFrame::GetAwaitJumpVariable() { |
987 | GetVarDescriptors(); |
988 | intptr_t var_ctxt_level = -1; |
989 | intptr_t ctxt_slot = -1; |
990 | intptr_t var_desc_len = var_descriptors_.Length(); |
991 | intptr_t await_jump_var = -1; |
992 | for (intptr_t i = 0; i < var_desc_len; i++) { |
993 | LocalVarDescriptorsLayout::VarInfo var_info; |
994 | var_descriptors_.GetInfo(i, &var_info); |
995 | const int8_t kind = var_info.kind(); |
996 | if (var_descriptors_.GetName(i) == Symbols::AwaitJumpVar().raw()) { |
997 | ASSERT(kind == LocalVarDescriptorsLayout::kContextVar); |
998 | ASSERT(!ctx_.IsNull()); |
999 | // Variable descriptors constructed from bytecode have all variables of |
1000 | // enclosing functions, even shadowed by the current function. |
1001 | // Pick the :await_jump_var variable with the highest context level. |
1002 | if (var_info.scope_id > var_ctxt_level) { |
1003 | var_ctxt_level = var_info.scope_id; |
1004 | ctxt_slot = var_info.index(); |
1005 | } |
1006 | } |
1007 | } |
1008 | if (var_ctxt_level >= 0) { |
1009 | Object& await_jump_index = Object::Handle(ctx_.At(ctxt_slot)); |
1010 | ASSERT(await_jump_index.IsSmi()); |
1011 | await_jump_var = Smi::Cast(await_jump_index).Value(); |
1012 | } |
1013 | return await_jump_var; |
1014 | } |
1015 | |
1016 | void ActivationFrame::() { |
1017 | // Attempt to determine the token pos and try index from the async closure. |
1018 | Thread* thread = Thread::Current(); |
1019 | Zone* zone = thread->zone(); |
1020 | |
1021 | ASSERT(function_.IsAsyncGenClosure() || function_.IsAsyncClosure()); |
1022 | // This should only be called on frames that aren't active on the stack. |
1023 | ASSERT(fp() == 0); |
1024 | |
1025 | if (function_.is_declared_in_bytecode()) { |
1026 | #if !defined(DART_PRECOMPILED_RUNTIME) |
1027 | const auto& bytecode = Bytecode::Handle(zone, function_.bytecode()); |
1028 | if (!bytecode.HasSourcePositions()) { |
1029 | return; |
1030 | } |
1031 | const intptr_t await_jump_var = GetAwaitJumpVariable(); |
1032 | if (await_jump_var < 0) { |
1033 | return; |
1034 | } |
1035 | // Yield points are counted from 1 (0 is reserved for normal entry). |
1036 | intptr_t yield_point_index = 1; |
1037 | kernel::BytecodeSourcePositionsIterator iter(zone, bytecode); |
1038 | while (iter.MoveNext()) { |
1039 | if (iter.IsYieldPoint()) { |
1040 | if (yield_point_index == await_jump_var) { |
1041 | token_pos_ = iter.TokenPos(); |
1042 | token_pos_initialized_ = true; |
1043 | const uword return_address = |
1044 | KernelBytecode::Next(bytecode.PayloadStart() + iter.PcOffset()); |
1045 | try_index_ = bytecode.GetTryIndexAtPc(return_address); |
1046 | return; |
1047 | } |
1048 | ++yield_point_index; |
1049 | } |
1050 | } |
1051 | return; |
1052 | #else |
1053 | UNREACHABLE(); |
1054 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
1055 | } |
1056 | |
1057 | ASSERT(!IsInterpreted()); |
1058 | const intptr_t await_jump_var = GetAwaitJumpVariable(); |
1059 | if (await_jump_var < 0) { |
1060 | return; |
1061 | } |
1062 | |
1063 | const auto& pc_descriptors = |
1064 | PcDescriptors::Handle(zone, code().pc_descriptors()); |
1065 | ASSERT(!pc_descriptors.IsNull()); |
1066 | PcDescriptors::Iterator it(pc_descriptors, PcDescriptorsLayout::kOther); |
1067 | while (it.MoveNext()) { |
1068 | if (it.YieldIndex() == await_jump_var) { |
1069 | try_index_ = it.TryIndex(); |
1070 | token_pos_ = it.TokenPos(); |
1071 | token_pos_initialized_ = true; |
1072 | return; |
1073 | } |
1074 | } |
1075 | } |
1076 | |
1077 | bool ActivationFrame::IsAsyncMachinery() const { |
1078 | if (function_.IsNull()) { |
1079 | ASSERT(IsInterpreted()); // This frame is a bytecode stub frame. |
1080 | return false; |
1081 | } |
1082 | Isolate* isolate = Isolate::Current(); |
1083 | if (function_.raw() == isolate->object_store()->complete_on_async_return()) { |
1084 | // We are completing an async function's completer. |
1085 | return true; |
1086 | } |
1087 | if (function_.Owner() == |
1088 | isolate->object_store()->async_star_stream_controller()) { |
1089 | // We are inside the async* stream controller code. |
1090 | return true; |
1091 | } |
1092 | return false; |
1093 | } |
1094 | |
1095 | // Get the saved current context of this activation. |
1096 | const Context& ActivationFrame::GetSavedCurrentContext() { |
1097 | if (!ctx_.IsNull()) return ctx_; |
1098 | GetVarDescriptors(); |
1099 | intptr_t var_desc_len = var_descriptors_.Length(); |
1100 | Object& obj = Object::Handle(); |
1101 | for (intptr_t i = 0; i < var_desc_len; i++) { |
1102 | LocalVarDescriptorsLayout::VarInfo var_info; |
1103 | var_descriptors_.GetInfo(i, &var_info); |
1104 | const int8_t kind = var_info.kind(); |
1105 | if (kind == LocalVarDescriptorsLayout::kSavedCurrentContext) { |
1106 | if (FLAG_trace_debugger_stacktrace) { |
1107 | OS::PrintErr("\tFound saved current ctx at index %d\n" , |
1108 | var_info.index()); |
1109 | } |
1110 | const auto variable_index = VariableIndex(var_info.index()); |
1111 | obj = GetStackVar(variable_index); |
1112 | if (obj.IsClosure()) { |
1113 | ASSERT(function().name() == Symbols::Call().raw()); |
1114 | ASSERT(function().IsInvokeFieldDispatcher()); |
1115 | // Closure.call frames. |
1116 | ctx_ = Closure::Cast(obj).context(); |
1117 | } else if (obj.IsContext()) { |
1118 | ctx_ = Context::Cast(obj).raw(); |
1119 | } else { |
1120 | ASSERT(obj.IsNull() || obj.raw() == Symbols::OptimizedOut().raw()); |
1121 | ctx_ = Context::null(); |
1122 | } |
1123 | return ctx_; |
1124 | } |
1125 | } |
1126 | return ctx_; |
1127 | } |
1128 | |
1129 | ObjectPtr ActivationFrame::GetAsyncOperation() { |
1130 | if (function().name() == Symbols::AsyncOperation().raw()) { |
1131 | return GetParameter(0); |
1132 | } |
1133 | return Object::null(); |
1134 | } |
1135 | |
1136 | ActivationFrame* DebuggerStackTrace::GetHandlerFrame( |
1137 | const Instance& exc_obj) const { |
1138 | for (intptr_t frame_index = 0; frame_index < Length(); frame_index++) { |
1139 | ActivationFrame* frame = FrameAt(frame_index); |
1140 | if (FLAG_trace_debugger_stacktrace) { |
1141 | OS::PrintErr("GetHandlerFrame: #%04" Pd " %s" , frame_index, |
1142 | frame->ToCString()); |
1143 | } |
1144 | if (frame->HandlesException(exc_obj)) { |
1145 | return frame; |
1146 | } |
1147 | } |
1148 | return NULL; |
1149 | } |
1150 | |
1151 | void ActivationFrame::GetDescIndices() { |
1152 | if (vars_initialized_) { |
1153 | return; |
1154 | } |
1155 | GetVarDescriptors(); |
1156 | |
1157 | TokenPosition activation_token_pos = TokenPos(); |
1158 | if (!activation_token_pos.IsDebugPause() || !live_frame_) { |
1159 | // We don't have a token position for this frame, so can't determine |
1160 | // which variables are visible. |
1161 | vars_initialized_ = true; |
1162 | return; |
1163 | } |
1164 | |
1165 | GrowableArray<String*> var_names(8); |
1166 | intptr_t var_desc_len = var_descriptors_.Length(); |
1167 | for (intptr_t cur_idx = 0; cur_idx < var_desc_len; cur_idx++) { |
1168 | ASSERT(var_names.length() == desc_indices_.length()); |
1169 | LocalVarDescriptorsLayout::VarInfo var_info; |
1170 | var_descriptors_.GetInfo(cur_idx, &var_info); |
1171 | const int8_t kind = var_info.kind(); |
1172 | if ((kind != LocalVarDescriptorsLayout::kStackVar) && |
1173 | (kind != LocalVarDescriptorsLayout::kContextVar)) { |
1174 | continue; |
1175 | } |
1176 | if ((var_info.begin_pos <= activation_token_pos) && |
1177 | (activation_token_pos <= var_info.end_pos)) { |
1178 | if ((kind == LocalVarDescriptorsLayout::kContextVar) && |
1179 | (ContextLevel() < var_info.scope_id)) { |
1180 | // The variable is textually in scope but the context level |
1181 | // at the activation frame's PC is lower than the context |
1182 | // level of the variable. The context containing the variable |
1183 | // has already been removed from the chain. This can happen when we |
1184 | // break at a return statement, since the contexts get discarded |
1185 | // before the debugger gets called. |
1186 | continue; |
1187 | } |
1188 | // The current variable is textually in scope. Now check whether |
1189 | // there is another local variable with the same name that shadows |
1190 | // or is shadowed by this variable. |
1191 | String& var_name = String::Handle(var_descriptors_.GetName(cur_idx)); |
1192 | intptr_t indices_len = desc_indices_.length(); |
1193 | bool name_match_found = false; |
1194 | for (intptr_t i = 0; i < indices_len; i++) { |
1195 | if (var_name.Equals(*var_names[i])) { |
1196 | // Found two local variables with the same name. Now determine |
1197 | // which one is shadowed. |
1198 | name_match_found = true; |
1199 | LocalVarDescriptorsLayout::VarInfo i_var_info; |
1200 | var_descriptors_.GetInfo(desc_indices_[i], &i_var_info); |
1201 | if (i_var_info.begin_pos < var_info.begin_pos) { |
1202 | // The variable we found earlier is in an outer scope |
1203 | // and is shadowed by the current variable. Replace the |
1204 | // descriptor index of the previously found variable |
1205 | // with the descriptor index of the current variable. |
1206 | desc_indices_[i] = cur_idx; |
1207 | } else { |
1208 | // The variable we found earlier is in an inner scope |
1209 | // and shadows the current variable. Skip the current |
1210 | // variable. (Nothing to do.) |
1211 | } |
1212 | break; // Stop looking for name matches. |
1213 | } |
1214 | } |
1215 | if (!name_match_found) { |
1216 | // No duplicate name found. Add the current descriptor index to the |
1217 | // list of visible variables. |
1218 | desc_indices_.Add(cur_idx); |
1219 | var_names.Add(&var_name); |
1220 | } |
1221 | } |
1222 | } |
1223 | vars_initialized_ = true; |
1224 | } |
1225 | |
1226 | intptr_t ActivationFrame::NumLocalVariables() { |
1227 | GetDescIndices(); |
1228 | return desc_indices_.length(); |
1229 | } |
1230 | |
1231 | DART_FORCE_INLINE static ObjectPtr GetVariableValue(uword addr) { |
1232 | return *reinterpret_cast<ObjectPtr*>(addr); |
1233 | } |
1234 | |
1235 | // Caution: GetParameter only works for fixed parameters. |
1236 | ObjectPtr ActivationFrame::GetParameter(intptr_t index) { |
1237 | intptr_t num_parameters = function().num_fixed_parameters(); |
1238 | ASSERT(0 <= index && index < num_parameters); |
1239 | |
1240 | if (IsInterpreted()) { |
1241 | if (function().NumOptionalParameters() > 0) { |
1242 | // Note that we do not access optional but only fixed parameters, hence |
1243 | // we do not need to replicate the logic of IndexFor() in bytecode reader. |
1244 | return GetVariableValue(fp() + index * kWordSize); |
1245 | } else { |
1246 | return GetVariableValue( |
1247 | fp() - (kKBCParamEndSlotFromFp + num_parameters - index) * kWordSize); |
1248 | } |
1249 | } |
1250 | |
1251 | if (function().NumOptionalParameters() > 0) { |
1252 | // If the function has optional parameters, the first positional parameter |
1253 | // can be in a number of places in the caller's frame depending on how many |
1254 | // were actually supplied at the call site, but they are copied to a fixed |
1255 | // place in the callee's frame. |
1256 | |
1257 | return GetVariableValue(LocalVarAddress( |
1258 | fp(), runtime_frame_layout.FrameSlotForVariableIndex(-index))); |
1259 | } else { |
1260 | intptr_t reverse_index = num_parameters - index; |
1261 | return GetVariableValue(ParamAddress(fp(), reverse_index)); |
1262 | } |
1263 | } |
1264 | |
1265 | ObjectPtr ActivationFrame::GetClosure() { |
1266 | ASSERT(function().IsClosureFunction()); |
1267 | return GetParameter(0); |
1268 | } |
1269 | |
1270 | ObjectPtr ActivationFrame::GetStackVar(VariableIndex variable_index) { |
1271 | if (IsInterpreted()) { |
1272 | intptr_t slot_index = -variable_index.value(); |
1273 | if (slot_index < 0) { |
1274 | slot_index -= kKBCParamEndSlotFromFp; // Accessing a parameter. |
1275 | } |
1276 | return GetVariableValue(fp() + slot_index * kWordSize); |
1277 | } |
1278 | const intptr_t slot_index = |
1279 | runtime_frame_layout.FrameSlotForVariableIndex(variable_index.value()); |
1280 | if (deopt_frame_.IsNull()) { |
1281 | return GetVariableValue(LocalVarAddress(fp(), slot_index)); |
1282 | } else { |
1283 | return deopt_frame_.At(LocalVarIndex(deopt_frame_offset_, slot_index)); |
1284 | } |
1285 | } |
1286 | |
1287 | bool ActivationFrame::IsRewindable() const { |
1288 | if (deopt_frame_.IsNull()) { |
1289 | return true; |
1290 | } |
1291 | // TODO(turnidge): This is conservative. It looks at all values in |
1292 | // the deopt_frame_ even though some of them may correspond to other |
1293 | // inlined frames. |
1294 | Object& obj = Object::Handle(); |
1295 | for (int i = 0; i < deopt_frame_.Length(); i++) { |
1296 | obj = deopt_frame_.At(i); |
1297 | if (obj.raw() == Symbols::OptimizedOut().raw()) { |
1298 | return false; |
1299 | } |
1300 | } |
1301 | return true; |
1302 | } |
1303 | |
1304 | void ActivationFrame::PrintContextMismatchError(intptr_t ctx_slot, |
1305 | intptr_t frame_ctx_level, |
1306 | intptr_t var_ctx_level) { |
1307 | OS::PrintErr( |
1308 | "-------------------------\n" |
1309 | "Encountered context mismatch\n" |
1310 | "\tctx_slot: %" Pd |
1311 | "\n" |
1312 | "\tframe_ctx_level: %" Pd |
1313 | "\n" |
1314 | "\tvar_ctx_level: %" Pd "\n\n" , |
1315 | ctx_slot, frame_ctx_level, var_ctx_level); |
1316 | |
1317 | OS::PrintErr( |
1318 | "-------------------------\n" |
1319 | "Current frame:\n%s\n" , |
1320 | this->ToCString()); |
1321 | |
1322 | OS::PrintErr( |
1323 | "-------------------------\n" |
1324 | "Context contents:\n" ); |
1325 | const Context& ctx = GetSavedCurrentContext(); |
1326 | ctx.Dump(8); |
1327 | |
1328 | OS::PrintErr( |
1329 | "-------------------------\n" |
1330 | "Debugger stack trace...\n\n" ); |
1331 | DebuggerStackTrace* stack = Isolate::Current()->debugger()->StackTrace(); |
1332 | intptr_t num_frames = stack->Length(); |
1333 | for (intptr_t i = 0; i < num_frames; i++) { |
1334 | ActivationFrame* frame = stack->FrameAt(i); |
1335 | OS::PrintErr("#%04" Pd " %s" , i, frame->ToCString()); |
1336 | } |
1337 | |
1338 | OS::PrintErr( |
1339 | "-------------------------\n" |
1340 | "All frames...\n\n" ); |
1341 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
1342 | Thread::Current(), |
1343 | StackFrameIterator::kNoCrossThreadIteration); |
1344 | StackFrame* frame = iterator.NextFrame(); |
1345 | intptr_t num = 0; |
1346 | while ((frame != NULL)) { |
1347 | OS::PrintErr("#%04" Pd " %s\n" , num++, frame->ToCString()); |
1348 | frame = iterator.NextFrame(); |
1349 | } |
1350 | } |
1351 | |
1352 | void ActivationFrame::VariableAt(intptr_t i, |
1353 | String* name, |
1354 | TokenPosition* declaration_token_pos, |
1355 | TokenPosition* visible_start_token_pos, |
1356 | TokenPosition* visible_end_token_pos, |
1357 | Object* value) { |
1358 | GetDescIndices(); |
1359 | ASSERT(i < desc_indices_.length()); |
1360 | intptr_t desc_index = desc_indices_[i]; |
1361 | ASSERT(name != NULL); |
1362 | |
1363 | *name = var_descriptors_.GetName(desc_index); |
1364 | |
1365 | LocalVarDescriptorsLayout::VarInfo var_info; |
1366 | var_descriptors_.GetInfo(desc_index, &var_info); |
1367 | ASSERT(declaration_token_pos != NULL); |
1368 | *declaration_token_pos = var_info.declaration_pos; |
1369 | ASSERT(visible_start_token_pos != NULL); |
1370 | *visible_start_token_pos = var_info.begin_pos; |
1371 | ASSERT(visible_end_token_pos != NULL); |
1372 | *visible_end_token_pos = var_info.end_pos; |
1373 | ASSERT(value != NULL); |
1374 | const int8_t kind = var_info.kind(); |
1375 | const auto variable_index = VariableIndex(var_info.index()); |
1376 | if (kind == LocalVarDescriptorsLayout::kStackVar) { |
1377 | *value = GetStackVar(variable_index); |
1378 | } else { |
1379 | ASSERT(kind == LocalVarDescriptorsLayout::kContextVar); |
1380 | *value = GetContextVar(var_info.scope_id, variable_index.value()); |
1381 | } |
1382 | } |
1383 | |
1384 | ObjectPtr ActivationFrame::GetContextVar(intptr_t var_ctx_level, |
1385 | intptr_t ctx_slot) { |
1386 | // The context level at the PC/token index of this activation frame. |
1387 | intptr_t frame_ctx_level = ContextLevel(); |
1388 | |
1389 | return GetRelativeContextVar(var_ctx_level, ctx_slot, frame_ctx_level); |
1390 | } |
1391 | |
1392 | ObjectPtr ActivationFrame::GetRelativeContextVar(intptr_t var_ctx_level, |
1393 | intptr_t ctx_slot, |
1394 | intptr_t frame_ctx_level) { |
1395 | const Context& ctx = GetSavedCurrentContext(); |
1396 | |
1397 | // It's possible that ctx was optimized out as no locals were captured by the |
1398 | // context. See issue #38182. |
1399 | if (ctx.IsNull()) { |
1400 | return Symbols::OptimizedOut().raw(); |
1401 | } |
1402 | |
1403 | intptr_t level_diff = frame_ctx_level - var_ctx_level; |
1404 | if (level_diff == 0) { |
1405 | if ((ctx_slot < 0) || (ctx_slot >= ctx.num_variables())) { |
1406 | PrintContextMismatchError(ctx_slot, frame_ctx_level, var_ctx_level); |
1407 | } |
1408 | ASSERT((ctx_slot >= 0) && (ctx_slot < ctx.num_variables())); |
1409 | return ctx.At(ctx_slot); |
1410 | } else if (level_diff > 0) { |
1411 | Context& var_ctx = Context::Handle(ctx.raw()); |
1412 | while (level_diff > 0 && !var_ctx.IsNull()) { |
1413 | level_diff--; |
1414 | var_ctx = var_ctx.parent(); |
1415 | } |
1416 | if (var_ctx.IsNull() || (ctx_slot < 0) || |
1417 | (ctx_slot >= var_ctx.num_variables())) { |
1418 | PrintContextMismatchError(ctx_slot, frame_ctx_level, var_ctx_level); |
1419 | } |
1420 | ASSERT(!var_ctx.IsNull()); |
1421 | ASSERT((ctx_slot >= 0) && (ctx_slot < var_ctx.num_variables())); |
1422 | return var_ctx.At(ctx_slot); |
1423 | } else { |
1424 | PrintContextMismatchError(ctx_slot, frame_ctx_level, var_ctx_level); |
1425 | return Object::null(); |
1426 | } |
1427 | } |
1428 | |
1429 | ArrayPtr ActivationFrame::GetLocalVariables() { |
1430 | GetDescIndices(); |
1431 | intptr_t num_variables = desc_indices_.length(); |
1432 | String& var_name = String::Handle(); |
1433 | Object& value = Instance::Handle(); |
1434 | const Array& list = Array::Handle(Array::New(2 * num_variables)); |
1435 | for (intptr_t i = 0; i < num_variables; i++) { |
1436 | TokenPosition ignore; |
1437 | VariableAt(i, &var_name, &ignore, &ignore, &ignore, &value); |
1438 | list.SetAt(2 * i, var_name); |
1439 | list.SetAt((2 * i) + 1, value); |
1440 | } |
1441 | return list.raw(); |
1442 | } |
1443 | |
1444 | ObjectPtr ActivationFrame::GetReceiver() { |
1445 | GetDescIndices(); |
1446 | intptr_t num_variables = desc_indices_.length(); |
1447 | String& var_name = String::Handle(); |
1448 | Instance& value = Instance::Handle(); |
1449 | for (intptr_t i = 0; i < num_variables; i++) { |
1450 | TokenPosition ignore; |
1451 | VariableAt(i, &var_name, &ignore, &ignore, &ignore, &value); |
1452 | if (var_name.Equals(Symbols::This())) { |
1453 | return value.raw(); |
1454 | } |
1455 | } |
1456 | return Symbols::OptimizedOut().raw(); |
1457 | } |
1458 | |
1459 | static bool IsSyntheticVariableName(const String& var_name) { |
1460 | return (var_name.Length() >= 1) && (var_name.CharAt(0) == ':'); |
1461 | } |
1462 | |
1463 | static bool IsPrivateVariableName(const String& var_name) { |
1464 | return (var_name.Length() >= 1) && (var_name.CharAt(0) == '_'); |
1465 | } |
1466 | |
1467 | ObjectPtr ActivationFrame::EvaluateCompiledExpression( |
1468 | const ExternalTypedData& kernel_buffer, |
1469 | const Array& type_definitions, |
1470 | const Array& arguments, |
1471 | const TypeArguments& type_arguments) { |
1472 | if (function().is_static()) { |
1473 | const Class& cls = Class::Handle(function().Owner()); |
1474 | return cls.EvaluateCompiledExpression(kernel_buffer, type_definitions, |
1475 | arguments, type_arguments); |
1476 | } else { |
1477 | const Object& receiver = Object::Handle(GetReceiver()); |
1478 | const Class& method_cls = Class::Handle(function().origin()); |
1479 | ASSERT(receiver.IsInstance() || receiver.IsNull()); |
1480 | if (!(receiver.IsInstance() || receiver.IsNull())) { |
1481 | return Object::null(); |
1482 | } |
1483 | const Instance& inst = Instance::Cast(receiver); |
1484 | return inst.EvaluateCompiledExpression( |
1485 | method_cls, kernel_buffer, type_definitions, arguments, type_arguments); |
1486 | } |
1487 | } |
1488 | |
1489 | TypeArgumentsPtr ActivationFrame::BuildParameters( |
1490 | const GrowableObjectArray& param_names, |
1491 | const GrowableObjectArray& param_values, |
1492 | const GrowableObjectArray& type_params_names) { |
1493 | GetDescIndices(); |
1494 | bool type_arguments_available = false; |
1495 | String& name = String::Handle(); |
1496 | String& existing_name = String::Handle(); |
1497 | Object& value = Instance::Handle(); |
1498 | TypeArguments& type_arguments = TypeArguments::Handle(); |
1499 | intptr_t num_variables = desc_indices_.length(); |
1500 | for (intptr_t i = 0; i < num_variables; i++) { |
1501 | TokenPosition ignore; |
1502 | VariableAt(i, &name, &ignore, &ignore, &ignore, &value); |
1503 | if (name.Equals(Symbols::FunctionTypeArgumentsVar())) { |
1504 | type_arguments_available = true; |
1505 | type_arguments ^= value.raw(); |
1506 | } else if (!name.Equals(Symbols::This()) && |
1507 | !IsSyntheticVariableName(name)) { |
1508 | if (IsPrivateVariableName(name)) { |
1509 | name = Symbols::New(Thread::Current(), String::ScrubName(name)); |
1510 | } |
1511 | bool conflict = false; |
1512 | for (intptr_t j = 0; j < param_names.Length(); j++) { |
1513 | existing_name ^= param_names.At(j); |
1514 | if (name.Equals(existing_name)) { |
1515 | conflict = true; |
1516 | break; |
1517 | } |
1518 | } |
1519 | // If local has the same name as a binding in the incoming scope, prefer |
1520 | // the one from the incoming scope, since it is logically a child scope |
1521 | // of the activation's current scope. |
1522 | if (!conflict) { |
1523 | param_names.Add(name); |
1524 | param_values.Add(value); |
1525 | } |
1526 | } |
1527 | } |
1528 | |
1529 | if ((function().IsGeneric() || function().HasGenericParent()) && |
1530 | type_arguments_available) { |
1531 | intptr_t num_vars = |
1532 | function().NumTypeParameters() + function().NumParentTypeParameters(); |
1533 | type_params_names.Grow(num_vars); |
1534 | type_params_names.SetLength(num_vars); |
1535 | TypeArguments& type_params = TypeArguments::Handle(); |
1536 | TypeParameter& type_param = TypeParameter::Handle(); |
1537 | Function& current = Function::Handle(function().raw()); |
1538 | intptr_t mapping_offset = num_vars; |
1539 | for (intptr_t i = 0; !current.IsNull(); i += current.NumTypeParameters(), |
1540 | current = current.parent_function()) { |
1541 | type_params = current.type_parameters(); |
1542 | intptr_t size = current.NumTypeParameters(); |
1543 | ASSERT(mapping_offset >= size); |
1544 | mapping_offset -= size; |
1545 | for (intptr_t j = 0; j < size; ++j) { |
1546 | type_param = TypeParameter::RawCast(type_params.TypeAt(j)); |
1547 | name = type_param.name(); |
1548 | // Write the names in backwards in terms of chain of functions. |
1549 | // But keep the order of names within the same function. so they |
1550 | // match up with the order of the types in 'type_arguments'. |
1551 | // Index:0 1 2 3 ... |
1552 | // |Names in Grandparent| |Names in Parent| ..|Names in Child| |
1553 | type_params_names.SetAt(mapping_offset + j, name); |
1554 | } |
1555 | } |
1556 | if (!type_arguments.IsNull()) { |
1557 | if (type_arguments.Length() == 0) { |
1558 | for (intptr_t i = 0; i < num_vars; ++i) { |
1559 | type_arguments.SetTypeAt(i, Object::dynamic_type()); |
1560 | } |
1561 | } |
1562 | ASSERT(type_arguments.Length() == num_vars); |
1563 | } |
1564 | } |
1565 | |
1566 | return type_arguments.raw(); |
1567 | } |
1568 | |
1569 | const char* ActivationFrame::ToCString() { |
1570 | if (function().IsNull()) { |
1571 | return Thread::Current()->zone()->PrintToString("[ Frame kind: %s]\n" , |
1572 | KindToCString(kind_)); |
1573 | } |
1574 | const String& url = String::Handle(SourceUrl()); |
1575 | intptr_t line = LineNumber(); |
1576 | const char* func_name = function().ToFullyQualifiedCString(); |
1577 | if (live_frame_) { |
1578 | return Thread::Current()->zone()->PrintToString( |
1579 | "[ Frame pc(0x%" Px " %s offset:0x%" Px ") fp(0x%" Px ") sp(0x%" Px |
1580 | ")\n" |
1581 | "\tfunction = %s\n" |
1582 | "\turl = %s\n" |
1583 | "\tline = %" Pd |
1584 | "\n" |
1585 | "\tcontext = %s\n" |
1586 | "\tcontext level = %" Pd " ]\n" , |
1587 | pc(), IsInterpreted() ? "bytecode" : "code" , |
1588 | pc() - (IsInterpreted() ? bytecode().PayloadStart() |
1589 | : code().PayloadStart()), |
1590 | fp(), sp(), func_name, url.ToCString(), line, ctx_.ToCString(), |
1591 | ContextLevel()); |
1592 | } else { |
1593 | return Thread::Current()->zone()->PrintToString( |
1594 | "[ Frame %s function = %s\n" |
1595 | "\turl = %s\n" |
1596 | "\tline = %" Pd |
1597 | "\n" |
1598 | "\tcontext = %s]\n" , |
1599 | IsInterpreted() ? "bytecode" : "code" , func_name, url.ToCString(), line, |
1600 | ctx_.ToCString()); |
1601 | } |
1602 | } |
1603 | |
1604 | void ActivationFrame::PrintToJSONObject(JSONObject* jsobj) { |
1605 | if (kind_ == kRegular || kind_ == kAsyncActivation) { |
1606 | PrintToJSONObjectRegular(jsobj); |
1607 | } else if (kind_ == kAsyncCausal) { |
1608 | PrintToJSONObjectAsyncCausal(jsobj); |
1609 | } else if (kind_ == kAsyncSuspensionMarker) { |
1610 | PrintToJSONObjectAsyncSuspensionMarker(jsobj); |
1611 | } else { |
1612 | UNIMPLEMENTED(); |
1613 | } |
1614 | } |
1615 | |
1616 | void ActivationFrame::PrintToJSONObjectRegular(JSONObject* jsobj) { |
1617 | const Script& script = Script::Handle(SourceScript()); |
1618 | jsobj->AddProperty("type" , "Frame" ); |
1619 | jsobj->AddProperty("kind" , KindToCString(kind_)); |
1620 | const TokenPosition pos = TokenPos().SourcePosition(); |
1621 | jsobj->AddLocation(script, pos); |
1622 | jsobj->AddProperty("function" , function()); |
1623 | if (IsInterpreted()) { |
1624 | jsobj->AddProperty("code" , bytecode()); |
1625 | } else { |
1626 | jsobj->AddProperty("code" , code()); |
1627 | } |
1628 | { |
1629 | JSONArray jsvars(jsobj, "vars" ); |
1630 | const int num_vars = NumLocalVariables(); |
1631 | for (intptr_t v = 0; v < num_vars; v++) { |
1632 | String& var_name = String::Handle(); |
1633 | Instance& var_value = Instance::Handle(); |
1634 | TokenPosition declaration_token_pos; |
1635 | TokenPosition visible_start_token_pos; |
1636 | TokenPosition visible_end_token_pos; |
1637 | VariableAt(v, &var_name, &declaration_token_pos, &visible_start_token_pos, |
1638 | &visible_end_token_pos, &var_value); |
1639 | if (!IsSyntheticVariableName(var_name)) { |
1640 | JSONObject jsvar(&jsvars); |
1641 | jsvar.AddProperty("type" , "BoundVariable" ); |
1642 | const char* scrubbed_var_name = String::ScrubName(var_name); |
1643 | jsvar.AddProperty("name" , scrubbed_var_name); |
1644 | jsvar.AddProperty("value" , var_value); |
1645 | // Where was the variable declared? |
1646 | jsvar.AddProperty("declarationTokenPos" , declaration_token_pos); |
1647 | // When the variable becomes visible to the scope. |
1648 | jsvar.AddProperty("scopeStartTokenPos" , visible_start_token_pos); |
1649 | // When the variable stops being visible to the scope. |
1650 | jsvar.AddProperty("scopeEndTokenPos" , visible_end_token_pos); |
1651 | } |
1652 | } |
1653 | } |
1654 | } |
1655 | |
1656 | void ActivationFrame::PrintToJSONObjectAsyncCausal(JSONObject* jsobj) { |
1657 | jsobj->AddProperty("type" , "Frame" ); |
1658 | jsobj->AddProperty("kind" , KindToCString(kind_)); |
1659 | const Script& script = Script::Handle(SourceScript()); |
1660 | const TokenPosition pos = TokenPos().SourcePosition(); |
1661 | jsobj->AddLocation(script, pos); |
1662 | jsobj->AddProperty("function" , function()); |
1663 | if (IsInterpreted()) { |
1664 | jsobj->AddProperty("code" , bytecode()); |
1665 | } else { |
1666 | jsobj->AddProperty("code" , code()); |
1667 | } |
1668 | } |
1669 | |
1670 | void ActivationFrame::PrintToJSONObjectAsyncSuspensionMarker( |
1671 | JSONObject* jsobj) { |
1672 | jsobj->AddProperty("type" , "Frame" ); |
1673 | jsobj->AddProperty("kind" , KindToCString(kind_)); |
1674 | jsobj->AddProperty("marker" , "AsynchronousSuspension" ); |
1675 | } |
1676 | |
1677 | static bool IsFunctionVisible(const Function& function) { |
1678 | return FLAG_show_invisible_frames || function.is_visible(); |
1679 | } |
1680 | |
1681 | void DebuggerStackTrace::AddActivation(ActivationFrame* frame) { |
1682 | if (IsFunctionVisible(frame->function())) { |
1683 | trace_.Add(frame); |
1684 | } |
1685 | } |
1686 | |
1687 | void DebuggerStackTrace::AddMarker(ActivationFrame::Kind marker) { |
1688 | ASSERT(marker == ActivationFrame::kAsyncSuspensionMarker); |
1689 | trace_.Add(new ActivationFrame(marker)); |
1690 | } |
1691 | |
1692 | void DebuggerStackTrace::AddAsyncCausalFrame(uword pc, const Code& code) { |
1693 | trace_.Add(new ActivationFrame(pc, 0, 0, code, Array::Handle(), 0, |
1694 | ActivationFrame::kAsyncCausal)); |
1695 | } |
1696 | |
1697 | #if !defined(DART_PRECOMPILED_RUNTIME) |
1698 | void DebuggerStackTrace::AddAsyncCausalFrame(uword pc, |
1699 | const Bytecode& bytecode) { |
1700 | trace_.Add( |
1701 | new ActivationFrame(pc, 0, 0, bytecode, ActivationFrame::kAsyncCausal)); |
1702 | } |
1703 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
1704 | |
1705 | const uint8_t kSafepointKind = PcDescriptorsLayout::kIcCall | |
1706 | PcDescriptorsLayout::kUnoptStaticCall | |
1707 | PcDescriptorsLayout::kRuntimeCall; |
1708 | |
1709 | CodeBreakpoint::CodeBreakpoint(const Code& code, |
1710 | TokenPosition token_pos, |
1711 | uword pc, |
1712 | PcDescriptorsLayout::Kind kind) |
1713 | : code_(code.raw()), |
1714 | bytecode_(Bytecode::null()), |
1715 | token_pos_(token_pos), |
1716 | pc_(pc), |
1717 | line_number_(-1), |
1718 | is_enabled_(false), |
1719 | bpt_location_(NULL), |
1720 | next_(NULL), |
1721 | breakpoint_kind_(kind), |
1722 | saved_value_(Code::null()) { |
1723 | ASSERT(!code.IsNull()); |
1724 | ASSERT(token_pos_.IsReal()); |
1725 | ASSERT(pc_ != 0); |
1726 | ASSERT((breakpoint_kind_ & kSafepointKind) != 0); |
1727 | } |
1728 | |
1729 | CodeBreakpoint::CodeBreakpoint(const Bytecode& bytecode, |
1730 | TokenPosition token_pos, |
1731 | uword pc) |
1732 | : code_(Code::null()), |
1733 | bytecode_(bytecode.raw()), |
1734 | token_pos_(token_pos), |
1735 | pc_(pc), |
1736 | line_number_(-1), |
1737 | is_enabled_(false), |
1738 | bpt_location_(NULL), |
1739 | next_(NULL), |
1740 | breakpoint_kind_(PcDescriptorsLayout::kAnyKind), |
1741 | saved_value_(Code::null()) { |
1742 | ASSERT(!bytecode.IsNull()); |
1743 | ASSERT(FLAG_enable_interpreter); |
1744 | ASSERT(token_pos_.IsReal()); |
1745 | ASSERT(pc_ != 0); |
1746 | } |
1747 | |
1748 | CodeBreakpoint::~CodeBreakpoint() { |
1749 | // Make sure we don't leave patched code behind. |
1750 | ASSERT(!IsEnabled()); |
1751 | // Poison the data so we catch use after free errors. |
1752 | #ifdef DEBUG |
1753 | code_ = Code::null(); |
1754 | bytecode_ = Bytecode::null(); |
1755 | pc_ = 0ul; |
1756 | bpt_location_ = NULL; |
1757 | next_ = NULL; |
1758 | breakpoint_kind_ = PcDescriptorsLayout::kOther; |
1759 | #endif |
1760 | } |
1761 | |
1762 | FunctionPtr CodeBreakpoint::function() const { |
1763 | if (IsInterpreted()) { |
1764 | ASSERT(Bytecode::Handle(bytecode_).function() != Function::null()); |
1765 | return Bytecode::Handle(bytecode_).function(); |
1766 | } else { |
1767 | return Code::Handle(code_).function(); |
1768 | } |
1769 | } |
1770 | |
1771 | ScriptPtr CodeBreakpoint::SourceCode() { |
1772 | const Function& func = Function::Handle(this->function()); |
1773 | return func.script(); |
1774 | } |
1775 | |
1776 | StringPtr CodeBreakpoint::SourceUrl() { |
1777 | const Script& script = Script::Handle(SourceCode()); |
1778 | return script.url(); |
1779 | } |
1780 | |
1781 | intptr_t CodeBreakpoint::LineNumber() { |
1782 | // Compute line number lazily since it causes scanning of the script. |
1783 | if (line_number_ < 0) { |
1784 | const Script& script = Script::Handle(SourceCode()); |
1785 | script.GetTokenLocation(token_pos_, &line_number_, NULL); |
1786 | } |
1787 | return line_number_; |
1788 | } |
1789 | |
1790 | void CodeBreakpoint::Enable() { |
1791 | if (!is_enabled_) { |
1792 | if (IsInterpreted()) { |
1793 | #if !defined(DART_PRECOMPILED_RUNTIME) |
1794 | SetBytecodeBreakpoint(); |
1795 | #else |
1796 | UNREACHABLE(); |
1797 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
1798 | } else { |
1799 | PatchCode(); |
1800 | } |
1801 | } |
1802 | ASSERT(is_enabled_); |
1803 | } |
1804 | |
1805 | void CodeBreakpoint::Disable() { |
1806 | if (is_enabled_) { |
1807 | if (IsInterpreted()) { |
1808 | #if !defined(DART_PRECOMPILED_RUNTIME) |
1809 | UnsetBytecodeBreakpoint(); |
1810 | #else |
1811 | UNREACHABLE(); |
1812 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
1813 | } else { |
1814 | RestoreCode(); |
1815 | } |
1816 | } |
1817 | ASSERT(!is_enabled_); |
1818 | } |
1819 | |
1820 | Debugger::Debugger(Isolate* isolate) |
1821 | : isolate_(isolate), |
1822 | next_id_(1), |
1823 | latent_locations_(NULL), |
1824 | breakpoint_locations_(NULL), |
1825 | code_breakpoints_(NULL), |
1826 | resume_action_(kContinue), |
1827 | resume_frame_index_(-1), |
1828 | post_deopt_frame_index_(-1), |
1829 | ignore_breakpoints_(false), |
1830 | pause_event_(NULL), |
1831 | stack_trace_(NULL), |
1832 | async_causal_stack_trace_(NULL), |
1833 | awaiter_stack_trace_(NULL), |
1834 | stepping_fp_(0), |
1835 | interpreted_stepping_(false), |
1836 | last_stepping_fp_(0), |
1837 | last_stepping_pos_(TokenPosition::kNoSource), |
1838 | async_stepping_fp_(0), |
1839 | interpreted_async_stepping_(false), |
1840 | top_frame_awaiter_(Object::null()), |
1841 | skip_next_step_(false), |
1842 | needs_breakpoint_cleanup_(false), |
1843 | synthetic_async_breakpoint_(NULL), |
1844 | exc_pause_info_(kNoPauseOnExceptions) {} |
1845 | |
1846 | Debugger::~Debugger() { |
1847 | ASSERT(!IsPaused()); |
1848 | ASSERT(latent_locations_ == NULL); |
1849 | ASSERT(breakpoint_locations_ == NULL); |
1850 | ASSERT(code_breakpoints_ == NULL); |
1851 | ASSERT(stack_trace_ == NULL); |
1852 | ASSERT(async_causal_stack_trace_ == NULL); |
1853 | ASSERT(synthetic_async_breakpoint_ == NULL); |
1854 | } |
1855 | |
1856 | void Debugger::Shutdown() { |
1857 | // TODO(johnmccutchan): Do not create a debugger for isolates that don't need |
1858 | // them. Then, assert here that isolate_ is not one of those isolates. |
1859 | if (Isolate::IsVMInternalIsolate(isolate_)) { |
1860 | return; |
1861 | } |
1862 | while (breakpoint_locations_ != NULL) { |
1863 | BreakpointLocation* loc = breakpoint_locations_; |
1864 | breakpoint_locations_ = breakpoint_locations_->next(); |
1865 | delete loc; |
1866 | } |
1867 | while (latent_locations_ != NULL) { |
1868 | BreakpointLocation* loc = latent_locations_; |
1869 | latent_locations_ = latent_locations_->next(); |
1870 | delete loc; |
1871 | } |
1872 | while (code_breakpoints_ != NULL) { |
1873 | CodeBreakpoint* cbpt = code_breakpoints_; |
1874 | code_breakpoints_ = code_breakpoints_->next(); |
1875 | cbpt->Disable(); |
1876 | delete cbpt; |
1877 | } |
1878 | if (NeedsIsolateEvents()) { |
1879 | ServiceEvent event(isolate_, ServiceEvent::kIsolateExit); |
1880 | InvokeEventHandler(&event); |
1881 | } |
1882 | } |
1883 | |
1884 | void Debugger::OnIsolateRunnable() {} |
1885 | |
1886 | bool Debugger::SetupStepOverAsyncSuspension(const char** error) { |
1887 | ActivationFrame* top_frame = TopDartFrame(); |
1888 | if (!IsAtAsyncJump(top_frame)) { |
1889 | // Not at an async operation. |
1890 | if (error != nullptr) { |
1891 | *error = "Isolate must be paused at an async suspension point" ; |
1892 | } |
1893 | return false; |
1894 | } |
1895 | Object& closure = Object::Handle(top_frame->GetAsyncOperation()); |
1896 | ASSERT(!closure.IsNull()); |
1897 | ASSERT(closure.IsInstance()); |
1898 | ASSERT(Instance::Cast(closure).IsClosure()); |
1899 | Breakpoint* bpt = SetBreakpointAtActivation(Instance::Cast(closure), true); |
1900 | if (bpt == NULL) { |
1901 | // Unable to set the breakpoint. |
1902 | if (error != nullptr) { |
1903 | *error = "Unable to set breakpoint at async suspension point" ; |
1904 | } |
1905 | return false; |
1906 | } |
1907 | return true; |
1908 | } |
1909 | |
1910 | bool Debugger::SetResumeAction(ResumeAction action, |
1911 | intptr_t frame_index, |
1912 | const char** error) { |
1913 | if (error != nullptr) { |
1914 | *error = NULL; |
1915 | } |
1916 | resume_frame_index_ = -1; |
1917 | switch (action) { |
1918 | case kStepInto: |
1919 | case kStepOver: |
1920 | case kStepOut: |
1921 | case kContinue: |
1922 | resume_action_ = action; |
1923 | return true; |
1924 | case kStepRewind: |
1925 | if (!CanRewindFrame(frame_index, error)) { |
1926 | return false; |
1927 | } |
1928 | resume_action_ = kStepRewind; |
1929 | resume_frame_index_ = frame_index; |
1930 | return true; |
1931 | case kStepOverAsyncSuspension: |
1932 | return SetupStepOverAsyncSuspension(error); |
1933 | default: |
1934 | UNREACHABLE(); |
1935 | return false; |
1936 | } |
1937 | } |
1938 | |
1939 | // Deoptimize all functions in the isolate. |
1940 | // TODO(hausner): Actually we only need to deoptimize those functions |
1941 | // that inline the function that contains the newly created breakpoint. |
1942 | // We currently don't have this info so we deoptimize all functions. |
1943 | void Debugger::DeoptimizeWorld() { |
1944 | #if defined(DART_PRECOMPILED_RUNTIME) |
1945 | UNREACHABLE(); |
1946 | #else |
1947 | BackgroundCompiler::Stop(isolate_); |
1948 | if (FLAG_trace_deoptimization) { |
1949 | THR_Print("Deopt for debugger\n" ); |
1950 | } |
1951 | isolate_->set_has_attempted_stepping(true); |
1952 | |
1953 | DeoptimizeFunctionsOnStack(); |
1954 | |
1955 | // Iterate over all classes, deoptimize functions. |
1956 | // TODO(hausner): Could possibly be combined with RemoveOptimizedCode() |
1957 | const ClassTable& class_table = *isolate_->class_table(); |
1958 | Zone* zone = Thread::Current()->zone(); |
1959 | CallSiteResetter resetter(zone); |
1960 | Class& cls = Class::Handle(zone); |
1961 | Array& functions = Array::Handle(zone); |
1962 | GrowableObjectArray& closures = GrowableObjectArray::Handle(zone); |
1963 | Function& function = Function::Handle(zone); |
1964 | Code& code = Code::Handle(zone); |
1965 | |
1966 | const intptr_t num_classes = class_table.NumCids(); |
1967 | const intptr_t num_tlc_classes = class_table.NumTopLevelCids(); |
1968 | for (intptr_t i = 1; i < num_classes + num_tlc_classes; i++) { |
1969 | const classid_t cid = |
1970 | i < num_classes ? i : ClassTable::CidFromTopLevelIndex(i - num_classes); |
1971 | if (class_table.HasValidClassAt(cid)) { |
1972 | cls = class_table.At(cid); |
1973 | |
1974 | // Disable optimized functions. |
1975 | functions = cls.functions(); |
1976 | if (!functions.IsNull()) { |
1977 | intptr_t num_functions = functions.Length(); |
1978 | for (intptr_t pos = 0; pos < num_functions; pos++) { |
1979 | function ^= functions.At(pos); |
1980 | ASSERT(!function.IsNull()); |
1981 | // Force-optimized functions don't have unoptimized code and can't |
1982 | // deoptimize. Their optimized codes are still valid. |
1983 | if (function.ForceOptimize()) { |
1984 | ASSERT(!function.HasImplicitClosureFunction()); |
1985 | continue; |
1986 | } |
1987 | if (function.HasOptimizedCode()) { |
1988 | function.SwitchToUnoptimizedCode(); |
1989 | } |
1990 | code = function.unoptimized_code(); |
1991 | if (!code.IsNull()) { |
1992 | resetter.ResetSwitchableCalls(code); |
1993 | } |
1994 | // Also disable any optimized implicit closure functions. |
1995 | if (function.HasImplicitClosureFunction()) { |
1996 | function = function.ImplicitClosureFunction(); |
1997 | if (function.HasOptimizedCode()) { |
1998 | function.SwitchToUnoptimizedCode(); |
1999 | } |
2000 | code = function.unoptimized_code(); |
2001 | if (!code.IsNull()) { |
2002 | resetter.ResetSwitchableCalls(code); |
2003 | } |
2004 | } |
2005 | } |
2006 | } |
2007 | } |
2008 | } |
2009 | |
2010 | // Disable optimized closure functions. |
2011 | closures = isolate_->object_store()->closure_functions(); |
2012 | const intptr_t num_closures = closures.Length(); |
2013 | for (intptr_t pos = 0; pos < num_closures; pos++) { |
2014 | function ^= closures.At(pos); |
2015 | ASSERT(!function.IsNull()); |
2016 | if (function.HasOptimizedCode()) { |
2017 | function.SwitchToUnoptimizedCode(); |
2018 | } |
2019 | code = function.unoptimized_code(); |
2020 | if (!code.IsNull()) { |
2021 | resetter.ResetSwitchableCalls(code); |
2022 | } |
2023 | } |
2024 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
2025 | } |
2026 | |
2027 | void Debugger::NotifySingleStepping(bool value) const { |
2028 | isolate_->set_single_step(value); |
2029 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2030 | // Do not call Interpreter::Current(), which may allocate an interpreter. |
2031 | Interpreter* interpreter = Thread::Current()->interpreter(); |
2032 | if (interpreter != nullptr) { |
2033 | // Do not reset is_debugging to false if bytecode debug breaks are enabled. |
2034 | interpreter->set_is_debugging(value || HasEnabledBytecodeBreakpoints()); |
2035 | } |
2036 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2037 | } |
2038 | |
2039 | ActivationFrame* Debugger::CollectDartFrame(Isolate* isolate, |
2040 | uword pc, |
2041 | StackFrame* frame, |
2042 | const Code& code, |
2043 | const Array& deopt_frame, |
2044 | intptr_t deopt_frame_offset, |
2045 | ActivationFrame::Kind kind) { |
2046 | ASSERT(code.ContainsInstructionAt(pc)); |
2047 | ActivationFrame* activation = |
2048 | new ActivationFrame(pc, frame->fp(), frame->sp(), code, deopt_frame, |
2049 | deopt_frame_offset, kind); |
2050 | if (FLAG_trace_debugger_stacktrace) { |
2051 | const Context& ctx = activation->GetSavedCurrentContext(); |
2052 | OS::PrintErr("\tUsing saved context: %s\n" , ctx.ToCString()); |
2053 | OS::PrintErr("\tLine number: %" Pd "\n" , activation->LineNumber()); |
2054 | } |
2055 | return activation; |
2056 | } |
2057 | |
2058 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2059 | ActivationFrame* Debugger::CollectDartFrame(Isolate* isolate, |
2060 | uword pc, |
2061 | StackFrame* frame, |
2062 | const Bytecode& bytecode, |
2063 | ActivationFrame::Kind kind) { |
2064 | ASSERT(bytecode.ContainsInstructionAt(pc)); |
2065 | ActivationFrame* activation = |
2066 | new ActivationFrame(pc, frame->fp(), frame->sp(), bytecode, kind); |
2067 | if (FLAG_trace_debugger_stacktrace) { |
2068 | const Context& ctx = activation->GetSavedCurrentContext(); |
2069 | OS::PrintErr("\tUsing saved context: %s\n" , ctx.ToCString()); |
2070 | OS::PrintErr("\tLine number: %" Pd "\n" , activation->LineNumber()); |
2071 | } |
2072 | return activation; |
2073 | } |
2074 | |
2075 | ArrayPtr Debugger::DeoptimizeToArray(Thread* thread, |
2076 | StackFrame* frame, |
2077 | const Code& code) { |
2078 | ASSERT(code.is_optimized() && !code.is_force_optimized()); |
2079 | Isolate* isolate = thread->isolate(); |
2080 | // Create the DeoptContext for this deoptimization. |
2081 | DeoptContext* deopt_context = |
2082 | new DeoptContext(frame, code, DeoptContext::kDestIsAllocated, NULL, NULL, |
2083 | true, false /* deoptimizing_code */); |
2084 | isolate->set_deopt_context(deopt_context); |
2085 | |
2086 | deopt_context->FillDestFrame(); |
2087 | deopt_context->MaterializeDeferredObjects(); |
2088 | const Array& dest_frame = |
2089 | Array::Handle(thread->zone(), deopt_context->DestFrameAsArray()); |
2090 | |
2091 | isolate->set_deopt_context(NULL); |
2092 | delete deopt_context; |
2093 | |
2094 | return dest_frame.raw(); |
2095 | } |
2096 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2097 | |
2098 | DebuggerStackTrace* Debugger::CollectStackTrace() { |
2099 | Thread* thread = Thread::Current(); |
2100 | Zone* zone = thread->zone(); |
2101 | Isolate* isolate = thread->isolate(); |
2102 | DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8); |
2103 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
2104 | Thread::Current(), |
2105 | StackFrameIterator::kNoCrossThreadIteration); |
2106 | Code& code = Code::Handle(zone); |
2107 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2108 | Bytecode& bytecode = Bytecode::Handle(zone); |
2109 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2110 | Code& inlined_code = Code::Handle(zone); |
2111 | Array& deopt_frame = Array::Handle(zone); |
2112 | |
2113 | for (StackFrame* frame = iterator.NextFrame(); frame != NULL; |
2114 | frame = iterator.NextFrame()) { |
2115 | ASSERT(frame->IsValid()); |
2116 | if (FLAG_trace_debugger_stacktrace) { |
2117 | OS::PrintErr("CollectStackTrace: visiting frame:\n\t%s\n" , |
2118 | frame->ToCString()); |
2119 | } |
2120 | if (frame->IsDartFrame()) { |
2121 | if (frame->is_interpreted()) { |
2122 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2123 | bytecode = frame->LookupDartBytecode(); |
2124 | if (bytecode.function() == Function::null()) { |
2125 | continue; // Skip bytecode stub frame. |
2126 | } |
2127 | stack_trace->AddActivation( |
2128 | CollectDartFrame(isolate, frame->pc(), frame, bytecode)); |
2129 | #else |
2130 | UNREACHABLE(); |
2131 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2132 | } else { |
2133 | code = frame->LookupDartCode(); |
2134 | AppendCodeFrames(thread, isolate, zone, stack_trace, frame, &code, |
2135 | &inlined_code, &deopt_frame); |
2136 | } |
2137 | } |
2138 | } |
2139 | return stack_trace; |
2140 | } |
2141 | |
2142 | void Debugger::AppendCodeFrames(Thread* thread, |
2143 | Isolate* isolate, |
2144 | Zone* zone, |
2145 | DebuggerStackTrace* stack_trace, |
2146 | StackFrame* frame, |
2147 | Code* code, |
2148 | Code* inlined_code, |
2149 | Array* deopt_frame) { |
2150 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2151 | if (code->is_optimized()) { |
2152 | if (code->is_force_optimized()) { |
2153 | if (FLAG_trace_debugger_stacktrace) { |
2154 | const Function& function = Function::Handle(zone, code->function()); |
2155 | ASSERT(!function.IsNull()); |
2156 | OS::PrintErr( |
2157 | "CollectStackTrace: skipping force-optimized function: %s\n" , |
2158 | function.ToFullyQualifiedCString()); |
2159 | } |
2160 | return; // Skip frame of force-optimized (and non-debuggable) function. |
2161 | } |
2162 | // TODO(rmacnak): Use CodeSourceMap |
2163 | *deopt_frame = DeoptimizeToArray(thread, frame, *code); |
2164 | for (InlinedFunctionsIterator it(*code, frame->pc()); !it.Done(); |
2165 | it.Advance()) { |
2166 | *inlined_code = it.code(); |
2167 | if (FLAG_trace_debugger_stacktrace) { |
2168 | const Function& function = Function::Handle(zone, it.function()); |
2169 | ASSERT(!function.IsNull()); |
2170 | OS::PrintErr("CollectStackTrace: visiting inlined function: %s\n" , |
2171 | function.ToFullyQualifiedCString()); |
2172 | } |
2173 | intptr_t deopt_frame_offset = it.GetDeoptFpOffset(); |
2174 | stack_trace->AddActivation(CollectDartFrame(isolate, it.pc(), frame, |
2175 | *inlined_code, *deopt_frame, |
2176 | deopt_frame_offset)); |
2177 | } |
2178 | return; |
2179 | } |
2180 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2181 | stack_trace->AddActivation(CollectDartFrame(isolate, frame->pc(), frame, |
2182 | *code, Object::null_array(), 0)); |
2183 | } |
2184 | |
2185 | DebuggerStackTrace* Debugger::CollectAsyncCausalStackTrace() { |
2186 | if (FLAG_lazy_async_stacks) { |
2187 | return CollectAsyncLazyStackTrace(); |
2188 | } |
2189 | if (!FLAG_causal_async_stacks) { |
2190 | return NULL; |
2191 | } |
2192 | Thread* thread = Thread::Current(); |
2193 | Zone* zone = thread->zone(); |
2194 | Isolate* isolate = thread->isolate(); |
2195 | DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8); |
2196 | |
2197 | Object& code_obj = Object::Handle(zone); |
2198 | Code& code = Code::Handle(zone); |
2199 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2200 | Bytecode& bytecode = Bytecode::Handle(zone); |
2201 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2202 | Smi& offset = Smi::Handle(); |
2203 | Code& inlined_code = Code::Handle(zone); |
2204 | Array& deopt_frame = Array::Handle(zone); |
2205 | |
2206 | Function& async_function = Function::Handle(zone); |
2207 | class StackTrace& async_stack_trace = StackTrace::Handle(zone); |
2208 | Array& async_code_array = Array::Handle(zone); |
2209 | Array& async_pc_offset_array = Array::Handle(zone); |
2210 | StackTraceUtils::ExtractAsyncStackTraceInfo( |
2211 | thread, &async_function, &async_stack_trace, &async_code_array, |
2212 | &async_pc_offset_array); |
2213 | |
2214 | if (async_function.IsNull()) { |
2215 | return NULL; |
2216 | } |
2217 | |
2218 | bool sync_async_end = false; |
2219 | intptr_t synchronous_stack_trace_length = |
2220 | StackTraceUtils::CountFrames(thread, 0, async_function, &sync_async_end); |
2221 | |
2222 | // Append the top frames from the synchronous stack trace, up until the active |
2223 | // asynchronous function. We truncate the remainder of the synchronous |
2224 | // stack trace because it contains activations that are part of the |
2225 | // asynchronous dispatch mechanisms. |
2226 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
2227 | Thread::Current(), |
2228 | StackFrameIterator::kNoCrossThreadIteration); |
2229 | StackFrame* frame = iterator.NextFrame(); |
2230 | while (synchronous_stack_trace_length > 0) { |
2231 | ASSERT(frame != NULL); |
2232 | if (frame->IsDartFrame()) { |
2233 | if (frame->is_interpreted()) { |
2234 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2235 | bytecode = frame->LookupDartBytecode(); |
2236 | if (bytecode.function() == Function::null()) { |
2237 | continue; // Skip bytecode stub frame. |
2238 | } |
2239 | stack_trace->AddActivation( |
2240 | CollectDartFrame(isolate, frame->pc(), frame, bytecode)); |
2241 | #else |
2242 | UNREACHABLE(); |
2243 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2244 | } else { |
2245 | code = frame->LookupDartCode(); |
2246 | AppendCodeFrames(thread, isolate, zone, stack_trace, frame, &code, |
2247 | &inlined_code, &deopt_frame); |
2248 | } |
2249 | synchronous_stack_trace_length--; |
2250 | } |
2251 | frame = iterator.NextFrame(); |
2252 | } |
2253 | |
2254 | // Now we append the asynchronous causal stack trace. These are not active |
2255 | // frames but a historical record of how this asynchronous function was |
2256 | // activated. |
2257 | |
2258 | intptr_t frame_skip = |
2259 | sync_async_end ? StackTrace::kSyncAsyncCroppedFrames : 0; |
2260 | while (!async_stack_trace.IsNull()) { |
2261 | for (intptr_t i = frame_skip; i < async_stack_trace.Length(); i++) { |
2262 | code_obj = async_stack_trace.CodeAtFrame(i); |
2263 | if (code_obj.IsNull()) { |
2264 | break; |
2265 | } |
2266 | if (code_obj.raw() == StubCode::AsynchronousGapMarker().raw()) { |
2267 | stack_trace->AddMarker(ActivationFrame::kAsyncSuspensionMarker); |
2268 | // The frame immediately below the asynchronous gap marker is the |
2269 | // identical to the frame above the marker. Skip the frame to enhance |
2270 | // the readability of the trace. |
2271 | i++; |
2272 | } else { |
2273 | offset = Smi::RawCast(async_stack_trace.PcOffsetAtFrame(i)); |
2274 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2275 | if (code_obj.IsBytecode()) { |
2276 | bytecode ^= code_obj.raw(); |
2277 | uword pc = bytecode.PayloadStart() + offset.Value(); |
2278 | stack_trace->AddAsyncCausalFrame(pc, bytecode); |
2279 | continue; |
2280 | } |
2281 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2282 | code ^= code_obj.raw(); |
2283 | uword pc = code.PayloadStart() + offset.Value(); |
2284 | if (code.is_optimized()) { |
2285 | for (InlinedFunctionsIterator it(code, pc); !it.Done(); |
2286 | it.Advance()) { |
2287 | inlined_code = it.code(); |
2288 | stack_trace->AddAsyncCausalFrame(it.pc(), inlined_code); |
2289 | } |
2290 | } else { |
2291 | stack_trace->AddAsyncCausalFrame(pc, code); |
2292 | } |
2293 | } |
2294 | } |
2295 | // Follow the link. |
2296 | frame_skip = async_stack_trace.skip_sync_start_in_parent_stack() |
2297 | ? StackTrace::kSyncAsyncCroppedFrames |
2298 | : 0; |
2299 | async_stack_trace = async_stack_trace.async_link(); |
2300 | } |
2301 | |
2302 | return stack_trace; |
2303 | } |
2304 | |
2305 | DebuggerStackTrace* Debugger::CollectAsyncLazyStackTrace() { |
2306 | Thread* thread = Thread::Current(); |
2307 | Zone* zone = thread->zone(); |
2308 | Isolate* isolate = thread->isolate(); |
2309 | |
2310 | Code& code = Code::Handle(zone); |
2311 | Code& inlined_code = Code::Handle(zone); |
2312 | Smi& offset = Smi::Handle(); |
2313 | Array& deopt_frame = Array::Handle(zone); |
2314 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2315 | Bytecode& bytecode = Bytecode::Handle(zone); |
2316 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2317 | |
2318 | constexpr intptr_t kDefaultStackAllocation = 8; |
2319 | auto stack_trace = new DebuggerStackTrace(kDefaultStackAllocation); |
2320 | |
2321 | std::function<void(StackFrame*)> on_sync_frame = [&](StackFrame* frame) { |
2322 | if (frame->is_interpreted()) { |
2323 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2324 | bytecode = frame->LookupDartBytecode(); |
2325 | stack_trace->AddActivation( |
2326 | CollectDartFrame(isolate, frame->pc(), frame, bytecode)); |
2327 | #else |
2328 | UNREACHABLE(); |
2329 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2330 | } else { |
2331 | code = frame->LookupDartCode(); |
2332 | AppendCodeFrames(thread, isolate, zone, stack_trace, frame, &code, |
2333 | &inlined_code, &deopt_frame); |
2334 | } |
2335 | }; |
2336 | |
2337 | const auto& code_array = GrowableObjectArray::ZoneHandle( |
2338 | zone, GrowableObjectArray::New(kDefaultStackAllocation)); |
2339 | const auto& pc_offset_array = GrowableObjectArray::ZoneHandle( |
2340 | zone, GrowableObjectArray::New(kDefaultStackAllocation)); |
2341 | bool has_async = false; |
2342 | StackTraceUtils::CollectFramesLazy(thread, code_array, pc_offset_array, |
2343 | /*skip_frames=*/0, &on_sync_frame, |
2344 | &has_async); |
2345 | |
2346 | if (!has_async) { |
2347 | return nullptr; |
2348 | } |
2349 | |
2350 | const intptr_t length = code_array.Length(); |
2351 | for (intptr_t i = stack_trace->Length(); i < length; ++i) { |
2352 | code ^= code_array.At(i); |
2353 | offset ^= pc_offset_array.At(i); |
2354 | if (code.raw() == StubCode::AsynchronousGapMarker().raw()) { |
2355 | stack_trace->AddMarker(ActivationFrame::kAsyncSuspensionMarker); |
2356 | } else { |
2357 | const uword absolute_pc = code.PayloadStart() + offset.Value(); |
2358 | stack_trace->AddAsyncCausalFrame(absolute_pc, code); |
2359 | } |
2360 | } |
2361 | |
2362 | return stack_trace; |
2363 | } |
2364 | |
2365 | DebuggerStackTrace* Debugger::CollectAwaiterReturnStackTrace() { |
2366 | #if defined(DART_PRECOMPILED_RUNTIME) |
2367 | // Causal async stacks are not supported in the AOT runtime. |
2368 | ASSERT(!FLAG_async_debugger); |
2369 | return NULL; |
2370 | #else |
2371 | if (!FLAG_async_debugger) { |
2372 | return NULL; |
2373 | } |
2374 | |
2375 | Thread* thread = Thread::Current(); |
2376 | Zone* zone = thread->zone(); |
2377 | Isolate* isolate = thread->isolate(); |
2378 | DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8); |
2379 | |
2380 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
2381 | Thread::Current(), |
2382 | StackFrameIterator::kNoCrossThreadIteration); |
2383 | |
2384 | Object& code_object = Object::Handle(zone); |
2385 | Code& code = Code::Handle(zone); |
2386 | Bytecode& bytecode = Bytecode::Handle(zone); |
2387 | Smi& offset = Smi::Handle(zone); |
2388 | Function& function = Function::Handle(zone); |
2389 | Code& inlined_code = Code::Handle(zone); |
2390 | Closure& async_activation = Closure::Handle(zone); |
2391 | Object& next_async_activation = Object::Handle(zone); |
2392 | Array& deopt_frame = Array::Handle(zone); |
2393 | class StackTrace& async_stack_trace = StackTrace::Handle(zone); |
2394 | bool stack_has_async_function = false; |
2395 | |
2396 | // Number of frames we are trying to skip that form "sync async" entry. |
2397 | int skip_sync_async_frames_count = -1; |
2398 | String& function_name = String::Handle(zone); |
2399 | for (StackFrame* frame = iterator.NextFrame(); frame != NULL; |
2400 | frame = iterator.NextFrame()) { |
2401 | ASSERT(frame->IsValid()); |
2402 | if (FLAG_trace_debugger_stacktrace) { |
2403 | OS::PrintErr("CollectAwaiterReturnStackTrace: visiting frame:\n\t%s\n" , |
2404 | frame->ToCString()); |
2405 | } |
2406 | if (frame->IsDartFrame()) { |
2407 | if (frame->is_interpreted()) { |
2408 | bytecode = frame->LookupDartBytecode(); |
2409 | function = bytecode.function(); |
2410 | if (function.IsNull()) { |
2411 | continue; // Skip bytecode stub frame. |
2412 | } |
2413 | |
2414 | if (skip_sync_async_frames_count > 0) { |
2415 | function_name = function.QualifiedScrubbedName(); |
2416 | if (!StackTraceUtils::CheckAndSkipAsync(&skip_sync_async_frames_count, |
2417 | function_name)) { |
2418 | // Unexpected function in synchronous call of async function. |
2419 | break; |
2420 | } |
2421 | } |
2422 | |
2423 | if (function.IsAsyncClosure() || function.IsAsyncGenClosure()) { |
2424 | ActivationFrame* activation = |
2425 | CollectDartFrame(isolate, frame->pc(), frame, bytecode, |
2426 | ActivationFrame::kAsyncActivation); |
2427 | ASSERT(activation != NULL); |
2428 | stack_trace->AddActivation(activation); |
2429 | stack_has_async_function = true; |
2430 | // Grab the awaiter. |
2431 | async_activation ^= activation->GetAsyncAwaiter(); |
2432 | async_stack_trace ^= activation->GetCausalStack(); |
2433 | // Interpreted bytecode does not invoke _ClosureCall(). |
2434 | // Skip _AsyncAwaitCompleterStart() only. |
2435 | skip_sync_async_frames_count = 1; |
2436 | } else { |
2437 | stack_trace->AddActivation( |
2438 | CollectDartFrame(isolate, frame->pc(), frame, bytecode)); |
2439 | } |
2440 | } else { |
2441 | code = frame->LookupDartCode(); |
2442 | if (code.is_optimized()) { |
2443 | if (code.is_force_optimized()) { |
2444 | if (FLAG_trace_debugger_stacktrace) { |
2445 | function = code.function(); |
2446 | ASSERT(!function.IsNull()); |
2447 | OS::PrintErr( |
2448 | "CollectAwaiterReturnStackTrace: " |
2449 | "skipping force-optimized function: %s\n" , |
2450 | function.ToFullyQualifiedCString()); |
2451 | } |
2452 | // Skip frame of force-optimized (and non-debuggable) function. |
2453 | continue; |
2454 | } |
2455 | deopt_frame = DeoptimizeToArray(thread, frame, code); |
2456 | bool found_async_awaiter = false; |
2457 | bool abort_attempt_to_navigate_through_sync_async = false; |
2458 | for (InlinedFunctionsIterator it(code, frame->pc()); !it.Done(); |
2459 | it.Advance()) { |
2460 | inlined_code = it.code(); |
2461 | function = it.function(); |
2462 | |
2463 | if (skip_sync_async_frames_count > 0) { |
2464 | function_name ^= function.QualifiedScrubbedName(); |
2465 | if (!StackTraceUtils::CheckAndSkipAsync( |
2466 | &skip_sync_async_frames_count, function_name)) { |
2467 | // Unexpected function in sync async call |
2468 | skip_sync_async_frames_count = -1; |
2469 | abort_attempt_to_navigate_through_sync_async = true; |
2470 | break; |
2471 | } |
2472 | } |
2473 | |
2474 | if (FLAG_trace_debugger_stacktrace) { |
2475 | ASSERT(!function.IsNull()); |
2476 | OS::PrintErr( |
2477 | "CollectAwaiterReturnStackTrace: " |
2478 | "visiting inlined function: %s\n " , |
2479 | function.ToFullyQualifiedCString()); |
2480 | } |
2481 | intptr_t deopt_frame_offset = it.GetDeoptFpOffset(); |
2482 | if (function.IsAsyncClosure() || function.IsAsyncGenClosure()) { |
2483 | ActivationFrame* activation = CollectDartFrame( |
2484 | isolate, it.pc(), frame, inlined_code, deopt_frame, |
2485 | deopt_frame_offset, ActivationFrame::kAsyncActivation); |
2486 | ASSERT(activation != NULL); |
2487 | stack_trace->AddActivation(activation); |
2488 | stack_has_async_function = true; |
2489 | // Grab the awaiter. |
2490 | async_activation ^= activation->GetAsyncAwaiter(); |
2491 | found_async_awaiter = true; |
2492 | // async function might have been called synchronously, in which |
2493 | // case we need to keep going down the stack. |
2494 | // To determine how we are called we peek few more frames further |
2495 | // expecting to see Closure_call followed by |
2496 | // AsyncAwaitCompleter_start. |
2497 | // If we are able to see those functions we continue going down |
2498 | // thestack, if we are not, we break out of the loop as we are |
2499 | // not interested in exploring rest of the stack - there is only |
2500 | // dart-internal code left. |
2501 | skip_sync_async_frames_count = 2; |
2502 | } else { |
2503 | stack_trace->AddActivation( |
2504 | CollectDartFrame(isolate, it.pc(), frame, inlined_code, |
2505 | deopt_frame, deopt_frame_offset)); |
2506 | } |
2507 | } |
2508 | // Break out of outer loop. |
2509 | if (found_async_awaiter || |
2510 | abort_attempt_to_navigate_through_sync_async) { |
2511 | break; |
2512 | } |
2513 | } else { |
2514 | function = code.function(); |
2515 | |
2516 | if (skip_sync_async_frames_count > 0) { |
2517 | function_name ^= function.QualifiedScrubbedName(); |
2518 | if (!StackTraceUtils::CheckAndSkipAsync( |
2519 | &skip_sync_async_frames_count, function_name)) { |
2520 | // Unexpected function in synchronous call of async function. |
2521 | break; |
2522 | } |
2523 | } |
2524 | |
2525 | if (function.IsAsyncClosure() || function.IsAsyncGenClosure()) { |
2526 | ActivationFrame* activation = CollectDartFrame( |
2527 | isolate, frame->pc(), frame, code, Object::null_array(), 0, |
2528 | ActivationFrame::kAsyncActivation); |
2529 | ASSERT(activation != NULL); |
2530 | stack_trace->AddActivation(activation); |
2531 | stack_has_async_function = true; |
2532 | // Grab the awaiter. |
2533 | async_activation ^= activation->GetAsyncAwaiter(); |
2534 | async_stack_trace ^= activation->GetCausalStack(); |
2535 | // see comment regarding skipping frames of async functions called |
2536 | // synchronously above. |
2537 | skip_sync_async_frames_count = 2; |
2538 | } else { |
2539 | stack_trace->AddActivation(CollectDartFrame( |
2540 | isolate, frame->pc(), frame, code, Object::null_array(), 0)); |
2541 | } |
2542 | } |
2543 | } |
2544 | } |
2545 | } |
2546 | |
2547 | // If the stack doesn't have any async functions on it, return NULL. |
2548 | if (!stack_has_async_function) { |
2549 | return NULL; |
2550 | } |
2551 | |
2552 | // Append the awaiter return call stack. |
2553 | while (!async_activation.IsNull()) { |
2554 | ActivationFrame* activation = new (zone) ActivationFrame(async_activation); |
2555 | activation->ExtractTokenPositionFromAsyncClosure(); |
2556 | stack_trace->AddActivation(activation); |
2557 | if (FLAG_trace_debugger_stacktrace) { |
2558 | OS::PrintErr( |
2559 | "CollectAwaiterReturnStackTrace: visiting awaiter return " |
2560 | "closures:\n\t%s\n" , |
2561 | activation->function().ToFullyQualifiedCString()); |
2562 | } |
2563 | next_async_activation = activation->GetAsyncAwaiter(); |
2564 | if (next_async_activation.IsNull()) { |
2565 | // No more awaiters. Extract the causal stack trace (if it exists). |
2566 | async_stack_trace ^= activation->GetCausalStack(); |
2567 | break; |
2568 | } |
2569 | async_activation = Closure::RawCast(next_async_activation.raw()); |
2570 | } |
2571 | |
2572 | // Now we append the asynchronous causal stack trace. These are not active |
2573 | // frames but a historical record of how this asynchronous function was |
2574 | // activated. |
2575 | while (!async_stack_trace.IsNull()) { |
2576 | for (intptr_t i = 0; i < async_stack_trace.Length(); i++) { |
2577 | if (async_stack_trace.CodeAtFrame(i) == Code::null()) { |
2578 | // Incomplete OutOfMemory/StackOverflow trace OR array padding. |
2579 | break; |
2580 | } |
2581 | if (async_stack_trace.CodeAtFrame(i) == |
2582 | StubCode::AsynchronousGapMarker().raw()) { |
2583 | stack_trace->AddMarker(ActivationFrame::kAsyncSuspensionMarker); |
2584 | // The frame immediately below the asynchronous gap marker is the |
2585 | // identical to the frame above the marker. Skip the frame to enhance |
2586 | // the readability of the trace. |
2587 | i++; |
2588 | } else { |
2589 | code_object = async_stack_trace.CodeAtFrame(i); |
2590 | offset = Smi::RawCast(async_stack_trace.PcOffsetAtFrame(i)); |
2591 | if (code_object.IsBytecode()) { |
2592 | bytecode ^= code_object.raw(); |
2593 | if (FLAG_trace_debugger_stacktrace) { |
2594 | OS::PrintErr("CollectAwaiterReturnStackTrace: visiting frame %" Pd |
2595 | " in async causal stack trace:\n\t%s\n" , |
2596 | i, |
2597 | Function::Handle(bytecode.function()) |
2598 | .ToFullyQualifiedCString()); |
2599 | } |
2600 | uword pc = bytecode.PayloadStart() + offset.Value(); |
2601 | stack_trace->AddAsyncCausalFrame(pc, bytecode); |
2602 | } else { |
2603 | code ^= code_object.raw(); |
2604 | if (FLAG_trace_debugger_stacktrace) { |
2605 | OS::PrintErr( |
2606 | "CollectAwaiterReturnStackTrace: visiting frame %" Pd |
2607 | " in async causal stack trace:\n\t%s\n" , |
2608 | i, Function::Handle(code.function()).ToFullyQualifiedCString()); |
2609 | } |
2610 | uword pc = code.PayloadStart() + offset.Value(); |
2611 | if (code.is_optimized()) { |
2612 | for (InlinedFunctionsIterator it(code, pc); !it.Done(); |
2613 | it.Advance()) { |
2614 | inlined_code = it.code(); |
2615 | stack_trace->AddAsyncCausalFrame(it.pc(), inlined_code); |
2616 | } |
2617 | } else { |
2618 | stack_trace->AddAsyncCausalFrame(pc, code); |
2619 | } |
2620 | } |
2621 | } |
2622 | } |
2623 | // Follow the link. |
2624 | async_stack_trace = async_stack_trace.async_link(); |
2625 | } |
2626 | |
2627 | return stack_trace; |
2628 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
2629 | } |
2630 | |
2631 | ActivationFrame* Debugger::TopDartFrame() const { |
2632 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
2633 | Thread::Current(), |
2634 | StackFrameIterator::kNoCrossThreadIteration); |
2635 | StackFrame* frame; |
2636 | while (true) { |
2637 | frame = iterator.NextFrame(); |
2638 | RELEASE_ASSERT(frame != nullptr); |
2639 | if (!frame->IsDartFrame()) { |
2640 | continue; |
2641 | } |
2642 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2643 | if (frame->is_interpreted()) { |
2644 | Bytecode& bytecode = Bytecode::Handle(frame->LookupDartBytecode()); |
2645 | // Note that we do not skip bytecode stub frame (with a null function), |
2646 | // so that we can ignore a single stepping breakpoint in such a frame. |
2647 | // A bytecode stub contains a VM internal bytecode followed by a |
2648 | // ReturnTOS bytecode. The single step on the ReturnTOS bytecode |
2649 | // needs to be skipped. |
2650 | ActivationFrame* activation = |
2651 | new ActivationFrame(frame->pc(), frame->fp(), frame->sp(), bytecode); |
2652 | return activation; |
2653 | } |
2654 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2655 | Code& code = Code::Handle(frame->LookupDartCode()); |
2656 | ActivationFrame* activation = new ActivationFrame( |
2657 | frame->pc(), frame->fp(), frame->sp(), code, Object::null_array(), 0); |
2658 | return activation; |
2659 | } |
2660 | } |
2661 | |
2662 | DebuggerStackTrace* Debugger::StackTrace() { |
2663 | return (stack_trace_ != NULL) ? stack_trace_ : CollectStackTrace(); |
2664 | } |
2665 | |
2666 | DebuggerStackTrace* Debugger::CurrentStackTrace() { |
2667 | return CollectStackTrace(); |
2668 | } |
2669 | |
2670 | DebuggerStackTrace* Debugger::AsyncCausalStackTrace() { |
2671 | return (async_causal_stack_trace_ != NULL) ? async_causal_stack_trace_ |
2672 | : CollectAsyncCausalStackTrace(); |
2673 | } |
2674 | |
2675 | DebuggerStackTrace* Debugger::CurrentAsyncCausalStackTrace() { |
2676 | return CollectAsyncCausalStackTrace(); |
2677 | } |
2678 | |
2679 | DebuggerStackTrace* Debugger::AwaiterStackTrace() { |
2680 | return (awaiter_stack_trace_ != NULL) ? awaiter_stack_trace_ |
2681 | : CollectAwaiterReturnStackTrace(); |
2682 | } |
2683 | |
2684 | DebuggerStackTrace* Debugger::CurrentAwaiterStackTrace() { |
2685 | return CollectAwaiterReturnStackTrace(); |
2686 | } |
2687 | |
2688 | DebuggerStackTrace* Debugger::StackTraceFrom(const class StackTrace& ex_trace) { |
2689 | DebuggerStackTrace* stack_trace = new DebuggerStackTrace(8); |
2690 | Function& function = Function::Handle(); |
2691 | Object& code_object = Object::Handle(); |
2692 | Code& code = Code::Handle(); |
2693 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2694 | Bytecode& bytecode = Bytecode::Handle(); |
2695 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2696 | |
2697 | const uword fp = 0; |
2698 | const uword sp = 0; |
2699 | const Array& deopt_frame = Array::Handle(); |
2700 | const intptr_t deopt_frame_offset = -1; |
2701 | |
2702 | for (intptr_t i = 0; i < ex_trace.Length(); i++) { |
2703 | code_object = ex_trace.CodeAtFrame(i); |
2704 | // Pre-allocated StackTraces may include empty slots, either (a) to indicate |
2705 | // where frames were omitted in the case a stack has more frames than the |
2706 | // pre-allocated trace (such as a stack overflow) or (b) because a stack has |
2707 | // fewer frames that the pre-allocated trace (such as memory exhaustion with |
2708 | // a shallow stack). |
2709 | if (!code_object.IsNull()) { |
2710 | if (code_object.IsBytecode()) { |
2711 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2712 | bytecode ^= code_object.raw(); |
2713 | function = bytecode.function(); |
2714 | // Skip bytecode stub frames and frames with invisible function. |
2715 | if (!function.IsNull() && function.is_visible()) { |
2716 | ASSERT(function.raw() == bytecode.function()); |
2717 | uword pc = |
2718 | bytecode.PayloadStart() + Smi::Value(ex_trace.PcOffsetAtFrame(i)); |
2719 | ActivationFrame* activation = |
2720 | new ActivationFrame(pc, fp, sp, bytecode); |
2721 | stack_trace->AddActivation(activation); |
2722 | } |
2723 | #else |
2724 | UNREACHABLE(); |
2725 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2726 | } else { |
2727 | code ^= code_object.raw(); |
2728 | ASSERT(code.IsFunctionCode()); |
2729 | function = code.function(); |
2730 | if (function.is_visible()) { |
2731 | ASSERT(function.raw() == code.function()); |
2732 | uword pc = |
2733 | code.PayloadStart() + Smi::Value(ex_trace.PcOffsetAtFrame(i)); |
2734 | if (code.is_optimized() && ex_trace.expand_inlined()) { |
2735 | // Traverse inlined frames. |
2736 | for (InlinedFunctionsIterator it(code, pc); !it.Done(); |
2737 | it.Advance()) { |
2738 | function = it.function(); |
2739 | code = it.code(); |
2740 | ASSERT(function.raw() == code.function()); |
2741 | uword pc = it.pc(); |
2742 | ASSERT(pc != 0); |
2743 | ASSERT(code.PayloadStart() <= pc); |
2744 | ASSERT(pc < (code.PayloadStart() + code.Size())); |
2745 | |
2746 | ActivationFrame* activation = new ActivationFrame( |
2747 | pc, fp, sp, code, deopt_frame, deopt_frame_offset); |
2748 | stack_trace->AddActivation(activation); |
2749 | } |
2750 | } else { |
2751 | ActivationFrame* activation = new ActivationFrame( |
2752 | pc, fp, sp, code, deopt_frame, deopt_frame_offset); |
2753 | stack_trace->AddActivation(activation); |
2754 | } |
2755 | } |
2756 | } |
2757 | } |
2758 | } |
2759 | return stack_trace; |
2760 | } |
2761 | |
2762 | void Debugger::SetExceptionPauseInfo(Dart_ExceptionPauseInfo pause_info) { |
2763 | ASSERT((pause_info == kNoPauseOnExceptions) || |
2764 | (pause_info == kPauseOnUnhandledExceptions) || |
2765 | (pause_info == kPauseOnAllExceptions)); |
2766 | exc_pause_info_ = pause_info; |
2767 | } |
2768 | |
2769 | Dart_ExceptionPauseInfo Debugger::GetExceptionPauseInfo() const { |
2770 | return exc_pause_info_; |
2771 | } |
2772 | |
2773 | bool Debugger::ShouldPauseOnException(DebuggerStackTrace* stack_trace, |
2774 | const Instance& exception) { |
2775 | if (exc_pause_info_ == kNoPauseOnExceptions) { |
2776 | return false; |
2777 | } |
2778 | if (exc_pause_info_ == kPauseOnAllExceptions) { |
2779 | return true; |
2780 | } |
2781 | ASSERT(exc_pause_info_ == kPauseOnUnhandledExceptions); |
2782 | // Exceptions coming from invalid token positions should be skipped |
2783 | ActivationFrame* top_frame = stack_trace->FrameAt(0); |
2784 | if (!top_frame->TokenPos().IsReal() && top_frame->TryIndex() != -1) { |
2785 | return false; |
2786 | } |
2787 | ActivationFrame* handler_frame = stack_trace->GetHandlerFrame(exception); |
2788 | if (handler_frame == NULL) { |
2789 | // Did not find an exception handler that catches this exception. |
2790 | // Note that this check is not precise, since we can't check |
2791 | // uninstantiated types, i.e. types containing type parameters. |
2792 | // Thus, we may report an exception as unhandled when in fact |
2793 | // it will be caught once we unwind the stack. |
2794 | return true; |
2795 | } |
2796 | return false; |
2797 | } |
2798 | |
2799 | void Debugger::PauseException(const Instance& exc) { |
2800 | if (FLAG_stress_async_stacks) { |
2801 | CollectAwaiterReturnStackTrace(); |
2802 | } |
2803 | // We ignore this exception event when the VM is executing code invoked |
2804 | // by the debugger to evaluate variables values, when we see a nested |
2805 | // breakpoint or exception event, or if the debugger is not |
2806 | // interested in exception events. |
2807 | if (ignore_breakpoints_ || IsPaused() || |
2808 | (exc_pause_info_ == kNoPauseOnExceptions)) { |
2809 | return; |
2810 | } |
2811 | DebuggerStackTrace* awaiter_stack_trace = CollectAwaiterReturnStackTrace(); |
2812 | DebuggerStackTrace* stack_trace = CollectStackTrace(); |
2813 | if (awaiter_stack_trace != NULL) { |
2814 | if (!ShouldPauseOnException(awaiter_stack_trace, exc)) { |
2815 | return; |
2816 | } |
2817 | } else { |
2818 | if (!ShouldPauseOnException(stack_trace, exc)) { |
2819 | return; |
2820 | } |
2821 | } |
2822 | ServiceEvent event(isolate_, ServiceEvent::kPauseException); |
2823 | event.set_exception(&exc); |
2824 | if (stack_trace->Length() > 0) { |
2825 | event.set_top_frame(stack_trace->FrameAt(0)); |
2826 | } |
2827 | CacheStackTraces(stack_trace, CollectAsyncCausalStackTrace(), |
2828 | CollectAwaiterReturnStackTrace()); |
2829 | Pause(&event); |
2830 | HandleSteppingRequest(stack_trace_); // we may get a rewind request |
2831 | ClearCachedStackTraces(); |
2832 | } |
2833 | |
2834 | // Helper to refine the resolved token pos. |
2835 | static void RefineBreakpointPos(const Script& script, |
2836 | TokenPosition pos, |
2837 | TokenPosition next_closest_token_position, |
2838 | TokenPosition requested_token_pos, |
2839 | TokenPosition last_token_pos, |
2840 | intptr_t requested_column, |
2841 | TokenPosition exact_token_pos, |
2842 | TokenPosition* best_fit_pos, |
2843 | intptr_t* best_column, |
2844 | intptr_t* best_line, |
2845 | TokenPosition* best_token_pos) { |
2846 | intptr_t token_start_column = -1; |
2847 | intptr_t token_line = -1; |
2848 | if (requested_column >= 0) { |
2849 | TokenPosition ignored; |
2850 | TokenPosition end_of_line_pos; |
2851 | script.GetTokenLocation(pos, &token_line, &token_start_column); |
2852 | script.TokenRangeAtLine(token_line, &ignored, &end_of_line_pos); |
2853 | TokenPosition token_end_pos = |
2854 | (end_of_line_pos < next_closest_token_position) |
2855 | ? end_of_line_pos |
2856 | : next_closest_token_position; |
2857 | |
2858 | if ((token_end_pos < exact_token_pos) || |
2859 | (token_start_column > *best_column)) { |
2860 | // Prefer the token with the lowest column number compatible |
2861 | // with the requested column. |
2862 | return; |
2863 | } |
2864 | } |
2865 | |
2866 | // Prefer the lowest (first) token pos. |
2867 | if (pos < *best_fit_pos) { |
2868 | *best_fit_pos = pos; |
2869 | *best_line = token_line; |
2870 | *best_column = token_start_column; |
2871 | // best_token_pos is only used when column number is specified. |
2872 | *best_token_pos = TokenPosition(exact_token_pos.value() - |
2873 | (requested_column - *best_column)); |
2874 | } |
2875 | } |
2876 | |
2877 | // Returns the best fit token position for a breakpoint. |
2878 | // |
2879 | // Takes a range of tokens [requested_token_pos, last_token_pos] and |
2880 | // an optional column (requested_column). The range of tokens usually |
2881 | // represents one line of the program text, but can represent a larger |
2882 | // range on recursive calls. |
2883 | // |
2884 | // The best fit is found in two passes. |
2885 | // |
2886 | // The first pass finds a candidate token which: |
2887 | // |
2888 | // - is a safepoint, |
2889 | // - has the lowest column number compatible with the requested column |
2890 | // if a column has been specified, |
2891 | // and: |
2892 | // - has the lowest token position number which satisfies the above. |
2893 | // |
2894 | // When we consider a column number, we look for the token which |
2895 | // intersects the desired column. For example: |
2896 | // |
2897 | // 1 2 3 |
2898 | // 12345678901234567890 0 |
2899 | // |
2900 | // var x = function(function(y)); |
2901 | // ^ |
2902 | // |
2903 | // If we request a breakpoint at column 14, the lowest column number |
2904 | // compatible with that would for column 11 (beginning of the |
2905 | // 'function' token) in the example above. |
2906 | // |
2907 | // Once this candidate token from the first pass is found, we then |
2908 | // have a second pass which considers only those tokens on the same |
2909 | // line as the candidate token. |
2910 | // |
2911 | // The second pass finds a best fit token which: |
2912 | // |
2913 | // - is a safepoint, |
2914 | // - has the same column number as the candidate token (perhaps |
2915 | // more than one token has the same column number), |
2916 | // and: |
2917 | // - has the lowest code address in the generated code. |
2918 | // |
2919 | // We prefer the lowest compiled code address, because this tends to |
2920 | // select the first subexpression on a line. For example in a line |
2921 | // with nested function calls f(g(x)), the call to g() will have a |
2922 | // lower compiled code address than the call to f(). |
2923 | // |
2924 | // If no best fit token can be found, the search is expanded, |
2925 | // searching through the rest of the current function by calling this |
2926 | // function recursively. |
2927 | // |
2928 | // TODO(turnidge): Given that we usually call this function with a |
2929 | // token range restricted to a single line, this could be a one-pass |
2930 | // algorithm, which would be simpler. I believe that it only needs |
2931 | // two passes to support the recursive try-the-whole-function case. |
2932 | // Rewrite this later, once there are more tests in place. |
2933 | TokenPosition Debugger::ResolveBreakpointPos(bool in_bytecode, |
2934 | const Function& func, |
2935 | TokenPosition requested_token_pos, |
2936 | TokenPosition last_token_pos, |
2937 | intptr_t requested_column, |
2938 | TokenPosition exact_token_pos) { |
2939 | ASSERT(!func.HasOptimizedCode()); |
2940 | |
2941 | if (requested_token_pos < func.token_pos()) { |
2942 | requested_token_pos = func.token_pos(); |
2943 | } |
2944 | if (last_token_pos > func.end_token_pos()) { |
2945 | last_token_pos = func.end_token_pos(); |
2946 | } |
2947 | |
2948 | Zone* zone = Thread::Current()->zone(); |
2949 | Script& script = Script::Handle(zone, func.script()); |
2950 | Code& code = Code::Handle(zone); |
2951 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2952 | Bytecode& bytecode = Bytecode::Handle(zone); |
2953 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2954 | PcDescriptors& desc = PcDescriptors::Handle(zone); |
2955 | if (in_bytecode) { |
2956 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2957 | ASSERT(func.HasBytecode()); |
2958 | bytecode = func.bytecode(); |
2959 | ASSERT(!bytecode.IsNull()); |
2960 | #else |
2961 | UNREACHABLE(); |
2962 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
2963 | } else { |
2964 | ASSERT(func.HasCode()); |
2965 | code = func.unoptimized_code(); |
2966 | ASSERT(!code.IsNull()); |
2967 | desc = code.pc_descriptors(); |
2968 | } |
2969 | |
2970 | // First pass: find the safe point which is closest to the beginning |
2971 | // of the given token range. |
2972 | TokenPosition best_fit_pos = TokenPosition::kMaxSource; |
2973 | intptr_t best_column = INT_MAX; |
2974 | intptr_t best_line = INT_MAX; |
2975 | // best_token_pos and exact_token_pos are only used |
2976 | // if column number is provided. |
2977 | TokenPosition best_token_pos = TokenPosition::kNoSource; |
2978 | |
2979 | if (in_bytecode) { |
2980 | #if !defined(DART_PRECOMPILED_RUNTIME) |
2981 | kernel::BytecodeSourcePositionsIterator iter(zone, bytecode); |
2982 | uword pc_offset = kUwordMax; |
2983 | TokenPosition pos = TokenPosition::kNoSource; |
2984 | // Ignore all possible breakpoint positions until the first DebugCheck |
2985 | // opcode of the function. |
2986 | const uword debug_check_pc = bytecode.GetFirstDebugCheckOpcodePc(); |
2987 | if (debug_check_pc != 0) { |
2988 | const uword debug_check_pc_offset = |
2989 | debug_check_pc - bytecode.PayloadStart(); |
2990 | while (iter.MoveNext()) { |
2991 | if (pc_offset != kUwordMax) { |
2992 | // Check that there is at least one 'debug checked' opcode in the last |
2993 | // source position range. |
2994 | uword pc = bytecode.GetDebugCheckedOpcodeReturnAddress( |
2995 | pc_offset, iter.PcOffset()); |
2996 | pc_offset = kUwordMax; |
2997 | if (pc != 0) { |
2998 | TokenPosition next_closest_token_position = |
2999 | TokenPosition::kMaxSource; |
3000 | if (requested_column >= 0) { |
3001 | kernel::BytecodeSourcePositionsIterator iter2(zone, bytecode); |
3002 | TokenPosition next_closest_token_position = |
3003 | TokenPosition::kMaxSource; |
3004 | while (iter2.MoveNext()) { |
3005 | const TokenPosition next = iter2.TokenPos(); |
3006 | if (next.IsReal() && next < next_closest_token_position && |
3007 | next > pos) { |
3008 | next_closest_token_position = next; |
3009 | } |
3010 | } |
3011 | } |
3012 | RefineBreakpointPos( |
3013 | script, pos, next_closest_token_position, requested_token_pos, |
3014 | last_token_pos, requested_column, exact_token_pos, |
3015 | &best_fit_pos, &best_column, &best_line, &best_token_pos); |
3016 | } |
3017 | } |
3018 | pos = iter.TokenPos(); |
3019 | if ((!pos.IsReal()) || (pos < requested_token_pos) || |
3020 | (pos > last_token_pos)) { |
3021 | // Token is not in the target range. |
3022 | continue; |
3023 | } |
3024 | pc_offset = iter.PcOffset(); |
3025 | if (pc_offset < debug_check_pc_offset) { |
3026 | // No breakpoints in prologue. |
3027 | pc_offset = debug_check_pc_offset; |
3028 | } |
3029 | } |
3030 | if (pc_offset != kUwordMax) { |
3031 | uword pc = bytecode.GetDebugCheckedOpcodeReturnAddress(pc_offset, |
3032 | bytecode.Size()); |
3033 | if (pc != 0) { |
3034 | RefineBreakpointPos(script, pos, TokenPosition::kMaxSource, |
3035 | requested_token_pos, last_token_pos, |
3036 | requested_column, exact_token_pos, &best_fit_pos, |
3037 | &best_column, &best_line, &best_token_pos); |
3038 | } |
3039 | } |
3040 | } |
3041 | #else |
3042 | UNREACHABLE(); |
3043 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
3044 | } else { |
3045 | PcDescriptors::Iterator iter(desc, kSafepointKind); |
3046 | while (iter.MoveNext()) { |
3047 | const TokenPosition pos = iter.TokenPos(); |
3048 | if ((!pos.IsReal()) || (pos < requested_token_pos) || |
3049 | (pos > last_token_pos)) { |
3050 | // Token is not in the target range. |
3051 | continue; |
3052 | } |
3053 | TokenPosition next_closest_token_position = TokenPosition::kMaxSource; |
3054 | if (requested_column >= 0) { |
3055 | // Find next closest safepoint |
3056 | PcDescriptors::Iterator iter2(desc, kSafepointKind); |
3057 | while (iter2.MoveNext()) { |
3058 | const TokenPosition next = iter2.TokenPos(); |
3059 | if (next < next_closest_token_position && next > pos) { |
3060 | next_closest_token_position = next; |
3061 | } |
3062 | } |
3063 | } |
3064 | RefineBreakpointPos(script, pos, next_closest_token_position, |
3065 | requested_token_pos, last_token_pos, requested_column, |
3066 | exact_token_pos, &best_fit_pos, &best_column, |
3067 | &best_line, &best_token_pos); |
3068 | } |
3069 | } |
3070 | |
3071 | // Second pass (if we found a safe point in the first pass). Find |
3072 | // the token on the line which is at the best fit column (if column |
3073 | // was specified) and has the lowest code address. |
3074 | if (best_fit_pos != TokenPosition::kMaxSource) { |
3075 | const Script& script = Script::Handle(zone, func.script()); |
3076 | const TokenPosition begin_pos = best_fit_pos; |
3077 | |
3078 | TokenPosition end_of_line_pos; |
3079 | if (best_line == -1) { |
3080 | script.GetTokenLocation(begin_pos, &best_line, NULL); |
3081 | } |
3082 | ASSERT(best_line > 0); |
3083 | TokenPosition ignored; |
3084 | script.TokenRangeAtLine(best_line, &ignored, &end_of_line_pos); |
3085 | if (end_of_line_pos < begin_pos) { |
3086 | end_of_line_pos = begin_pos; |
3087 | } |
3088 | |
3089 | uword lowest_pc_offset = kUwordMax; |
3090 | if (in_bytecode) { |
3091 | #if !defined(DART_PRECOMPILED_RUNTIME) |
3092 | kernel::BytecodeSourcePositionsIterator iter(zone, bytecode); |
3093 | while (iter.MoveNext()) { |
3094 | const TokenPosition pos = iter.TokenPos(); |
3095 | if (!pos.IsReal() || (pos < begin_pos) || (pos > end_of_line_pos)) { |
3096 | // Token is not on same line as best fit. |
3097 | continue; |
3098 | } |
3099 | |
3100 | if (requested_column >= 0) { |
3101 | if (pos != best_token_pos) { |
3102 | continue; |
3103 | } |
3104 | } |
3105 | |
3106 | // Prefer the lowest pc offset. |
3107 | if (iter.PcOffset() < lowest_pc_offset) { |
3108 | lowest_pc_offset = iter.PcOffset(); |
3109 | best_fit_pos = pos; |
3110 | } |
3111 | } |
3112 | #else |
3113 | UNREACHABLE(); |
3114 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
3115 | } else { |
3116 | PcDescriptors::Iterator iter(desc, kSafepointKind); |
3117 | while (iter.MoveNext()) { |
3118 | const TokenPosition pos = iter.TokenPos(); |
3119 | if (!pos.IsReal() || (pos < begin_pos) || (pos > end_of_line_pos)) { |
3120 | // Token is not on same line as best fit. |
3121 | continue; |
3122 | } |
3123 | |
3124 | if (requested_column >= 0) { |
3125 | if (pos != best_token_pos) { |
3126 | continue; |
3127 | } |
3128 | } |
3129 | |
3130 | // Prefer the lowest pc offset. |
3131 | if (iter.PcOffset() < lowest_pc_offset) { |
3132 | lowest_pc_offset = iter.PcOffset(); |
3133 | best_fit_pos = pos; |
3134 | } |
3135 | } |
3136 | } |
3137 | return best_fit_pos; |
3138 | } |
3139 | |
3140 | // We didn't find a safe point in the given token range. Try and |
3141 | // find a safe point in the remaining source code of the function. |
3142 | // Since we have moved to the next line of the function, we no |
3143 | // longer are requesting a specific column number. |
3144 | if (last_token_pos < func.end_token_pos()) { |
3145 | return ResolveBreakpointPos(in_bytecode, func, last_token_pos, |
3146 | func.end_token_pos(), -1 /* no column */, |
3147 | TokenPosition::kNoSource); |
3148 | } |
3149 | return TokenPosition::kNoSource; |
3150 | } |
3151 | |
3152 | void Debugger::MakeCodeBreakpointAt(const Function& func, |
3153 | BreakpointLocation* loc) { |
3154 | ASSERT(loc->token_pos_.IsReal()); |
3155 | ASSERT((loc != NULL) && loc->IsResolved()); |
3156 | ASSERT(!func.HasOptimizedCode()); |
3157 | ASSERT(func.HasCode() || func.HasBytecode()); |
3158 | #if !defined(DART_PRECOMPILED_RUNTIME) |
3159 | if (func.HasBytecode() && FLAG_enable_interpreter) { |
3160 | Bytecode& bytecode = Bytecode::Handle(func.bytecode()); |
3161 | ASSERT(!bytecode.IsNull()); |
3162 | uword pc = 0; |
3163 | if (bytecode.HasSourcePositions()) { |
3164 | kernel::BytecodeSourcePositionsIterator iter(Thread::Current()->zone(), |
3165 | bytecode); |
3166 | // Ignore all possible breakpoint positions until the first DebugCheck |
3167 | // opcode of the function. |
3168 | const uword debug_check_pc = bytecode.GetFirstDebugCheckOpcodePc(); |
3169 | if (debug_check_pc != 0) { |
3170 | const uword debug_check_pc_offset = |
3171 | debug_check_pc - bytecode.PayloadStart(); |
3172 | uword pc_offset = kUwordMax; |
3173 | while (iter.MoveNext()) { |
3174 | if (pc_offset != kUwordMax) { |
3175 | pc = bytecode.GetDebugCheckedOpcodeReturnAddress(pc_offset, |
3176 | iter.PcOffset()); |
3177 | pc_offset = kUwordMax; |
3178 | if (pc != 0) { |
3179 | // TODO(regis): We may want to find all PCs for a token position, |
3180 | // e.g. in the case of duplicated bytecode in finally clauses. |
3181 | break; |
3182 | } |
3183 | // This range does not contain a 'debug checked' opcode or the |
3184 | // first DebugCheck opcode of the function is not reached yet. |
3185 | } |
3186 | if (iter.TokenPos() == loc->token_pos_) { |
3187 | pc_offset = iter.PcOffset(); |
3188 | if (pc_offset < debug_check_pc_offset) { |
3189 | // No breakpoints in prologue. |
3190 | pc_offset = debug_check_pc_offset; |
3191 | } |
3192 | } |
3193 | } |
3194 | if (pc_offset != kUwordMax) { |
3195 | pc = bytecode.GetDebugCheckedOpcodeReturnAddress(pc_offset, |
3196 | bytecode.Size()); |
3197 | } |
3198 | } |
3199 | } |
3200 | if (pc != 0) { |
3201 | CodeBreakpoint* code_bpt = GetCodeBreakpoint(pc); |
3202 | if (code_bpt == NULL) { |
3203 | // No code breakpoint for this code exists; create one. |
3204 | code_bpt = new CodeBreakpoint(bytecode, loc->token_pos_, pc); |
3205 | if (FLAG_verbose_debug) { |
3206 | OS::PrintErr("Setting bytecode breakpoint at pos %s pc %#" Px |
3207 | " offset %#" Px "\n" , |
3208 | loc->token_pos_.ToCString(), pc, |
3209 | pc - bytecode.PayloadStart()); |
3210 | } |
3211 | RegisterCodeBreakpoint(code_bpt); |
3212 | } |
3213 | code_bpt->set_bpt_location(loc); |
3214 | if (loc->AnyEnabled()) { |
3215 | code_bpt->Enable(); |
3216 | } |
3217 | } |
3218 | } |
3219 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
3220 | if (func.HasCode()) { |
3221 | Code& code = Code::Handle(func.unoptimized_code()); |
3222 | ASSERT(!code.IsNull()); |
3223 | PcDescriptors& desc = PcDescriptors::Handle(code.pc_descriptors()); |
3224 | uword lowest_pc_offset = kUwordMax; |
3225 | PcDescriptorsLayout::Kind lowest_kind = PcDescriptorsLayout::kAnyKind; |
3226 | // Find the safe point with the lowest compiled code address |
3227 | // that maps to the token position of the source breakpoint. |
3228 | PcDescriptors::Iterator iter(desc, kSafepointKind); |
3229 | while (iter.MoveNext()) { |
3230 | if (iter.TokenPos() == loc->token_pos_) { |
3231 | if (iter.PcOffset() < lowest_pc_offset) { |
3232 | lowest_pc_offset = iter.PcOffset(); |
3233 | lowest_kind = iter.Kind(); |
3234 | } |
3235 | } |
3236 | } |
3237 | if (lowest_pc_offset != kUwordMax) { |
3238 | uword lowest_pc = code.PayloadStart() + lowest_pc_offset; |
3239 | CodeBreakpoint* code_bpt = GetCodeBreakpoint(lowest_pc); |
3240 | if (code_bpt == NULL) { |
3241 | // No code breakpoint for this code exists; create one. |
3242 | code_bpt = |
3243 | new CodeBreakpoint(code, loc->token_pos_, lowest_pc, lowest_kind); |
3244 | if (FLAG_verbose_debug) { |
3245 | OS::PrintErr("Setting code breakpoint at pos %s pc %#" Px |
3246 | " offset %#" Px "\n" , |
3247 | loc->token_pos_.ToCString(), lowest_pc, |
3248 | lowest_pc - code.PayloadStart()); |
3249 | } |
3250 | RegisterCodeBreakpoint(code_bpt); |
3251 | } |
3252 | code_bpt->set_bpt_location(loc); |
3253 | if (loc->AnyEnabled()) { |
3254 | code_bpt->Enable(); |
3255 | } |
3256 | } |
3257 | } |
3258 | } |
3259 | |
3260 | void Debugger::FindCompiledFunctions( |
3261 | const Script& script, |
3262 | TokenPosition start_pos, |
3263 | TokenPosition end_pos, |
3264 | GrowableObjectArray* bytecode_function_list, |
3265 | GrowableObjectArray* code_function_list) { |
3266 | Zone* zone = Thread::Current()->zone(); |
3267 | Class& cls = Class::Handle(zone); |
3268 | Array& functions = Array::Handle(zone); |
3269 | GrowableObjectArray& closures = GrowableObjectArray::Handle(zone); |
3270 | Function& function = Function::Handle(zone); |
3271 | |
3272 | closures = isolate_->object_store()->closure_functions(); |
3273 | const intptr_t num_closures = closures.Length(); |
3274 | for (intptr_t pos = 0; pos < num_closures; pos++) { |
3275 | function ^= closures.At(pos); |
3276 | ASSERT(!function.IsNull()); |
3277 | if ((function.token_pos() == start_pos) && |
3278 | (function.end_token_pos() == end_pos) && |
3279 | (function.script() == script.raw())) { |
3280 | if (function.is_debuggable()) { |
3281 | if (FLAG_enable_interpreter && function.HasBytecode()) { |
3282 | bytecode_function_list->Add(function); |
3283 | } |
3284 | if (function.HasCode()) { |
3285 | code_function_list->Add(function); |
3286 | } |
3287 | } |
3288 | if (function.HasImplicitClosureFunction()) { |
3289 | function = function.ImplicitClosureFunction(); |
3290 | if (function.is_debuggable()) { |
3291 | if (FLAG_enable_interpreter && function.HasBytecode()) { |
3292 | bytecode_function_list->Add(function); |
3293 | } |
3294 | if (function.HasCode()) { |
3295 | code_function_list->Add(function); |
3296 | } |
3297 | } |
3298 | } |
3299 | } |
3300 | } |
3301 | |
3302 | const ClassTable& class_table = *isolate_->class_table(); |
3303 | const intptr_t num_classes = class_table.NumCids(); |
3304 | const intptr_t num_tlc_classes = class_table.NumTopLevelCids(); |
3305 | for (intptr_t i = 1; i < num_classes + num_tlc_classes; i++) { |
3306 | const classid_t cid = |
3307 | i < num_classes ? i : ClassTable::CidFromTopLevelIndex(i - num_classes); |
3308 | if (class_table.HasValidClassAt(cid)) { |
3309 | cls = class_table.At(cid); |
3310 | // If the class is not finalized, e.g. if it hasn't been parsed |
3311 | // yet entirely, we can ignore it. If it contains a function with |
3312 | // an unresolved breakpoint, we will detect it if and when the |
3313 | // function gets compiled. |
3314 | if (!cls.is_finalized()) { |
3315 | continue; |
3316 | } |
3317 | // Note: we need to check the functions of this class even if |
3318 | // the class is defined in a different 'script'. There could |
3319 | // be mixin functions from the given script in this class. |
3320 | functions = cls.functions(); |
3321 | if (!functions.IsNull()) { |
3322 | const intptr_t num_functions = functions.Length(); |
3323 | for (intptr_t pos = 0; pos < num_functions; pos++) { |
3324 | function ^= functions.At(pos); |
3325 | ASSERT(!function.IsNull()); |
3326 | bool function_added = false; |
3327 | if (function.is_debuggable() && |
3328 | (function.HasCode() || |
3329 | (FLAG_enable_interpreter && function.HasBytecode())) && |
3330 | function.token_pos() == start_pos && |
3331 | function.end_token_pos() == end_pos && |
3332 | function.script() == script.raw()) { |
3333 | if (FLAG_enable_interpreter && function.HasBytecode()) { |
3334 | bytecode_function_list->Add(function); |
3335 | } |
3336 | if (function.HasCode()) { |
3337 | code_function_list->Add(function); |
3338 | } |
3339 | function_added = true; |
3340 | } |
3341 | if (function_added && function.HasImplicitClosureFunction()) { |
3342 | function = function.ImplicitClosureFunction(); |
3343 | if (function.is_debuggable()) { |
3344 | if (FLAG_enable_interpreter && function.HasBytecode()) { |
3345 | bytecode_function_list->Add(function); |
3346 | } |
3347 | if (function.HasCode()) { |
3348 | code_function_list->Add(function); |
3349 | } |
3350 | } |
3351 | } |
3352 | } |
3353 | } |
3354 | } |
3355 | } |
3356 | } |
3357 | |
3358 | static void SelectBestFit(Function* best_fit, Function* func) { |
3359 | if (best_fit->IsNull()) { |
3360 | *best_fit = func->raw(); |
3361 | } else { |
3362 | if ((func->token_pos() > best_fit->token_pos()) && |
3363 | ((func->end_token_pos() <= best_fit->end_token_pos()))) { |
3364 | *best_fit = func->raw(); |
3365 | } |
3366 | } |
3367 | } |
3368 | |
3369 | // Returns true if a best fit is found. A best fit can either be a function |
3370 | // or a field. If it is a function, then the best fit function is returned |
3371 | // in |best_fit|. If a best fit is a field, it means that a latent |
3372 | // breakpoint can be set in the range |token_pos| to |last_token_pos|. |
3373 | bool Debugger::FindBestFit(const Script& script, |
3374 | TokenPosition token_pos, |
3375 | TokenPosition last_token_pos, |
3376 | Function* best_fit) { |
3377 | Thread* thread = Thread::Current(); |
3378 | Zone* zone = thread->zone(); |
3379 | Class& cls = Class::Handle(zone); |
3380 | |
3381 | // A single script can belong to several libraries because of mixins. |
3382 | // Go through all libraries and for each that contains the script, try to find |
3383 | // a fit there. |
3384 | // Return the first fit found, but if a library doesn't contain a fit, |
3385 | // process the next one. |
3386 | const GrowableObjectArray& libs = GrowableObjectArray::Handle( |
3387 | zone, thread->isolate()->object_store()->libraries()); |
3388 | Library& lib = Library::Handle(zone); |
3389 | for (int i = 0; i < libs.Length(); i++) { |
3390 | lib ^= libs.At(i); |
3391 | ASSERT(!lib.IsNull()); |
3392 | const Array& scripts = Array::Handle(zone, lib.LoadedScripts()); |
3393 | bool lib_has_script = false; |
3394 | for (intptr_t j = 0; j < scripts.Length(); j++) { |
3395 | if (scripts.At(j) == script.raw()) { |
3396 | lib_has_script = true; |
3397 | break; |
3398 | } |
3399 | } |
3400 | if (!lib_has_script) { |
3401 | continue; |
3402 | } |
3403 | |
3404 | if (!lib.IsDebuggable()) { |
3405 | if (FLAG_verbose_debug) { |
3406 | OS::PrintErr("Library '%s' has been marked as non-debuggable\n" , |
3407 | lib.ToCString()); |
3408 | } |
3409 | continue; |
3410 | } |
3411 | const GrowableObjectArray& closures = GrowableObjectArray::Handle( |
3412 | zone, isolate_->object_store()->closure_functions()); |
3413 | Array& functions = Array::Handle(zone); |
3414 | Function& function = Function::Handle(zone); |
3415 | Array& fields = Array::Handle(zone); |
3416 | Field& field = Field::Handle(zone); |
3417 | Error& error = Error::Handle(zone); |
3418 | |
3419 | const intptr_t num_closures = closures.Length(); |
3420 | for (intptr_t i = 0; i < num_closures; i++) { |
3421 | function ^= closures.At(i); |
3422 | if (FunctionOverlaps(function, script, token_pos, last_token_pos)) { |
3423 | // Select the inner most closure. |
3424 | SelectBestFit(best_fit, &function); |
3425 | } |
3426 | } |
3427 | if (!best_fit->IsNull()) { |
3428 | // The inner most closure found will be the best fit. Going |
3429 | // over class functions below will not help in any further |
3430 | // narrowing. |
3431 | return true; |
3432 | } |
3433 | |
3434 | const ClassTable& class_table = *isolate_->class_table(); |
3435 | const intptr_t num_classes = class_table.NumCids(); |
3436 | const intptr_t num_tlc_classes = class_table.NumTopLevelCids(); |
3437 | for (intptr_t i = 1; i < num_classes + num_tlc_classes; i++) { |
3438 | const classid_t cid = |
3439 | i < num_classes ? i |
3440 | : ClassTable::CidFromTopLevelIndex(i - num_classes); |
3441 | if (!class_table.HasValidClassAt(cid)) { |
3442 | continue; |
3443 | } |
3444 | cls = class_table.At(cid); |
3445 | // This class is relevant to us only if it belongs to the |
3446 | // library to which |script| belongs. |
3447 | if (cls.library() != lib.raw()) { |
3448 | continue; |
3449 | } |
3450 | // Parse class definition if not done yet. |
3451 | error = cls.EnsureIsFinalized(Thread::Current()); |
3452 | if (!error.IsNull()) { |
3453 | // Ignore functions in this class. |
3454 | // TODO(hausner): Should we propagate this error? How? |
3455 | // EnsureIsFinalized only returns an error object if there |
3456 | // is no longjump base on the stack. |
3457 | continue; |
3458 | } |
3459 | functions = cls.functions(); |
3460 | if (!functions.IsNull()) { |
3461 | const intptr_t num_functions = functions.Length(); |
3462 | for (intptr_t pos = 0; pos < num_functions; pos++) { |
3463 | function ^= functions.At(pos); |
3464 | ASSERT(!function.IsNull()); |
3465 | if (IsImplicitFunction(function)) { |
3466 | // Implicit functions do not have a user specifiable source |
3467 | // location. |
3468 | continue; |
3469 | } |
3470 | if (FunctionOverlaps(function, script, token_pos, last_token_pos)) { |
3471 | // Closures and inner functions within a class method are not |
3472 | // present in the functions of a class. Hence, we can return |
3473 | // right away as looking through other functions of a class |
3474 | // will not narrow down to any inner function/closure. |
3475 | *best_fit = function.raw(); |
3476 | return true; |
3477 | } |
3478 | } |
3479 | } |
3480 | // If none of the functions in the class contain token_pos, then we |
3481 | // check if it falls within a function literal initializer of a field |
3482 | // that has not been initialized yet. If the field (and hence the |
3483 | // function literal initializer) has already been initialized, then |
3484 | // it would have been found above in the object store as a closure. |
3485 | fields = cls.fields(); |
3486 | if (!fields.IsNull()) { |
3487 | const intptr_t num_fields = fields.Length(); |
3488 | for (intptr_t pos = 0; pos < num_fields; pos++) { |
3489 | TokenPosition start; |
3490 | TokenPosition end; |
3491 | field ^= fields.At(pos); |
3492 | ASSERT(!field.IsNull()); |
3493 | if (field.Script() != script.raw()) { |
3494 | // The field should be defined in the script we want to set |
3495 | // the breakpoint in. |
3496 | continue; |
3497 | } |
3498 | if (!field.has_nontrivial_initializer()) { |
3499 | continue; |
3500 | } |
3501 | start = field.token_pos(); |
3502 | end = field.end_token_pos(); |
3503 | if ((start <= token_pos && token_pos <= end) || |
3504 | (token_pos <= start && start <= last_token_pos)) { |
3505 | return true; |
3506 | } |
3507 | } |
3508 | } |
3509 | } |
3510 | } |
3511 | return false; |
3512 | } |
3513 | |
3514 | BreakpointLocation* Debugger::SetCodeBreakpoints( |
3515 | bool in_bytecode, |
3516 | BreakpointLocation* loc, |
3517 | const Script& script, |
3518 | TokenPosition token_pos, |
3519 | TokenPosition last_token_pos, |
3520 | intptr_t requested_line, |
3521 | intptr_t requested_column, |
3522 | TokenPosition exact_token_pos, |
3523 | const GrowableObjectArray& functions) { |
3524 | ASSERT(!in_bytecode || FLAG_enable_interpreter); |
3525 | Function& function = Function::Handle(); |
3526 | function ^= functions.At(0); |
3527 | TokenPosition breakpoint_pos = |
3528 | ResolveBreakpointPos(in_bytecode, function, token_pos, last_token_pos, |
3529 | requested_column, exact_token_pos); |
3530 | if (!breakpoint_pos.IsReal()) { |
3531 | return NULL; |
3532 | } |
3533 | if (loc == NULL) { |
3534 | // Find an existing resolved breakpoint location. |
3535 | loc = GetBreakpointLocation( |
3536 | script, TokenPosition::kNoSource, |
3537 | /* requested_line = */ -1, |
3538 | /* requested_column = */ -1, |
3539 | in_bytecode ? breakpoint_pos : TokenPosition::kNoSource, |
3540 | !in_bytecode ? breakpoint_pos : TokenPosition::kNoSource); |
3541 | } |
3542 | if (loc == NULL) { |
3543 | // Find an existing unresolved breakpoint location. |
3544 | loc = GetBreakpointLocation(script, token_pos, requested_line, |
3545 | requested_column); |
3546 | } |
3547 | if (loc == NULL) { |
3548 | loc = new BreakpointLocation(script, breakpoint_pos, breakpoint_pos, |
3549 | requested_line, requested_column); |
3550 | RegisterBreakpointLocation(loc); |
3551 | } |
3552 | // A source breakpoint for this location may already exists, but it may |
3553 | // not yet be resolved in both bytecode and code. |
3554 | if (loc->IsResolved(in_bytecode)) { |
3555 | return loc; |
3556 | } |
3557 | loc->SetResolved(in_bytecode, function, breakpoint_pos); |
3558 | |
3559 | // Create code breakpoints for all compiled functions we found. |
3560 | Function& func = Function::Handle(); |
3561 | const intptr_t num_functions = functions.Length(); |
3562 | for (intptr_t i = 0; i < num_functions; i++) { |
3563 | func ^= functions.At(i); |
3564 | ASSERT((in_bytecode && func.HasBytecode()) || |
3565 | (!in_bytecode && func.HasCode())); |
3566 | MakeCodeBreakpointAt(func, loc); |
3567 | } |
3568 | if (FLAG_verbose_debug) { |
3569 | intptr_t line_number; |
3570 | intptr_t column_number; |
3571 | script.GetTokenLocation(breakpoint_pos, &line_number, &column_number); |
3572 | OS::PrintErr("Resolved %s breakpoint for function '%s' at line %" Pd |
3573 | " col %" Pd "\n" , |
3574 | in_bytecode ? "bytecode" : "code" , |
3575 | func.ToFullyQualifiedCString(), line_number, column_number); |
3576 | } |
3577 | return loc; |
3578 | } |
3579 | |
3580 | BreakpointLocation* Debugger::SetBreakpoint(const Script& script, |
3581 | TokenPosition token_pos, |
3582 | TokenPosition last_token_pos, |
3583 | intptr_t requested_line, |
3584 | intptr_t requested_column, |
3585 | const Function& function) { |
3586 | Function& func = Function::Handle(); |
3587 | if (function.IsNull()) { |
3588 | if (!FindBestFit(script, token_pos, last_token_pos, &func)) { |
3589 | return NULL; |
3590 | } |
3591 | // If func was not set (still Null), the best fit is a field. |
3592 | } else { |
3593 | func = function.raw(); |
3594 | if (!func.token_pos().IsReal()) { |
3595 | return NULL; // Missing source positions in bytecode? |
3596 | } |
3597 | } |
3598 | if (!func.IsNull()) { |
3599 | // There may be more than one function object for a given function |
3600 | // in source code. There may be implicit closure functions, and |
3601 | // there may be copies of mixin functions. Collect all compiled |
3602 | // functions whose source code range matches exactly the best fit |
3603 | // function we found. |
3604 | GrowableObjectArray& bytecode_functions = |
3605 | GrowableObjectArray::Handle(GrowableObjectArray::New()); |
3606 | GrowableObjectArray& code_functions = |
3607 | GrowableObjectArray::Handle(GrowableObjectArray::New()); |
3608 | FindCompiledFunctions(script, func.token_pos(), func.end_token_pos(), |
3609 | &bytecode_functions, &code_functions); |
3610 | |
3611 | if (bytecode_functions.Length() > 0 || code_functions.Length() > 0) { |
3612 | // One or more function object containing this breakpoint location |
3613 | // have already been compiled. We can resolve the breakpoint now. |
3614 | // If requested_column is larger than zero, [token_pos, last_token_pos] |
3615 | // governs one single line of code. |
3616 | TokenPosition exact_token_pos = TokenPosition(-1); |
3617 | if (token_pos != last_token_pos && requested_column >= 0) { |
3618 | #if !defined(DART_PRECOMPILED_RUNTIME) |
3619 | exact_token_pos = |
3620 | FindExactTokenPosition(script, token_pos, requested_column); |
3621 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
3622 | } |
3623 | DeoptimizeWorld(); |
3624 | // Since source positions may differ in code and bytecode, process |
3625 | // breakpoints in bytecode and code separately. |
3626 | BreakpointLocation* loc = NULL; |
3627 | if (bytecode_functions.Length() > 0) { |
3628 | loc = SetCodeBreakpoints(true, loc, script, token_pos, last_token_pos, |
3629 | requested_line, requested_column, |
3630 | exact_token_pos, bytecode_functions); |
3631 | } |
3632 | if (code_functions.Length() > 0) { |
3633 | loc = SetCodeBreakpoints(false, loc, script, token_pos, last_token_pos, |
3634 | requested_line, requested_column, |
3635 | exact_token_pos, code_functions); |
3636 | } |
3637 | if (loc != NULL) { |
3638 | return loc; |
3639 | } |
3640 | } |
3641 | } |
3642 | // There is either an uncompiled function, or an uncompiled function literal |
3643 | // initializer of a field at |token_pos|. Hence, Register an unresolved |
3644 | // breakpoint. |
3645 | if (FLAG_verbose_debug) { |
3646 | intptr_t line_number; |
3647 | intptr_t column_number; |
3648 | script.GetTokenLocation(token_pos, &line_number, &column_number); |
3649 | if (func.IsNull()) { |
3650 | OS::PrintErr( |
3651 | "Registering pending breakpoint for " |
3652 | "an uncompiled function literal at line %" Pd " col %" Pd "\n" , |
3653 | line_number, column_number); |
3654 | } else { |
3655 | OS::PrintErr( |
3656 | "Registering pending breakpoint for " |
3657 | "uncompiled function '%s' at line %" Pd " col %" Pd "\n" , |
3658 | func.ToFullyQualifiedCString(), line_number, column_number); |
3659 | } |
3660 | } |
3661 | BreakpointLocation* loc = |
3662 | GetBreakpointLocation(script, token_pos, -1, requested_column); |
3663 | if (loc == NULL) { |
3664 | loc = new BreakpointLocation(script, token_pos, last_token_pos, |
3665 | requested_line, requested_column); |
3666 | RegisterBreakpointLocation(loc); |
3667 | } |
3668 | return loc; |
3669 | } |
3670 | |
3671 | // Synchronize the enabled/disabled state of all code breakpoints |
3672 | // associated with the breakpoint location loc. |
3673 | void Debugger::SyncBreakpointLocation(BreakpointLocation* loc) { |
3674 | bool any_enabled = loc->AnyEnabled(); |
3675 | |
3676 | CodeBreakpoint* cbpt = code_breakpoints_; |
3677 | while (cbpt != NULL) { |
3678 | if (loc == cbpt->bpt_location()) { |
3679 | if (any_enabled) { |
3680 | cbpt->Enable(); |
3681 | } else { |
3682 | cbpt->Disable(); |
3683 | } |
3684 | } |
3685 | cbpt = cbpt->next(); |
3686 | } |
3687 | } |
3688 | |
3689 | Breakpoint* Debugger::SetBreakpointAtEntry(const Function& target_function, |
3690 | bool single_shot) { |
3691 | ASSERT(!target_function.IsNull()); |
3692 | // AsyncFunction is marked not debuggable. When target_function is an async |
3693 | // function, it is actually referring the inner async_op. Allow the |
3694 | // breakpoint to be set, it will get resolved correctly when inner async_op |
3695 | // gets compiled. |
3696 | if (!target_function.is_debuggable() && !target_function.IsAsyncFunction()) { |
3697 | return NULL; |
3698 | } |
3699 | const Script& script = Script::Handle(target_function.script()); |
3700 | BreakpointLocation* bpt_location = SetBreakpoint( |
3701 | script, target_function.token_pos(), target_function.end_token_pos(), -1, |
3702 | -1 /* no requested line/col */, target_function); |
3703 | if (bpt_location == NULL) { |
3704 | return NULL; |
3705 | } |
3706 | |
3707 | if (single_shot) { |
3708 | return bpt_location->AddSingleShot(this); |
3709 | } else { |
3710 | return bpt_location->AddRepeated(this); |
3711 | } |
3712 | } |
3713 | |
3714 | Breakpoint* Debugger::SetBreakpointAtActivation(const Instance& closure, |
3715 | bool for_over_await) { |
3716 | if (!closure.IsClosure()) { |
3717 | return NULL; |
3718 | } |
3719 | const Function& func = Function::Handle(Closure::Cast(closure).function()); |
3720 | const Script& script = Script::Handle(func.script()); |
3721 | BreakpointLocation* bpt_location = |
3722 | SetBreakpoint(script, func.token_pos(), func.end_token_pos(), -1, |
3723 | -1 /* no line/col */, func); |
3724 | return bpt_location->AddPerClosure(this, closure, for_over_await); |
3725 | } |
3726 | |
3727 | Breakpoint* Debugger::BreakpointAtActivation(const Instance& closure) { |
3728 | if (!closure.IsClosure()) { |
3729 | return NULL; |
3730 | } |
3731 | |
3732 | BreakpointLocation* loc = breakpoint_locations_; |
3733 | while (loc != NULL) { |
3734 | Breakpoint* bpt = loc->breakpoints(); |
3735 | while (bpt != NULL) { |
3736 | if (bpt->IsPerClosure()) { |
3737 | if (closure.raw() == bpt->closure()) { |
3738 | return bpt; |
3739 | } |
3740 | } |
3741 | bpt = bpt->next(); |
3742 | } |
3743 | loc = loc->next(); |
3744 | } |
3745 | |
3746 | return NULL; |
3747 | } |
3748 | |
3749 | Breakpoint* Debugger::SetBreakpointAtLine(const String& script_url, |
3750 | intptr_t line_number) { |
3751 | // Prevent future tests from calling this function in the wrong |
3752 | // execution state. If you hit this assert, consider using |
3753 | // Dart_SetBreakpoint instead. |
3754 | ASSERT(Thread::Current()->execution_state() == Thread::kThreadInVM); |
3755 | |
3756 | BreakpointLocation* loc = |
3757 | BreakpointLocationAtLineCol(script_url, line_number, -1 /* no column */); |
3758 | if (loc != NULL) { |
3759 | return loc->AddRepeated(this); |
3760 | } |
3761 | return NULL; |
3762 | } |
3763 | |
3764 | Breakpoint* Debugger::SetBreakpointAtLineCol(const String& script_url, |
3765 | intptr_t line_number, |
3766 | intptr_t column_number) { |
3767 | // Prevent future tests from calling this function in the wrong |
3768 | // execution state. If you hit this assert, consider using |
3769 | // Dart_SetBreakpoint instead. |
3770 | ASSERT(Thread::Current()->execution_state() == Thread::kThreadInVM); |
3771 | |
3772 | BreakpointLocation* loc = |
3773 | BreakpointLocationAtLineCol(script_url, line_number, column_number); |
3774 | if (loc != NULL) { |
3775 | return loc->AddRepeated(this); |
3776 | } |
3777 | return NULL; |
3778 | } |
3779 | |
3780 | BreakpointLocation* Debugger::BreakpointLocationAtLineCol( |
3781 | const String& script_url, |
3782 | intptr_t line_number, |
3783 | intptr_t column_number) { |
3784 | Zone* zone = Thread::Current()->zone(); |
3785 | Library& lib = Library::Handle(zone); |
3786 | Script& script = Script::Handle(zone); |
3787 | const GrowableObjectArray& libs = |
3788 | GrowableObjectArray::Handle(isolate_->object_store()->libraries()); |
3789 | bool is_package = script_url.StartsWith(Symbols::PackageScheme()); |
3790 | Script& script_for_lib = Script::Handle(zone); |
3791 | for (intptr_t i = 0; i < libs.Length(); i++) { |
3792 | lib ^= libs.At(i); |
3793 | // Ensure that all top-level members are loaded so their scripts |
3794 | // are available for look up. When certain script only contains |
3795 | // top level functions, scripts could still be loaded correctly. |
3796 | lib.EnsureTopLevelClassIsFinalized(); |
3797 | script_for_lib = lib.LookupScript(script_url, !is_package); |
3798 | if (!script_for_lib.IsNull()) { |
3799 | if (script.IsNull()) { |
3800 | script = script_for_lib.raw(); |
3801 | } else if (script.raw() != script_for_lib.raw()) { |
3802 | if (FLAG_verbose_debug) { |
3803 | OS::PrintErr("Multiple scripts match url '%s'\n" , |
3804 | script_url.ToCString()); |
3805 | } |
3806 | return NULL; |
3807 | } |
3808 | } |
3809 | } |
3810 | if (script.IsNull()) { |
3811 | // No script found with given url. Create a latent breakpoint which |
3812 | // will be set if the url is loaded later. |
3813 | BreakpointLocation* latent_bpt = |
3814 | GetLatentBreakpoint(script_url, line_number, column_number); |
3815 | if (FLAG_verbose_debug) { |
3816 | OS::PrintErr( |
3817 | "Set latent breakpoint in url '%s' at " |
3818 | "line %" Pd " col %" Pd "\n" , |
3819 | script_url.ToCString(), line_number, column_number); |
3820 | } |
3821 | return latent_bpt; |
3822 | } |
3823 | TokenPosition first_token_idx, last_token_idx; |
3824 | script.TokenRangeAtLine(line_number, &first_token_idx, &last_token_idx); |
3825 | if (!first_token_idx.IsReal()) { |
3826 | // Script does not contain the given line number. |
3827 | if (FLAG_verbose_debug) { |
3828 | OS::PrintErr("Script '%s' does not contain line number %" Pd "\n" , |
3829 | script_url.ToCString(), line_number); |
3830 | } |
3831 | return NULL; |
3832 | } else if (!last_token_idx.IsReal()) { |
3833 | // Line does not contain any tokens. |
3834 | if (FLAG_verbose_debug) { |
3835 | OS::PrintErr("No executable code at line %" Pd " in '%s'\n" , line_number, |
3836 | script_url.ToCString()); |
3837 | } |
3838 | return NULL; |
3839 | } |
3840 | |
3841 | BreakpointLocation* loc = NULL; |
3842 | ASSERT(first_token_idx <= last_token_idx); |
3843 | while ((loc == NULL) && (first_token_idx <= last_token_idx)) { |
3844 | loc = SetBreakpoint(script, first_token_idx, last_token_idx, line_number, |
3845 | column_number, Function::Handle()); |
3846 | first_token_idx.Next(); |
3847 | } |
3848 | if ((loc == NULL) && FLAG_verbose_debug) { |
3849 | OS::PrintErr("No executable code at line %" Pd " in '%s'\n" , line_number, |
3850 | script_url.ToCString()); |
3851 | } |
3852 | return loc; |
3853 | } |
3854 | |
3855 | // static |
3856 | void Debugger::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
3857 | ASSERT(visitor != NULL); |
3858 | BreakpointLocation* loc = breakpoint_locations_; |
3859 | while (loc != NULL) { |
3860 | loc->VisitObjectPointers(visitor); |
3861 | loc = loc->next(); |
3862 | } |
3863 | loc = latent_locations_; |
3864 | while (loc != NULL) { |
3865 | loc->VisitObjectPointers(visitor); |
3866 | loc = loc->next(); |
3867 | } |
3868 | CodeBreakpoint* cbpt = code_breakpoints_; |
3869 | while (cbpt != NULL) { |
3870 | cbpt->VisitObjectPointers(visitor); |
3871 | cbpt = cbpt->next(); |
3872 | } |
3873 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&top_frame_awaiter_)); |
3874 | } |
3875 | |
3876 | void Debugger::Pause(ServiceEvent* event) { |
3877 | ASSERT(event->IsPause()); // Should call InvokeEventHandler instead. |
3878 | ASSERT(!ignore_breakpoints_); // We shouldn't get here when ignoring bpts. |
3879 | ASSERT(!IsPaused()); // No recursive pausing. |
3880 | |
3881 | pause_event_ = event; |
3882 | pause_event_->UpdateTimestamp(); |
3883 | |
3884 | // We are about to invoke the debugger's event handler. Disable |
3885 | // interrupts for this thread while waiting for debug commands over |
3886 | // the service protocol. |
3887 | { |
3888 | Thread* thread = Thread::Current(); |
3889 | DisableThreadInterruptsScope dtis(thread); |
3890 | TIMELINE_DURATION(thread, Debugger, "Debugger Pause" ); |
3891 | |
3892 | // Send the pause event. |
3893 | Service::HandleEvent(event); |
3894 | |
3895 | { |
3896 | TransitionVMToNative transition(thread); |
3897 | isolate_->PauseEventHandler(); |
3898 | } |
3899 | |
3900 | // Notify the service that we have resumed. |
3901 | const Error& error = Error::Handle(Thread::Current()->sticky_error()); |
3902 | ASSERT(error.IsNull() || error.IsUnwindError() || |
3903 | error.IsUnhandledException()); |
3904 | |
3905 | // Only send a resume event when the isolate is not unwinding. |
3906 | if (!error.IsUnwindError()) { |
3907 | ServiceEvent resume_event(event->isolate(), ServiceEvent::kResume); |
3908 | resume_event.set_top_frame(event->top_frame()); |
3909 | Service::HandleEvent(&resume_event); |
3910 | } |
3911 | } |
3912 | |
3913 | if (needs_breakpoint_cleanup_) { |
3914 | RemoveUnlinkedCodeBreakpoints(); |
3915 | } |
3916 | pause_event_ = NULL; |
3917 | } |
3918 | |
3919 | void Debugger::EnterSingleStepMode() { |
3920 | ResetSteppingFramePointers(); |
3921 | DeoptimizeWorld(); |
3922 | NotifySingleStepping(true); |
3923 | } |
3924 | |
3925 | void Debugger::ResetSteppingFramePointers() { |
3926 | stepping_fp_ = 0; |
3927 | async_stepping_fp_ = 0; |
3928 | } |
3929 | |
3930 | bool Debugger::SteppedForSyntheticAsyncBreakpoint() const { |
3931 | return synthetic_async_breakpoint_ != NULL; |
3932 | } |
3933 | |
3934 | void Debugger::CleanupSyntheticAsyncBreakpoint() { |
3935 | if (synthetic_async_breakpoint_ != NULL) { |
3936 | RemoveBreakpoint(synthetic_async_breakpoint_->id()); |
3937 | synthetic_async_breakpoint_ = NULL; |
3938 | } |
3939 | } |
3940 | |
3941 | void Debugger::RememberTopFrameAwaiter() { |
3942 | if (!FLAG_async_debugger) { |
3943 | return; |
3944 | } |
3945 | if (stack_trace_->Length() > 0) { |
3946 | top_frame_awaiter_ = stack_trace_->FrameAt(0)->GetAsyncAwaiter(); |
3947 | } else { |
3948 | top_frame_awaiter_ = Object::null(); |
3949 | } |
3950 | } |
3951 | |
3952 | void Debugger::SetAsyncSteppingFramePointer(DebuggerStackTrace* stack_trace) { |
3953 | if (!FLAG_async_debugger) { |
3954 | return; |
3955 | } |
3956 | if ((stack_trace->Length()) > 0 && |
3957 | (stack_trace->FrameAt(0)->function().IsAsyncClosure() || |
3958 | stack_trace->FrameAt(0)->function().IsAsyncGenClosure())) { |
3959 | async_stepping_fp_ = stack_trace->FrameAt(0)->fp(); |
3960 | interpreted_async_stepping_ = stack_trace->FrameAt(0)->IsInterpreted(); |
3961 | } else { |
3962 | async_stepping_fp_ = 0; |
3963 | } |
3964 | } |
3965 | |
3966 | void Debugger::SetSyncSteppingFramePointer(DebuggerStackTrace* stack_trace) { |
3967 | if (stack_trace->Length() > 0) { |
3968 | stepping_fp_ = stack_trace->FrameAt(0)->fp(); |
3969 | interpreted_stepping_ = stack_trace->FrameAt(0)->IsInterpreted(); |
3970 | } else { |
3971 | stepping_fp_ = 0; |
3972 | } |
3973 | } |
3974 | |
3975 | void Debugger::HandleSteppingRequest(DebuggerStackTrace* stack_trace, |
3976 | bool skip_next_step) { |
3977 | ResetSteppingFramePointers(); |
3978 | RememberTopFrameAwaiter(); |
3979 | if (resume_action_ == kStepInto) { |
3980 | // When single stepping, we need to deoptimize because we might be |
3981 | // stepping into optimized code. This happens in particular if |
3982 | // the isolate has been interrupted, but can happen in other cases |
3983 | // as well. We need to deoptimize the world in case we are about |
3984 | // to call an optimized function. |
3985 | DeoptimizeWorld(); |
3986 | NotifySingleStepping(true); |
3987 | skip_next_step_ = skip_next_step; |
3988 | SetAsyncSteppingFramePointer(stack_trace); |
3989 | if (FLAG_verbose_debug) { |
3990 | OS::PrintErr("HandleSteppingRequest- kStepInto\n" ); |
3991 | } |
3992 | } else if (resume_action_ == kStepOver) { |
3993 | DeoptimizeWorld(); |
3994 | NotifySingleStepping(true); |
3995 | skip_next_step_ = skip_next_step; |
3996 | SetSyncSteppingFramePointer(stack_trace); |
3997 | SetAsyncSteppingFramePointer(stack_trace); |
3998 | if (FLAG_verbose_debug) { |
3999 | OS::PrintErr("HandleSteppingRequest- kStepOver %" Px "\n" , stepping_fp_); |
4000 | } |
4001 | } else if (resume_action_ == kStepOut) { |
4002 | if (FLAG_async_debugger) { |
4003 | if (stack_trace->FrameAt(0)->function().IsAsyncClosure() || |
4004 | stack_trace->FrameAt(0)->function().IsAsyncGenClosure()) { |
4005 | // Request to step out of an async/async* closure. |
4006 | const Object& async_op = |
4007 | Object::Handle(stack_trace->FrameAt(0)->GetAsyncAwaiter()); |
4008 | if (!async_op.IsNull()) { |
4009 | // Step out to the awaiter. |
4010 | ASSERT(async_op.IsClosure()); |
4011 | AsyncStepInto(Closure::Cast(async_op)); |
4012 | if (FLAG_verbose_debug) { |
4013 | OS::PrintErr("HandleSteppingRequest- kContinue to async_op %s\n" , |
4014 | Function::Handle(Closure::Cast(async_op).function()) |
4015 | .ToFullyQualifiedCString()); |
4016 | } |
4017 | return; |
4018 | } |
4019 | } |
4020 | } |
4021 | // Fall through to synchronous stepping. |
4022 | DeoptimizeWorld(); |
4023 | NotifySingleStepping(true); |
4024 | // Find topmost caller that is debuggable. |
4025 | for (intptr_t i = 1; i < stack_trace->Length(); i++) { |
4026 | ActivationFrame* frame = stack_trace->FrameAt(i); |
4027 | if (frame->IsDebuggable()) { |
4028 | stepping_fp_ = frame->fp(); |
4029 | interpreted_stepping_ = frame->IsInterpreted(); |
4030 | break; |
4031 | } |
4032 | } |
4033 | if (FLAG_verbose_debug) { |
4034 | OS::PrintErr("HandleSteppingRequest- kStepOut %" Px "\n" , stepping_fp_); |
4035 | } |
4036 | } else if (resume_action_ == kStepRewind) { |
4037 | if (FLAG_trace_rewind) { |
4038 | OS::PrintErr("Rewinding to frame %" Pd "\n" , resume_frame_index_); |
4039 | OS::PrintErr( |
4040 | "-------------------------\n" |
4041 | "All frames...\n\n" ); |
4042 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
4043 | Thread::Current(), |
4044 | StackFrameIterator::kNoCrossThreadIteration); |
4045 | StackFrame* frame = iterator.NextFrame(); |
4046 | intptr_t num = 0; |
4047 | while ((frame != NULL)) { |
4048 | OS::PrintErr("#%04" Pd " %s\n" , num++, frame->ToCString()); |
4049 | frame = iterator.NextFrame(); |
4050 | } |
4051 | } |
4052 | RewindToFrame(resume_frame_index_); |
4053 | UNREACHABLE(); |
4054 | } |
4055 | } |
4056 | |
4057 | void Debugger::CacheStackTraces(DebuggerStackTrace* stack_trace, |
4058 | DebuggerStackTrace* async_causal_stack_trace, |
4059 | DebuggerStackTrace* awaiter_stack_trace) { |
4060 | ASSERT(stack_trace_ == NULL); |
4061 | stack_trace_ = stack_trace; |
4062 | ASSERT(async_causal_stack_trace_ == NULL); |
4063 | async_causal_stack_trace_ = async_causal_stack_trace; |
4064 | ASSERT(awaiter_stack_trace_ == NULL); |
4065 | awaiter_stack_trace_ = awaiter_stack_trace; |
4066 | } |
4067 | |
4068 | void Debugger::ClearCachedStackTraces() { |
4069 | stack_trace_ = NULL; |
4070 | async_causal_stack_trace_ = NULL; |
4071 | awaiter_stack_trace_ = NULL; |
4072 | } |
4073 | |
4074 | static intptr_t FindNextRewindFrameIndex(DebuggerStackTrace* stack, |
4075 | intptr_t frame_index) { |
4076 | for (intptr_t i = frame_index + 1; i < stack->Length(); i++) { |
4077 | ActivationFrame* frame = stack->FrameAt(i); |
4078 | if (frame->IsRewindable()) { |
4079 | return i; |
4080 | } |
4081 | } |
4082 | return -1; |
4083 | } |
4084 | |
4085 | // Can the top frame be rewound? |
4086 | bool Debugger::CanRewindFrame(intptr_t frame_index, const char** error) const { |
4087 | // check rewind pc is found |
4088 | DebuggerStackTrace* stack = Isolate::Current()->debugger()->StackTrace(); |
4089 | intptr_t num_frames = stack->Length(); |
4090 | if (frame_index < 1 || frame_index >= num_frames) { |
4091 | if (error != nullptr) { |
4092 | *error = Thread::Current()->zone()->PrintToString( |
4093 | "Frame must be in bounds [1..%" Pd |
4094 | "]: " |
4095 | "saw %" Pd "" , |
4096 | num_frames - 1, frame_index); |
4097 | } |
4098 | return false; |
4099 | } |
4100 | ActivationFrame* frame = stack->FrameAt(frame_index); |
4101 | if (!frame->IsRewindable()) { |
4102 | intptr_t next_index = FindNextRewindFrameIndex(stack, frame_index); |
4103 | if (next_index > 0) { |
4104 | *error = Thread::Current()->zone()->PrintToString( |
4105 | "Cannot rewind to frame %" Pd |
4106 | " due to conflicting compiler " |
4107 | "optimizations. " |
4108 | "Run the vm with --no-prune-dead-locals to disallow these " |
4109 | "optimizations. " |
4110 | "Next valid rewind frame is %" Pd "." , |
4111 | frame_index, next_index); |
4112 | } else { |
4113 | *error = Thread::Current()->zone()->PrintToString( |
4114 | "Cannot rewind to frame %" Pd |
4115 | " due to conflicting compiler " |
4116 | "optimizations. " |
4117 | "Run the vm with --no-prune-dead-locals to disallow these " |
4118 | "optimizations." , |
4119 | frame_index); |
4120 | } |
4121 | return false; |
4122 | } |
4123 | return true; |
4124 | } |
4125 | |
4126 | // Given a return address, find the "rewind" pc, which is the pc |
4127 | // before the corresponding call. |
4128 | static uword LookupRewindPc(const Code& code, uword return_address) { |
4129 | ASSERT(!code.is_optimized()); |
4130 | ASSERT(code.ContainsInstructionAt(return_address)); |
4131 | |
4132 | uword pc_offset = return_address - code.PayloadStart(); |
4133 | const PcDescriptors& descriptors = |
4134 | PcDescriptors::Handle(code.pc_descriptors()); |
4135 | PcDescriptors::Iterator iter( |
4136 | descriptors, PcDescriptorsLayout::kRewind | PcDescriptorsLayout::kIcCall | |
4137 | PcDescriptorsLayout::kUnoptStaticCall); |
4138 | intptr_t rewind_deopt_id = -1; |
4139 | uword rewind_pc = 0; |
4140 | while (iter.MoveNext()) { |
4141 | if (iter.Kind() == PcDescriptorsLayout::kRewind) { |
4142 | // Remember the last rewind so we don't need to iterator twice. |
4143 | rewind_pc = code.PayloadStart() + iter.PcOffset(); |
4144 | rewind_deopt_id = iter.DeoptId(); |
4145 | } |
4146 | if ((pc_offset == iter.PcOffset()) && (iter.DeoptId() == rewind_deopt_id)) { |
4147 | return rewind_pc; |
4148 | } |
4149 | } |
4150 | return 0; |
4151 | } |
4152 | |
4153 | // Given a return address, find the "rewind" pc, which is the pc |
4154 | // before the corresponding call. |
4155 | static uword LookupRewindPc(const Bytecode& bytecode, uword return_address) { |
4156 | #if defined(DART_PRECOMPILED_RUNTIME) |
4157 | UNREACHABLE(); |
4158 | #else |
4159 | ASSERT(bytecode.ContainsInstructionAt(return_address)); |
4160 | uword pc = bytecode.PayloadStart(); |
4161 | const uword end_pc = pc + bytecode.Size(); |
4162 | while (pc < end_pc) { |
4163 | uword next_pc = KernelBytecode::Next(pc); |
4164 | if (next_pc == return_address) { |
4165 | return pc; |
4166 | } |
4167 | pc = next_pc; |
4168 | } |
4169 | return 0; |
4170 | #endif |
4171 | } |
4172 | |
4173 | void Debugger::RewindToFrame(intptr_t frame_index) { |
4174 | Thread* thread = Thread::Current(); |
4175 | Zone* zone = thread->zone(); |
4176 | Code& code = Code::Handle(zone); |
4177 | #if !defined(DART_PRECOMPILED_RUNTIME) |
4178 | Bytecode& bytecode = Bytecode::Handle(zone); |
4179 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
4180 | Function& function = Function::Handle(zone); |
4181 | |
4182 | // Find the requested frame. |
4183 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
4184 | Thread::Current(), |
4185 | StackFrameIterator::kNoCrossThreadIteration); |
4186 | intptr_t current_frame = 0; |
4187 | for (StackFrame* frame = iterator.NextFrame(); frame != NULL; |
4188 | frame = iterator.NextFrame()) { |
4189 | ASSERT(frame->IsValid()); |
4190 | if (frame->IsDartFrame()) { |
4191 | if (frame->is_interpreted()) { |
4192 | #if !defined(DART_PRECOMPILED_RUNTIME) |
4193 | bytecode = frame->LookupDartBytecode(); |
4194 | function = bytecode.function(); |
4195 | if (function.IsNull() || !IsFunctionVisible(function)) { |
4196 | continue; // Skip bytecode stub frame or invisible frame. |
4197 | } |
4198 | if (current_frame == frame_index) { |
4199 | // We are rewinding to an interpreted frame. |
4200 | RewindToInterpretedFrame(frame, bytecode); |
4201 | UNREACHABLE(); |
4202 | } |
4203 | current_frame++; |
4204 | #else |
4205 | UNREACHABLE(); |
4206 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
4207 | } else { |
4208 | code = frame->LookupDartCode(); |
4209 | function = code.function(); |
4210 | if (!IsFunctionVisible(function)) { |
4211 | continue; |
4212 | } |
4213 | if (code.is_optimized()) { |
4214 | intptr_t sub_index = 0; |
4215 | for (InlinedFunctionsIterator it(code, frame->pc()); !it.Done(); |
4216 | it.Advance()) { |
4217 | if (current_frame == frame_index) { |
4218 | RewindToOptimizedFrame(frame, code, sub_index); |
4219 | UNREACHABLE(); |
4220 | } |
4221 | current_frame++; |
4222 | sub_index++; |
4223 | } |
4224 | } else { |
4225 | if (current_frame == frame_index) { |
4226 | // We are rewinding to an unoptimized frame. |
4227 | RewindToUnoptimizedFrame(frame, code); |
4228 | UNREACHABLE(); |
4229 | } |
4230 | current_frame++; |
4231 | } |
4232 | } |
4233 | } |
4234 | } |
4235 | UNIMPLEMENTED(); |
4236 | } |
4237 | |
4238 | void Debugger::RewindToUnoptimizedFrame(StackFrame* frame, const Code& code) { |
4239 | // We will be jumping out of the debugger rather than exiting this |
4240 | // function, so prepare the debugger state. |
4241 | ClearCachedStackTraces(); |
4242 | resume_action_ = kContinue; |
4243 | resume_frame_index_ = -1; |
4244 | EnterSingleStepMode(); |
4245 | |
4246 | uword rewind_pc = LookupRewindPc(code, frame->pc()); |
4247 | if (FLAG_trace_rewind && rewind_pc == 0) { |
4248 | OS::PrintErr("Unable to find rewind pc for pc(%" Px ")\n" , frame->pc()); |
4249 | } |
4250 | ASSERT(rewind_pc != 0); |
4251 | if (FLAG_trace_rewind) { |
4252 | OS::PrintErr( |
4253 | "===============================\n" |
4254 | "Rewinding to unoptimized frame:\n" |
4255 | " rewind_pc(0x%" Px " offset:0x%" Px ") sp(0x%" Px ") fp(0x%" Px |
4256 | ")\n" |
4257 | "===============================\n" , |
4258 | rewind_pc, rewind_pc - code.PayloadStart(), frame->sp(), frame->fp()); |
4259 | } |
4260 | Exceptions::JumpToFrame(Thread::Current(), rewind_pc, frame->sp(), |
4261 | frame->fp(), true /* clear lazy deopt at target */); |
4262 | UNREACHABLE(); |
4263 | } |
4264 | |
4265 | void Debugger::RewindToOptimizedFrame(StackFrame* frame, |
4266 | const Code& optimized_code, |
4267 | intptr_t sub_index) { |
4268 | post_deopt_frame_index_ = sub_index; |
4269 | |
4270 | // We will be jumping out of the debugger rather than exiting this |
4271 | // function, so prepare the debugger state. |
4272 | ClearCachedStackTraces(); |
4273 | resume_action_ = kContinue; |
4274 | resume_frame_index_ = -1; |
4275 | EnterSingleStepMode(); |
4276 | |
4277 | if (FLAG_trace_rewind) { |
4278 | OS::PrintErr( |
4279 | "===============================\n" |
4280 | "Deoptimizing frame for rewind:\n" |
4281 | " deopt_pc(0x%" Px ") sp(0x%" Px ") fp(0x%" Px |
4282 | ")\n" |
4283 | "===============================\n" , |
4284 | frame->pc(), frame->sp(), frame->fp()); |
4285 | } |
4286 | Thread* thread = Thread::Current(); |
4287 | thread->set_resume_pc(frame->pc()); |
4288 | uword deopt_stub_pc = StubCode::DeoptForRewind().EntryPoint(); |
4289 | Exceptions::JumpToFrame(thread, deopt_stub_pc, frame->sp(), frame->fp(), |
4290 | true /* clear lazy deopt at target */); |
4291 | UNREACHABLE(); |
4292 | } |
4293 | |
4294 | void Debugger::RewindToInterpretedFrame(StackFrame* frame, |
4295 | const Bytecode& bytecode) { |
4296 | // We will be jumping out of the debugger rather than exiting this |
4297 | // function, so prepare the debugger state. |
4298 | ClearCachedStackTraces(); |
4299 | resume_action_ = kContinue; |
4300 | resume_frame_index_ = -1; |
4301 | EnterSingleStepMode(); |
4302 | |
4303 | uword rewind_pc = LookupRewindPc(bytecode, frame->pc()); |
4304 | if (FLAG_trace_rewind && rewind_pc == 0) { |
4305 | OS::PrintErr("Unable to find rewind pc for bytecode pc(%" Px ")\n" , |
4306 | frame->pc()); |
4307 | } |
4308 | ASSERT(rewind_pc != 0); |
4309 | if (FLAG_trace_rewind) { |
4310 | OS::PrintErr( |
4311 | "===============================\n" |
4312 | "Rewinding to interpreted frame:\n" |
4313 | " rewind_pc(0x%" Px " offset:0x%" Px ") sp(0x%" Px ") fp(0x%" Px |
4314 | ")\n" |
4315 | "===============================\n" , |
4316 | rewind_pc, rewind_pc - bytecode.PayloadStart(), frame->sp(), |
4317 | frame->fp()); |
4318 | } |
4319 | Exceptions::JumpToFrame(Thread::Current(), rewind_pc, frame->sp(), |
4320 | frame->fp(), true /* clear lazy deopt at target */); |
4321 | UNREACHABLE(); |
4322 | } |
4323 | |
4324 | void Debugger::RewindPostDeopt() { |
4325 | intptr_t rewind_frame = post_deopt_frame_index_; |
4326 | post_deopt_frame_index_ = -1; |
4327 | if (FLAG_trace_rewind) { |
4328 | OS::PrintErr("Post deopt, jumping to frame %" Pd "\n" , rewind_frame); |
4329 | OS::PrintErr( |
4330 | "-------------------------\n" |
4331 | "All frames...\n\n" ); |
4332 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
4333 | Thread::Current(), |
4334 | StackFrameIterator::kNoCrossThreadIteration); |
4335 | StackFrame* frame = iterator.NextFrame(); |
4336 | intptr_t num = 0; |
4337 | while ((frame != NULL)) { |
4338 | OS::PrintErr("#%04" Pd " %s\n" , num++, frame->ToCString()); |
4339 | frame = iterator.NextFrame(); |
4340 | } |
4341 | } |
4342 | |
4343 | Thread* thread = Thread::Current(); |
4344 | Zone* zone = thread->zone(); |
4345 | Code& code = Code::Handle(zone); |
4346 | |
4347 | StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, |
4348 | Thread::Current(), |
4349 | StackFrameIterator::kNoCrossThreadIteration); |
4350 | intptr_t current_frame = 0; |
4351 | for (StackFrame* frame = iterator.NextFrame(); frame != NULL; |
4352 | frame = iterator.NextFrame()) { |
4353 | ASSERT(frame->IsValid()); |
4354 | if (frame->IsDartFrame()) { |
4355 | code = frame->LookupDartCode(); |
4356 | ASSERT(!code.is_optimized()); |
4357 | if (current_frame == rewind_frame) { |
4358 | RewindToUnoptimizedFrame(frame, code); |
4359 | UNREACHABLE(); |
4360 | } |
4361 | current_frame++; |
4362 | } |
4363 | } |
4364 | } |
4365 | |
4366 | // static |
4367 | bool Debugger::IsDebuggable(const Function& func) { |
4368 | if (!func.is_debuggable()) { |
4369 | return false; |
4370 | } |
4371 | const Class& cls = Class::Handle(func.Owner()); |
4372 | const Library& lib = Library::Handle(cls.library()); |
4373 | return lib.IsDebuggable(); |
4374 | } |
4375 | |
4376 | bool Debugger::IsDebugging(Thread* thread, const Function& func) { |
4377 | Debugger* debugger = thread->isolate()->debugger(); |
4378 | return debugger->IsStepping() || |
4379 | debugger->HasBreakpoint(func, thread->zone()); |
4380 | } |
4381 | |
4382 | void Debugger::SignalPausedEvent(ActivationFrame* top_frame, Breakpoint* bpt) { |
4383 | resume_action_ = kContinue; |
4384 | ResetSteppingFramePointers(); |
4385 | NotifySingleStepping(false); |
4386 | ASSERT(!IsPaused()); |
4387 | if ((bpt != NULL) && bpt->IsSingleShot()) { |
4388 | RemoveBreakpoint(bpt->id()); |
4389 | bpt = NULL; |
4390 | } |
4391 | |
4392 | ServiceEvent event(isolate_, ServiceEvent::kPauseBreakpoint); |
4393 | event.set_top_frame(top_frame); |
4394 | event.set_breakpoint(bpt); |
4395 | event.set_at_async_jump(IsAtAsyncJump(top_frame)); |
4396 | Pause(&event); |
4397 | } |
4398 | |
4399 | bool Debugger::IsAtAsyncJump(ActivationFrame* top_frame) { |
4400 | Zone* zone = Thread::Current()->zone(); |
4401 | Object& closure_or_null = |
4402 | Object::Handle(zone, top_frame->GetAsyncOperation()); |
4403 | if (!closure_or_null.IsNull()) { |
4404 | ASSERT(top_frame->function().IsAsyncClosure() || |
4405 | top_frame->function().IsAsyncGenClosure()); |
4406 | ASSERT(closure_or_null.IsInstance()); |
4407 | ASSERT(Instance::Cast(closure_or_null).IsClosure()); |
4408 | if (top_frame->function().is_declared_in_bytecode()) { |
4409 | #if !defined(DART_PRECOMPILED_RUNTIME) |
4410 | const auto& bytecode = |
4411 | Bytecode::Handle(zone, top_frame->function().bytecode()); |
4412 | const TokenPosition token_pos = top_frame->TokenPos(); |
4413 | kernel::BytecodeSourcePositionsIterator iter(zone, bytecode); |
4414 | while (iter.MoveNext()) { |
4415 | if (iter.IsYieldPoint() && (iter.TokenPos() == token_pos)) { |
4416 | return true; |
4417 | } |
4418 | } |
4419 | return false; |
4420 | #else |
4421 | UNREACHABLE(); |
4422 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
4423 | } |
4424 | ASSERT(!top_frame->IsInterpreted()); |
4425 | const auto& pc_descriptors = |
4426 | PcDescriptors::Handle(zone, top_frame->code().pc_descriptors()); |
4427 | if (pc_descriptors.IsNull()) { |
4428 | return false; |
4429 | } |
4430 | const TokenPosition looking_for = top_frame->TokenPos(); |
4431 | PcDescriptors::Iterator it(pc_descriptors, PcDescriptorsLayout::kOther); |
4432 | while (it.MoveNext()) { |
4433 | if (it.TokenPos() == looking_for && |
4434 | it.YieldIndex() != PcDescriptorsLayout::kInvalidYieldIndex) { |
4435 | return true; |
4436 | } |
4437 | } |
4438 | } |
4439 | return false; |
4440 | } |
4441 | |
4442 | ErrorPtr Debugger::PauseStepping() { |
4443 | ASSERT(isolate_->single_step()); |
4444 | // Don't pause recursively. |
4445 | if (IsPaused()) { |
4446 | return Error::null(); |
4447 | } |
4448 | if (skip_next_step_) { |
4449 | skip_next_step_ = false; |
4450 | return Error::null(); |
4451 | } |
4452 | |
4453 | // Check whether we are in a Dart function that the user is |
4454 | // interested in. If we saved the frame pointer of a stack frame |
4455 | // the user is interested in, we ignore the single step if we are |
4456 | // in a callee of that frame. Note that we assume that the stack |
4457 | // grows towards lower addresses. |
4458 | ActivationFrame* frame = TopDartFrame(); |
4459 | ASSERT(frame != NULL); |
4460 | |
4461 | if (FLAG_async_debugger) { |
4462 | if ((async_stepping_fp_ != 0) && (top_frame_awaiter_ != Object::null())) { |
4463 | // Check if the user has single stepped out of an async function with |
4464 | // an awaiter. The first check handles the case of calling into the |
4465 | // async machinery as we finish the async function. The second check |
4466 | // handles the case of returning from an async function. |
4467 | const ActivationFrame::Relation relation = |
4468 | frame->CompareTo(async_stepping_fp_, interpreted_async_stepping_); |
4469 | const bool exited_async_function = |
4470 | (relation == ActivationFrame::kCallee && frame->IsAsyncMachinery()) || |
4471 | relation == ActivationFrame::kCaller; |
4472 | if (exited_async_function) { |
4473 | // Step to the top frame awaiter. |
4474 | const Object& async_op = Object::Handle(top_frame_awaiter_); |
4475 | top_frame_awaiter_ = Object::null(); |
4476 | AsyncStepInto(Closure::Cast(async_op)); |
4477 | return Error::null(); |
4478 | } |
4479 | } |
4480 | } |
4481 | |
4482 | if (stepping_fp_ != 0) { |
4483 | // There is an "interesting frame" set. Only pause at appropriate |
4484 | // locations in this frame. |
4485 | const ActivationFrame::Relation relation = |
4486 | frame->CompareTo(stepping_fp_, interpreted_stepping_); |
4487 | if (relation == ActivationFrame::kCallee) { |
4488 | // We are in a callee of the frame we're interested in. |
4489 | // Ignore this stepping break. |
4490 | return Error::null(); |
4491 | } else if (relation == ActivationFrame::kCaller) { |
4492 | // We returned from the "interesting frame", there can be no more |
4493 | // stepping breaks for it. Pause at the next appropriate location |
4494 | // and let the user set the "interesting" frame again. |
4495 | ResetSteppingFramePointers(); |
4496 | } |
4497 | } |
4498 | |
4499 | if (!frame->IsDebuggable()) { |
4500 | return Error::null(); |
4501 | } |
4502 | if (!frame->TokenPos().IsDebugPause()) { |
4503 | return Error::null(); |
4504 | } |
4505 | |
4506 | if (frame->fp() == last_stepping_fp_ && |
4507 | frame->TokenPos() == last_stepping_pos_) { |
4508 | // Do not stop multiple times for the same token position. |
4509 | // Several 'debug checked' opcodes may be issued in the same token range. |
4510 | return Error::null(); |
4511 | } |
4512 | |
4513 | // In bytecode, do not stop before encountering the DebugCheck opcode. |
4514 | // Skip this check if we previously stopped in this frame. |
4515 | // If no DebugCheck was emitted, do not stop (InPrologue returns true). |
4516 | if (frame->IsInterpreted() && frame->fp() != last_stepping_fp_) { |
4517 | uword debug_check_pc = frame->bytecode().GetFirstDebugCheckOpcodePc(); |
4518 | // Frame pc is return address, debug_check_pc is exact, so use '<=' in test. |
4519 | if (debug_check_pc == 0 || frame->pc() <= debug_check_pc) { |
4520 | return Error::null(); |
4521 | } |
4522 | } |
4523 | |
4524 | // We are stopping in this frame at the token pos. |
4525 | last_stepping_fp_ = frame->fp(); |
4526 | last_stepping_pos_ = frame->TokenPos(); |
4527 | |
4528 | // If there is an active breakpoint at this pc, then we should have |
4529 | // already bailed out of this function in the skip_next_step_ test |
4530 | // above. |
4531 | ASSERT(!HasActiveBreakpoint(frame->pc())); |
4532 | |
4533 | if (FLAG_verbose_debug) { |
4534 | OS::PrintErr( |
4535 | ">>> single step break at %s:%" Pd ":%" Pd |
4536 | " (func %s token %s address %#" Px " offset %#" Px ")\n" , |
4537 | String::Handle(frame->SourceUrl()).ToCString(), frame->LineNumber(), |
4538 | frame->ColumnNumber(), |
4539 | String::Handle(frame->QualifiedFunctionName()).ToCString(), |
4540 | frame->TokenPos().ToCString(), frame->pc(), |
4541 | frame->pc() - (frame->IsInterpreted() ? frame->bytecode().PayloadStart() |
4542 | : frame->code().PayloadStart())); |
4543 | } |
4544 | |
4545 | CacheStackTraces(CollectStackTrace(), CollectAsyncCausalStackTrace(), |
4546 | CollectAwaiterReturnStackTrace()); |
4547 | if (SteppedForSyntheticAsyncBreakpoint()) { |
4548 | CleanupSyntheticAsyncBreakpoint(); |
4549 | } |
4550 | SignalPausedEvent(frame, NULL); |
4551 | HandleSteppingRequest(stack_trace_); |
4552 | ClearCachedStackTraces(); |
4553 | |
4554 | // If any error occurred while in the debug message loop, return it here. |
4555 | return Thread::Current()->StealStickyError(); |
4556 | } |
4557 | |
4558 | ErrorPtr Debugger::PauseBreakpoint() { |
4559 | // We ignore this breakpoint when the VM is executing code invoked |
4560 | // by the debugger to evaluate variables values, or when we see a nested |
4561 | // breakpoint or exception event. |
4562 | if (ignore_breakpoints_ || IsPaused()) { |
4563 | return Error::null(); |
4564 | } |
4565 | DebuggerStackTrace* stack_trace = CollectStackTrace(); |
4566 | ASSERT(stack_trace->Length() > 0); |
4567 | ActivationFrame* top_frame = stack_trace->FrameAt(0); |
4568 | ASSERT(top_frame != NULL); |
4569 | CodeBreakpoint* cbpt = GetCodeBreakpoint(top_frame->pc()); |
4570 | ASSERT(cbpt != NULL); |
4571 | |
4572 | if (!Library::Handle(top_frame->Library()).IsDebuggable()) { |
4573 | return Error::null(); |
4574 | } |
4575 | |
4576 | Breakpoint* bpt_hit = FindHitBreakpoint(cbpt->bpt_location_, top_frame); |
4577 | if (bpt_hit == NULL) { |
4578 | return Error::null(); |
4579 | } |
4580 | |
4581 | if (bpt_hit->is_synthetic_async()) { |
4582 | DebuggerStackTrace* stack_trace = CollectStackTrace(); |
4583 | ASSERT(stack_trace->Length() > 0); |
4584 | CacheStackTraces(stack_trace, CollectAsyncCausalStackTrace(), |
4585 | CollectAwaiterReturnStackTrace()); |
4586 | |
4587 | // Hit a synthetic async breakpoint. |
4588 | if (FLAG_verbose_debug) { |
4589 | OS::PrintErr( |
4590 | ">>> hit synthetic breakpoint at %s:%" Pd |
4591 | " (func %s token %s address %#" Px " offset %#" Px ")\n" , |
4592 | String::Handle(cbpt->SourceUrl()).ToCString(), cbpt->LineNumber(), |
4593 | String::Handle(top_frame->QualifiedFunctionName()).ToCString(), |
4594 | cbpt->token_pos().ToCString(), top_frame->pc(), |
4595 | top_frame->pc() - (top_frame->IsInterpreted() |
4596 | ? top_frame->bytecode().PayloadStart() |
4597 | : top_frame->code().PayloadStart())); |
4598 | } |
4599 | |
4600 | ASSERT(synthetic_async_breakpoint_ == NULL); |
4601 | synthetic_async_breakpoint_ = bpt_hit; |
4602 | bpt_hit = NULL; |
4603 | |
4604 | // We are at the entry of an async function. |
4605 | // We issue a step over to resume at the point after the await statement. |
4606 | SetResumeAction(kStepOver); |
4607 | // When we single step from a user breakpoint, our next stepping |
4608 | // point will be at the exact same pc. Skip it. |
4609 | HandleSteppingRequest(stack_trace_, true /* skip next step */); |
4610 | ClearCachedStackTraces(); |
4611 | return Error::null(); |
4612 | } |
4613 | |
4614 | if (FLAG_verbose_debug) { |
4615 | OS::PrintErr(">>> hit breakpoint %" Pd " at %s:%" Pd |
4616 | " (func %s token %s address %#" Px " offset %#" Px ")\n" , |
4617 | bpt_hit->id(), String::Handle(cbpt->SourceUrl()).ToCString(), |
4618 | cbpt->LineNumber(), |
4619 | String::Handle(top_frame->QualifiedFunctionName()).ToCString(), |
4620 | cbpt->token_pos().ToCString(), top_frame->pc(), |
4621 | top_frame->pc() - (top_frame->IsInterpreted() |
4622 | ? top_frame->bytecode().PayloadStart() |
4623 | : top_frame->code().PayloadStart())); |
4624 | } |
4625 | |
4626 | CacheStackTraces(stack_trace, CollectAsyncCausalStackTrace(), |
4627 | CollectAwaiterReturnStackTrace()); |
4628 | SignalPausedEvent(top_frame, bpt_hit); |
4629 | // When we single step from a user breakpoint, our next stepping |
4630 | // point will be at the exact same pc. Skip it. |
4631 | HandleSteppingRequest(stack_trace_, true /* skip next step */); |
4632 | ClearCachedStackTraces(); |
4633 | |
4634 | // If any error occurred while in the debug message loop, return it here. |
4635 | return Thread::Current()->StealStickyError(); |
4636 | } |
4637 | |
4638 | Breakpoint* Debugger::FindHitBreakpoint(BreakpointLocation* location, |
4639 | ActivationFrame* top_frame) { |
4640 | if (location == NULL) { |
4641 | return NULL; |
4642 | } |
4643 | // There may be more than one applicable breakpoint at this location, but we |
4644 | // will report only one as reached. If there is a single-shot breakpoint, we |
4645 | // favor it; then a closure-specific breakpoint ; then an general breakpoint. |
4646 | |
4647 | // First check for a single-shot breakpoint. |
4648 | Breakpoint* bpt = location->breakpoints(); |
4649 | while (bpt != NULL) { |
4650 | if (bpt->IsSingleShot()) { |
4651 | return bpt; |
4652 | } |
4653 | bpt = bpt->next(); |
4654 | } |
4655 | |
4656 | // Now check for a closure-specific breakpoint. |
4657 | bpt = location->breakpoints(); |
4658 | while (bpt != NULL) { |
4659 | if (bpt->IsPerClosure()) { |
4660 | Object& closure = Object::Handle(top_frame->GetClosure()); |
4661 | ASSERT(closure.IsInstance()); |
4662 | ASSERT(Instance::Cast(closure).IsClosure()); |
4663 | if (closure.raw() == bpt->closure()) { |
4664 | return bpt; |
4665 | } |
4666 | } |
4667 | bpt = bpt->next(); |
4668 | } |
4669 | |
4670 | // Finally, check for a general breakpoint. |
4671 | bpt = location->breakpoints(); |
4672 | while (bpt != NULL) { |
4673 | if (bpt->IsRepeated()) { |
4674 | return bpt; |
4675 | } |
4676 | bpt = bpt->next(); |
4677 | } |
4678 | |
4679 | return NULL; |
4680 | } |
4681 | |
4682 | void Debugger::PauseDeveloper(const String& msg) { |
4683 | // We ignore this breakpoint when the VM is executing code invoked |
4684 | // by the debugger to evaluate variables values, or when we see a nested |
4685 | // breakpoint or exception event. |
4686 | if (ignore_breakpoints_ || IsPaused()) { |
4687 | return; |
4688 | } |
4689 | |
4690 | DebuggerStackTrace* stack_trace = CollectStackTrace(); |
4691 | ASSERT(stack_trace->Length() > 0); |
4692 | CacheStackTraces(stack_trace, CollectAsyncCausalStackTrace(), |
4693 | CollectAwaiterReturnStackTrace()); |
4694 | // TODO(johnmccutchan): Send |msg| to Observatory. |
4695 | |
4696 | // We are in the native call to Developer_debugger. the developer |
4697 | // gets a better experience by not seeing this call. To accomplish |
4698 | // this, we continue execution until the call exits (step out). |
4699 | SetResumeAction(kStepOut); |
4700 | HandleSteppingRequest(stack_trace_); |
4701 | ClearCachedStackTraces(); |
4702 | } |
4703 | |
4704 | void Debugger::NotifyIsolateCreated() { |
4705 | if (NeedsIsolateEvents()) { |
4706 | ServiceEvent event(isolate_, ServiceEvent::kIsolateStart); |
4707 | InvokeEventHandler(&event); |
4708 | } |
4709 | } |
4710 | |
4711 | // Return innermost closure contained in 'function' that contains |
4712 | // the given token position. |
4713 | FunctionPtr Debugger::FindInnermostClosure(const Function& function, |
4714 | TokenPosition token_pos) { |
4715 | Zone* zone = Thread::Current()->zone(); |
4716 | const Script& outer_origin = Script::Handle(zone, function.script()); |
4717 | const GrowableObjectArray& closures = GrowableObjectArray::Handle( |
4718 | zone, Isolate::Current()->object_store()->closure_functions()); |
4719 | const intptr_t num_closures = closures.Length(); |
4720 | Function& closure = Function::Handle(zone); |
4721 | Function& best_fit = Function::Handle(zone); |
4722 | for (intptr_t i = 0; i < num_closures; i++) { |
4723 | closure ^= closures.At(i); |
4724 | if ((function.token_pos() < closure.token_pos()) && |
4725 | (closure.end_token_pos() < function.end_token_pos()) && |
4726 | (closure.token_pos() <= token_pos) && |
4727 | (token_pos <= closure.end_token_pos()) && |
4728 | (closure.script() == outer_origin.raw())) { |
4729 | SelectBestFit(&best_fit, &closure); |
4730 | } |
4731 | } |
4732 | return best_fit.raw(); |
4733 | } |
4734 | |
4735 | #if !defined(DART_PRECOMPILED_RUNTIME) |
4736 | // On single line of code with given column number, |
4737 | // Calculate exact tokenPosition |
4738 | TokenPosition Debugger::FindExactTokenPosition(const Script& script, |
4739 | TokenPosition start_of_line, |
4740 | intptr_t column_number) { |
4741 | intptr_t line = -1; |
4742 | intptr_t col = -1; |
4743 | Zone* zone = Thread::Current()->zone(); |
4744 | kernel::KernelLineStartsReader line_starts_reader( |
4745 | TypedData::Handle(zone, script.line_starts()), zone); |
4746 | line_starts_reader.LocationForPosition(start_of_line.value(), &line, &col); |
4747 | return TokenPosition(start_of_line.value() + (column_number - col)); |
4748 | } |
4749 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
4750 | |
4751 | void Debugger::HandleCodeChange(bool bytecode_loaded, const Function& func) { |
4752 | if (breakpoint_locations_ == NULL) { |
4753 | // Return with minimal overhead if there are no breakpoints. |
4754 | return; |
4755 | } |
4756 | if (bytecode_loaded && !FLAG_enable_interpreter) { |
4757 | // We do not set breakpoints in bytecode if the interpreter is not used. |
4758 | return; |
4759 | } |
4760 | if (!func.is_debuggable()) { |
4761 | // Nothing to do if the function is not debuggable. If there is |
4762 | // a pending breakpoint in an inner function (that is debuggable), |
4763 | // we'll resolve the breakpoint when the inner function is compiled. |
4764 | return; |
4765 | } |
4766 | // Iterate over all source breakpoints to check whether breakpoints |
4767 | // need to be set in the newly compiled function. |
4768 | Zone* zone = Thread::Current()->zone(); |
4769 | Script& script = Script::Handle(zone); |
4770 | for (BreakpointLocation* loc = breakpoint_locations_; loc != NULL; |
4771 | loc = loc->next()) { |
4772 | script = loc->script(); |
4773 | if (FunctionOverlaps(func, script, loc->token_pos(), |
4774 | loc->end_token_pos())) { |
4775 | TokenPosition token_pos = loc->token_pos(); |
4776 | TokenPosition end_token_pos = loc->end_token_pos(); |
4777 | if (token_pos != end_token_pos && loc->requested_column_number() >= 0) { |
4778 | #if !defined(DART_PRECOMPILED_RUNTIME) |
4779 | // Narrow down the token position range to a single value |
4780 | // if requested column number is provided so that inner |
4781 | // Closure won't be missed. |
4782 | token_pos = FindExactTokenPosition(script, token_pos, |
4783 | loc->requested_column_number()); |
4784 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
4785 | } |
4786 | const Function& inner_function = |
4787 | Function::Handle(zone, FindInnermostClosure(func, token_pos)); |
4788 | if (!inner_function.IsNull()) { |
4789 | if (bytecode_loaded) { |
4790 | // func's bytecode was just loaded. |
4791 | // If func is a closure and has an inner closure, the inner closure |
4792 | // may not have been loaded yet. |
4793 | if (inner_function.HasBytecode()) { |
4794 | ASSERT(loc->IsResolved(bytecode_loaded)); |
4795 | } else { |
4796 | if (FLAG_verbose_debug) { |
4797 | OS::PrintErr( |
4798 | "Pending breakpoint remains unresolved in " |
4799 | "inner bytecode function '%s'\n" , |
4800 | inner_function.ToFullyQualifiedCString()); |
4801 | } |
4802 | } |
4803 | continue; |
4804 | } else { |
4805 | // func was just compiled. |
4806 | // The local function of a function we just compiled cannot |
4807 | // be compiled already. |
4808 | ASSERT(!inner_function.HasCode()); |
4809 | if (FLAG_verbose_debug) { |
4810 | OS::PrintErr( |
4811 | "Pending breakpoint remains unresolved in " |
4812 | "inner function '%s'\n" , |
4813 | inner_function.ToFullyQualifiedCString()); |
4814 | } |
4815 | continue; |
4816 | } |
4817 | |
4818 | // TODO(hausner): What should we do if function is optimized? |
4819 | // Can we deoptimize the function? |
4820 | ASSERT(!func.HasOptimizedCode()); |
4821 | } |
4822 | |
4823 | // There is no local function within func that contains the |
4824 | // breakpoint token position. Resolve the breakpoint if necessary |
4825 | // and set the code breakpoints. |
4826 | const bool resolved_in_bytecode = |
4827 | !bytecode_loaded && loc->IsResolved(/* in_bytecode = */ true); |
4828 | if (!loc->IsResolved(bytecode_loaded)) { |
4829 | // Resolve source breakpoint in the newly compiled function. |
4830 | TokenPosition bp_pos = ResolveBreakpointPos( |
4831 | bytecode_loaded, func, loc->token_pos(), loc->end_token_pos(), |
4832 | loc->requested_column_number(), token_pos); |
4833 | if (!bp_pos.IsDebugPause()) { |
4834 | if (FLAG_verbose_debug) { |
4835 | OS::PrintErr("Failed resolving breakpoint for function '%s'\n" , |
4836 | func.ToFullyQualifiedCString()); |
4837 | } |
4838 | continue; |
4839 | } |
4840 | TokenPosition requested_pos = loc->token_pos(); |
4841 | TokenPosition requested_end_pos = loc->end_token_pos(); |
4842 | loc->SetResolved(bytecode_loaded, func, bp_pos); |
4843 | Breakpoint* bpt = loc->breakpoints(); |
4844 | while (bpt != NULL) { |
4845 | if (FLAG_verbose_debug) { |
4846 | OS::PrintErr( |
4847 | "Resolved breakpoint %" Pd |
4848 | " to pos %s, function '%s' (requested range %s-%s, " |
4849 | "requested col %" Pd ")\n" , |
4850 | bpt->id(), loc->token_pos().ToCString(), |
4851 | func.ToFullyQualifiedCString(), requested_pos.ToCString(), |
4852 | requested_end_pos.ToCString(), loc->requested_column_number()); |
4853 | } |
4854 | // Do not signal resolution in code if already signaled resolution |
4855 | // in bytecode. |
4856 | if (!resolved_in_bytecode) { |
4857 | SendBreakpointEvent(ServiceEvent::kBreakpointResolved, bpt); |
4858 | } |
4859 | bpt = bpt->next(); |
4860 | } |
4861 | } |
4862 | ASSERT(loc->IsResolved(bytecode_loaded)); |
4863 | if (FLAG_verbose_debug) { |
4864 | Breakpoint* bpt = loc->breakpoints(); |
4865 | while (bpt != NULL) { |
4866 | OS::PrintErr("Setting breakpoint %" Pd " for %s '%s'\n" , bpt->id(), |
4867 | func.IsClosureFunction() ? "closure" : "function" , |
4868 | func.ToFullyQualifiedCString()); |
4869 | bpt = bpt->next(); |
4870 | } |
4871 | } |
4872 | MakeCodeBreakpointAt(func, loc); |
4873 | } |
4874 | } |
4875 | } |
4876 | |
4877 | void Debugger::NotifyDoneLoading() { |
4878 | if (latent_locations_ == NULL) { |
4879 | // Common, fast path. |
4880 | return; |
4881 | } |
4882 | Zone* zone = Thread::Current()->zone(); |
4883 | Library& lib = Library::Handle(zone); |
4884 | Script& script = Script::Handle(zone); |
4885 | String& url = String::Handle(zone); |
4886 | BreakpointLocation* loc = latent_locations_; |
4887 | BreakpointLocation* prev_loc = NULL; |
4888 | const GrowableObjectArray& libs = |
4889 | GrowableObjectArray::Handle(isolate_->object_store()->libraries()); |
4890 | while (loc != NULL) { |
4891 | url = loc->url(); |
4892 | bool found_match = false; |
4893 | bool is_package = url.StartsWith(Symbols::PackageScheme()); |
4894 | for (intptr_t i = 0; i < libs.Length(); i++) { |
4895 | lib ^= libs.At(i); |
4896 | script = lib.LookupScript(url, !is_package); |
4897 | if (!script.IsNull()) { |
4898 | // Found a script with matching url for this latent breakpoint. |
4899 | // Unlink the latent breakpoint from the list. |
4900 | found_match = true; |
4901 | BreakpointLocation* matched_loc = loc; |
4902 | loc = loc->next(); |
4903 | if (prev_loc == NULL) { |
4904 | latent_locations_ = loc; |
4905 | } else { |
4906 | prev_loc->set_next(loc); |
4907 | } |
4908 | // Now find the token range at the requested line and make a |
4909 | // new unresolved source breakpoint. |
4910 | intptr_t line_number = matched_loc->requested_line_number(); |
4911 | intptr_t column_number = matched_loc->requested_column_number(); |
4912 | ASSERT(line_number >= 0); |
4913 | TokenPosition first_token_pos, last_token_pos; |
4914 | script.TokenRangeAtLine(line_number, &first_token_pos, &last_token_pos); |
4915 | if (!first_token_pos.IsDebugPause() || !last_token_pos.IsDebugPause()) { |
4916 | // Script does not contain the given line number or there are no |
4917 | // tokens on the line. Drop the breakpoint silently. |
4918 | Breakpoint* bpt = matched_loc->breakpoints(); |
4919 | while (bpt != NULL) { |
4920 | if (FLAG_verbose_debug) { |
4921 | OS::PrintErr("No code found at line %" Pd |
4922 | ": " |
4923 | "dropping latent breakpoint %" Pd " in '%s'\n" , |
4924 | line_number, bpt->id(), url.ToCString()); |
4925 | } |
4926 | Breakpoint* prev = bpt; |
4927 | bpt = bpt->next(); |
4928 | delete prev; |
4929 | } |
4930 | delete matched_loc; |
4931 | } else { |
4932 | // We don't expect to already have a breakpoint for this location. |
4933 | // If there is one, assert in debug build but silently drop |
4934 | // the latent breakpoint in release build. |
4935 | BreakpointLocation* existing_loc = |
4936 | GetBreakpointLocation(script, first_token_pos, -1, column_number); |
4937 | ASSERT(existing_loc == NULL); |
4938 | if (existing_loc == NULL) { |
4939 | // Create and register a new source breakpoint for the |
4940 | // latent breakpoint. |
4941 | BreakpointLocation* unresolved_loc = |
4942 | new BreakpointLocation(script, first_token_pos, last_token_pos, |
4943 | line_number, column_number); |
4944 | RegisterBreakpointLocation(unresolved_loc); |
4945 | |
4946 | // Move breakpoints over. |
4947 | Breakpoint* bpt = matched_loc->breakpoints(); |
4948 | unresolved_loc->set_breakpoints(bpt); |
4949 | matched_loc->set_breakpoints(NULL); |
4950 | while (bpt != NULL) { |
4951 | bpt->set_bpt_location(unresolved_loc); |
4952 | if (FLAG_verbose_debug) { |
4953 | OS::PrintErr( |
4954 | "Converted latent breakpoint " |
4955 | "%" Pd " in '%s' at line %" Pd " col %" Pd "\n" , |
4956 | bpt->id(), url.ToCString(), line_number, column_number); |
4957 | } |
4958 | bpt = bpt->next(); |
4959 | } |
4960 | SyncBreakpointLocation(unresolved_loc); |
4961 | } |
4962 | delete matched_loc; |
4963 | // Break out of the iteration over loaded libraries. If the |
4964 | // same url has been loaded into more than one library, we |
4965 | // only set a breakpoint in the first one. |
4966 | // TODO(hausner): There is one possible pitfall here. |
4967 | // If the user sets a latent breakpoint using a partial url that |
4968 | // ends up matching more than one script, the breakpoint might |
4969 | // get set in the wrong script. |
4970 | // It would be better if we could warn the user if multiple |
4971 | // scripts are matching. |
4972 | break; |
4973 | } |
4974 | } |
4975 | } |
4976 | if (!found_match) { |
4977 | // No matching url found in any of the libraries. |
4978 | if (FLAG_verbose_debug) { |
4979 | Breakpoint* bpt = loc->breakpoints(); |
4980 | while (bpt != NULL) { |
4981 | OS::PrintErr( |
4982 | "No match found for latent breakpoint id " |
4983 | "%" Pd " with url '%s'\n" , |
4984 | bpt->id(), url.ToCString()); |
4985 | bpt = bpt->next(); |
4986 | } |
4987 | } |
4988 | loc = loc->next(); |
4989 | } |
4990 | } |
4991 | } |
4992 | |
4993 | // TODO(hausner): Could potentially make this faster by checking |
4994 | // whether the call target at pc is a debugger stub. |
4995 | bool Debugger::HasActiveBreakpoint(uword pc) { |
4996 | CodeBreakpoint* cbpt = GetCodeBreakpoint(pc); |
4997 | return (cbpt != NULL) && (cbpt->IsEnabled()); |
4998 | } |
4999 | |
5000 | CodeBreakpoint* Debugger::GetCodeBreakpoint(uword breakpoint_address) { |
5001 | CodeBreakpoint* cbpt = code_breakpoints_; |
5002 | while (cbpt != NULL) { |
5003 | if (cbpt->pc() == breakpoint_address) { |
5004 | return cbpt; |
5005 | } |
5006 | cbpt = cbpt->next(); |
5007 | } |
5008 | return NULL; |
5009 | } |
5010 | |
5011 | CodePtr Debugger::GetPatchedStubAddress(uword breakpoint_address) { |
5012 | CodeBreakpoint* cbpt = GetCodeBreakpoint(breakpoint_address); |
5013 | if (cbpt != NULL) { |
5014 | return cbpt->OrigStubAddress(); |
5015 | } |
5016 | UNREACHABLE(); |
5017 | return Code::null(); |
5018 | } |
5019 | |
5020 | // Remove and delete the source breakpoint bpt and its associated |
5021 | // code breakpoints. |
5022 | void Debugger::RemoveBreakpoint(intptr_t bp_id) { |
5023 | if (RemoveBreakpointFromTheList(bp_id, &breakpoint_locations_)) { |
5024 | return; |
5025 | } |
5026 | RemoveBreakpointFromTheList(bp_id, &latent_locations_); |
5027 | } |
5028 | |
5029 | // Remove and delete the source breakpoint bpt and its associated |
5030 | // code breakpoints. Returns true, if breakpoint was found and removed, |
5031 | // returns false, if breakpoint was not found. |
5032 | bool Debugger::RemoveBreakpointFromTheList(intptr_t bp_id, |
5033 | BreakpointLocation** list) { |
5034 | BreakpointLocation* prev_loc = NULL; |
5035 | BreakpointLocation* curr_loc = *list; |
5036 | while (curr_loc != NULL) { |
5037 | Breakpoint* prev_bpt = NULL; |
5038 | Breakpoint* curr_bpt = curr_loc->breakpoints(); |
5039 | while (curr_bpt != NULL) { |
5040 | if (curr_bpt->id() == bp_id) { |
5041 | if (prev_bpt == NULL) { |
5042 | curr_loc->set_breakpoints(curr_bpt->next()); |
5043 | } else { |
5044 | prev_bpt->set_next(curr_bpt->next()); |
5045 | } |
5046 | |
5047 | // Send event to client before the breakpoint's fields are |
5048 | // poisoned and deleted. |
5049 | SendBreakpointEvent(ServiceEvent::kBreakpointRemoved, curr_bpt); |
5050 | |
5051 | curr_bpt->set_next(NULL); |
5052 | curr_bpt->set_bpt_location(NULL); |
5053 | // Remove possible references to the breakpoint. |
5054 | if (pause_event_ != NULL && pause_event_->breakpoint() == curr_bpt) { |
5055 | pause_event_->set_breakpoint(NULL); |
5056 | } |
5057 | if (synthetic_async_breakpoint_ == curr_bpt) { |
5058 | synthetic_async_breakpoint_ = NULL; |
5059 | } |
5060 | delete curr_bpt; |
5061 | curr_bpt = NULL; |
5062 | |
5063 | // Delete the breakpoint location object if there are no more |
5064 | // breakpoints at that location. |
5065 | if (curr_loc->breakpoints() == NULL) { |
5066 | if (prev_loc == NULL) { |
5067 | *list = curr_loc->next(); |
5068 | } else { |
5069 | prev_loc->set_next(curr_loc->next()); |
5070 | } |
5071 | |
5072 | if (!curr_loc->IsLatent()) { |
5073 | // Remove references from code breakpoints to this breakpoint |
5074 | // location and disable them. |
5075 | // Latent breakpoint locations won't have code breakpoints. |
5076 | UnlinkCodeBreakpoints(curr_loc); |
5077 | } |
5078 | BreakpointLocation* next_loc = curr_loc->next(); |
5079 | delete curr_loc; |
5080 | curr_loc = next_loc; |
5081 | } |
5082 | |
5083 | // The code breakpoints will be deleted when the VM resumes |
5084 | // after the pause event. |
5085 | return true; |
5086 | } |
5087 | |
5088 | prev_bpt = curr_bpt; |
5089 | curr_bpt = curr_bpt->next(); |
5090 | } |
5091 | prev_loc = curr_loc; |
5092 | curr_loc = curr_loc->next(); |
5093 | } |
5094 | // breakpoint with bp_id does not exist, nothing to do. |
5095 | return false; |
5096 | } |
5097 | |
5098 | // Unlink code breakpoints from the given breakpoint location. |
5099 | // They will later be deleted when control returns from the pause event |
5100 | // callback. Also, disable the breakpoint so it no longer fires if it |
5101 | // should be hit before it gets deleted. |
5102 | void Debugger::UnlinkCodeBreakpoints(BreakpointLocation* bpt_location) { |
5103 | ASSERT(bpt_location != NULL); |
5104 | CodeBreakpoint* curr_bpt = code_breakpoints_; |
5105 | while (curr_bpt != NULL) { |
5106 | if (curr_bpt->bpt_location() == bpt_location) { |
5107 | curr_bpt->Disable(); |
5108 | curr_bpt->set_bpt_location(NULL); |
5109 | needs_breakpoint_cleanup_ = true; |
5110 | } |
5111 | curr_bpt = curr_bpt->next(); |
5112 | } |
5113 | } |
5114 | |
5115 | // Remove and delete unlinked code breakpoints, i.e. breakpoints that |
5116 | // are not associated with a breakpoint location. |
5117 | void Debugger::RemoveUnlinkedCodeBreakpoints() { |
5118 | CodeBreakpoint* prev_bpt = NULL; |
5119 | CodeBreakpoint* curr_bpt = code_breakpoints_; |
5120 | while (curr_bpt != NULL) { |
5121 | if (curr_bpt->bpt_location() == NULL) { |
5122 | if (prev_bpt == NULL) { |
5123 | code_breakpoints_ = code_breakpoints_->next(); |
5124 | } else { |
5125 | prev_bpt->set_next(curr_bpt->next()); |
5126 | } |
5127 | CodeBreakpoint* temp_bpt = curr_bpt; |
5128 | curr_bpt = curr_bpt->next(); |
5129 | temp_bpt->Disable(); |
5130 | delete temp_bpt; |
5131 | } else { |
5132 | prev_bpt = curr_bpt; |
5133 | curr_bpt = curr_bpt->next(); |
5134 | } |
5135 | } |
5136 | needs_breakpoint_cleanup_ = false; |
5137 | } |
5138 | |
5139 | BreakpointLocation* Debugger::GetBreakpointLocation( |
5140 | const Script& script, |
5141 | TokenPosition token_pos, |
5142 | intptr_t requested_line, |
5143 | intptr_t requested_column, |
5144 | TokenPosition bytecode_token_pos, |
5145 | TokenPosition code_token_pos) { |
5146 | BreakpointLocation* loc = breakpoint_locations_; |
5147 | while (loc != NULL) { |
5148 | if (loc->script_ == script.raw() && |
5149 | (!token_pos.IsReal() || (loc->token_pos_ == token_pos)) && |
5150 | ((requested_line == -1) || |
5151 | (loc->requested_line_number_ == requested_line)) && |
5152 | ((requested_column == -1) || |
5153 | (loc->requested_column_number_ == requested_column)) && |
5154 | (!bytecode_token_pos.IsReal() || |
5155 | (loc->bytecode_token_pos_ == bytecode_token_pos)) && |
5156 | (!code_token_pos.IsReal() || |
5157 | (loc->code_token_pos_ == code_token_pos))) { |
5158 | return loc; |
5159 | } |
5160 | loc = loc->next(); |
5161 | } |
5162 | return NULL; |
5163 | } |
5164 | |
5165 | Breakpoint* Debugger::GetBreakpointById(intptr_t id) { |
5166 | Breakpoint* bpt = GetBreakpointByIdInTheList(id, breakpoint_locations_); |
5167 | if (bpt != NULL) { |
5168 | return bpt; |
5169 | } |
5170 | return GetBreakpointByIdInTheList(id, latent_locations_); |
5171 | } |
5172 | |
5173 | Breakpoint* Debugger::GetBreakpointByIdInTheList(intptr_t id, |
5174 | BreakpointLocation* list) { |
5175 | BreakpointLocation* loc = list; |
5176 | while (loc != NULL) { |
5177 | Breakpoint* bpt = loc->breakpoints(); |
5178 | while (bpt != NULL) { |
5179 | if (bpt->id() == id) { |
5180 | return bpt; |
5181 | } |
5182 | bpt = bpt->next(); |
5183 | } |
5184 | loc = loc->next(); |
5185 | } |
5186 | return NULL; |
5187 | } |
5188 | |
5189 | void Debugger::MaybeAsyncStepInto(const Closure& async_op) { |
5190 | if (FLAG_async_debugger && IsSingleStepping()) { |
5191 | // We are single stepping, set a breakpoint on the closure activation |
5192 | // and resume execution so we can hit the breakpoint. |
5193 | AsyncStepInto(async_op); |
5194 | } |
5195 | } |
5196 | |
5197 | void Debugger::AsyncStepInto(const Closure& async_op) { |
5198 | SetBreakpointAtActivation(async_op, true); |
5199 | Continue(); |
5200 | } |
5201 | |
5202 | void Debugger::Continue() { |
5203 | SetResumeAction(kContinue); |
5204 | ResetSteppingFramePointers(); |
5205 | NotifySingleStepping(false); |
5206 | } |
5207 | |
5208 | BreakpointLocation* Debugger::GetLatentBreakpoint(const String& url, |
5209 | intptr_t line, |
5210 | intptr_t column) { |
5211 | BreakpointLocation* loc = latent_locations_; |
5212 | String& bpt_url = String::Handle(); |
5213 | while (loc != NULL) { |
5214 | bpt_url = loc->url(); |
5215 | if (bpt_url.Equals(url) && (loc->requested_line_number() == line) && |
5216 | (loc->requested_column_number() == column)) { |
5217 | return loc; |
5218 | } |
5219 | loc = loc->next(); |
5220 | } |
5221 | // No breakpoint for this location requested. Allocate new one. |
5222 | loc = new BreakpointLocation(url, line, column); |
5223 | loc->set_next(latent_locations_); |
5224 | latent_locations_ = loc; |
5225 | return loc; |
5226 | } |
5227 | |
5228 | void Debugger::RegisterBreakpointLocation(BreakpointLocation* loc) { |
5229 | ASSERT(loc->next() == NULL); |
5230 | loc->set_next(breakpoint_locations_); |
5231 | breakpoint_locations_ = loc; |
5232 | } |
5233 | |
5234 | void Debugger::RegisterCodeBreakpoint(CodeBreakpoint* cbpt) { |
5235 | ASSERT(cbpt->next() == NULL); |
5236 | cbpt->set_next(code_breakpoints_); |
5237 | code_breakpoints_ = cbpt; |
5238 | } |
5239 | |
5240 | #endif // !PRODUCT |
5241 | |
5242 | } // namespace dart |
5243 | |