1// Copyright (c) 2017, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/stack_trace.h"
6
7#include "vm/dart_api_impl.h"
8#include "vm/stack_frame.h"
9#include "vm/symbols.h"
10
11#if !defined(DART_PRECOMPILED_RUNTIME)
12#include "vm/compiler/frontend/bytecode_reader.h"
13#endif // !defined(DART_PRECOMPILED_RUNTIME)
14
15namespace dart {
16
17// Keep in sync with
18// sdk/lib/async/stream_controller.dart:_StreamController._STATE_SUBSCRIBED.
19const intptr_t kStreamController_StateSubscribed = 1;
20
21ClosurePtr FindClosureInFrame(ObjectPtr* last_object_in_caller,
22 const Function& function,
23 bool is_interpreted) {
24 NoSafepointScope nsp;
25
26 // The callee has function signature
27 // :async_op([result, exception, stack])
28 // So we are guaranteed to
29 // a) have only tagged arguments on the stack until we find the :async_op
30 // closure, and
31 // b) find the async closure.
32 auto& closure = Closure::Handle();
33 for (intptr_t i = 0; i < 4; i++) {
34 // KBC builds the stack upwards instead of the usual downwards stack.
35 ObjectPtr arg = last_object_in_caller[(is_interpreted ? -i : i)];
36 if (arg->IsHeapObject() && arg->GetClassId() == kClosureCid) {
37 closure = Closure::RawCast(arg);
38 if (closure.function() == function.raw()) {
39 return closure.raw();
40 }
41 }
42 }
43 UNREACHABLE();
44}
45
46// Find current yield index from async closure.
47// Async closures contains a variable, :await_jump_var that holds the index into
48// async wrapper.
49intptr_t GetYieldIndex(const Closure& receiver_closure) {
50 const auto& function = Function::Handle(receiver_closure.function());
51 if (!function.IsAsyncClosure() && !function.IsAsyncGenClosure()) {
52 return PcDescriptorsLayout::kInvalidYieldIndex;
53 }
54 const auto& await_jump_var =
55 Object::Handle(Context::Handle(receiver_closure.context())
56 .At(Context::kAwaitJumpVarIndex));
57 ASSERT(await_jump_var.IsSmi());
58 return Smi::Cast(await_jump_var).Value();
59}
60
61intptr_t FindPcOffset(const PcDescriptors& pc_descs, intptr_t yield_index) {
62 if (yield_index == PcDescriptorsLayout::kInvalidYieldIndex) {
63 return 0;
64 }
65 PcDescriptors::Iterator iter(pc_descs, PcDescriptorsLayout::kAnyKind);
66 while (iter.MoveNext()) {
67 if (iter.YieldIndex() == yield_index) {
68 return iter.PcOffset();
69 }
70 }
71 UNREACHABLE(); // If we cannot find it we have a bug.
72}
73
74#if !defined(DART_PRECOMPILED_RUNTIME)
75intptr_t FindPcOffset(const Bytecode& bytecode, intptr_t yield_index) {
76 if (yield_index == PcDescriptorsLayout::kInvalidYieldIndex) {
77 return 0;
78 }
79 if (!bytecode.HasSourcePositions()) {
80 return 0;
81 }
82 intptr_t last_yield_point = 0;
83 kernel::BytecodeSourcePositionsIterator iter(Thread::Current()->zone(),
84 bytecode);
85 while (iter.MoveNext()) {
86 if (iter.IsYieldPoint()) {
87 last_yield_point++;
88 }
89 if (last_yield_point == yield_index) {
90 return iter.PcOffset();
91 }
92 }
93 UNREACHABLE(); // If we cannot find it we have a bug.
94}
95#endif
96
97// Helper class for finding the closure of the caller.
98// This is done via the _AsyncAwaitCompleter which holds a
99// FutureResultOrListeners which in turn holds a callback.
100class CallerClosureFinder {
101 public:
102 // Instance caches library and field references.
103 // This way we don't have to do the look-ups for every frame in the stack.
104 explicit CallerClosureFinder(Zone* zone)
105 : receiver_context_(Context::Handle(zone)),
106 receiver_function_(Function::Handle(zone)),
107 parent_function_(Function::Handle(zone)),
108 context_entry_(Object::Handle(zone)),
109 is_sync(Object::Handle(zone)),
110 future_(Object::Handle(zone)),
111 listener_(Object::Handle(zone)),
112 callback_(Object::Handle(zone)),
113 controller_(Object::Handle(zone)),
114 state_(Object::Handle(zone)),
115 var_data_(Object::Handle(zone)),
116 callback_instance_(Object::Handle(zone)),
117 future_impl_class(Class::Handle(zone)),
118 async_await_completer_class(Class::Handle(zone)),
119 future_listener_class(Class::Handle(zone)),
120 async_start_stream_controller_class(Class::Handle(zone)),
121 stream_controller_class(Class::Handle(zone)),
122 async_stream_controller_class(Class::Handle(zone)),
123 controller_subscription_class(Class::Handle(zone)),
124 buffering_stream_subscription_class(Class::Handle(zone)),
125 stream_iterator_class(Class::Handle(zone)),
126 completer_is_sync_field(Field::Handle(zone)),
127 completer_future_field(Field::Handle(zone)),
128 future_result_or_listeners_field(Field::Handle(zone)),
129 callback_field(Field::Handle(zone)),
130 controller_controller_field(Field::Handle(zone)),
131 var_data_field(Field::Handle(zone)),
132 state_field(Field::Handle(zone)),
133 on_data_field(Field::Handle(zone)),
134 state_data_field(Field::Handle(zone)) {
135 const auto& async_lib = Library::Handle(zone, Library::AsyncLibrary());
136 // Look up classes:
137 // - async:
138 future_impl_class =
139 async_lib.LookupClassAllowPrivate(Symbols::FutureImpl());
140 ASSERT(!future_impl_class.IsNull());
141 async_await_completer_class =
142 async_lib.LookupClassAllowPrivate(Symbols::_AsyncAwaitCompleter());
143 ASSERT(!async_await_completer_class.IsNull());
144 future_listener_class =
145 async_lib.LookupClassAllowPrivate(Symbols::_FutureListener());
146 ASSERT(!future_listener_class.IsNull());
147 // - async*:
148 async_start_stream_controller_class = async_lib.LookupClassAllowPrivate(
149 Symbols::_AsyncStarStreamController());
150 ASSERT(!async_start_stream_controller_class.IsNull());
151 stream_controller_class =
152 async_lib.LookupClassAllowPrivate(Symbols::_StreamController());
153 ASSERT(!stream_controller_class.IsNull());
154 async_stream_controller_class =
155 async_lib.LookupClassAllowPrivate(Symbols::_AsyncStreamController());
156 ASSERT(!async_stream_controller_class.IsNull());
157 controller_subscription_class =
158 async_lib.LookupClassAllowPrivate(Symbols::_ControllerSubscription());
159 ASSERT(!controller_subscription_class.IsNull());
160 buffering_stream_subscription_class = async_lib.LookupClassAllowPrivate(
161 Symbols::_BufferingStreamSubscription());
162 ASSERT(!buffering_stream_subscription_class.IsNull());
163 stream_iterator_class =
164 async_lib.LookupClassAllowPrivate(Symbols::_StreamIterator());
165 ASSERT(!stream_iterator_class.IsNull());
166
167 // Look up fields:
168 // - async:
169 completer_is_sync_field =
170 async_await_completer_class.LookupFieldAllowPrivate(Symbols::isSync());
171 ASSERT(!completer_is_sync_field.IsNull());
172 completer_future_field =
173 async_await_completer_class.LookupFieldAllowPrivate(Symbols::_future());
174 ASSERT(!completer_future_field.IsNull());
175 future_result_or_listeners_field =
176 future_impl_class.LookupFieldAllowPrivate(
177 Symbols::_resultOrListeners());
178 ASSERT(!future_result_or_listeners_field.IsNull());
179 callback_field =
180 future_listener_class.LookupFieldAllowPrivate(Symbols::callback());
181 ASSERT(!callback_field.IsNull());
182 // - async*:
183 controller_controller_field =
184 async_start_stream_controller_class.LookupFieldAllowPrivate(
185 Symbols::controller());
186 ASSERT(!controller_controller_field.IsNull());
187 state_field =
188 stream_controller_class.LookupFieldAllowPrivate(Symbols::_state());
189 ASSERT(!state_field.IsNull());
190 var_data_field =
191 stream_controller_class.LookupFieldAllowPrivate(Symbols::_varData());
192 ASSERT(!var_data_field.IsNull());
193 on_data_field = buffering_stream_subscription_class.LookupFieldAllowPrivate(
194 Symbols::_onData());
195 ASSERT(!on_data_field.IsNull());
196 state_data_field =
197 stream_iterator_class.LookupFieldAllowPrivate(Symbols::_stateData());
198 ASSERT(!state_data_field.IsNull());
199 }
200
201 ClosurePtr GetCallerInFutureImpl(const Object& future_) {
202 ASSERT(!future_.IsNull());
203 ASSERT(future_.GetClassId() == future_impl_class.id());
204
205 listener_ =
206 Instance::Cast(future_).GetField(future_result_or_listeners_field);
207 if (listener_.GetClassId() != future_listener_class.id()) {
208 return Closure::null();
209 }
210
211 callback_ = Instance::Cast(listener_).GetField(callback_field);
212 // This happens for e.g.: await f().catchError(..);
213 if (callback_.IsNull()) {
214 return Closure::null();
215 }
216 ASSERT(callback_.IsClosure());
217
218 return Closure::Cast(callback_).raw();
219 }
220
221 ClosurePtr FindCallerInAsyncClosure(const Context& receiver_context) {
222 context_entry_ = receiver_context.At(Context::kAsyncCompleterIndex);
223 ASSERT(context_entry_.IsInstance());
224 ASSERT(context_entry_.GetClassId() == async_await_completer_class.id());
225
226 const Instance& completer = Instance::Cast(context_entry_);
227 future_ = completer.GetField(completer_future_field);
228 return GetCallerInFutureImpl(future_);
229 }
230
231 ClosurePtr FindCallerInAsyncGenClosure(const Context& receiver_context) {
232 context_entry_ = receiver_context.At(Context::kControllerIndex);
233 ASSERT(context_entry_.IsInstance());
234 ASSERT(context_entry_.GetClassId() ==
235 async_start_stream_controller_class.id());
236
237 const Instance& controller = Instance::Cast(context_entry_);
238 controller_ = controller.GetField(controller_controller_field);
239 ASSERT(!controller_.IsNull());
240 ASSERT(controller_.GetClassId() == async_stream_controller_class.id());
241
242 state_ = Instance::Cast(controller_).GetField(state_field);
243 ASSERT(state_.IsSmi());
244 if (Smi::Cast(state_).Value() != kStreamController_StateSubscribed) {
245 return Closure::null();
246 }
247
248 // _StreamController._varData
249 var_data_ = Instance::Cast(controller_).GetField(var_data_field);
250 ASSERT(var_data_.GetClassId() == controller_subscription_class.id());
251
252 // _ControllerSubscription<T>/_BufferingStreamSubscription.<T>_onData
253 callback_ = Instance::Cast(var_data_).GetField(on_data_field);
254 ASSERT(callback_.IsClosure());
255
256 // If this is not the "_StreamIterator._onData" tear-off, we return the
257 // callback we found.
258 receiver_function_ = Closure::Cast(callback_).function();
259 if (!receiver_function_.IsImplicitInstanceClosureFunction() ||
260 receiver_function_.Owner() != stream_iterator_class.raw()) {
261 return Closure::Cast(callback_).raw();
262 }
263
264 // All implicit closure functions (tear-offs) have the "this" receiver
265 // captured.
266 receiver_context_ = Closure::Cast(callback_).context();
267 ASSERT(receiver_context_.num_variables() == 1);
268 callback_instance_ = receiver_context_.At(0);
269 ASSERT(callback_instance_.IsInstance());
270
271 // If the async* stream is await-for'd:
272 if (callback_instance_.GetClassId() == stream_iterator_class.id()) {
273 // _StreamIterator._stateData
274 future_ = Instance::Cast(callback_instance_).GetField(state_data_field);
275 return GetCallerInFutureImpl(future_);
276 }
277
278 UNREACHABLE(); // If no onData is found we have a bug.
279 }
280
281 ClosurePtr FindCaller(const Closure& receiver_closure) {
282 receiver_function_ = receiver_closure.function();
283 receiver_context_ = receiver_closure.context();
284
285 if (receiver_function_.IsAsyncClosure()) {
286 return FindCallerInAsyncClosure(receiver_context_);
287 } else if (receiver_function_.IsAsyncGenClosure()) {
288 return FindCallerInAsyncGenClosure(receiver_context_);
289 } else if (receiver_function_.IsLocalFunction()) {
290 parent_function_ = receiver_function_.parent_function();
291 if (parent_function_.recognized_kind() ==
292 MethodRecognizer::kFutureTimeout) {
293 context_entry_ =
294 receiver_context_.At(Context::kFutureTimeoutFutureIndex);
295 return GetCallerInFutureImpl(context_entry_);
296 } else if (parent_function_.recognized_kind() ==
297 MethodRecognizer::kFutureWait) {
298 receiver_context_ = receiver_context_.parent();
299 ASSERT(!receiver_context_.IsNull());
300 context_entry_ = receiver_context_.At(Context::kFutureWaitFutureIndex);
301 return GetCallerInFutureImpl(context_entry_);
302 }
303 }
304
305 return Closure::null();
306 }
307
308 bool IsRunningAsync(const Closure& receiver_closure) {
309 receiver_function_ = receiver_closure.function();
310 receiver_context_ = receiver_closure.context();
311
312 // The async* functions are never started synchronously, they start running
313 // after the first `listen()` call to its returned `Stream`.
314 if (receiver_function_.IsAsyncGenClosure()) {
315 return true;
316 }
317 ASSERT(receiver_function_.IsAsyncClosure());
318
319 context_entry_ = receiver_context_.At(Context::kAsyncCompleterIndex);
320 ASSERT(context_entry_.IsInstance());
321 ASSERT(context_entry_.GetClassId() == async_await_completer_class.id());
322
323 const Instance& completer = Instance::Cast(context_entry_);
324 is_sync = completer.GetField(completer_is_sync_field);
325 ASSERT(!is_sync.IsNull());
326 ASSERT(is_sync.IsBool());
327 // _AsyncAwaitCompleter.isSync indicates whether the future should be
328 // completed async. or sync., based on whether it has yielded yet.
329 // isSync is true when the :async_op is running async.
330 return Bool::Cast(is_sync).value();
331 }
332
333 private:
334 Context& receiver_context_;
335 Function& receiver_function_;
336 Function& parent_function_;
337
338 Object& context_entry_;
339 Object& is_sync;
340 Object& future_;
341 Object& listener_;
342 Object& callback_;
343 Object& controller_;
344 Object& state_;
345 Object& var_data_;
346 Object& callback_instance_;
347
348 Class& future_impl_class;
349 Class& async_await_completer_class;
350 Class& future_listener_class;
351 Class& async_start_stream_controller_class;
352 Class& stream_controller_class;
353 Class& async_stream_controller_class;
354 Class& controller_subscription_class;
355 Class& buffering_stream_subscription_class;
356 Class& stream_iterator_class;
357
358 Field& completer_is_sync_field;
359 Field& completer_future_field;
360 Field& future_result_or_listeners_field;
361 Field& callback_field;
362 Field& controller_controller_field;
363 Field& var_data_field;
364 Field& state_field;
365 Field& on_data_field;
366 Field& state_data_field;
367};
368
369void StackTraceUtils::CollectFramesLazy(
370 Thread* thread,
371 const GrowableObjectArray& code_array,
372 const GrowableObjectArray& pc_offset_array,
373 int skip_frames,
374 std::function<void(StackFrame*)>* on_sync_frames,
375 bool* has_async) {
376 if (has_async != nullptr) {
377 *has_async = false;
378 }
379 Zone* zone = thread->zone();
380 DartFrameIterator frames(thread, StackFrameIterator::kNoCrossThreadIteration);
381 StackFrame* frame = frames.NextFrame();
382
383 // If e.g. the isolate is paused before executing anything, we might not get
384 // any frames at all. Bail:
385 if (frame == nullptr) {
386 return;
387 }
388
389 auto& function = Function::Handle(zone);
390 auto& code = Code::Handle(zone);
391 auto& bytecode = Bytecode::Handle(zone);
392 auto& offset = Smi::Handle(zone);
393
394 auto& closure = Closure::Handle(zone);
395 CallerClosureFinder caller_closure_finder(zone);
396 auto& pc_descs = PcDescriptors::Handle();
397
398 for (; frame != nullptr; frame = frames.NextFrame()) {
399 if (skip_frames > 0) {
400 skip_frames--;
401 continue;
402 }
403
404 if (frame->is_interpreted()) {
405 bytecode = frame->LookupDartBytecode();
406 ASSERT(!bytecode.IsNull());
407 function = bytecode.function();
408 if (function.IsNull()) {
409 continue;
410 }
411 RELEASE_ASSERT(function.raw() == frame->LookupDartFunction());
412 } else {
413 function = frame->LookupDartFunction();
414 }
415
416 // Add the current synchronous frame.
417 if (frame->is_interpreted()) {
418 code_array.Add(bytecode);
419 const intptr_t pc_offset = frame->pc() - bytecode.PayloadStart();
420 ASSERT(pc_offset > 0 && pc_offset <= bytecode.Size());
421 offset = Smi::New(pc_offset);
422 } else {
423 code = frame->LookupDartCode();
424 ASSERT(function.raw() == code.function());
425 code_array.Add(code);
426 const intptr_t pc_offset = frame->pc() - code.PayloadStart();
427 ASSERT(pc_offset > 0 && pc_offset <= code.Size());
428 offset = Smi::New(pc_offset);
429 }
430 pc_offset_array.Add(offset);
431 if (on_sync_frames != nullptr) {
432 (*on_sync_frames)(frame);
433 }
434
435 // Either continue the loop (sync-async case) or find all await'ers and
436 // return.
437 if (!function.IsNull() &&
438 (function.IsAsyncClosure() || function.IsAsyncGenClosure())) {
439 if (has_async != nullptr) {
440 *has_async = true;
441 }
442
443 // Next, look up caller's closure on the stack and walk backwards through
444 // the yields.
445 ObjectPtr* last_caller_obj =
446 reinterpret_cast<ObjectPtr*>(frame->GetCallerSp());
447 closure = FindClosureInFrame(last_caller_obj, function,
448 frame->is_interpreted());
449
450 // If this async function hasn't yielded yet, we're still dealing with a
451 // normal stack. Continue to next frame as usual.
452 if (!caller_closure_finder.IsRunningAsync(closure)) {
453 continue;
454 }
455
456 // Inject async suspension marker.
457 code_array.Add(StubCode::AsynchronousGapMarker());
458 offset = Smi::New(0);
459 pc_offset_array.Add(offset);
460
461 // Skip: Already handled this frame's function above.
462 closure = caller_closure_finder.FindCaller(closure);
463
464 for (; !closure.IsNull();
465 closure = caller_closure_finder.FindCaller(closure)) {
466 function = closure.function();
467 // In hot-reload-test-mode we sometimes have to do this:
468 if (!function.HasCode() && !function.HasBytecode()) {
469 function.EnsureHasCode();
470 }
471 if (function.HasBytecode()) {
472#if !defined(DART_PRECOMPILED_RUNTIME)
473 bytecode = function.bytecode();
474 code_array.Add(bytecode);
475 offset = Smi::New(FindPcOffset(bytecode, GetYieldIndex(closure)));
476#else
477 UNREACHABLE();
478#endif // !defined(DART_PRECOMPILED_RUNTIME)
479 } else if (function.HasCode()) {
480 code = function.CurrentCode();
481 code_array.Add(code);
482 pc_descs = code.pc_descriptors();
483 offset = Smi::New(FindPcOffset(pc_descs, GetYieldIndex(closure)));
484 } else {
485 UNREACHABLE();
486 }
487 // Unlike other sources of PC offsets, the offset may be 0 here if we
488 // reach a non-async closure receiving the yielded value.
489 ASSERT(offset.Value() >= 0);
490 pc_offset_array.Add(offset);
491
492 // Inject async suspension marker.
493 code_array.Add(StubCode::AsynchronousGapMarker());
494 offset = Smi::New(0);
495 pc_offset_array.Add(offset);
496 }
497
498 // Ignore the rest of the stack; already unwound all async calls.
499 return;
500 }
501 }
502
503 return;
504}
505
506// Count the number of frames that are on the stack.
507intptr_t StackTraceUtils::CountFrames(Thread* thread,
508 int skip_frames,
509 const Function& async_function,
510 bool* sync_async_end) {
511 Zone* zone = thread->zone();
512 intptr_t frame_count = 0;
513 StackFrameIterator frames(ValidationPolicy::kDontValidateFrames, thread,
514 StackFrameIterator::kNoCrossThreadIteration);
515 StackFrame* frame = frames.NextFrame();
516 ASSERT(frame != NULL); // We expect to find a dart invocation frame.
517 Function& function = Function::Handle(zone);
518 Code& code = Code::Handle(zone);
519 Bytecode& bytecode = Bytecode::Handle(zone);
520 String& function_name = String::Handle(zone);
521 const bool async_function_is_null = async_function.IsNull();
522 int sync_async_gap_frames = -1;
523 ASSERT(async_function_is_null || sync_async_end != NULL);
524 for (; frame != NULL && sync_async_gap_frames != 0;
525 frame = frames.NextFrame()) {
526 if (!frame->IsDartFrame()) {
527 continue;
528 }
529 if (skip_frames > 0) {
530 skip_frames--;
531 continue;
532 }
533 if (frame->is_interpreted()) {
534 bytecode = frame->LookupDartBytecode();
535 function = bytecode.function();
536 if (function.IsNull()) continue;
537 } else {
538 code = frame->LookupDartCode();
539 function = code.function();
540 }
541 const bool function_is_null = function.IsNull();
542 if (!function_is_null && sync_async_gap_frames > 0) {
543 function_name = function.QualifiedScrubbedName();
544 if (!CheckAndSkipAsync(&sync_async_gap_frames, function_name)) {
545 *sync_async_end = false;
546 return frame_count;
547 }
548 } else {
549 frame_count++;
550 }
551 if (!async_function_is_null && !function_is_null &&
552 (async_function.raw() == function.parent_function())) {
553 sync_async_gap_frames = kSyncAsyncFrameGap;
554 }
555 }
556 if (!async_function_is_null) {
557 *sync_async_end = sync_async_gap_frames == 0;
558 }
559 return frame_count;
560}
561
562intptr_t StackTraceUtils::CollectFrames(Thread* thread,
563 const Array& code_array,
564 const Array& pc_offset_array,
565 intptr_t array_offset,
566 intptr_t count,
567 int skip_frames) {
568 Zone* zone = thread->zone();
569 StackFrameIterator frames(ValidationPolicy::kDontValidateFrames, thread,
570 StackFrameIterator::kNoCrossThreadIteration);
571 StackFrame* frame = frames.NextFrame();
572 ASSERT(frame != NULL); // We expect to find a dart invocation frame.
573 Function& function = Function::Handle(zone);
574 Code& code = Code::Handle(zone);
575 Bytecode& bytecode = Bytecode::Handle(zone);
576 Smi& offset = Smi::Handle(zone);
577 intptr_t collected_frames_count = 0;
578 for (; (frame != NULL) && (collected_frames_count < count);
579 frame = frames.NextFrame()) {
580 if (!frame->IsDartFrame()) {
581 continue;
582 }
583 if (skip_frames > 0) {
584 skip_frames--;
585 continue;
586 }
587 if (frame->is_interpreted()) {
588 bytecode = frame->LookupDartBytecode();
589 function = bytecode.function();
590 if (function.IsNull()) {
591 continue;
592 }
593 offset = Smi::New(frame->pc() - bytecode.PayloadStart());
594 code_array.SetAt(array_offset, bytecode);
595 } else {
596 code = frame->LookupDartCode();
597 offset = Smi::New(frame->pc() - code.PayloadStart());
598 code_array.SetAt(array_offset, code);
599 }
600 pc_offset_array.SetAt(array_offset, offset);
601 array_offset++;
602 collected_frames_count++;
603 }
604 return collected_frames_count;
605}
606
607intptr_t StackTraceUtils::ExtractAsyncStackTraceInfo(
608 Thread* thread,
609 Function* async_function,
610 StackTrace* async_stack_trace_out,
611 Array* async_code_array,
612 Array* async_pc_offset_array) {
613 if (thread->async_stack_trace() == StackTrace::null()) {
614 return 0;
615 }
616 *async_stack_trace_out = thread->async_stack_trace();
617 ASSERT(!async_stack_trace_out->IsNull());
618 const StackTrace& async_stack_trace =
619 StackTrace::Handle(thread->async_stack_trace());
620 const intptr_t async_stack_trace_length = async_stack_trace.Length();
621 // At least two entries (0: gap marker, 1: async function).
622 ASSERT(async_stack_trace_length >= 2);
623 // Validate the structure of this stack trace.
624 *async_code_array = async_stack_trace.code_array();
625 ASSERT(!async_code_array->IsNull());
626 *async_pc_offset_array = async_stack_trace.pc_offset_array();
627 ASSERT(!async_pc_offset_array->IsNull());
628 // We start with the asynchronous gap marker.
629 ASSERT(async_code_array->At(0) != Code::null());
630 ASSERT(async_code_array->At(0) == StubCode::AsynchronousGapMarker().raw());
631 const Object& code_object = Object::Handle(async_code_array->At(1));
632 if (code_object.IsCode()) {
633 *async_function = Code::Cast(code_object).function();
634 } else {
635 ASSERT(code_object.IsBytecode());
636 *async_function = Bytecode::Cast(code_object).function();
637 }
638 ASSERT(!async_function->IsNull());
639 ASSERT(async_function->IsAsyncFunction() ||
640 async_function->IsAsyncGenerator());
641 return async_stack_trace_length;
642}
643
644} // namespace dart
645