1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/code_descriptors.h"
6
7#include "vm/compiler/api/deopt_id.h"
8#include "vm/log.h"
9#include "vm/object_store.h"
10#include "vm/zone_text_buffer.h"
11
12namespace dart {
13
14void DescriptorList::AddDescriptor(PcDescriptorsLayout::Kind kind,
15 intptr_t pc_offset,
16 intptr_t deopt_id,
17 TokenPosition token_pos,
18 intptr_t try_index,
19 intptr_t yield_index) {
20 // yield index 0 is reserved for normal entry.
21 RELEASE_ASSERT(yield_index != 0);
22
23 ASSERT((kind == PcDescriptorsLayout::kRuntimeCall) ||
24 (kind == PcDescriptorsLayout::kBSSRelocation) ||
25 (kind == PcDescriptorsLayout::kOther) ||
26 (yield_index != PcDescriptorsLayout::kInvalidYieldIndex) ||
27 (deopt_id != DeoptId::kNone));
28
29 // When precompiling, we only use pc descriptors for exceptions,
30 // relocations and yield indices.
31 if (!FLAG_precompiled_mode || try_index != -1 ||
32 yield_index != PcDescriptorsLayout::kInvalidYieldIndex ||
33 kind == PcDescriptorsLayout::kBSSRelocation) {
34 const int32_t kind_and_metadata =
35 PcDescriptorsLayout::KindAndMetadata::Encode(kind, try_index,
36 yield_index);
37
38 PcDescriptors::EncodeInteger(&encoded_data_, kind_and_metadata);
39 PcDescriptors::EncodeInteger(&encoded_data_, pc_offset - prev_pc_offset);
40 prev_pc_offset = pc_offset;
41
42 if (!FLAG_precompiled_mode) {
43 PcDescriptors::EncodeInteger(&encoded_data_, deopt_id - prev_deopt_id);
44 PcDescriptors::EncodeInteger(&encoded_data_,
45 token_pos.value() - prev_token_pos);
46 prev_deopt_id = deopt_id;
47 prev_token_pos = token_pos.value();
48 }
49 }
50}
51
52PcDescriptorsPtr DescriptorList::FinalizePcDescriptors(uword entry_point) {
53 if (encoded_data_.length() == 0) {
54 return Object::empty_descriptors().raw();
55 }
56 return PcDescriptors::New(&encoded_data_);
57}
58
59// Encode unsigned integer |value| in LEB128 format and store into |data|.
60void CompressedStackMapsBuilder::EncodeLEB128(GrowableArray<uint8_t>* data,
61 uintptr_t value) {
62 while (true) {
63 uint8_t part = value & 0x7f;
64 value >>= 7;
65 if (value != 0) part |= 0x80;
66 data->Add(part);
67 if (value == 0) break;
68 }
69}
70
71void CompressedStackMapsBuilder::AddEntry(intptr_t pc_offset,
72 BitmapBuilder* bitmap,
73 intptr_t spill_slot_bit_count) {
74 ASSERT(bitmap != nullptr);
75 ASSERT(pc_offset > last_pc_offset_);
76 ASSERT(spill_slot_bit_count >= 0 && spill_slot_bit_count <= bitmap->Length());
77 auto const pc_delta = pc_offset - last_pc_offset_;
78 auto const non_spill_slot_bit_count = bitmap->Length() - spill_slot_bit_count;
79 EncodeLEB128(&encoded_bytes_, pc_delta);
80 EncodeLEB128(&encoded_bytes_, spill_slot_bit_count);
81 EncodeLEB128(&encoded_bytes_, non_spill_slot_bit_count);
82 bitmap->AppendAsBytesTo(&encoded_bytes_);
83 last_pc_offset_ = pc_offset;
84}
85
86CompressedStackMapsPtr CompressedStackMapsBuilder::Finalize() const {
87 if (encoded_bytes_.length() == 0) return CompressedStackMaps::null();
88 return CompressedStackMaps::NewInlined(encoded_bytes_);
89}
90
91CompressedStackMapsIterator::CompressedStackMapsIterator(
92 const CompressedStackMaps& maps,
93 const CompressedStackMaps& global_table)
94 : maps_(maps),
95 bits_container_(maps_.UsesGlobalTable() ? global_table : maps_) {
96 ASSERT(!maps_.IsGlobalTable());
97 ASSERT(!maps_.UsesGlobalTable() || bits_container_.IsGlobalTable());
98}
99
100CompressedStackMapsIterator::CompressedStackMapsIterator(
101 const CompressedStackMaps& maps)
102 : CompressedStackMapsIterator(
103 maps,
104 // Only look up the global table if the map will end up using it.
105 maps.UsesGlobalTable() ? CompressedStackMaps::Handle(
106 Thread::Current()
107 ->isolate()
108 ->object_store()
109 ->canonicalized_stack_map_entries())
110 : Object::null_compressed_stack_maps()) {}
111
112CompressedStackMapsIterator::CompressedStackMapsIterator(
113 const CompressedStackMapsIterator& it)
114 : maps_(it.maps_),
115 bits_container_(it.bits_container_),
116 next_offset_(it.next_offset_),
117 current_pc_offset_(it.current_pc_offset_),
118 current_global_table_offset_(it.current_global_table_offset_),
119 current_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
120 current_non_spill_slot_bit_count_(it.current_spill_slot_bit_count_),
121 current_bits_offset_(it.current_bits_offset_) {}
122
123// Decode unsigned integer in LEB128 format from the payload of |maps| and
124// update |byte_index|.
125uintptr_t CompressedStackMapsIterator::DecodeLEB128(
126 const CompressedStackMaps& maps,
127 uintptr_t* byte_index) {
128 uword shift = 0;
129 uintptr_t value = 0;
130 uint8_t part = 0;
131 do {
132 ASSERT(*byte_index < maps.payload_size());
133 part = maps.PayloadByte((*byte_index)++);
134 value |= static_cast<uintptr_t>(part & 0x7f) << shift;
135 shift += 7;
136 } while ((part & 0x80) != 0);
137
138 return value;
139}
140
141bool CompressedStackMapsIterator::MoveNext() {
142 // Empty CompressedStackMaps are represented as null values.
143 if (maps_.IsNull() || next_offset_ >= maps_.payload_size()) return false;
144 uintptr_t offset = next_offset_;
145
146 auto const pc_delta = DecodeLEB128(maps_, &offset);
147 ASSERT(pc_delta <= (kMaxUint32 - current_pc_offset_));
148 current_pc_offset_ += pc_delta;
149
150 // Table-using CSMs have a table offset after the PC offset delta, whereas
151 // the post-delta part of inlined entries has the same information as
152 // global table entries.
153 if (maps_.UsesGlobalTable()) {
154 current_global_table_offset_ = DecodeLEB128(maps_, &offset);
155 ASSERT(current_global_table_offset_ < bits_container_.payload_size());
156
157 // Since generally we only use entries in the GC and the GC only needs
158 // the rest of the entry information if the PC offset matches, we lazily
159 // load and cache the information stored in the global object when it is
160 // actually requested.
161 current_spill_slot_bit_count_ = -1;
162 current_non_spill_slot_bit_count_ = -1;
163 current_bits_offset_ = -1;
164 } else {
165 current_spill_slot_bit_count_ = DecodeLEB128(maps_, &offset);
166 ASSERT(current_spill_slot_bit_count_ >= 0);
167
168 current_non_spill_slot_bit_count_ = DecodeLEB128(maps_, &offset);
169 ASSERT(current_non_spill_slot_bit_count_ >= 0);
170
171 const auto stackmap_bits =
172 current_spill_slot_bit_count_ + current_non_spill_slot_bit_count_;
173 const uintptr_t stackmap_size =
174 Utils::RoundUp(stackmap_bits, kBitsPerByte) >> kBitsPerByteLog2;
175 ASSERT(stackmap_size <= (maps_.payload_size() - offset));
176
177 current_bits_offset_ = offset;
178 offset += stackmap_size;
179 }
180
181 next_offset_ = offset;
182 return true;
183}
184
185intptr_t CompressedStackMapsIterator::Length() {
186 EnsureFullyLoadedEntry();
187 return current_spill_slot_bit_count_ + current_non_spill_slot_bit_count_;
188}
189intptr_t CompressedStackMapsIterator::SpillSlotBitCount() {
190 EnsureFullyLoadedEntry();
191 return current_spill_slot_bit_count_;
192}
193
194bool CompressedStackMapsIterator::IsObject(intptr_t bit_index) {
195 EnsureFullyLoadedEntry();
196 ASSERT(!bits_container_.IsNull());
197 ASSERT(bit_index >= 0 && bit_index < Length());
198 const intptr_t byte_index = bit_index >> kBitsPerByteLog2;
199 const intptr_t bit_remainder = bit_index & (kBitsPerByte - 1);
200 uint8_t byte_mask = 1U << bit_remainder;
201 const intptr_t byte_offset = current_bits_offset_ + byte_index;
202 return (bits_container_.PayloadByte(byte_offset) & byte_mask) != 0;
203}
204
205void CompressedStackMapsIterator::LazyLoadGlobalTableEntry() {
206 ASSERT(maps_.UsesGlobalTable() && bits_container_.IsGlobalTable());
207 ASSERT(HasLoadedEntry());
208 ASSERT(current_global_table_offset_ < bits_container_.payload_size());
209
210 uintptr_t offset = current_global_table_offset_;
211 current_spill_slot_bit_count_ = DecodeLEB128(bits_container_, &offset);
212 ASSERT(current_spill_slot_bit_count_ >= 0);
213
214 current_non_spill_slot_bit_count_ = DecodeLEB128(bits_container_, &offset);
215 ASSERT(current_non_spill_slot_bit_count_ >= 0);
216
217 const auto stackmap_bits = Length();
218 const uintptr_t stackmap_size =
219 Utils::RoundUp(stackmap_bits, kBitsPerByte) >> kBitsPerByteLog2;
220 ASSERT(stackmap_size <= (bits_container_.payload_size() - offset));
221
222 current_bits_offset_ = offset;
223}
224
225const char* CompressedStackMapsIterator::ToCString(Zone* zone) const {
226 ZoneTextBuffer b(zone, 100);
227 CompressedStackMapsIterator it(*this);
228 // If we haven't loaded an entry yet, do so (but don't skip the current
229 // one if we have!)
230 if (!it.HasLoadedEntry()) {
231 if (!it.MoveNext()) return b.buffer();
232 }
233 bool first_entry = true;
234 do {
235 if (first_entry) {
236 first_entry = false;
237 } else {
238 b.AddString("\n");
239 }
240 b.Printf("0x%08x: ", it.pc_offset());
241 for (intptr_t i = 0, n = it.Length(); i < n; i++) {
242 b.AddString(it.IsObject(i) ? "1" : "0");
243 }
244 } while (it.MoveNext());
245 return b.buffer();
246}
247
248const char* CompressedStackMapsIterator::ToCString() const {
249 return ToCString(Thread::Current()->zone());
250}
251
252ExceptionHandlersPtr ExceptionHandlerList::FinalizeExceptionHandlers(
253 uword entry_point) const {
254 intptr_t num_handlers = Length();
255 if (num_handlers == 0) {
256 return Object::empty_exception_handlers().raw();
257 }
258 const ExceptionHandlers& handlers =
259 ExceptionHandlers::Handle(ExceptionHandlers::New(num_handlers));
260 for (intptr_t i = 0; i < num_handlers; i++) {
261 // Assert that every element in the array has been initialized.
262 if (list_[i].handler_types == NULL) {
263 // Unreachable handler, entry not computed.
264 // Initialize it to some meaningful value.
265 const bool has_catch_all = false;
266 // Check it is uninitialized.
267 ASSERT((list_[i].outer_try_index == -1) &&
268 (list_[i].pc_offset == ExceptionHandlers::kInvalidPcOffset));
269 handlers.SetHandlerInfo(i, list_[i].outer_try_index, list_[i].pc_offset,
270 list_[i].needs_stacktrace, has_catch_all,
271 list_[i].is_generated);
272 handlers.SetHandledTypes(i, Array::empty_array());
273 } else {
274 const bool has_catch_all = ContainsCatchAllType(*list_[i].handler_types);
275 handlers.SetHandlerInfo(i, list_[i].outer_try_index, list_[i].pc_offset,
276 list_[i].needs_stacktrace, has_catch_all,
277 list_[i].is_generated);
278 handlers.SetHandledTypes(i, *list_[i].handler_types);
279 }
280 }
281 return handlers.raw();
282}
283
284static uint8_t* ZoneAllocator(uint8_t* ptr,
285 intptr_t old_size,
286 intptr_t new_size) {
287 Zone* zone = Thread::Current()->zone();
288 return zone->Realloc<uint8_t>(ptr, old_size, new_size);
289}
290
291#if !defined(DART_PRECOMPILED_RUNTIME)
292class CatchEntryMovesMapBuilder::TrieNode : public ZoneAllocated {
293 public:
294 TrieNode() : move_(), entry_state_offset_(-1) {}
295 TrieNode(CatchEntryMove move, intptr_t index)
296 : move_(move), entry_state_offset_(index) {}
297
298 intptr_t Offset() { return entry_state_offset_; }
299
300 TrieNode* Insert(TrieNode* node) {
301 children_.Add(node);
302 return node;
303 }
304
305 TrieNode* Follow(CatchEntryMove next) {
306 for (intptr_t i = 0; i < children_.length(); i++) {
307 if (children_[i]->move_ == next) return children_[i];
308 }
309 return NULL;
310 }
311
312 private:
313 CatchEntryMove move_;
314 const intptr_t entry_state_offset_;
315 GrowableArray<TrieNode*> children_;
316};
317
318CatchEntryMovesMapBuilder::CatchEntryMovesMapBuilder()
319 : zone_(Thread::Current()->zone()),
320 root_(new TrieNode()),
321 current_pc_offset_(0),
322 buffer_(NULL),
323 stream_(&buffer_, ZoneAllocator, 64) {}
324
325void CatchEntryMovesMapBuilder::Append(const CatchEntryMove& move) {
326 moves_.Add(move);
327}
328
329void CatchEntryMovesMapBuilder::NewMapping(intptr_t pc_offset) {
330 moves_.Clear();
331 current_pc_offset_ = pc_offset;
332}
333
334void CatchEntryMovesMapBuilder::EndMapping() {
335 intptr_t suffix_length = 0;
336 TrieNode* suffix = root_;
337 // Find the largest common suffix, get the last node of the path.
338 for (intptr_t i = moves_.length() - 1; i >= 0; i--) {
339 TrieNode* n = suffix->Follow(moves_[i]);
340 if (n == NULL) break;
341 suffix_length++;
342 suffix = n;
343 }
344 intptr_t length = moves_.length() - suffix_length;
345 intptr_t current_offset = stream_.bytes_written();
346
347 typedef WriteStream::Raw<sizeof(intptr_t), intptr_t> Writer;
348 Writer::Write(&stream_, current_pc_offset_);
349 Writer::Write(&stream_, length);
350 Writer::Write(&stream_, suffix_length);
351 Writer::Write(&stream_, suffix->Offset());
352
353 // Write the unshared part, adding it to the trie.
354 TrieNode* node = suffix;
355 for (intptr_t i = length - 1; i >= 0; i--) {
356 moves_[i].WriteTo(&stream_);
357
358 TrieNode* child = new (zone_) TrieNode(moves_[i], current_offset);
359 node->Insert(child);
360 node = child;
361 }
362}
363
364TypedDataPtr CatchEntryMovesMapBuilder::FinalizeCatchEntryMovesMap() {
365 TypedData& td = TypedData::Handle(TypedData::New(
366 kTypedDataInt8ArrayCid, stream_.bytes_written(), Heap::kOld));
367 NoSafepointScope no_safepoint;
368 uint8_t* dest = reinterpret_cast<uint8_t*>(td.DataAddr(0));
369 uint8_t* src = stream_.buffer();
370 for (intptr_t i = 0; i < stream_.bytes_written(); i++) {
371 dest[i] = src[i];
372 }
373 return td.raw();
374}
375#endif // !defined(DART_PRECOMPILED_RUNTIME)
376
377const TokenPosition CodeSourceMapBuilder::kInitialPosition =
378 TokenPosition(TokenPosition::kDartCodeProloguePos);
379
380CodeSourceMapBuilder::CodeSourceMapBuilder(
381 bool stack_traces_only,
382 const GrowableArray<intptr_t>& caller_inline_id,
383 const GrowableArray<TokenPosition>& inline_id_to_token_pos,
384 const GrowableArray<const Function*>& inline_id_to_function)
385 : buffered_pc_offset_(0),
386 buffered_inline_id_stack_(),
387 buffered_token_pos_stack_(),
388 written_pc_offset_(0),
389 written_inline_id_stack_(),
390 written_token_pos_stack_(),
391 caller_inline_id_(caller_inline_id),
392 inline_id_to_token_pos_(inline_id_to_token_pos),
393 inline_id_to_function_(inline_id_to_function),
394 inlined_functions_(
395 GrowableObjectArray::Handle(GrowableObjectArray::New(Heap::kOld))),
396 buffer_(NULL),
397 stream_(&buffer_, ZoneAllocator, 64),
398 stack_traces_only_(stack_traces_only) {
399 buffered_inline_id_stack_.Add(0);
400 buffered_token_pos_stack_.Add(kInitialPosition);
401 written_inline_id_stack_.Add(0);
402 written_token_pos_stack_.Add(kInitialPosition);
403}
404
405void CodeSourceMapBuilder::FlushBuffer() {
406 FlushBufferStack();
407 FlushBufferPosition();
408 FlushBufferPC();
409}
410
411void CodeSourceMapBuilder::FlushBufferStack() {
412 for (intptr_t i = buffered_inline_id_stack_.length() - 1; i >= 0; i--) {
413 intptr_t buffered_id = buffered_inline_id_stack_[i];
414 if (i < written_inline_id_stack_.length()) {
415 intptr_t written_id = written_inline_id_stack_[i];
416 if (buffered_id == written_id) {
417 // i is the top-most position where the buffered and written stack
418 // match.
419 while (written_inline_id_stack_.length() > i + 1) {
420 WritePop();
421 }
422 for (intptr_t j = i + 1; j < buffered_inline_id_stack_.length(); j++) {
423 TokenPosition buffered_pos = buffered_token_pos_stack_[j - 1];
424 TokenPosition written_pos = written_token_pos_stack_[j - 1];
425 if (buffered_pos != written_pos) {
426 WriteChangePosition(buffered_pos);
427 }
428 WritePush(buffered_inline_id_stack_[j]);
429 }
430 return;
431 }
432 }
433 }
434 UNREACHABLE();
435}
436
437void CodeSourceMapBuilder::FlushBufferPosition() {
438 ASSERT(buffered_token_pos_stack_.length() ==
439 written_token_pos_stack_.length());
440
441 intptr_t top = buffered_token_pos_stack_.length() - 1;
442 TokenPosition buffered_pos = buffered_token_pos_stack_[top];
443 TokenPosition written_pos = written_token_pos_stack_[top];
444 if (buffered_pos != written_pos) {
445 WriteChangePosition(buffered_pos);
446 }
447}
448
449void CodeSourceMapBuilder::FlushBufferPC() {
450 if (buffered_pc_offset_ != written_pc_offset_) {
451 WriteAdvancePC(buffered_pc_offset_ - written_pc_offset_);
452 }
453}
454
455void CodeSourceMapBuilder::StartInliningInterval(int32_t pc_offset,
456 intptr_t inline_id) {
457 if (buffered_inline_id_stack_.Last() == inline_id) {
458 // No change in function stack.
459 return;
460 }
461 if (inline_id == -1) {
462 // Basic blocking missing an inline_id.
463 return;
464 }
465
466 if (!stack_traces_only_) {
467 FlushBuffer();
468 }
469
470 // Find a minimal set of pops and pushes to bring us to the new function
471 // stack.
472
473 // Pop to a common ancestor.
474 intptr_t common_parent = inline_id;
475 while (!IsOnBufferedStack(common_parent)) {
476 common_parent = caller_inline_id_[common_parent];
477 }
478 while (buffered_inline_id_stack_.Last() != common_parent) {
479 BufferPop();
480 }
481
482 // Push to the new top-of-stack function.
483 GrowableArray<intptr_t> to_push;
484 intptr_t id = inline_id;
485 while (id != common_parent) {
486 to_push.Add(id);
487 id = caller_inline_id_[id];
488 }
489 for (intptr_t i = to_push.length() - 1; i >= 0; i--) {
490 intptr_t callee_id = to_push[i];
491 TokenPosition call_token;
492 if (callee_id != 0) {
493 // TODO(rmacnak): Should make this array line up with the others.
494 call_token = inline_id_to_token_pos_[callee_id - 1];
495 } else {
496 UNREACHABLE();
497 }
498
499 // Report caller as at the position of the call.
500 BufferChangePosition(call_token);
501
502 BufferPush(callee_id);
503 }
504}
505
506void CodeSourceMapBuilder::BeginCodeSourceRange(int32_t pc_offset) {}
507
508void CodeSourceMapBuilder::EndCodeSourceRange(int32_t pc_offset,
509 TokenPosition pos) {
510 if (pc_offset == buffered_pc_offset_) {
511 return; // Empty intermediate instruction.
512 }
513 if (pos != buffered_token_pos_stack_.Last()) {
514 if (!stack_traces_only_) {
515 FlushBuffer();
516 }
517 BufferChangePosition(pos);
518 }
519 BufferAdvancePC(pc_offset - buffered_pc_offset_);
520}
521
522void CodeSourceMapBuilder::NoteDescriptor(PcDescriptorsLayout::Kind kind,
523 int32_t pc_offset,
524 TokenPosition pos) {
525 const uint8_t kCanThrow =
526 PcDescriptorsLayout::kIcCall | PcDescriptorsLayout::kUnoptStaticCall |
527 PcDescriptorsLayout::kRuntimeCall | PcDescriptorsLayout::kOther;
528 if ((kind & kCanThrow) != 0) {
529 BufferChangePosition(pos);
530 BufferAdvancePC(pc_offset - buffered_pc_offset_);
531 FlushBuffer();
532 }
533}
534
535void CodeSourceMapBuilder::NoteNullCheck(int32_t pc_offset,
536 TokenPosition pos,
537 intptr_t name_index) {
538 BufferChangePosition(pos);
539 BufferAdvancePC(pc_offset - buffered_pc_offset_);
540 FlushBuffer();
541 WriteNullCheck(name_index);
542}
543
544intptr_t CodeSourceMapBuilder::GetFunctionId(intptr_t inline_id) {
545 const Function& function = *inline_id_to_function_[inline_id];
546 for (intptr_t i = 0; i < inlined_functions_.Length(); i++) {
547 if (inlined_functions_.At(i) == function.raw()) {
548 return i;
549 }
550 }
551 RELEASE_ASSERT(!function.IsNull());
552 inlined_functions_.Add(function, Heap::kOld);
553 return inlined_functions_.Length() - 1;
554}
555
556ArrayPtr CodeSourceMapBuilder::InliningIdToFunction() {
557 if (inlined_functions_.Length() == 0) {
558 return Object::empty_array().raw();
559 }
560 return Array::MakeFixedLength(inlined_functions_);
561}
562
563CodeSourceMapPtr CodeSourceMapBuilder::Finalize() {
564 if (!stack_traces_only_) {
565 FlushBuffer();
566 }
567 intptr_t length = stream_.bytes_written();
568 const CodeSourceMap& map = CodeSourceMap::Handle(CodeSourceMap::New(length));
569 NoSafepointScope no_safepoint;
570 memmove(map.Data(), buffer_, length);
571 return map.raw();
572}
573
574void CodeSourceMapBuilder::WriteChangePosition(TokenPosition pos) {
575 stream_.Write<uint8_t>(kChangePosition);
576 intptr_t position_or_line = pos.value();
577#if defined(DART_PRECOMPILER)
578 intptr_t column = TokenPosition::kNoSourcePos;
579 if (FLAG_precompiled_mode) {
580 // Don't use the raw position value directly in precompiled mode. Instead,
581 // use the value of kNoSource as a fallback when no line or column
582 // information is found.
583 position_or_line = TokenPosition::kNoSourcePos;
584 intptr_t inline_id = buffered_inline_id_stack_.Last();
585 if (inline_id < inline_id_to_function_.length()) {
586 const Function* function = inline_id_to_function_[inline_id];
587 Script& script = Script::Handle(function->script());
588 script.GetTokenLocationUsingLineStarts(pos.SourcePosition(),
589 &position_or_line, &column);
590 }
591 }
592#endif
593 stream_.Write<int32_t>(position_or_line);
594#if defined(DART_PRECOMPILER)
595 // For non-symbolic stack traces, the CodeSourceMaps are not serialized,
596 // so we need not worry about increasing snapshot size by including more
597 // information here.
598 if (FLAG_dwarf_stack_traces_mode) {
599 stream_.Write<int32_t>(column);
600 }
601#endif
602 written_token_pos_stack_.Last() = pos;
603}
604
605void CodeSourceMapReader::GetInlinedFunctionsAt(
606 int32_t pc_offset,
607 GrowableArray<const Function*>* function_stack,
608 GrowableArray<TokenPosition>* token_positions) {
609 function_stack->Clear();
610 token_positions->Clear();
611
612 NoSafepointScope no_safepoint;
613 ReadStream stream(map_.Data(), map_.Length());
614
615 int32_t current_pc_offset = 0;
616 function_stack->Add(&root_);
617 token_positions->Add(CodeSourceMapBuilder::kInitialPosition);
618
619 while (stream.PendingBytes() > 0) {
620 uint8_t opcode = stream.Read<uint8_t>();
621 switch (opcode) {
622 case CodeSourceMapBuilder::kChangePosition: {
623 (*token_positions)[token_positions->length() - 1] =
624 ReadPosition(&stream);
625 break;
626 }
627 case CodeSourceMapBuilder::kAdvancePC: {
628 int32_t delta = stream.Read<int32_t>();
629 current_pc_offset += delta;
630 if (current_pc_offset > pc_offset) {
631 return;
632 }
633 break;
634 }
635 case CodeSourceMapBuilder::kPushFunction: {
636 int32_t func = stream.Read<int32_t>();
637 function_stack->Add(
638 &Function::Handle(Function::RawCast(functions_.At(func))));
639 token_positions->Add(CodeSourceMapBuilder::kInitialPosition);
640 break;
641 }
642 case CodeSourceMapBuilder::kPopFunction: {
643 // We never pop the root function.
644 ASSERT(function_stack->length() > 1);
645 ASSERT(token_positions->length() > 1);
646 function_stack->RemoveLast();
647 token_positions->RemoveLast();
648 break;
649 }
650 case CodeSourceMapBuilder::kNullCheck: {
651 stream.Read<int32_t>();
652 break;
653 }
654 default:
655 UNREACHABLE();
656 }
657 }
658}
659
660#ifndef PRODUCT
661void CodeSourceMapReader::PrintJSONInlineIntervals(JSONObject* jsobj) {
662 {
663 JSONArray inlined_functions(jsobj, "_inlinedFunctions");
664 Function& function = Function::Handle();
665 for (intptr_t i = 0; i < functions_.Length(); i++) {
666 function ^= functions_.At(i);
667 ASSERT(!function.IsNull());
668 inlined_functions.AddValue(function);
669 }
670 }
671
672 GrowableArray<intptr_t> function_stack;
673 JSONArray inline_intervals(jsobj, "_inlinedIntervals");
674 NoSafepointScope no_safepoint;
675 ReadStream stream(map_.Data(), map_.Length());
676
677 int32_t current_pc_offset = 0;
678 function_stack.Add(0);
679
680 while (stream.PendingBytes() > 0) {
681 uint8_t opcode = stream.Read<uint8_t>();
682 switch (opcode) {
683 case CodeSourceMapBuilder::kChangePosition: {
684 ReadPosition(&stream);
685 break;
686 }
687 case CodeSourceMapBuilder::kAdvancePC: {
688 int32_t delta = stream.Read<int32_t>();
689 // Format: [start, end, inline functions...]
690 JSONArray inline_interval(&inline_intervals);
691 inline_interval.AddValue(static_cast<intptr_t>(current_pc_offset));
692 inline_interval.AddValue(
693 static_cast<intptr_t>(current_pc_offset + delta - 1));
694 for (intptr_t i = 0; i < function_stack.length(); i++) {
695 inline_interval.AddValue(function_stack[i]);
696 }
697 current_pc_offset += delta;
698 break;
699 }
700 case CodeSourceMapBuilder::kPushFunction: {
701 int32_t func = stream.Read<int32_t>();
702 function_stack.Add(func);
703 break;
704 }
705 case CodeSourceMapBuilder::kPopFunction: {
706 // We never pop the root function.
707 ASSERT(function_stack.length() > 1);
708 function_stack.RemoveLast();
709 break;
710 }
711 case CodeSourceMapBuilder::kNullCheck: {
712 stream.Read<int32_t>();
713 break;
714 }
715 default:
716 UNREACHABLE();
717 }
718 }
719}
720#endif // !PRODUCT
721
722void CodeSourceMapReader::DumpInlineIntervals(uword start) {
723 GrowableArray<const Function*> function_stack;
724 LogBlock lb;
725 NoSafepointScope no_safepoint;
726 ReadStream stream(map_.Data(), map_.Length());
727
728 int32_t current_pc_offset = 0;
729 function_stack.Add(&root_);
730
731 THR_Print("Inline intervals for function '%s' {\n",
732 root_.ToFullyQualifiedCString());
733 while (stream.PendingBytes() > 0) {
734 uint8_t opcode = stream.Read<uint8_t>();
735 switch (opcode) {
736 case CodeSourceMapBuilder::kChangePosition: {
737 ReadPosition(&stream);
738 break;
739 }
740 case CodeSourceMapBuilder::kAdvancePC: {
741 int32_t delta = stream.Read<int32_t>();
742 THR_Print("%" Px "-%" Px ": ", start + current_pc_offset,
743 start + current_pc_offset + delta - 1);
744 for (intptr_t i = 0; i < function_stack.length(); i++) {
745 THR_Print("%s ", function_stack[i]->ToCString());
746 }
747 THR_Print("\n");
748 current_pc_offset += delta;
749 break;
750 }
751 case CodeSourceMapBuilder::kPushFunction: {
752 int32_t func = stream.Read<int32_t>();
753 function_stack.Add(
754 &Function::Handle(Function::RawCast(functions_.At(func))));
755 break;
756 }
757 case CodeSourceMapBuilder::kPopFunction: {
758 // We never pop the root function.
759 ASSERT(function_stack.length() > 1);
760 function_stack.RemoveLast();
761 break;
762 }
763 case CodeSourceMapBuilder::kNullCheck: {
764 stream.Read<int32_t>();
765 break;
766 }
767 default:
768 UNREACHABLE();
769 }
770 }
771 THR_Print("}\n");
772}
773
774void CodeSourceMapReader::DumpSourcePositions(uword start) {
775 GrowableArray<const Function*> function_stack;
776 GrowableArray<TokenPosition> token_positions;
777 LogBlock lb;
778 NoSafepointScope no_safepoint;
779 ReadStream stream(map_.Data(), map_.Length());
780
781 int32_t current_pc_offset = 0;
782 function_stack.Add(&root_);
783 token_positions.Add(CodeSourceMapBuilder::kInitialPosition);
784
785 THR_Print("Source positions for function '%s' {\n",
786 root_.ToFullyQualifiedCString());
787 while (stream.PendingBytes() > 0) {
788 uint8_t opcode = stream.Read<uint8_t>();
789 switch (opcode) {
790 case CodeSourceMapBuilder::kChangePosition: {
791 token_positions[token_positions.length() - 1] = ReadPosition(&stream);
792 break;
793 }
794 case CodeSourceMapBuilder::kAdvancePC: {
795 int32_t delta = stream.Read<int32_t>();
796 THR_Print("%" Px "-%" Px ": ", start + current_pc_offset,
797 start + current_pc_offset + delta - 1);
798 for (intptr_t i = 0; i < function_stack.length(); i++) {
799 THR_Print("%s@%" Pd " ", function_stack[i]->ToCString(),
800 token_positions[i].value());
801 }
802 THR_Print("\n");
803 current_pc_offset += delta;
804 break;
805 }
806 case CodeSourceMapBuilder::kPushFunction: {
807 int32_t func = stream.Read<int32_t>();
808 function_stack.Add(
809 &Function::Handle(Function::RawCast(functions_.At(func))));
810 token_positions.Add(CodeSourceMapBuilder::kInitialPosition);
811 break;
812 }
813 case CodeSourceMapBuilder::kPopFunction: {
814 // We never pop the root function.
815 ASSERT(function_stack.length() > 1);
816 ASSERT(token_positions.length() > 1);
817 function_stack.RemoveLast();
818 token_positions.RemoveLast();
819 break;
820 }
821 case CodeSourceMapBuilder::kNullCheck: {
822 const intptr_t name_index = stream.Read<int32_t>();
823 THR_Print("%" Px "-%" Px ": null check PP#%" Pd "\n",
824 start + current_pc_offset, start + current_pc_offset,
825 name_index);
826 break;
827 }
828 default:
829 UNREACHABLE();
830 }
831 }
832 THR_Print("}\n");
833}
834
835intptr_t CodeSourceMapReader::GetNullCheckNameIndexAt(int32_t pc_offset) {
836 NoSafepointScope no_safepoint;
837 ReadStream stream(map_.Data(), map_.Length());
838
839 int32_t current_pc_offset = 0;
840
841 while (stream.PendingBytes() > 0) {
842 uint8_t opcode = stream.Read<uint8_t>();
843 switch (opcode) {
844 case CodeSourceMapBuilder::kChangePosition: {
845 ReadPosition(&stream);
846 break;
847 }
848 case CodeSourceMapBuilder::kAdvancePC: {
849 int32_t delta = stream.Read<int32_t>();
850 current_pc_offset += delta;
851 RELEASE_ASSERT(current_pc_offset <= pc_offset);
852 break;
853 }
854 case CodeSourceMapBuilder::kPushFunction: {
855 stream.Read<int32_t>();
856 break;
857 }
858 case CodeSourceMapBuilder::kPopFunction: {
859 break;
860 }
861 case CodeSourceMapBuilder::kNullCheck: {
862 const int32_t name_index = stream.Read<int32_t>();
863 if (current_pc_offset == pc_offset) {
864 return name_index;
865 }
866 break;
867 }
868 default:
869 UNREACHABLE();
870 }
871 }
872
873 UNREACHABLE();
874 return -1;
875}
876
877TokenPosition CodeSourceMapReader::ReadPosition(ReadStream* stream) {
878 const intptr_t line = stream->Read<int32_t>();
879#if defined(DART_PRECOMPILER)
880 // The special handling for non-symbolic stack trace mode only needs to
881 // happen in the precompiler, because those CSMs are not serialized in
882 // precompiled snapshots.
883 if (FLAG_dwarf_stack_traces_mode) {
884 stream->Read<int32_t>(); // Discard the column information.
885 }
886#endif
887 return TokenPosition(line);
888}
889
890} // namespace dart
891