1 | // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #ifndef RUNTIME_VM_CODE_DESCRIPTORS_H_ |
6 | #define RUNTIME_VM_CODE_DESCRIPTORS_H_ |
7 | |
8 | #include "vm/datastream.h" |
9 | #include "vm/globals.h" |
10 | #include "vm/growable_array.h" |
11 | #include "vm/log.h" |
12 | #include "vm/object.h" |
13 | #include "vm/runtime_entry.h" |
14 | |
15 | namespace dart { |
16 | |
17 | static const intptr_t kInvalidTryIndex = -1; |
18 | |
19 | class DescriptorList : public ZoneAllocated { |
20 | public: |
21 | explicit DescriptorList(intptr_t initial_capacity) |
22 | : encoded_data_(initial_capacity), |
23 | prev_pc_offset(0), |
24 | prev_deopt_id(0), |
25 | prev_token_pos(0) {} |
26 | |
27 | ~DescriptorList() {} |
28 | |
29 | void AddDescriptor(PcDescriptorsLayout::Kind kind, |
30 | intptr_t pc_offset, |
31 | intptr_t deopt_id, |
32 | TokenPosition token_pos, |
33 | intptr_t try_index, |
34 | intptr_t yield_index); |
35 | |
36 | PcDescriptorsPtr FinalizePcDescriptors(uword entry_point); |
37 | |
38 | private: |
39 | GrowableArray<uint8_t> encoded_data_; |
40 | |
41 | intptr_t prev_pc_offset; |
42 | intptr_t prev_deopt_id; |
43 | intptr_t prev_token_pos; |
44 | |
45 | DISALLOW_COPY_AND_ASSIGN(DescriptorList); |
46 | }; |
47 | |
48 | class CompressedStackMapsBuilder : public ZoneAllocated { |
49 | public: |
50 | CompressedStackMapsBuilder() : encoded_bytes_() {} |
51 | |
52 | static void EncodeLEB128(GrowableArray<uint8_t>* data, uintptr_t value); |
53 | |
54 | void AddEntry(intptr_t pc_offset, |
55 | BitmapBuilder* bitmap, |
56 | intptr_t spill_slot_bit_count); |
57 | |
58 | CompressedStackMapsPtr Finalize() const; |
59 | |
60 | private: |
61 | intptr_t last_pc_offset_ = 0; |
62 | GrowableArray<uint8_t> encoded_bytes_; |
63 | DISALLOW_COPY_AND_ASSIGN(CompressedStackMapsBuilder); |
64 | }; |
65 | |
66 | class CompressedStackMapsIterator : public ValueObject { |
67 | public: |
68 | // We use the null value to represent CompressedStackMaps with no |
69 | // entries, so any CompressedStackMaps arguments to constructors can be null. |
70 | CompressedStackMapsIterator(const CompressedStackMaps& maps, |
71 | const CompressedStackMaps& global_table); |
72 | explicit CompressedStackMapsIterator(const CompressedStackMaps& maps); |
73 | |
74 | explicit CompressedStackMapsIterator(const CompressedStackMapsIterator& it); |
75 | |
76 | // Loads the next entry from [maps_], if any. If [maps_] is the null |
77 | // value, this always returns false. |
78 | bool MoveNext(); |
79 | |
80 | // Finds the entry with the given PC offset starting at the current |
81 | // position of the iterator. If [maps_] is the null value, this always |
82 | // returns false. |
83 | bool Find(uint32_t pc_offset) { |
84 | // We should never have an entry with a PC offset of 0 inside an |
85 | // non-empty CSM, so fail. |
86 | if (pc_offset == 0) return false; |
87 | do { |
88 | if (current_pc_offset_ >= pc_offset) break; |
89 | } while (MoveNext()); |
90 | return current_pc_offset_ == pc_offset; |
91 | } |
92 | |
93 | // Methods for accessing parts of an entry should not be called until |
94 | // a successful MoveNext() or Find() call has been made. |
95 | |
96 | uint32_t pc_offset() const { |
97 | ASSERT(HasLoadedEntry()); |
98 | return current_pc_offset_; |
99 | } |
100 | // We lazily load and cache information from the global table if the |
101 | // CSM uses it, so these methods cannot be const. |
102 | intptr_t Length(); |
103 | intptr_t SpillSlotBitCount(); |
104 | bool IsObject(intptr_t bit_offset); |
105 | |
106 | void EnsureFullyLoadedEntry() { |
107 | ASSERT(HasLoadedEntry()); |
108 | if (current_spill_slot_bit_count_ < 0) { |
109 | LazyLoadGlobalTableEntry(); |
110 | } |
111 | ASSERT(current_spill_slot_bit_count_ >= 0); |
112 | } |
113 | |
114 | const char* ToCString(Zone* zone) const; |
115 | const char* ToCString() const; |
116 | |
117 | private: |
118 | static uintptr_t DecodeLEB128(const CompressedStackMaps& data, |
119 | uintptr_t* byte_index); |
120 | bool HasLoadedEntry() const { return next_offset_ > 0; } |
121 | void LazyLoadGlobalTableEntry(); |
122 | |
123 | const CompressedStackMaps& maps_; |
124 | const CompressedStackMaps& bits_container_; |
125 | |
126 | uintptr_t next_offset_ = 0; |
127 | uint32_t current_pc_offset_ = 0; |
128 | // Only used when looking up non-PC information in the global table. |
129 | uintptr_t current_global_table_offset_ = 0; |
130 | intptr_t current_spill_slot_bit_count_ = -1; |
131 | intptr_t current_non_spill_slot_bit_count_ = -1; |
132 | intptr_t current_bits_offset_ = -1; |
133 | |
134 | friend class StackMapEntry; |
135 | }; |
136 | |
137 | class ExceptionHandlerList : public ZoneAllocated { |
138 | public: |
139 | struct HandlerDesc { |
140 | intptr_t outer_try_index; // Try block in which this try block is nested. |
141 | intptr_t pc_offset; // Handler PC offset value. |
142 | bool is_generated; // False if this is directly from Dart code. |
143 | const Array* handler_types; // Catch clause guards. |
144 | bool needs_stacktrace; |
145 | }; |
146 | |
147 | ExceptionHandlerList() : list_() {} |
148 | |
149 | intptr_t Length() const { return list_.length(); } |
150 | |
151 | void AddPlaceHolder() { |
152 | struct HandlerDesc data; |
153 | data.outer_try_index = -1; |
154 | data.pc_offset = ExceptionHandlers::kInvalidPcOffset; |
155 | data.is_generated = true; |
156 | data.handler_types = NULL; |
157 | data.needs_stacktrace = false; |
158 | list_.Add(data); |
159 | } |
160 | |
161 | void AddHandler(intptr_t try_index, |
162 | intptr_t outer_try_index, |
163 | intptr_t pc_offset, |
164 | bool is_generated, |
165 | const Array& handler_types, |
166 | bool needs_stacktrace) { |
167 | ASSERT(try_index >= 0); |
168 | while (Length() <= try_index) { |
169 | AddPlaceHolder(); |
170 | } |
171 | list_[try_index].outer_try_index = outer_try_index; |
172 | ASSERT(list_[try_index].pc_offset == ExceptionHandlers::kInvalidPcOffset); |
173 | list_[try_index].pc_offset = pc_offset; |
174 | list_[try_index].is_generated = is_generated; |
175 | ASSERT(handler_types.IsZoneHandle()); |
176 | list_[try_index].handler_types = &handler_types; |
177 | list_[try_index].needs_stacktrace |= needs_stacktrace; |
178 | } |
179 | |
180 | // Called by rethrows, to mark their enclosing handlers. |
181 | void SetNeedsStackTrace(intptr_t try_index) { |
182 | // Rethrows can be generated outside a try by the compiler. |
183 | if (try_index == kInvalidTryIndex) { |
184 | return; |
185 | } |
186 | ASSERT(try_index >= 0); |
187 | while (Length() <= try_index) { |
188 | AddPlaceHolder(); |
189 | } |
190 | list_[try_index].needs_stacktrace = true; |
191 | } |
192 | |
193 | static bool ContainsCatchAllType(const Array& array) { |
194 | auto& type = AbstractType::Handle(); |
195 | for (intptr_t i = 0; i < array.Length(); i++) { |
196 | type ^= array.At(i); |
197 | if (type.IsCatchAllType()) { |
198 | return true; |
199 | } |
200 | } |
201 | return false; |
202 | } |
203 | |
204 | ExceptionHandlersPtr FinalizeExceptionHandlers(uword entry_point) const; |
205 | |
206 | private: |
207 | GrowableArray<struct HandlerDesc> list_; |
208 | DISALLOW_COPY_AND_ASSIGN(ExceptionHandlerList); |
209 | }; |
210 | |
211 | #if !defined(DART_PRECOMPILED_RUNTIME) |
212 | // Used to construct CatchEntryMoves for the AOT mode of compilation. |
213 | class CatchEntryMovesMapBuilder : public ZoneAllocated { |
214 | public: |
215 | CatchEntryMovesMapBuilder(); |
216 | |
217 | void NewMapping(intptr_t pc_offset); |
218 | void Append(const CatchEntryMove& move); |
219 | void EndMapping(); |
220 | TypedDataPtr FinalizeCatchEntryMovesMap(); |
221 | |
222 | private: |
223 | class TrieNode; |
224 | |
225 | Zone* zone_; |
226 | TrieNode* root_; |
227 | intptr_t current_pc_offset_; |
228 | GrowableArray<CatchEntryMove> moves_; |
229 | uint8_t* buffer_; |
230 | WriteStream stream_; |
231 | |
232 | DISALLOW_COPY_AND_ASSIGN(CatchEntryMovesMapBuilder); |
233 | }; |
234 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
235 | |
236 | // A CodeSourceMap maps from pc offsets to a stack of inlined functions and |
237 | // their positions. This is encoded as a little bytecode that pushes and pops |
238 | // functions and changes the top function's position as the PC advances. |
239 | // Decoding happens by running this bytecode until we reach the desired PC. |
240 | // |
241 | // The implementation keeps track of two sets of state: one written to the byte |
242 | // stream and one that is buffered. On the JIT, this buffering effectively gives |
243 | // us a peephole optimization that merges adjacent advance PC bytecodes. On AOT, |
244 | // this allows to skip encoding our position until we reach a PC where we might |
245 | // throw. |
246 | class CodeSourceMapBuilder : public ZoneAllocated { |
247 | public: |
248 | CodeSourceMapBuilder( |
249 | bool stack_traces_only, |
250 | const GrowableArray<intptr_t>& caller_inline_id, |
251 | const GrowableArray<TokenPosition>& inline_id_to_token_pos, |
252 | const GrowableArray<const Function*>& inline_id_to_function); |
253 | |
254 | // The position at which a function implicitly starts, for both the root and |
255 | // after a push bytecode. We use the classifying position kDartCodePrologue |
256 | // since it is the most common. |
257 | static const TokenPosition kInitialPosition; |
258 | |
259 | static const uint8_t kChangePosition = 0; |
260 | static const uint8_t kAdvancePC = 1; |
261 | static const uint8_t kPushFunction = 2; |
262 | static const uint8_t kPopFunction = 3; |
263 | static const uint8_t kNullCheck = 4; |
264 | |
265 | void StartInliningInterval(int32_t pc_offset, intptr_t inline_id); |
266 | void BeginCodeSourceRange(int32_t pc_offset); |
267 | void EndCodeSourceRange(int32_t pc_offset, TokenPosition pos); |
268 | void NoteDescriptor(PcDescriptorsLayout::Kind kind, |
269 | int32_t pc_offset, |
270 | TokenPosition pos); |
271 | void NoteNullCheck(int32_t pc_offset, TokenPosition pos, intptr_t name_index); |
272 | |
273 | ArrayPtr InliningIdToFunction(); |
274 | CodeSourceMapPtr Finalize(); |
275 | |
276 | private: |
277 | intptr_t GetFunctionId(intptr_t inline_id); |
278 | |
279 | void BufferChangePosition(TokenPosition pos) { |
280 | buffered_token_pos_stack_.Last() = pos; |
281 | } |
282 | void WriteChangePosition(TokenPosition pos); |
283 | void BufferAdvancePC(int32_t distance) { buffered_pc_offset_ += distance; } |
284 | void WriteAdvancePC(int32_t distance) { |
285 | stream_.Write<uint8_t>(kAdvancePC); |
286 | stream_.Write<int32_t>(distance); |
287 | written_pc_offset_ += distance; |
288 | } |
289 | void BufferPush(intptr_t inline_id) { |
290 | buffered_inline_id_stack_.Add(inline_id); |
291 | buffered_token_pos_stack_.Add(kInitialPosition); |
292 | } |
293 | void WritePush(intptr_t inline_id) { |
294 | stream_.Write<uint8_t>(kPushFunction); |
295 | stream_.Write<int32_t>(GetFunctionId(inline_id)); |
296 | written_inline_id_stack_.Add(inline_id); |
297 | written_token_pos_stack_.Add(kInitialPosition); |
298 | } |
299 | void BufferPop() { |
300 | buffered_inline_id_stack_.RemoveLast(); |
301 | buffered_token_pos_stack_.RemoveLast(); |
302 | } |
303 | void WritePop() { |
304 | stream_.Write<uint8_t>(kPopFunction); |
305 | written_inline_id_stack_.RemoveLast(); |
306 | written_token_pos_stack_.RemoveLast(); |
307 | } |
308 | void WriteNullCheck(int32_t name_index) { |
309 | stream_.Write<uint8_t>(kNullCheck); |
310 | stream_.Write<int32_t>(name_index); |
311 | } |
312 | |
313 | void FlushBuffer(); |
314 | void FlushBufferStack(); |
315 | void FlushBufferPosition(); |
316 | void FlushBufferPC(); |
317 | |
318 | bool IsOnBufferedStack(intptr_t inline_id) { |
319 | for (intptr_t i = 0; i < buffered_inline_id_stack_.length(); i++) { |
320 | if (buffered_inline_id_stack_[i] == inline_id) return true; |
321 | } |
322 | return false; |
323 | } |
324 | |
325 | intptr_t buffered_pc_offset_; |
326 | GrowableArray<intptr_t> buffered_inline_id_stack_; |
327 | GrowableArray<TokenPosition> buffered_token_pos_stack_; |
328 | |
329 | intptr_t written_pc_offset_; |
330 | GrowableArray<intptr_t> written_inline_id_stack_; |
331 | GrowableArray<TokenPosition> written_token_pos_stack_; |
332 | |
333 | const GrowableArray<intptr_t>& caller_inline_id_; |
334 | const GrowableArray<TokenPosition>& inline_id_to_token_pos_; |
335 | const GrowableArray<const Function*>& inline_id_to_function_; |
336 | |
337 | const GrowableObjectArray& inlined_functions_; |
338 | |
339 | uint8_t* buffer_; |
340 | WriteStream stream_; |
341 | |
342 | const bool stack_traces_only_; |
343 | |
344 | DISALLOW_COPY_AND_ASSIGN(CodeSourceMapBuilder); |
345 | }; |
346 | |
347 | class CodeSourceMapReader : public ValueObject { |
348 | public: |
349 | CodeSourceMapReader(const CodeSourceMap& map, |
350 | const Array& functions, |
351 | const Function& root) |
352 | : map_(map), functions_(functions), root_(root) {} |
353 | |
354 | void GetInlinedFunctionsAt(int32_t pc_offset, |
355 | GrowableArray<const Function*>* function_stack, |
356 | GrowableArray<TokenPosition>* token_positions); |
357 | NOT_IN_PRODUCT(void PrintJSONInlineIntervals(JSONObject* jsobj)); |
358 | void DumpInlineIntervals(uword start); |
359 | void DumpSourcePositions(uword start); |
360 | |
361 | intptr_t GetNullCheckNameIndexAt(int32_t pc_offset); |
362 | |
363 | private: |
364 | // Reads a TokenPosition value from a CSM, handling the different encoding for |
365 | // when non-symbolic stack traces are enabled. |
366 | static TokenPosition ReadPosition(ReadStream* stream); |
367 | |
368 | const CodeSourceMap& map_; |
369 | const Array& functions_; |
370 | const Function& root_; |
371 | |
372 | DISALLOW_COPY_AND_ASSIGN(CodeSourceMapReader); |
373 | }; |
374 | |
375 | } // namespace dart |
376 | |
377 | #endif // RUNTIME_VM_CODE_DESCRIPTORS_H_ |
378 | |