1 | // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/exceptions.h" |
6 | |
7 | #include "platform/address_sanitizer.h" |
8 | |
9 | #include "lib/stacktrace.h" |
10 | |
11 | #include "vm/dart_api_impl.h" |
12 | #include "vm/dart_entry.h" |
13 | #include "vm/datastream.h" |
14 | #include "vm/debugger.h" |
15 | #include "vm/deopt_instructions.h" |
16 | #include "vm/flags.h" |
17 | #include "vm/log.h" |
18 | #include "vm/longjump.h" |
19 | #include "vm/object.h" |
20 | #include "vm/object_store.h" |
21 | #include "vm/stack_frame.h" |
22 | #include "vm/stub_code.h" |
23 | #include "vm/symbols.h" |
24 | |
25 | namespace dart { |
26 | |
27 | DECLARE_FLAG(bool, enable_interpreter); |
28 | DECLARE_FLAG(bool, trace_deoptimization); |
29 | DEFINE_FLAG(bool, |
30 | print_stacktrace_at_throw, |
31 | false, |
32 | "Prints a stack trace everytime a throw occurs." ); |
33 | |
34 | class StackTraceBuilder : public ValueObject { |
35 | public: |
36 | StackTraceBuilder() {} |
37 | virtual ~StackTraceBuilder() {} |
38 | |
39 | virtual void AddFrame(const Object& code, const Smi& offset) = 0; |
40 | }; |
41 | |
42 | class RegularStackTraceBuilder : public StackTraceBuilder { |
43 | public: |
44 | explicit RegularStackTraceBuilder(Zone* zone) |
45 | : code_list_( |
46 | GrowableObjectArray::Handle(zone, GrowableObjectArray::New())), |
47 | pc_offset_list_( |
48 | GrowableObjectArray::Handle(zone, GrowableObjectArray::New())) {} |
49 | ~RegularStackTraceBuilder() {} |
50 | |
51 | const GrowableObjectArray& code_list() const { return code_list_; } |
52 | const GrowableObjectArray& pc_offset_list() const { return pc_offset_list_; } |
53 | |
54 | virtual void AddFrame(const Object& code, const Smi& offset) { |
55 | code_list_.Add(code); |
56 | pc_offset_list_.Add(offset); |
57 | } |
58 | |
59 | private: |
60 | const GrowableObjectArray& code_list_; |
61 | const GrowableObjectArray& pc_offset_list_; |
62 | |
63 | DISALLOW_COPY_AND_ASSIGN(RegularStackTraceBuilder); |
64 | }; |
65 | |
66 | class PreallocatedStackTraceBuilder : public StackTraceBuilder { |
67 | public: |
68 | explicit PreallocatedStackTraceBuilder(const Instance& stacktrace) |
69 | : stacktrace_(StackTrace::Cast(stacktrace)), |
70 | cur_index_(0), |
71 | dropped_frames_(0) { |
72 | ASSERT( |
73 | stacktrace_.raw() == |
74 | Isolate::Current()->isolate_object_store()->preallocated_stack_trace()); |
75 | } |
76 | ~PreallocatedStackTraceBuilder() {} |
77 | |
78 | virtual void AddFrame(const Object& code, const Smi& offset); |
79 | |
80 | private: |
81 | static const int kNumTopframes = StackTrace::kPreallocatedStackdepth / 2; |
82 | |
83 | const StackTrace& stacktrace_; |
84 | intptr_t cur_index_; |
85 | intptr_t dropped_frames_; |
86 | |
87 | DISALLOW_COPY_AND_ASSIGN(PreallocatedStackTraceBuilder); |
88 | }; |
89 | |
90 | void PreallocatedStackTraceBuilder::AddFrame(const Object& code, |
91 | const Smi& offset) { |
92 | if (cur_index_ >= StackTrace::kPreallocatedStackdepth) { |
93 | // The number of frames is overflowing the preallocated stack trace object. |
94 | Object& frame_code = Object::Handle(); |
95 | Smi& frame_offset = Smi::Handle(); |
96 | intptr_t start = StackTrace::kPreallocatedStackdepth - (kNumTopframes - 1); |
97 | intptr_t null_slot = start - 2; |
98 | // We are going to drop one frame. |
99 | dropped_frames_++; |
100 | // Add an empty slot to indicate the overflow so that the toString |
101 | // method can account for the overflow. |
102 | if (stacktrace_.CodeAtFrame(null_slot) != Code::null()) { |
103 | stacktrace_.SetCodeAtFrame(null_slot, frame_code); |
104 | // We drop an extra frame here too. |
105 | dropped_frames_++; |
106 | } |
107 | // Encode the number of dropped frames into the pc offset. |
108 | frame_offset = Smi::New(dropped_frames_); |
109 | stacktrace_.SetPcOffsetAtFrame(null_slot, frame_offset); |
110 | // Move frames one slot down so that we can accommodate the new frame. |
111 | for (intptr_t i = start; i < StackTrace::kPreallocatedStackdepth; i++) { |
112 | intptr_t prev = (i - 1); |
113 | frame_code = stacktrace_.CodeAtFrame(i); |
114 | frame_offset = stacktrace_.PcOffsetAtFrame(i); |
115 | stacktrace_.SetCodeAtFrame(prev, frame_code); |
116 | stacktrace_.SetPcOffsetAtFrame(prev, frame_offset); |
117 | } |
118 | cur_index_ = (StackTrace::kPreallocatedStackdepth - 1); |
119 | } |
120 | stacktrace_.SetCodeAtFrame(cur_index_, code); |
121 | stacktrace_.SetPcOffsetAtFrame(cur_index_, offset); |
122 | cur_index_ += 1; |
123 | } |
124 | |
125 | static void BuildStackTrace(StackTraceBuilder* builder) { |
126 | StackFrameIterator frames(ValidationPolicy::kDontValidateFrames, |
127 | Thread::Current(), |
128 | StackFrameIterator::kNoCrossThreadIteration); |
129 | StackFrame* frame = frames.NextFrame(); |
130 | ASSERT(frame != NULL); // We expect to find a dart invocation frame. |
131 | Code& code = Code::Handle(); |
132 | Bytecode& bytecode = Bytecode::Handle(); |
133 | Smi& offset = Smi::Handle(); |
134 | for (; frame != NULL; frame = frames.NextFrame()) { |
135 | if (!frame->IsDartFrame()) { |
136 | continue; |
137 | } |
138 | if (frame->is_interpreted()) { |
139 | bytecode = frame->LookupDartBytecode(); |
140 | ASSERT(bytecode.ContainsInstructionAt(frame->pc())); |
141 | if (bytecode.function() == Function::null()) { |
142 | continue; |
143 | } |
144 | offset = Smi::New(frame->pc() - bytecode.PayloadStart()); |
145 | builder->AddFrame(bytecode, offset); |
146 | } else { |
147 | code = frame->LookupDartCode(); |
148 | ASSERT(code.ContainsInstructionAt(frame->pc())); |
149 | offset = Smi::New(frame->pc() - code.PayloadStart()); |
150 | builder->AddFrame(code, offset); |
151 | } |
152 | } |
153 | } |
154 | |
155 | class ExceptionHandlerFinder : public StackResource { |
156 | public: |
157 | explicit ExceptionHandlerFinder(Thread* thread) |
158 | : StackResource(thread), thread_(thread) {} |
159 | |
160 | // Iterate through the stack frames and try to find a frame with an |
161 | // exception handler. Once found, set the pc, sp and fp so that execution |
162 | // can continue in that frame. Sets 'needs_stacktrace' if there is no |
163 | // catch-all handler or if a stack-trace is specified in the catch. |
164 | bool Find() { |
165 | StackFrameIterator frames(ValidationPolicy::kDontValidateFrames, |
166 | Thread::Current(), |
167 | StackFrameIterator::kNoCrossThreadIteration); |
168 | StackFrame* frame = frames.NextFrame(); |
169 | if (frame == NULL) return false; // No Dart frame. |
170 | handler_pc_set_ = false; |
171 | needs_stacktrace = false; |
172 | bool is_catch_all = false; |
173 | uword temp_handler_pc = kUwordMax; |
174 | bool is_optimized = false; |
175 | code_ = NULL; |
176 | catch_entry_moves_cache_ = thread_->isolate()->catch_entry_moves_cache(); |
177 | |
178 | while (!frame->IsEntryFrame()) { |
179 | if (frame->IsDartFrame()) { |
180 | if (frame->FindExceptionHandler(thread_, &temp_handler_pc, |
181 | &needs_stacktrace, &is_catch_all, |
182 | &is_optimized)) { |
183 | if (!handler_pc_set_) { |
184 | handler_pc_set_ = true; |
185 | handler_pc = temp_handler_pc; |
186 | handler_sp = frame->sp(); |
187 | handler_fp = frame->fp(); |
188 | if (is_optimized) { |
189 | pc_ = frame->pc(); |
190 | code_ = &Code::Handle(frame->LookupDartCode()); |
191 | CatchEntryMovesRefPtr* cached_catch_entry_moves = |
192 | catch_entry_moves_cache_->Lookup(pc_); |
193 | if (cached_catch_entry_moves != NULL) { |
194 | cached_catch_entry_moves_ = *cached_catch_entry_moves; |
195 | } |
196 | if (cached_catch_entry_moves_.IsEmpty()) { |
197 | #if defined(DART_PRECOMPILED_RUNTIME) |
198 | // Only AOT mode is supported. |
199 | ReadCompressedCatchEntryMoves(); |
200 | #elif defined(DART_PRECOMPILER) |
201 | // Both AOT and JIT modes are supported. |
202 | if (FLAG_precompiled_mode) { |
203 | ReadCompressedCatchEntryMoves(); |
204 | } else { |
205 | GetCatchEntryMovesFromDeopt(code_->num_variables(), frame); |
206 | } |
207 | #else |
208 | // Only JIT mode is supported. |
209 | ASSERT(!FLAG_precompiled_mode); |
210 | GetCatchEntryMovesFromDeopt(code_->num_variables(), frame); |
211 | #endif |
212 | } |
213 | } |
214 | } |
215 | if (needs_stacktrace || is_catch_all) { |
216 | return true; |
217 | } |
218 | } |
219 | } // if frame->IsDartFrame |
220 | frame = frames.NextFrame(); |
221 | ASSERT(frame != NULL); |
222 | } // while !frame->IsEntryFrame |
223 | ASSERT(frame->IsEntryFrame()); |
224 | if (!handler_pc_set_) { |
225 | handler_pc = frame->pc(); |
226 | handler_sp = frame->sp(); |
227 | handler_fp = frame->fp(); |
228 | } |
229 | // No catch-all encountered, needs stacktrace. |
230 | needs_stacktrace = true; |
231 | return handler_pc_set_; |
232 | } |
233 | |
234 | // When entering catch block in the optimized code we need to execute |
235 | // catch entry moves that would morph the state of the frame into |
236 | // what catch entry expects. |
237 | void PrepareFrameForCatchEntry() { |
238 | if (code_ == nullptr || !code_->is_optimized()) { |
239 | return; |
240 | } |
241 | |
242 | if (cached_catch_entry_moves_.IsEmpty()) { |
243 | catch_entry_moves_cache_->Insert( |
244 | pc_, CatchEntryMovesRefPtr(catch_entry_moves_)); |
245 | } else { |
246 | catch_entry_moves_ = &cached_catch_entry_moves_.moves(); |
247 | } |
248 | |
249 | ExecuteCatchEntryMoves(*catch_entry_moves_); |
250 | } |
251 | |
252 | void ExecuteCatchEntryMoves(const CatchEntryMoves& moves) { |
253 | Zone* zone = Thread::Current()->zone(); |
254 | auto& value = Object::Handle(zone); |
255 | auto& dst_values = Array::Handle(zone, Array::New(moves.count())); |
256 | |
257 | uword fp = handler_fp; |
258 | ObjectPool* pool = nullptr; |
259 | for (int j = 0; j < moves.count(); j++) { |
260 | const CatchEntryMove& move = moves.At(j); |
261 | |
262 | switch (move.source_kind()) { |
263 | case CatchEntryMove::SourceKind::kConstant: |
264 | if (pool == nullptr) { |
265 | pool = &ObjectPool::Handle(code_->GetObjectPool()); |
266 | } |
267 | value = pool->ObjectAt(move.src_slot()); |
268 | break; |
269 | |
270 | case CatchEntryMove::SourceKind::kTaggedSlot: |
271 | value = *TaggedSlotAt(fp, move.src_slot()); |
272 | break; |
273 | |
274 | case CatchEntryMove::SourceKind::kDoubleSlot: |
275 | value = Double::New(*SlotAt<double>(fp, move.src_slot())); |
276 | break; |
277 | |
278 | case CatchEntryMove::SourceKind::kFloat32x4Slot: |
279 | value = Float32x4::New(*SlotAt<simd128_value_t>(fp, move.src_slot())); |
280 | break; |
281 | |
282 | case CatchEntryMove::SourceKind::kFloat64x2Slot: |
283 | value = Float64x2::New(*SlotAt<simd128_value_t>(fp, move.src_slot())); |
284 | break; |
285 | |
286 | case CatchEntryMove::SourceKind::kInt32x4Slot: |
287 | value = Int32x4::New(*SlotAt<simd128_value_t>(fp, move.src_slot())); |
288 | break; |
289 | |
290 | case CatchEntryMove::SourceKind::kInt64PairSlot: |
291 | value = Integer::New( |
292 | Utils::LowHighTo64Bits(*SlotAt<uint32_t>(fp, move.src_lo_slot()), |
293 | *SlotAt<int32_t>(fp, move.src_hi_slot()))); |
294 | break; |
295 | |
296 | case CatchEntryMove::SourceKind::kInt64Slot: |
297 | value = Integer::New(*SlotAt<int64_t>(fp, move.src_slot())); |
298 | break; |
299 | |
300 | case CatchEntryMove::SourceKind::kInt32Slot: |
301 | value = Integer::New(*SlotAt<int32_t>(fp, move.src_slot())); |
302 | break; |
303 | |
304 | case CatchEntryMove::SourceKind::kUint32Slot: |
305 | value = Integer::New(*SlotAt<uint32_t>(fp, move.src_slot())); |
306 | break; |
307 | |
308 | default: |
309 | UNREACHABLE(); |
310 | } |
311 | |
312 | dst_values.SetAt(j, value); |
313 | } |
314 | |
315 | { |
316 | NoSafepointScope no_safepoint_scope; |
317 | |
318 | for (int j = 0; j < moves.count(); j++) { |
319 | const CatchEntryMove& move = moves.At(j); |
320 | value = dst_values.At(j); |
321 | *TaggedSlotAt(fp, move.dest_slot()) = value.raw(); |
322 | } |
323 | } |
324 | } |
325 | |
326 | #if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER) |
327 | void ReadCompressedCatchEntryMoves() { |
328 | const intptr_t pc_offset = pc_ - code_->PayloadStart(); |
329 | const auto& td = TypedData::Handle(code_->catch_entry_moves_maps()); |
330 | |
331 | CatchEntryMovesMapReader reader(td); |
332 | catch_entry_moves_ = reader.ReadMovesForPcOffset(pc_offset); |
333 | } |
334 | #endif // defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER) |
335 | |
336 | #if !defined(DART_PRECOMPILED_RUNTIME) |
337 | void GetCatchEntryMovesFromDeopt(intptr_t num_vars, StackFrame* frame) { |
338 | Isolate* isolate = thread_->isolate(); |
339 | DeoptContext* deopt_context = |
340 | new DeoptContext(frame, *code_, DeoptContext::kDestIsAllocated, NULL, |
341 | NULL, true, false /* deoptimizing_code */); |
342 | isolate->set_deopt_context(deopt_context); |
343 | |
344 | catch_entry_moves_ = deopt_context->ToCatchEntryMoves(num_vars); |
345 | |
346 | isolate->set_deopt_context(NULL); |
347 | delete deopt_context; |
348 | } |
349 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
350 | |
351 | bool needs_stacktrace; |
352 | uword handler_pc; |
353 | uword handler_sp; |
354 | uword handler_fp; |
355 | |
356 | private: |
357 | template <typename T> |
358 | static T* SlotAt(uword fp, int stack_slot) { |
359 | const intptr_t frame_slot = |
360 | runtime_frame_layout.FrameSlotForVariableIndex(-stack_slot); |
361 | return reinterpret_cast<T*>(fp + frame_slot * kWordSize); |
362 | } |
363 | |
364 | static ObjectPtr* TaggedSlotAt(uword fp, int stack_slot) { |
365 | return SlotAt<ObjectPtr>(fp, stack_slot); |
366 | } |
367 | |
368 | typedef ReadStream::Raw<sizeof(intptr_t), intptr_t> Reader; |
369 | Thread* thread_; |
370 | Code* code_; |
371 | bool handler_pc_set_; |
372 | intptr_t pc_; // Current pc in the handler frame. |
373 | |
374 | const CatchEntryMoves* catch_entry_moves_ = nullptr; |
375 | CatchEntryMovesCache* catch_entry_moves_cache_ = nullptr; |
376 | CatchEntryMovesRefPtr cached_catch_entry_moves_; |
377 | }; |
378 | |
379 | CatchEntryMove CatchEntryMove::ReadFrom(ReadStream* stream) { |
380 | using Reader = ReadStream::Raw<sizeof(int32_t), int32_t>; |
381 | const int32_t src = Reader::Read(stream); |
382 | const int32_t dest_and_kind = Reader::Read(stream); |
383 | return CatchEntryMove(src, dest_and_kind); |
384 | } |
385 | |
386 | #if !defined(DART_PRECOMPILED_RUNTIME) |
387 | void CatchEntryMove::WriteTo(WriteStream* stream) { |
388 | using Writer = WriteStream::Raw<sizeof(int32_t), int32_t>; |
389 | Writer::Write(stream, src_); |
390 | Writer::Write(stream, dest_and_kind_); |
391 | } |
392 | #endif |
393 | |
394 | #if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER) |
395 | const char* CatchEntryMove::ToCString() const { |
396 | char from[256]; |
397 | |
398 | switch (source_kind()) { |
399 | case SourceKind::kConstant: |
400 | Utils::SNPrint(from, ARRAY_SIZE(from), "pp[%" Pd "]" , src_slot()); |
401 | break; |
402 | |
403 | case SourceKind::kTaggedSlot: |
404 | Utils::SNPrint(from, ARRAY_SIZE(from), "fp[%" Pd "]" , src_slot()); |
405 | break; |
406 | |
407 | case SourceKind::kDoubleSlot: |
408 | Utils::SNPrint(from, ARRAY_SIZE(from), "f64 [fp + %" Pd "]" , |
409 | src_slot() * compiler::target::kWordSize); |
410 | break; |
411 | |
412 | case SourceKind::kFloat32x4Slot: |
413 | Utils::SNPrint(from, ARRAY_SIZE(from), "f32x4 [fp + %" Pd "]" , |
414 | src_slot() * compiler::target::kWordSize); |
415 | break; |
416 | |
417 | case SourceKind::kFloat64x2Slot: |
418 | Utils::SNPrint(from, ARRAY_SIZE(from), "f64x2 [fp + %" Pd "]" , |
419 | src_slot() * compiler::target::kWordSize); |
420 | break; |
421 | |
422 | case SourceKind::kInt32x4Slot: |
423 | Utils::SNPrint(from, ARRAY_SIZE(from), "i32x4 [fp + %" Pd "]" , |
424 | src_slot() * compiler::target::kWordSize); |
425 | break; |
426 | |
427 | case SourceKind::kInt64PairSlot: |
428 | Utils::SNPrint(from, ARRAY_SIZE(from), |
429 | "i64 ([fp + %" Pd "], [fp + %" Pd "])" , |
430 | src_lo_slot() * compiler::target::kWordSize, |
431 | src_hi_slot() * compiler::target::kWordSize); |
432 | break; |
433 | |
434 | case SourceKind::kInt64Slot: |
435 | Utils::SNPrint(from, ARRAY_SIZE(from), "i64 [fp + %" Pd "]" , |
436 | src_slot() * compiler::target::kWordSize); |
437 | break; |
438 | |
439 | case SourceKind::kInt32Slot: |
440 | Utils::SNPrint(from, ARRAY_SIZE(from), "i32 [fp + %" Pd "]" , |
441 | src_slot() * compiler::target::kWordSize); |
442 | break; |
443 | |
444 | case SourceKind::kUint32Slot: |
445 | Utils::SNPrint(from, ARRAY_SIZE(from), "u32 [fp + %" Pd "]" , |
446 | src_slot() * compiler::target::kWordSize); |
447 | break; |
448 | |
449 | default: |
450 | UNREACHABLE(); |
451 | } |
452 | |
453 | return Thread::Current()->zone()->PrintToString("fp[%" Pd "] <- %s" , |
454 | dest_slot(), from); |
455 | } |
456 | |
457 | void CatchEntryMovesMapReader::PrintEntries() { |
458 | NoSafepointScope no_safepoint; |
459 | |
460 | using Reader = ReadStream::Raw<sizeof(intptr_t), intptr_t>; |
461 | |
462 | ReadStream stream(static_cast<uint8_t*>(bytes_.DataAddr(0)), bytes_.Length()); |
463 | |
464 | while (stream.PendingBytes() > 0) { |
465 | const intptr_t stream_position = stream.Position(); |
466 | const intptr_t target_pc_offset = Reader::Read(&stream); |
467 | const intptr_t prefix_length = Reader::Read(&stream); |
468 | const intptr_t suffix_length = Reader::Read(&stream); |
469 | const intptr_t length = prefix_length + suffix_length; |
470 | Reader::Read(&stream); // Skip suffix_offset |
471 | for (intptr_t j = 0; j < prefix_length; j++) { |
472 | CatchEntryMove::ReadFrom(&stream); |
473 | } |
474 | |
475 | ReadStream inner_stream(static_cast<uint8_t*>(bytes_.DataAddr(0)), |
476 | bytes_.Length()); |
477 | CatchEntryMoves* moves = ReadCompressedCatchEntryMovesSuffix( |
478 | &inner_stream, stream_position, length); |
479 | THR_Print(" [code+0x%08" Px "]: (% " Pd " moves)\n" , target_pc_offset, |
480 | moves->count()); |
481 | for (intptr_t i = 0; i < moves->count(); i++) { |
482 | THR_Print(" %s\n" , moves->At(i).ToCString()); |
483 | } |
484 | CatchEntryMoves::Free(moves); |
485 | } |
486 | } |
487 | #endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER) |
488 | |
489 | CatchEntryMoves* CatchEntryMovesMapReader::ReadMovesForPcOffset( |
490 | intptr_t pc_offset) { |
491 | NoSafepointScope no_safepoint; |
492 | |
493 | ReadStream stream(static_cast<uint8_t*>(bytes_.DataAddr(0)), bytes_.Length()); |
494 | |
495 | intptr_t position = 0; |
496 | intptr_t length = 0; |
497 | FindEntryForPc(&stream, pc_offset, &position, &length); |
498 | |
499 | return ReadCompressedCatchEntryMovesSuffix(&stream, position, length); |
500 | } |
501 | |
502 | void CatchEntryMovesMapReader::FindEntryForPc(ReadStream* stream, |
503 | intptr_t pc_offset, |
504 | intptr_t* position, |
505 | intptr_t* length) { |
506 | using Reader = ReadStream::Raw<sizeof(intptr_t), intptr_t>; |
507 | |
508 | while (stream->PendingBytes() > 0) { |
509 | const intptr_t stream_position = stream->Position(); |
510 | const intptr_t target_pc_offset = Reader::Read(stream); |
511 | const intptr_t prefix_length = Reader::Read(stream); |
512 | const intptr_t suffix_length = Reader::Read(stream); |
513 | Reader::Read(stream); // Skip suffix_offset |
514 | if (pc_offset == target_pc_offset) { |
515 | *position = stream_position; |
516 | *length = prefix_length + suffix_length; |
517 | return; |
518 | } |
519 | |
520 | // Skip the prefix moves. |
521 | for (intptr_t j = 0; j < prefix_length; j++) { |
522 | CatchEntryMove::ReadFrom(stream); |
523 | } |
524 | } |
525 | |
526 | UNREACHABLE(); |
527 | } |
528 | |
529 | CatchEntryMoves* CatchEntryMovesMapReader::ReadCompressedCatchEntryMovesSuffix( |
530 | ReadStream* stream, |
531 | intptr_t offset, |
532 | intptr_t length) { |
533 | using Reader = ReadStream::Raw<sizeof(intptr_t), intptr_t>; |
534 | |
535 | CatchEntryMoves* moves = CatchEntryMoves::Allocate(length); |
536 | |
537 | intptr_t remaining_length = length; |
538 | |
539 | intptr_t moves_offset = 0; |
540 | while (remaining_length > 0) { |
541 | stream->SetPosition(offset); |
542 | Reader::Read(stream); // skip pc_offset |
543 | Reader::Read(stream); // skip prefix length |
544 | const intptr_t suffix_length = Reader::Read(stream); |
545 | const intptr_t suffix_offset = Reader::Read(stream); |
546 | const intptr_t to_read = remaining_length - suffix_length; |
547 | if (to_read > 0) { |
548 | for (int j = 0; j < to_read; j++) { |
549 | // The prefix is written from the back. |
550 | moves->At(moves_offset + to_read - j - 1) = |
551 | CatchEntryMove::ReadFrom(stream); |
552 | } |
553 | remaining_length -= to_read; |
554 | moves_offset += to_read; |
555 | } |
556 | offset = suffix_offset; |
557 | } |
558 | |
559 | return moves; |
560 | } |
561 | |
562 | static void FindErrorHandler(uword* handler_pc, |
563 | uword* handler_sp, |
564 | uword* handler_fp) { |
565 | StackFrameIterator frames(ValidationPolicy::kDontValidateFrames, |
566 | Thread::Current(), |
567 | StackFrameIterator::kNoCrossThreadIteration); |
568 | StackFrame* frame = frames.NextFrame(); |
569 | ASSERT(frame != NULL); |
570 | while (!frame->IsEntryFrame()) { |
571 | frame = frames.NextFrame(); |
572 | ASSERT(frame != NULL); |
573 | } |
574 | ASSERT(frame->IsEntryFrame()); |
575 | *handler_pc = frame->pc(); |
576 | *handler_sp = frame->sp(); |
577 | *handler_fp = frame->fp(); |
578 | } |
579 | |
580 | static uword RemapExceptionPCForDeopt(Thread* thread, |
581 | uword program_counter, |
582 | uword frame_pointer) { |
583 | MallocGrowableArray<PendingLazyDeopt>* pending_deopts = |
584 | thread->isolate()->pending_deopts(); |
585 | if (pending_deopts->length() > 0) { |
586 | // Check if the target frame is scheduled for lazy deopt. |
587 | for (intptr_t i = 0; i < pending_deopts->length(); i++) { |
588 | if ((*pending_deopts)[i].fp() == frame_pointer) { |
589 | // Deopt should now resume in the catch handler instead of after the |
590 | // call. |
591 | (*pending_deopts)[i].set_pc(program_counter); |
592 | |
593 | // Jump to the deopt stub instead of the catch handler. |
594 | program_counter = StubCode::DeoptimizeLazyFromThrow().EntryPoint(); |
595 | if (FLAG_trace_deoptimization) { |
596 | THR_Print("Throwing to frame scheduled for lazy deopt fp=%" Pp "\n" , |
597 | frame_pointer); |
598 | |
599 | #if defined(DEBUG) |
600 | // Ensure the frame references optimized code. |
601 | ObjectPtr pc_marker = *(reinterpret_cast<ObjectPtr*>( |
602 | frame_pointer + runtime_frame_layout.code_from_fp * kWordSize)); |
603 | Code& code = Code::Handle(Code::RawCast(pc_marker)); |
604 | ASSERT(code.is_optimized() && !code.is_force_optimized()); |
605 | #endif |
606 | } |
607 | break; |
608 | } |
609 | } |
610 | } |
611 | return program_counter; |
612 | } |
613 | |
614 | static void ClearLazyDeopts(Thread* thread, uword frame_pointer) { |
615 | MallocGrowableArray<PendingLazyDeopt>* pending_deopts = |
616 | thread->isolate()->pending_deopts(); |
617 | if (pending_deopts->length() > 0) { |
618 | // We may be jumping over frames scheduled for lazy deopt. Remove these |
619 | // frames from the pending deopt table, but only after unmarking them so |
620 | // any stack walk that happens before the stack is unwound will still work. |
621 | { |
622 | DartFrameIterator frames(thread, |
623 | StackFrameIterator::kNoCrossThreadIteration); |
624 | for (StackFrame* frame = frames.NextFrame(); frame != nullptr; |
625 | frame = frames.NextFrame()) { |
626 | if (frame->is_interpreted()) { |
627 | continue; |
628 | } else if (frame->fp() >= frame_pointer) { |
629 | break; |
630 | } |
631 | if (frame->IsMarkedForLazyDeopt()) { |
632 | frame->UnmarkForLazyDeopt(); |
633 | } |
634 | } |
635 | } |
636 | |
637 | #if defined(DEBUG) |
638 | ValidateFrames(); |
639 | #endif |
640 | |
641 | for (intptr_t i = 0; i < pending_deopts->length(); i++) { |
642 | if ((*pending_deopts)[i].fp() < frame_pointer) { |
643 | if (FLAG_trace_deoptimization) { |
644 | THR_Print( |
645 | "Lazy deopt skipped due to throw for " |
646 | "fp=%" Pp ", pc=%" Pp "\n" , |
647 | (*pending_deopts)[i].fp(), (*pending_deopts)[i].pc()); |
648 | } |
649 | pending_deopts->RemoveAt(i--); |
650 | } |
651 | } |
652 | |
653 | #if defined(DEBUG) |
654 | ValidateFrames(); |
655 | #endif |
656 | } |
657 | } |
658 | |
659 | static void JumpToExceptionHandler(Thread* thread, |
660 | uword program_counter, |
661 | uword stack_pointer, |
662 | uword frame_pointer, |
663 | const Object& exception_object, |
664 | const Object& stacktrace_object) { |
665 | uword remapped_pc = |
666 | RemapExceptionPCForDeopt(thread, program_counter, frame_pointer); |
667 | thread->set_active_exception(exception_object); |
668 | thread->set_active_stacktrace(stacktrace_object); |
669 | thread->set_resume_pc(remapped_pc); |
670 | uword run_exception_pc = StubCode::RunExceptionHandler().EntryPoint(); |
671 | Exceptions::JumpToFrame(thread, run_exception_pc, stack_pointer, |
672 | frame_pointer, false /* do not clear deopt */); |
673 | } |
674 | |
675 | NO_SANITIZE_SAFE_STACK // This function manipulates the safestack pointer. |
676 | void Exceptions::JumpToFrame(Thread* thread, |
677 | uword program_counter, |
678 | uword stack_pointer, |
679 | uword frame_pointer, |
680 | bool clear_deopt_at_target) { |
681 | #if !defined(DART_PRECOMPILED_RUNTIME) |
682 | // TODO(regis): We still possibly need to unwind interpreter frames if they |
683 | // are callee frames of the C++ frame handling the exception. |
684 | if (FLAG_enable_interpreter) { |
685 | Interpreter* interpreter = thread->interpreter(); |
686 | if ((interpreter != NULL) && interpreter->HasFrame(frame_pointer)) { |
687 | interpreter->JumpToFrame(program_counter, stack_pointer, frame_pointer, |
688 | thread); |
689 | } |
690 | } |
691 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
692 | |
693 | const uword fp_for_clearing = |
694 | (clear_deopt_at_target ? frame_pointer + 1 : frame_pointer); |
695 | ClearLazyDeopts(thread, fp_for_clearing); |
696 | |
697 | #if defined(USING_SIMULATOR) |
698 | // Unwinding of the C++ frames and destroying of their stack resources is done |
699 | // by the simulator, because the target stack_pointer is a simulated stack |
700 | // pointer and not the C++ stack pointer. |
701 | |
702 | // Continue simulating at the given pc in the given frame after setting up the |
703 | // exception object in the kExceptionObjectReg register and the stacktrace |
704 | // object (may be raw null) in the kStackTraceObjectReg register. |
705 | |
706 | Simulator::Current()->JumpToFrame(program_counter, stack_pointer, |
707 | frame_pointer, thread); |
708 | #else |
709 | |
710 | // Prepare for unwinding frames by destroying all the stack resources |
711 | // in the previous frames. |
712 | StackResource::Unwind(thread); |
713 | |
714 | // Call a stub to set up the exception object in kExceptionObjectReg, |
715 | // to set up the stacktrace object in kStackTraceObjectReg, and to |
716 | // continue execution at the given pc in the given frame. |
717 | typedef void (*ExcpHandler)(uword, uword, uword, Thread*); |
718 | ExcpHandler func = |
719 | reinterpret_cast<ExcpHandler>(StubCode::JumpToFrame().EntryPoint()); |
720 | |
721 | // Unpoison the stack before we tear it down in the generated stub code. |
722 | uword current_sp = OSThread::GetCurrentStackPointer() - 1024; |
723 | ASAN_UNPOISON(reinterpret_cast<void*>(current_sp), |
724 | stack_pointer - current_sp); |
725 | |
726 | // We are jumping over C++ frames, so we have to set the safestack pointer |
727 | // back to what it was when we entered the runtime from Dart code. |
728 | #if defined(USING_SAFE_STACK) |
729 | const uword saved_ssp = thread->saved_safestack_limit(); |
730 | OSThread::SetCurrentSafestackPointer(saved_ssp); |
731 | #endif |
732 | |
733 | #if defined(USING_SHADOW_CALL_STACK) |
734 | // The shadow call stack register will be restored by the JumpToFrame stub. |
735 | #endif |
736 | |
737 | func(program_counter, stack_pointer, frame_pointer, thread); |
738 | #endif |
739 | UNREACHABLE(); |
740 | } |
741 | |
742 | static FieldPtr LookupStackTraceField(const Instance& instance) { |
743 | if (instance.GetClassId() < kNumPredefinedCids) { |
744 | // 'class Error' is not a predefined class. |
745 | return Field::null(); |
746 | } |
747 | Thread* thread = Thread::Current(); |
748 | Zone* zone = thread->zone(); |
749 | Isolate* isolate = thread->isolate(); |
750 | Class& error_class = |
751 | Class::Handle(zone, isolate->object_store()->error_class()); |
752 | if (error_class.IsNull()) { |
753 | const Library& core_lib = Library::Handle(zone, Library::CoreLibrary()); |
754 | error_class = core_lib.LookupClass(Symbols::Error()); |
755 | ASSERT(!error_class.IsNull()); |
756 | isolate->object_store()->set_error_class(error_class); |
757 | } |
758 | // If instance class extends 'class Error' return '_stackTrace' field. |
759 | Class& test_class = Class::Handle(zone, instance.clazz()); |
760 | AbstractType& type = AbstractType::Handle(zone, AbstractType::null()); |
761 | while (true) { |
762 | if (test_class.raw() == error_class.raw()) { |
763 | return error_class.LookupInstanceFieldAllowPrivate( |
764 | Symbols::_stackTrace()); |
765 | } |
766 | type = test_class.super_type(); |
767 | if (type.IsNull()) return Field::null(); |
768 | test_class = type.type_class(); |
769 | } |
770 | UNREACHABLE(); |
771 | return Field::null(); |
772 | } |
773 | |
774 | StackTracePtr Exceptions::CurrentStackTrace() { |
775 | return GetStackTraceForException(); |
776 | } |
777 | |
778 | DART_NORETURN |
779 | static void ThrowExceptionHelper(Thread* thread, |
780 | const Instance& incoming_exception, |
781 | const Instance& existing_stacktrace, |
782 | const bool is_rethrow) { |
783 | DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame()); |
784 | Zone* zone = thread->zone(); |
785 | Isolate* isolate = thread->isolate(); |
786 | #if !defined(PRODUCT) |
787 | // Do not notify debugger on stack overflow and out of memory exceptions. |
788 | // The VM would crash when the debugger calls back into the VM to |
789 | // get values of variables. |
790 | if (incoming_exception.raw() != isolate->object_store()->out_of_memory() && |
791 | incoming_exception.raw() != isolate->object_store()->stack_overflow()) { |
792 | isolate->debugger()->PauseException(incoming_exception); |
793 | } |
794 | #endif |
795 | bool use_preallocated_stacktrace = false; |
796 | Instance& exception = Instance::Handle(zone, incoming_exception.raw()); |
797 | if (exception.IsNull()) { |
798 | exception ^= |
799 | Exceptions::Create(Exceptions::kNullThrown, Object::empty_array()); |
800 | } else if (exception.raw() == isolate->object_store()->out_of_memory() || |
801 | exception.raw() == isolate->object_store()->stack_overflow()) { |
802 | use_preallocated_stacktrace = true; |
803 | } |
804 | // Find the exception handler and determine if the handler needs a |
805 | // stacktrace. |
806 | ExceptionHandlerFinder finder(thread); |
807 | bool handler_exists = finder.Find(); |
808 | uword handler_pc = finder.handler_pc; |
809 | uword handler_sp = finder.handler_sp; |
810 | uword handler_fp = finder.handler_fp; |
811 | bool handler_needs_stacktrace = finder.needs_stacktrace; |
812 | Instance& stacktrace = Instance::Handle(zone); |
813 | if (use_preallocated_stacktrace) { |
814 | if (handler_pc == 0) { |
815 | // No Dart frame. |
816 | ASSERT(incoming_exception.raw() == |
817 | isolate->object_store()->out_of_memory()); |
818 | const UnhandledException& error = UnhandledException::Handle( |
819 | zone, |
820 | isolate->isolate_object_store()->preallocated_unhandled_exception()); |
821 | thread->long_jump_base()->Jump(1, error); |
822 | UNREACHABLE(); |
823 | } |
824 | stacktrace = isolate->isolate_object_store()->preallocated_stack_trace(); |
825 | PreallocatedStackTraceBuilder frame_builder(stacktrace); |
826 | ASSERT(existing_stacktrace.IsNull() || |
827 | (existing_stacktrace.raw() == stacktrace.raw())); |
828 | ASSERT(existing_stacktrace.IsNull() || is_rethrow); |
829 | if (handler_needs_stacktrace && existing_stacktrace.IsNull()) { |
830 | BuildStackTrace(&frame_builder); |
831 | } |
832 | } else { |
833 | if (!existing_stacktrace.IsNull()) { |
834 | // If we have an existing stack trace then this better be a rethrow. The |
835 | // reverse is not necessarily true (e.g. Dart_PropagateError can cause |
836 | // a rethrow being called without an existing stacktrace.) |
837 | ASSERT(is_rethrow); |
838 | stacktrace = existing_stacktrace.raw(); |
839 | } else { |
840 | // Get stacktrace field of class Error to determine whether we have a |
841 | // subclass of Error which carries around its stack trace. |
842 | const Field& stacktrace_field = |
843 | Field::Handle(zone, LookupStackTraceField(exception)); |
844 | if (!stacktrace_field.IsNull() || handler_needs_stacktrace) { |
845 | // Collect the stacktrace if needed. |
846 | ASSERT(existing_stacktrace.IsNull()); |
847 | stacktrace = Exceptions::CurrentStackTrace(); |
848 | // If we have an Error object, then set its stackTrace field only if it |
849 | // not yet initialized. |
850 | if (!stacktrace_field.IsNull() && |
851 | (exception.GetField(stacktrace_field) == Object::null())) { |
852 | exception.SetField(stacktrace_field, stacktrace); |
853 | } |
854 | } |
855 | } |
856 | } |
857 | // We expect to find a handler_pc, if the exception is unhandled |
858 | // then we expect to at least have the dart entry frame on the |
859 | // stack as Exceptions::Throw should happen only after a dart |
860 | // invocation has been done. |
861 | ASSERT(handler_pc != 0); |
862 | |
863 | if (FLAG_print_stacktrace_at_throw) { |
864 | THR_Print("Exception '%s' thrown:\n" , exception.ToCString()); |
865 | THR_Print("%s\n" , stacktrace.ToCString()); |
866 | } |
867 | if (handler_exists) { |
868 | finder.PrepareFrameForCatchEntry(); |
869 | // Found a dart handler for the exception, jump to it. |
870 | JumpToExceptionHandler(thread, handler_pc, handler_sp, handler_fp, |
871 | exception, stacktrace); |
872 | } else { |
873 | // No dart exception handler found in this invocation sequence, |
874 | // so we create an unhandled exception object and return to the |
875 | // invocation stub so that it returns this unhandled exception |
876 | // object. The C++ code which invoked this dart sequence can check |
877 | // and do the appropriate thing (rethrow the exception to the |
878 | // dart invocation sequence above it, print diagnostics and terminate |
879 | // the isolate etc.). This can happen in the compiler, which is not |
880 | // allowed to allocate in new space, so we pass the kOld argument. |
881 | const UnhandledException& unhandled_exception = UnhandledException::Handle( |
882 | zone, UnhandledException::New(exception, stacktrace, Heap::kOld)); |
883 | stacktrace = StackTrace::null(); |
884 | JumpToExceptionHandler(thread, handler_pc, handler_sp, handler_fp, |
885 | unhandled_exception, stacktrace); |
886 | } |
887 | UNREACHABLE(); |
888 | } |
889 | |
890 | // Static helpers for allocating, initializing, and throwing an error instance. |
891 | |
892 | // Return the script of the Dart function that called the native entry or the |
893 | // runtime entry. The frame iterator points to the callee. |
894 | ScriptPtr Exceptions::GetCallerScript(DartFrameIterator* iterator) { |
895 | StackFrame* caller_frame = iterator->NextFrame(); |
896 | ASSERT(caller_frame != NULL && caller_frame->IsDartFrame()); |
897 | const Function& caller = Function::Handle(caller_frame->LookupDartFunction()); |
898 | #if defined(DART_PRECOMPILED_RUNTIME) |
899 | if (caller.IsNull()) return Script::null(); |
900 | #else |
901 | ASSERT(!caller.IsNull()); |
902 | #endif |
903 | return caller.script(); |
904 | } |
905 | |
906 | // Allocate a new instance of the given class name. |
907 | // TODO(hausner): Rename this NewCoreInstance to call out the fact that |
908 | // the class name is resolved in the core library implicitly? |
909 | InstancePtr Exceptions::NewInstance(const char* class_name) { |
910 | Thread* thread = Thread::Current(); |
911 | Zone* zone = thread->zone(); |
912 | const String& cls_name = |
913 | String::Handle(zone, Symbols::New(thread, class_name)); |
914 | const Library& core_lib = Library::Handle(Library::CoreLibrary()); |
915 | // No ambiguity error expected: passing NULL. |
916 | Class& cls = Class::Handle(core_lib.LookupClass(cls_name)); |
917 | ASSERT(!cls.IsNull()); |
918 | // There are no parameterized error types, so no need to set type arguments. |
919 | return Instance::New(cls); |
920 | } |
921 | |
922 | // Allocate, initialize, and throw a TypeError or CastError. |
923 | // If error_msg is not null, throw a TypeError, even for a type cast. |
924 | void Exceptions::CreateAndThrowTypeError(TokenPosition location, |
925 | const AbstractType& src_type, |
926 | const AbstractType& dst_type, |
927 | const String& dst_name) { |
928 | ASSERT(!dst_name.IsNull()); // Pass Symbols::Empty() instead. |
929 | Thread* thread = Thread::Current(); |
930 | Zone* zone = thread->zone(); |
931 | const Array& args = Array::Handle(zone, Array::New(4)); |
932 | |
933 | ExceptionType exception_type = |
934 | (dst_name.raw() == Symbols::InTypeCast().raw()) ? kCast : kType; |
935 | |
936 | DartFrameIterator iterator(thread, |
937 | StackFrameIterator::kNoCrossThreadIteration); |
938 | const Script& script = Script::Handle(zone, GetCallerScript(&iterator)); |
939 | const String& url = String::Handle( |
940 | zone, script.IsNull() ? Symbols::OptimizedOut().raw() : script.url()); |
941 | intptr_t line = -1; |
942 | intptr_t column = -1; |
943 | if (!script.IsNull() && location.IsReal()) { |
944 | script.GetTokenLocation(location, &line, &column); |
945 | } |
946 | // Initialize '_url', '_line', and '_column' arguments. |
947 | args.SetAt(0, url); |
948 | args.SetAt(1, Smi::Handle(zone, Smi::New(line))); |
949 | args.SetAt(2, Smi::Handle(zone, Smi::New(column))); |
950 | |
951 | // Construct '_errorMsg'. |
952 | const GrowableObjectArray& pieces = |
953 | GrowableObjectArray::Handle(zone, GrowableObjectArray::New(20)); |
954 | |
955 | if (!dst_type.IsNull()) { |
956 | // Describe the type error. |
957 | if (!src_type.IsNull()) { |
958 | pieces.Add(Symbols::TypeQuote()); |
959 | pieces.Add(String::Handle(zone, src_type.UserVisibleName())); |
960 | pieces.Add(Symbols::QuoteIsNotASubtypeOf()); |
961 | } |
962 | pieces.Add(Symbols::TypeQuote()); |
963 | pieces.Add(String::Handle(zone, dst_type.UserVisibleName())); |
964 | pieces.Add(Symbols::SingleQuote()); |
965 | if (exception_type == kCast) { |
966 | pieces.Add(dst_name); |
967 | } else if (dst_name.Length() > 0) { |
968 | pieces.Add(Symbols::SpaceOfSpace()); |
969 | pieces.Add(Symbols::SingleQuote()); |
970 | pieces.Add(dst_name); |
971 | pieces.Add(Symbols::SingleQuote()); |
972 | } |
973 | // Print ambiguous URIs of src and dst types. |
974 | URIs uris(zone, 12); |
975 | if (!src_type.IsNull()) { |
976 | src_type.EnumerateURIs(&uris); |
977 | } |
978 | if (!dst_type.IsDynamicType() && !dst_type.IsVoidType() && |
979 | !dst_type.IsNeverType()) { |
980 | dst_type.EnumerateURIs(&uris); |
981 | } |
982 | const String& formatted_uris = |
983 | String::Handle(zone, AbstractType::PrintURIs(&uris)); |
984 | if (formatted_uris.Length() > 0) { |
985 | pieces.Add(Symbols::SpaceWhereNewLine()); |
986 | pieces.Add(formatted_uris); |
987 | } |
988 | } |
989 | const Array& arr = Array::Handle(zone, Array::MakeFixedLength(pieces)); |
990 | const String& error_msg = String::Handle(zone, String::ConcatAll(arr)); |
991 | args.SetAt(3, error_msg); |
992 | |
993 | // Type errors in the core library may be difficult to diagnose. |
994 | // Print type error information before throwing the error when debugging. |
995 | if (FLAG_print_stacktrace_at_throw) { |
996 | THR_Print("'%s': Failed type check: line %" Pd " pos %" Pd ": " , |
997 | String::Handle(zone, script.url()).ToCString(), line, column); |
998 | THR_Print("%s\n" , error_msg.ToCString()); |
999 | } |
1000 | |
1001 | // Throw TypeError or CastError instance. |
1002 | Exceptions::ThrowByType(exception_type, args); |
1003 | UNREACHABLE(); |
1004 | } |
1005 | |
1006 | void Exceptions::Throw(Thread* thread, const Instance& exception) { |
1007 | // Null object is a valid exception object. |
1008 | ThrowExceptionHelper(thread, exception, StackTrace::Handle(thread->zone()), |
1009 | false); |
1010 | } |
1011 | |
1012 | void Exceptions::ReThrow(Thread* thread, |
1013 | const Instance& exception, |
1014 | const Instance& stacktrace) { |
1015 | // Null object is a valid exception object. |
1016 | ThrowExceptionHelper(thread, exception, stacktrace, true); |
1017 | } |
1018 | |
1019 | void Exceptions::PropagateError(const Error& error) { |
1020 | ASSERT(!error.IsNull()); |
1021 | Thread* thread = Thread::Current(); |
1022 | DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame()); |
1023 | Zone* zone = thread->zone(); |
1024 | if (error.IsUnhandledException()) { |
1025 | // If the error object represents an unhandled exception, then |
1026 | // rethrow the exception in the normal fashion. |
1027 | const UnhandledException& uhe = UnhandledException::Cast(error); |
1028 | const Instance& exc = Instance::Handle(zone, uhe.exception()); |
1029 | const Instance& stk = Instance::Handle(zone, uhe.stacktrace()); |
1030 | Exceptions::ReThrow(thread, exc, stk); |
1031 | } else { |
1032 | // Return to the invocation stub and return this error object. The |
1033 | // C++ code which invoked this dart sequence can check and do the |
1034 | // appropriate thing. |
1035 | uword handler_pc = 0; |
1036 | uword handler_sp = 0; |
1037 | uword handler_fp = 0; |
1038 | FindErrorHandler(&handler_pc, &handler_sp, &handler_fp); |
1039 | JumpToExceptionHandler(thread, handler_pc, handler_sp, handler_fp, error, |
1040 | StackTrace::Handle(zone)); // Null stacktrace. |
1041 | } |
1042 | UNREACHABLE(); |
1043 | } |
1044 | |
1045 | void Exceptions::PropagateToEntry(const Error& error) { |
1046 | Thread* thread = Thread::Current(); |
1047 | Zone* zone = thread->zone(); |
1048 | ASSERT(thread->top_exit_frame_info() != 0); |
1049 | Instance& stacktrace = Instance::Handle(zone); |
1050 | if (error.IsUnhandledException()) { |
1051 | const UnhandledException& uhe = UnhandledException::Cast(error); |
1052 | stacktrace = uhe.stacktrace(); |
1053 | } else { |
1054 | stacktrace = Exceptions::CurrentStackTrace(); |
1055 | } |
1056 | uword handler_pc = 0; |
1057 | uword handler_sp = 0; |
1058 | uword handler_fp = 0; |
1059 | FindErrorHandler(&handler_pc, &handler_sp, &handler_fp); |
1060 | JumpToExceptionHandler(thread, handler_pc, handler_sp, handler_fp, error, |
1061 | stacktrace); |
1062 | UNREACHABLE(); |
1063 | } |
1064 | |
1065 | void Exceptions::ThrowByType(ExceptionType type, const Array& arguments) { |
1066 | Thread* thread = Thread::Current(); |
1067 | const Object& result = |
1068 | Object::Handle(thread->zone(), Create(type, arguments)); |
1069 | if (result.IsError()) { |
1070 | // We got an error while constructing the exception object. |
1071 | // Propagate the error instead of throwing the exception. |
1072 | PropagateError(Error::Cast(result)); |
1073 | } else { |
1074 | ASSERT(result.IsInstance()); |
1075 | Throw(thread, Instance::Cast(result)); |
1076 | } |
1077 | } |
1078 | |
1079 | void Exceptions::ThrowOOM() { |
1080 | Thread* thread = Thread::Current(); |
1081 | Isolate* isolate = thread->isolate(); |
1082 | const Instance& oom = Instance::Handle( |
1083 | thread->zone(), isolate->object_store()->out_of_memory()); |
1084 | Throw(thread, oom); |
1085 | } |
1086 | |
1087 | void Exceptions::ThrowStackOverflow() { |
1088 | Thread* thread = Thread::Current(); |
1089 | Isolate* isolate = thread->isolate(); |
1090 | const Instance& stack_overflow = Instance::Handle( |
1091 | thread->zone(), isolate->object_store()->stack_overflow()); |
1092 | Throw(thread, stack_overflow); |
1093 | } |
1094 | |
1095 | void Exceptions::ThrowArgumentError(const Instance& arg) { |
1096 | const Array& args = Array::Handle(Array::New(1)); |
1097 | args.SetAt(0, arg); |
1098 | Exceptions::ThrowByType(Exceptions::kArgument, args); |
1099 | } |
1100 | |
1101 | void Exceptions::ThrowRangeError(const char* argument_name, |
1102 | const Integer& argument_value, |
1103 | intptr_t expected_from, |
1104 | intptr_t expected_to) { |
1105 | const Array& args = Array::Handle(Array::New(4)); |
1106 | args.SetAt(0, argument_value); |
1107 | args.SetAt(1, Integer::Handle(Integer::New(expected_from))); |
1108 | args.SetAt(2, Integer::Handle(Integer::New(expected_to))); |
1109 | args.SetAt(3, String::Handle(String::New(argument_name))); |
1110 | Exceptions::ThrowByType(Exceptions::kRange, args); |
1111 | } |
1112 | |
1113 | void Exceptions::ThrowUnsupportedError(const char* msg) { |
1114 | const Array& args = Array::Handle(Array::New(1)); |
1115 | args.SetAt(0, String::Handle(String::New(msg))); |
1116 | Exceptions::ThrowByType(Exceptions::kUnsupported, args); |
1117 | } |
1118 | |
1119 | void Exceptions::ThrowCompileTimeError(const LanguageError& error) { |
1120 | const Array& args = Array::Handle(Array::New(1)); |
1121 | args.SetAt(0, String::Handle(error.FormatMessage())); |
1122 | Exceptions::ThrowByType(Exceptions::kCompileTimeError, args); |
1123 | } |
1124 | |
1125 | void Exceptions::ThrowLateInitializationError(const String& name) { |
1126 | const Array& args = Array::Handle(Array::New(1)); |
1127 | args.SetAt(0, name); |
1128 | Exceptions::ThrowByType(Exceptions::kLateInitializationError, args); |
1129 | } |
1130 | |
1131 | ObjectPtr Exceptions::Create(ExceptionType type, const Array& arguments) { |
1132 | Library& library = Library::Handle(); |
1133 | const String* class_name = NULL; |
1134 | const String* constructor_name = &Symbols::Dot(); |
1135 | switch (type) { |
1136 | case kNone: |
1137 | case kStackOverflow: |
1138 | case kOutOfMemory: |
1139 | UNREACHABLE(); |
1140 | break; |
1141 | case kRange: |
1142 | library = Library::CoreLibrary(); |
1143 | class_name = &Symbols::RangeError(); |
1144 | constructor_name = &Symbols::DotRange(); |
1145 | break; |
1146 | case kRangeMsg: |
1147 | library = Library::CoreLibrary(); |
1148 | class_name = &Symbols::RangeError(); |
1149 | constructor_name = &Symbols::Dot(); |
1150 | break; |
1151 | case kArgument: |
1152 | library = Library::CoreLibrary(); |
1153 | class_name = &Symbols::ArgumentError(); |
1154 | break; |
1155 | case kArgumentValue: |
1156 | library = Library::CoreLibrary(); |
1157 | class_name = &Symbols::ArgumentError(); |
1158 | constructor_name = &Symbols::DotValue(); |
1159 | break; |
1160 | case kIntegerDivisionByZeroException: |
1161 | library = Library::CoreLibrary(); |
1162 | class_name = &Symbols::IntegerDivisionByZeroException(); |
1163 | break; |
1164 | case kNoSuchMethod: |
1165 | library = Library::CoreLibrary(); |
1166 | class_name = &Symbols::NoSuchMethodError(); |
1167 | constructor_name = &Symbols::DotWithType(); |
1168 | break; |
1169 | case kFormat: |
1170 | library = Library::CoreLibrary(); |
1171 | class_name = &Symbols::FormatException(); |
1172 | break; |
1173 | case kUnsupported: |
1174 | library = Library::CoreLibrary(); |
1175 | class_name = &Symbols::UnsupportedError(); |
1176 | break; |
1177 | case kNullThrown: |
1178 | library = Library::CoreLibrary(); |
1179 | class_name = &Symbols::NullThrownError(); |
1180 | break; |
1181 | case kIsolateSpawn: |
1182 | library = Library::IsolateLibrary(); |
1183 | class_name = &Symbols::IsolateSpawnException(); |
1184 | break; |
1185 | case kAssertion: |
1186 | library = Library::CoreLibrary(); |
1187 | class_name = &Symbols::AssertionError(); |
1188 | constructor_name = &Symbols::DotCreate(); |
1189 | break; |
1190 | case kCast: |
1191 | library = Library::CoreLibrary(); |
1192 | class_name = &Symbols::CastError(); |
1193 | constructor_name = &Symbols::DotCreate(); |
1194 | break; |
1195 | case kType: |
1196 | library = Library::CoreLibrary(); |
1197 | class_name = &Symbols::TypeError(); |
1198 | constructor_name = &Symbols::DotCreate(); |
1199 | break; |
1200 | case kFallThrough: |
1201 | library = Library::CoreLibrary(); |
1202 | class_name = &Symbols::FallThroughError(); |
1203 | constructor_name = &Symbols::DotCreate(); |
1204 | break; |
1205 | case kAbstractClassInstantiation: |
1206 | library = Library::CoreLibrary(); |
1207 | class_name = &Symbols::AbstractClassInstantiationError(); |
1208 | constructor_name = &Symbols::DotCreate(); |
1209 | break; |
1210 | case kCyclicInitializationError: |
1211 | library = Library::CoreLibrary(); |
1212 | class_name = &Symbols::CyclicInitializationError(); |
1213 | break; |
1214 | case kCompileTimeError: |
1215 | library = Library::CoreLibrary(); |
1216 | class_name = &Symbols::_CompileTimeError(); |
1217 | break; |
1218 | case kLateInitializationError: |
1219 | library = Library::CoreLibrary(); |
1220 | class_name = &Symbols::LateInitializationError(); |
1221 | break; |
1222 | } |
1223 | |
1224 | Thread* thread = Thread::Current(); |
1225 | NoReloadScope no_reload_scope(thread->isolate(), thread); |
1226 | return DartLibraryCalls::InstanceCreate(library, *class_name, |
1227 | *constructor_name, arguments); |
1228 | } |
1229 | |
1230 | UnhandledExceptionPtr Exceptions::CreateUnhandledException(Zone* zone, |
1231 | ExceptionType type, |
1232 | const char* msg) { |
1233 | const String& error_str = String::Handle(zone, String::New(msg)); |
1234 | const Array& args = Array::Handle(zone, Array::New(1)); |
1235 | args.SetAt(0, error_str); |
1236 | |
1237 | Object& result = Object::Handle(zone, Exceptions::Create(type, args)); |
1238 | const StackTrace& stacktrace = StackTrace::Handle(zone); |
1239 | return UnhandledException::New(Instance::Cast(result), stacktrace); |
1240 | } |
1241 | |
1242 | } // namespace dart |
1243 | |