| 1 | // Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file |
| 2 | // for details. All rights reserved. Use of this source code is governed by a |
| 3 | // BSD-style license that can be found in the LICENSE file. |
| 4 | |
| 5 | #ifndef RUNTIME_VM_EXCEPTIONS_H_ |
| 6 | #define RUNTIME_VM_EXCEPTIONS_H_ |
| 7 | |
| 8 | #include "vm/allocation.h" |
| 9 | #include "vm/bitfield.h" |
| 10 | #include "vm/tagged_pointer.h" |
| 11 | #include "vm/token_position.h" |
| 12 | |
| 13 | namespace dart { |
| 14 | |
| 15 | // Forward declarations. |
| 16 | class AbstractType; |
| 17 | class Array; |
| 18 | class DartFrameIterator; |
| 19 | class Error; |
| 20 | class LanguageError; |
| 21 | class Instance; |
| 22 | class Integer; |
| 23 | class ReadStream; |
| 24 | class WriteStream; |
| 25 | class String; |
| 26 | class Thread; |
| 27 | class TypedData; |
| 28 | |
| 29 | class Exceptions : AllStatic { |
| 30 | public: |
| 31 | DART_NORETURN static void Throw(Thread* thread, const Instance& exception); |
| 32 | DART_NORETURN static void ReThrow(Thread* thread, |
| 33 | const Instance& exception, |
| 34 | const Instance& stacktrace); |
| 35 | DART_NORETURN static void PropagateError(const Error& error); |
| 36 | |
| 37 | // Propagate an error to the entry frame, skipping over Dart frames. |
| 38 | DART_NORETURN static void PropagateToEntry(const Error& error); |
| 39 | |
| 40 | // Helpers to create and throw errors. |
| 41 | static StackTracePtr CurrentStackTrace(); |
| 42 | static ScriptPtr GetCallerScript(DartFrameIterator* iterator); |
| 43 | static InstancePtr NewInstance(const char* class_name); |
| 44 | static void CreateAndThrowTypeError(TokenPosition location, |
| 45 | const AbstractType& src_type, |
| 46 | const AbstractType& dst_type, |
| 47 | const String& dst_name); |
| 48 | |
| 49 | enum ExceptionType { |
| 50 | kNone, |
| 51 | kRange, |
| 52 | kRangeMsg, |
| 53 | kArgument, |
| 54 | kArgumentValue, |
| 55 | kIntegerDivisionByZeroException, |
| 56 | kNoSuchMethod, |
| 57 | kFormat, |
| 58 | kUnsupported, |
| 59 | kStackOverflow, |
| 60 | kOutOfMemory, |
| 61 | kNullThrown, |
| 62 | kIsolateSpawn, |
| 63 | kAssertion, |
| 64 | kCast, |
| 65 | kType, |
| 66 | kFallThrough, |
| 67 | kAbstractClassInstantiation, |
| 68 | kCyclicInitializationError, |
| 69 | kCompileTimeError, |
| 70 | kLateInitializationError, |
| 71 | }; |
| 72 | |
| 73 | DART_NORETURN static void ThrowByType(ExceptionType type, |
| 74 | const Array& arguments); |
| 75 | // Uses the preallocated out of memory exception to avoid calling |
| 76 | // into Dart code or allocating any code. |
| 77 | DART_NORETURN static void ThrowOOM(); |
| 78 | DART_NORETURN static void ThrowStackOverflow(); |
| 79 | DART_NORETURN static void ThrowArgumentError(const Instance& arg); |
| 80 | DART_NORETURN static void ThrowRangeError(const char* argument_name, |
| 81 | const Integer& argument_value, |
| 82 | intptr_t expected_from, |
| 83 | intptr_t expected_to); |
| 84 | DART_NORETURN static void ThrowUnsupportedError(const char* msg); |
| 85 | DART_NORETURN static void ThrowCompileTimeError(const LanguageError& error); |
| 86 | DART_NORETURN static void ThrowLateInitializationError(const String& name); |
| 87 | |
| 88 | // Returns a RawInstance if the exception is successfully created, |
| 89 | // otherwise returns a RawError. |
| 90 | static ObjectPtr Create(ExceptionType type, const Array& arguments); |
| 91 | |
| 92 | // Returns RawUnhandledException that wraps exception of type [type] with |
| 93 | // [msg] as a single argument. |
| 94 | static UnhandledExceptionPtr CreateUnhandledException(Zone* zone, |
| 95 | ExceptionType type, |
| 96 | const char* msg); |
| 97 | |
| 98 | DART_NORETURN static void JumpToFrame(Thread* thread, |
| 99 | uword program_counter, |
| 100 | uword stack_pointer, |
| 101 | uword frame_pointer, |
| 102 | bool clear_deopt_at_target); |
| 103 | |
| 104 | private: |
| 105 | DISALLOW_COPY_AND_ASSIGN(Exceptions); |
| 106 | }; |
| 107 | |
| 108 | // The index into the ExceptionHandlers table corresponds to |
| 109 | // the try_index of the handler. |
| 110 | struct ExceptionHandlerInfo { |
| 111 | uint32_t handler_pc_offset; // PC offset value of handler. |
| 112 | int16_t outer_try_index; // Try block index of enclosing try block. |
| 113 | int8_t needs_stacktrace; // True if a stacktrace is needed. |
| 114 | int8_t has_catch_all; // Catches all exceptions. |
| 115 | int8_t is_generated; // True if this is a generated handler. |
| 116 | }; |
| 117 | |
| 118 | // |
| 119 | // Support for try/catch in the optimized code. |
| 120 | // |
| 121 | // Optimizing compiler does not model exceptional control flow explicitly, |
| 122 | // instead we rely on the runtime system to create correct state at the |
| 123 | // entry into the catch block by reshuffling values in the frame into |
| 124 | // positions where they are expected to be at the beginning of the catch block. |
| 125 | // |
| 126 | // See runtime/docs/compiler/exceptions.md for more details. |
| 127 | // |
| 128 | |
| 129 | // A single move from a stack slot or an object pool into another stack slot. |
| 130 | // Destination slot is expecting only tagged values, however source |
| 131 | // slot can contain an unboxed value (e.g. an unboxed double) - in this case |
| 132 | // we will box the value before executing the move. |
| 133 | class CatchEntryMove { |
| 134 | public: |
| 135 | CatchEntryMove() |
| 136 | : src_(0), |
| 137 | dest_and_kind_(static_cast<intptr_t>(SourceKind::kTaggedSlot)) { |
| 138 | ASSERT(IsRedundant()); |
| 139 | } |
| 140 | |
| 141 | enum class SourceKind { |
| 142 | kConstant, |
| 143 | kTaggedSlot, |
| 144 | kDoubleSlot, |
| 145 | kFloat32x4Slot, |
| 146 | kFloat64x2Slot, |
| 147 | kInt32x4Slot, |
| 148 | kInt64PairSlot, |
| 149 | kInt64Slot, |
| 150 | kInt32Slot, |
| 151 | kUint32Slot, |
| 152 | }; |
| 153 | |
| 154 | SourceKind source_kind() const { |
| 155 | return SourceKindField::decode(dest_and_kind_); |
| 156 | } |
| 157 | |
| 158 | intptr_t src_slot() const { |
| 159 | ASSERT(source_kind() != SourceKind::kInt64PairSlot); |
| 160 | return src_; |
| 161 | } |
| 162 | |
| 163 | intptr_t src_lo_slot() const { |
| 164 | ASSERT(source_kind() == SourceKind::kInt64PairSlot); |
| 165 | return index_to_pair_slot(LoSourceSlot::decode(src_)); |
| 166 | } |
| 167 | |
| 168 | intptr_t src_hi_slot() const { |
| 169 | ASSERT(source_kind() == SourceKind::kInt64PairSlot); |
| 170 | return index_to_pair_slot(HiSourceSlot::decode(src_)); |
| 171 | } |
| 172 | |
| 173 | intptr_t dest_slot() const { |
| 174 | return dest_and_kind_ >> SourceKindField::bitsize(); |
| 175 | } |
| 176 | |
| 177 | static CatchEntryMove FromConstant(intptr_t pool_id, intptr_t dest_slot) { |
| 178 | return FromSlot(SourceKind::kConstant, pool_id, dest_slot); |
| 179 | } |
| 180 | |
| 181 | static CatchEntryMove FromSlot(SourceKind kind, |
| 182 | intptr_t src_slot, |
| 183 | intptr_t dest_slot) { |
| 184 | return CatchEntryMove(src_slot, SourceKindField::encode(kind) | |
| 185 | (static_cast<uintptr_t>(dest_slot) |
| 186 | << SourceKindField::bitsize())); |
| 187 | } |
| 188 | |
| 189 | static intptr_t EncodePairSource(intptr_t src_lo_slot, intptr_t src_hi_slot) { |
| 190 | return LoSourceSlot::encode(pair_slot_to_index(src_lo_slot)) | |
| 191 | HiSourceSlot::encode(pair_slot_to_index(src_hi_slot)); |
| 192 | } |
| 193 | |
| 194 | bool IsRedundant() const { |
| 195 | return (source_kind() == SourceKind::kTaggedSlot) && |
| 196 | (dest_slot() == src_slot()); |
| 197 | } |
| 198 | |
| 199 | bool operator==(const CatchEntryMove& rhs) const { |
| 200 | return src_ == rhs.src_ && dest_and_kind_ == rhs.dest_and_kind_; |
| 201 | } |
| 202 | |
| 203 | static CatchEntryMove ReadFrom(ReadStream* stream); |
| 204 | |
| 205 | #if !defined(DART_PRECOMPILED_RUNTIME) |
| 206 | void WriteTo(WriteStream* stream); |
| 207 | #endif |
| 208 | |
| 209 | #if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER) |
| 210 | const char* ToCString() const; |
| 211 | #endif |
| 212 | |
| 213 | private: |
| 214 | static intptr_t pair_slot_to_index(intptr_t slot) { |
| 215 | return (slot < 0) ? -2 * slot : 2 * slot + 1; |
| 216 | } |
| 217 | |
| 218 | static intptr_t index_to_pair_slot(intptr_t index) { |
| 219 | ASSERT(index >= 0); |
| 220 | return ((index & 1) != 0) ? (index >> 1) : -(index >> 1); |
| 221 | } |
| 222 | |
| 223 | CatchEntryMove(int32_t src, int32_t dest_and_kind) |
| 224 | : src_(src), dest_and_kind_(dest_and_kind) {} |
| 225 | |
| 226 | // Note: BitField helper does not work with signed values of size that does |
| 227 | // not match the destination size - thus we don't use BitField for declaring |
| 228 | // DestinationField and instead encode and decode it manually. |
| 229 | using SourceKindField = BitField<int32_t, SourceKind, 0, 4>; |
| 230 | |
| 231 | static constexpr intptr_t kHalfSourceBits = 16; |
| 232 | using LoSourceSlot = BitField<int32_t, int32_t, 0, kHalfSourceBits>; |
| 233 | using HiSourceSlot = |
| 234 | BitField<int32_t, int32_t, kHalfSourceBits, kHalfSourceBits>; |
| 235 | |
| 236 | int32_t src_; |
| 237 | int32_t dest_and_kind_; |
| 238 | }; |
| 239 | |
| 240 | // A sequence of moves that needs to be executed to create a state expected |
| 241 | // at the catch entry. |
| 242 | // Note: this is a deserialized representation that is used by the runtime |
| 243 | // system as a temporary representation and for caching. That is why this |
| 244 | // object is allocated in the malloced heap and not in the Dart heap. |
| 245 | class CatchEntryMoves { |
| 246 | public: |
| 247 | static CatchEntryMoves* Allocate(intptr_t num_moves) { |
| 248 | auto result = reinterpret_cast<CatchEntryMoves*>( |
| 249 | malloc(sizeof(CatchEntryMoves) + sizeof(CatchEntryMove) * num_moves)); |
| 250 | result->count_ = num_moves; |
| 251 | return result; |
| 252 | } |
| 253 | |
| 254 | static void Free(const CatchEntryMoves* moves) { |
| 255 | free(const_cast<CatchEntryMoves*>(moves)); |
| 256 | } |
| 257 | |
| 258 | intptr_t count() const { return count_; } |
| 259 | CatchEntryMove& At(intptr_t i) { return Moves()[i]; } |
| 260 | const CatchEntryMove& At(intptr_t i) const { return Moves()[i]; } |
| 261 | |
| 262 | private: |
| 263 | CatchEntryMove* Moves() { |
| 264 | return reinterpret_cast<CatchEntryMove*>(this + 1); |
| 265 | } |
| 266 | |
| 267 | const CatchEntryMove* Moves() const { |
| 268 | return reinterpret_cast<const CatchEntryMove*>(this + 1); |
| 269 | } |
| 270 | |
| 271 | intptr_t count_; |
| 272 | // Followed by CatchEntryMove[count_] |
| 273 | }; |
| 274 | |
| 275 | // Used for reading the [CatchEntryMoves] from the compressed form. |
| 276 | class CatchEntryMovesMapReader : public ValueObject { |
| 277 | public: |
| 278 | explicit CatchEntryMovesMapReader(const TypedData& bytes) : bytes_(bytes) {} |
| 279 | |
| 280 | // The returned [CatchEntryMoves] must be freed by the caller via [free]. |
| 281 | CatchEntryMoves* ReadMovesForPcOffset(intptr_t pc_offset); |
| 282 | |
| 283 | #if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER) |
| 284 | void PrintEntries(); |
| 285 | #endif |
| 286 | |
| 287 | private: |
| 288 | // Given the [pc_offset] this function will find the [position] at which to |
| 289 | // read the catch entries and the [length] of the catch entry moves array. |
| 290 | void FindEntryForPc(ReadStream* stream, |
| 291 | intptr_t pc_offset, |
| 292 | intptr_t* position, |
| 293 | intptr_t* length); |
| 294 | |
| 295 | // Reads the [length] catch entry moves from [offset] in the [stream]. |
| 296 | CatchEntryMoves* ReadCompressedCatchEntryMovesSuffix(ReadStream* stream, |
| 297 | intptr_t offset, |
| 298 | intptr_t length); |
| 299 | |
| 300 | const TypedData& bytes_; |
| 301 | }; |
| 302 | |
| 303 | // A simple reference counting wrapper for CatchEntryMoves. |
| 304 | // |
| 305 | // TODO(vegorov) switch this to intrusive reference counting. |
| 306 | class CatchEntryMovesRefPtr { |
| 307 | public: |
| 308 | CatchEntryMovesRefPtr() : moves_(nullptr), ref_count_(nullptr) {} |
| 309 | explicit CatchEntryMovesRefPtr(const CatchEntryMoves* moves) |
| 310 | : moves_(moves), ref_count_(new intptr_t(1)) {} |
| 311 | |
| 312 | CatchEntryMovesRefPtr(const CatchEntryMovesRefPtr& state) { Copy(state); } |
| 313 | |
| 314 | ~CatchEntryMovesRefPtr() { Destroy(); } |
| 315 | |
| 316 | CatchEntryMovesRefPtr& operator=(const CatchEntryMovesRefPtr& state) { |
| 317 | Destroy(); |
| 318 | Copy(state); |
| 319 | return *this; |
| 320 | } |
| 321 | |
| 322 | bool IsEmpty() { return ref_count_ == nullptr; } |
| 323 | |
| 324 | const CatchEntryMoves& moves() { return *moves_; } |
| 325 | |
| 326 | private: |
| 327 | void Destroy() { |
| 328 | if (ref_count_ != nullptr) { |
| 329 | (*ref_count_)--; |
| 330 | if (*ref_count_ == 0) { |
| 331 | delete ref_count_; |
| 332 | CatchEntryMoves::Free(moves_); |
| 333 | } |
| 334 | } |
| 335 | } |
| 336 | |
| 337 | void Copy(const CatchEntryMovesRefPtr& state) { |
| 338 | moves_ = state.moves_; |
| 339 | ref_count_ = state.ref_count_; |
| 340 | if (ref_count_ != nullptr) { |
| 341 | (*ref_count_)++; |
| 342 | } |
| 343 | } |
| 344 | |
| 345 | const CatchEntryMoves* moves_; |
| 346 | intptr_t* ref_count_; |
| 347 | }; |
| 348 | |
| 349 | } // namespace dart |
| 350 | |
| 351 | #endif // RUNTIME_VM_EXCEPTIONS_H_ |
| 352 | |