| 1 | // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 | // for details. All rights reserved. Use of this source code is governed by a |
| 3 | // BSD-style license that can be found in the LICENSE file. |
| 4 | |
| 5 | #include "vm/stack_frame.h" |
| 6 | |
| 7 | #include "platform/memory_sanitizer.h" |
| 8 | #include "vm/code_descriptors.h" |
| 9 | #include "vm/compiler/runtime_api.h" |
| 10 | #include "vm/heap/become.h" |
| 11 | #include "vm/isolate.h" |
| 12 | #include "vm/object.h" |
| 13 | #include "vm/object_store.h" |
| 14 | #include "vm/os.h" |
| 15 | #include "vm/parser.h" |
| 16 | #include "vm/raw_object.h" |
| 17 | #include "vm/reusable_handles.h" |
| 18 | #include "vm/reverse_pc_lookup_cache.h" |
| 19 | #include "vm/scopes.h" |
| 20 | #include "vm/stub_code.h" |
| 21 | #include "vm/visitor.h" |
| 22 | |
| 23 | #if !defined(DART_PRECOMPILED_RUNTIME) |
| 24 | #include "vm/deopt_instructions.h" |
| 25 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| 26 | |
| 27 | namespace dart { |
| 28 | |
| 29 | DECLARE_FLAG(bool, enable_interpreter); |
| 30 | |
| 31 | const FrameLayout invalid_frame_layout = { |
| 32 | /*.first_object_from_fp = */ -1, |
| 33 | /*.last_fixed_object_from_fp = */ -1, |
| 34 | /*.param_end_from_fp = */ -1, |
| 35 | /*.last_param_from_entry_sp = */ -1, |
| 36 | /*.first_local_from_fp = */ -1, |
| 37 | /*.dart_fixed_frame_size = */ -1, |
| 38 | /*.saved_caller_pp_from_fp = */ -1, |
| 39 | /*.code_from_fp = */ -1, |
| 40 | /*.exit_link_slot_from_entry_fp = */ -1, |
| 41 | }; |
| 42 | |
| 43 | const FrameLayout default_frame_layout = { |
| 44 | /*.first_object_from_fp = */ kFirstObjectSlotFromFp, |
| 45 | /*.last_fixed_object_from_fp = */ kLastFixedObjectSlotFromFp, |
| 46 | /*.param_end_from_fp = */ kParamEndSlotFromFp, |
| 47 | /*.last_param_from_entry_sp = */ kLastParamSlotFromEntrySp, |
| 48 | /*.first_local_from_fp = */ kFirstLocalSlotFromFp, |
| 49 | /*.dart_fixed_frame_size = */ kDartFrameFixedSize, |
| 50 | /*.saved_caller_pp_from_fp = */ kSavedCallerPpSlotFromFp, |
| 51 | /*.code_from_fp = */ kPcMarkerSlotFromFp, |
| 52 | /*.exit_link_slot_from_entry_fp = */ kExitLinkSlotFromEntryFp, |
| 53 | }; |
| 54 | const FrameLayout bare_instructions_frame_layout = { |
| 55 | /*.first_object_from_pc =*/kFirstObjectSlotFromFp, // No saved PP slot. |
| 56 | /*.last_fixed_object_from_fp = */ kLastFixedObjectSlotFromFp + |
| 57 | 2, // No saved CODE, PP slots |
| 58 | /*.param_end_from_fp = */ kParamEndSlotFromFp, |
| 59 | /*.last_param_from_entry_sp = */ kLastParamSlotFromEntrySp, |
| 60 | /*.first_local_from_fp =*/kFirstLocalSlotFromFp + |
| 61 | 2, // No saved CODE, PP slots. |
| 62 | /*.dart_fixed_frame_size =*/kDartFrameFixedSize - |
| 63 | 2, // No saved CODE, PP slots. |
| 64 | /*.saved_caller_pp_from_fp = */ 0, // No saved PP slot. |
| 65 | /*.code_from_fp = */ 0, // No saved CODE |
| 66 | /*.exit_link_slot_from_entry_fp = */ kExitLinkSlotFromEntryFp, |
| 67 | }; |
| 68 | |
| 69 | namespace compiler { |
| 70 | |
| 71 | namespace target { |
| 72 | FrameLayout frame_layout = invalid_frame_layout; |
| 73 | } |
| 74 | |
| 75 | } // namespace compiler |
| 76 | |
| 77 | FrameLayout runtime_frame_layout = invalid_frame_layout; |
| 78 | |
| 79 | int FrameLayout::FrameSlotForVariable(const LocalVariable* variable) const { |
| 80 | ASSERT(!variable->is_captured()); |
| 81 | return this->FrameSlotForVariableIndex(variable->index().value()); |
| 82 | } |
| 83 | |
| 84 | int FrameLayout::FrameSlotForVariableIndex(int variable_index) const { |
| 85 | // Variable indices are: |
| 86 | // [1, 2, ..., M] for the M parameters. |
| 87 | // [0, -1, -2, ... -(N-1)] for the N [LocalVariable]s |
| 88 | // See (runtime/vm/scopes.h) |
| 89 | return variable_index <= 0 ? (variable_index + first_local_from_fp) |
| 90 | : (variable_index + param_end_from_fp); |
| 91 | } |
| 92 | |
| 93 | void FrameLayout::Init() { |
| 94 | // By default we use frames with CODE_REG/PP in the frame. |
| 95 | compiler::target::frame_layout = default_frame_layout; |
| 96 | runtime_frame_layout = default_frame_layout; |
| 97 | |
| 98 | if (FLAG_precompiled_mode && FLAG_use_bare_instructions) { |
| 99 | compiler::target::frame_layout = bare_instructions_frame_layout; |
| 100 | } |
| 101 | #if defined(DART_PRECOMPILED_RUNTIME) |
| 102 | if (FLAG_precompiled_mode && FLAG_use_bare_instructions) { |
| 103 | compiler::target::frame_layout = invalid_frame_layout; |
| 104 | runtime_frame_layout = bare_instructions_frame_layout; |
| 105 | } |
| 106 | #endif |
| 107 | } |
| 108 | |
| 109 | bool StackFrame::IsBareInstructionsDartFrame() const { |
| 110 | NoSafepointScope no_safepoint; |
| 111 | |
| 112 | Code code; |
| 113 | code = ReversePc::Lookup(this->isolate_group(), pc(), |
| 114 | /*is_return_address=*/true); |
| 115 | if (!code.IsNull()) { |
| 116 | auto const cid = code.OwnerClassId(); |
| 117 | ASSERT(cid == kNullCid || cid == kClassCid || cid == kFunctionCid); |
| 118 | return cid == kFunctionCid; |
| 119 | } |
| 120 | code = ReversePc::Lookup(Dart::vm_isolate()->group(), pc(), |
| 121 | /*is_return_address=*/true); |
| 122 | if (!code.IsNull()) { |
| 123 | auto const cid = code.OwnerClassId(); |
| 124 | ASSERT(cid == kNullCid || cid == kClassCid || cid == kFunctionCid); |
| 125 | return cid == kFunctionCid; |
| 126 | } |
| 127 | |
| 128 | return false; |
| 129 | } |
| 130 | |
| 131 | bool StackFrame::IsBareInstructionsStubFrame() const { |
| 132 | NoSafepointScope no_safepoint; |
| 133 | |
| 134 | Code code; |
| 135 | code = ReversePc::Lookup(this->isolate_group(), pc(), |
| 136 | /*is_return_address=*/true); |
| 137 | if (!code.IsNull()) { |
| 138 | auto const cid = code.OwnerClassId(); |
| 139 | ASSERT(cid == kNullCid || cid == kClassCid || cid == kFunctionCid); |
| 140 | return cid == kNullCid || cid == kClassCid; |
| 141 | } |
| 142 | code = ReversePc::Lookup(Dart::vm_isolate()->group(), pc(), |
| 143 | /*is_return_address=*/true); |
| 144 | if (!code.IsNull()) { |
| 145 | auto const cid = code.OwnerClassId(); |
| 146 | ASSERT(cid == kNullCid || cid == kClassCid || cid == kFunctionCid); |
| 147 | return cid == kNullCid || cid == kClassCid; |
| 148 | } |
| 149 | |
| 150 | return false; |
| 151 | } |
| 152 | |
| 153 | bool StackFrame::IsStubFrame() const { |
| 154 | if (is_interpreted()) { |
| 155 | return false; |
| 156 | } |
| 157 | |
| 158 | if (FLAG_precompiled_mode && FLAG_use_bare_instructions) { |
| 159 | return IsBareInstructionsStubFrame(); |
| 160 | } |
| 161 | |
| 162 | ASSERT(!(IsEntryFrame() || IsExitFrame())); |
| 163 | #if !defined(HOST_OS_WINDOWS) && !defined(HOST_OS_FUCHSIA) |
| 164 | // On Windows and Fuchsia, the profiler calls this from a separate thread |
| 165 | // where Thread::Current() is NULL, so we cannot create a NoSafepointScope. |
| 166 | NoSafepointScope no_safepoint; |
| 167 | #endif |
| 168 | |
| 169 | CodePtr code = GetCodeObject(); |
| 170 | ASSERT(code != Object::null()); |
| 171 | auto const cid = Code::OwnerClassIdOf(code); |
| 172 | ASSERT(cid == kNullCid || cid == kClassCid || cid == kFunctionCid); |
| 173 | return cid == kNullCid || cid == kClassCid; |
| 174 | } |
| 175 | |
| 176 | const char* StackFrame::ToCString() const { |
| 177 | ASSERT(thread_ == Thread::Current()); |
| 178 | Zone* zone = Thread::Current()->zone(); |
| 179 | if (IsDartFrame()) { |
| 180 | if (is_interpreted()) { |
| 181 | const Bytecode& bytecode = Bytecode::Handle(zone, LookupDartBytecode()); |
| 182 | ASSERT(!bytecode.IsNull()); |
| 183 | return zone->PrintToString("[%-8s : sp(%#" Px ") fp(%#" Px ") pc(%#" Px |
| 184 | " offset:0x%" Px ") %s ]" , |
| 185 | GetName(), sp(), fp(), pc(), |
| 186 | pc() - bytecode.PayloadStart(), |
| 187 | bytecode.FullyQualifiedName()); |
| 188 | } |
| 189 | const Code& code = Code::Handle(zone, LookupDartCode()); |
| 190 | ASSERT(!code.IsNull()); |
| 191 | const auto& owner = Object::Handle( |
| 192 | zone, WeakSerializationReference::UnwrapIfTarget(code.owner())); |
| 193 | ASSERT(!owner.IsNull()); |
| 194 | auto const opt = code.IsFunctionCode() && code.is_optimized() ? "*" : "" ; |
| 195 | auto const owner_name = |
| 196 | owner.IsFunction() ? Function::Cast(owner).ToFullyQualifiedCString() |
| 197 | : owner.ToCString(); |
| 198 | return zone->PrintToString("[%-8s : sp(%#" Px ") fp(%#" Px ") pc(%#" Px |
| 199 | ") %s%s ]" , |
| 200 | GetName(), sp(), fp(), pc(), opt, owner_name); |
| 201 | } else { |
| 202 | return zone->PrintToString("[%-8s : sp(%#" Px ") fp(%#" Px ") pc(%#" Px |
| 203 | ")]" , |
| 204 | GetName(), sp(), fp(), pc()); |
| 205 | } |
| 206 | } |
| 207 | |
| 208 | void ExitFrame::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
| 209 | ASSERT(visitor != NULL); |
| 210 | // Visit pc marker and saved pool pointer, or, for interpreted frame, code |
| 211 | // object and function object. |
| 212 | ObjectPtr* last_fixed = |
| 213 | reinterpret_cast<ObjectPtr*>(fp()) + |
| 214 | (is_interpreted() ? kKBCLastFixedObjectSlotFromFp |
| 215 | : runtime_frame_layout.first_object_from_fp); |
| 216 | ObjectPtr* first_fixed = |
| 217 | reinterpret_cast<ObjectPtr*>(fp()) + |
| 218 | (is_interpreted() ? kKBCFirstObjectSlotFromFp |
| 219 | : runtime_frame_layout.last_fixed_object_from_fp); |
| 220 | if (first_fixed <= last_fixed) { |
| 221 | visitor->VisitPointers(first_fixed, last_fixed); |
| 222 | } else { |
| 223 | ASSERT(runtime_frame_layout.first_object_from_fp == |
| 224 | runtime_frame_layout.first_local_from_fp); |
| 225 | } |
| 226 | } |
| 227 | |
| 228 | void EntryFrame::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
| 229 | ASSERT(visitor != NULL); |
| 230 | // Visit objects between SP and (FP - callee_save_area). |
| 231 | ObjectPtr* first = is_interpreted() ? reinterpret_cast<ObjectPtr*>(fp()) + |
| 232 | kKBCSavedArgDescSlotFromEntryFp |
| 233 | : reinterpret_cast<ObjectPtr*>(sp()); |
| 234 | ObjectPtr* last = is_interpreted() ? reinterpret_cast<ObjectPtr*>(sp()) |
| 235 | : reinterpret_cast<ObjectPtr*>(fp()) + |
| 236 | kExitLinkSlotFromEntryFp - 1; |
| 237 | // There may not be any pointer to visit; in this case, first > last. |
| 238 | visitor->VisitPointers(first, last); |
| 239 | } |
| 240 | |
| 241 | void StackFrame::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
| 242 | ASSERT(visitor != NULL); |
| 243 | // NOTE: This code runs while GC is in progress and runs within |
| 244 | // a NoHandleScope block. Hence it is not ok to use regular Zone or |
| 245 | // Scope handles. We use direct stack handles, the raw pointers in |
| 246 | // these handles are not traversed. The use of handles is mainly to |
| 247 | // be able to reuse the handle based code and avoid having to add |
| 248 | // helper functions to the raw object interface. |
| 249 | NoSafepointScope no_safepoint; |
| 250 | Code code; |
| 251 | |
| 252 | if (FLAG_precompiled_mode && FLAG_use_bare_instructions) { |
| 253 | code = GetCodeObject(); |
| 254 | } else { |
| 255 | ObjectPtr pc_marker = *(reinterpret_cast<ObjectPtr*>( |
| 256 | fp() + ((is_interpreted() ? kKBCPcMarkerSlotFromFp |
| 257 | : runtime_frame_layout.code_from_fp) * |
| 258 | kWordSize))); |
| 259 | // May forward raw code. Note we don't just visit the pc marker slot first |
| 260 | // because the visitor's forwarding might not be idempotent. |
| 261 | visitor->VisitPointer(&pc_marker); |
| 262 | if (pc_marker->IsHeapObject() && (pc_marker->GetClassId() == kCodeCid)) { |
| 263 | code ^= pc_marker; |
| 264 | } else { |
| 265 | ASSERT(pc_marker == Object::null() || |
| 266 | (is_interpreted() && (!pc_marker->IsHeapObject() || |
| 267 | (pc_marker->GetClassId() == kBytecodeCid)))); |
| 268 | } |
| 269 | } |
| 270 | |
| 271 | if (!code.IsNull()) { |
| 272 | // Optimized frames have a stack map. We need to visit the frame based |
| 273 | // on the stack map. |
| 274 | CompressedStackMaps maps; |
| 275 | maps = code.compressed_stackmaps(); |
| 276 | CompressedStackMaps global_table; |
| 277 | |
| 278 | // The GC does not have an active isolate, only an active isolate group, |
| 279 | // yet the global compressed stack map table is only stored in the object |
| 280 | // store. It has the same contents for all isolates, so we just pick the |
| 281 | // one from the first isolate here. |
| 282 | // TODO(dartbug.com/36097): Avoid having this per-isolate and instead store |
| 283 | // it per isolate group. |
| 284 | auto isolate = isolate_group()->isolates_.First(); |
| 285 | |
| 286 | global_table = isolate->object_store()->canonicalized_stack_map_entries(); |
| 287 | CompressedStackMapsIterator it(maps, global_table); |
| 288 | const uword start = code.PayloadStart(); |
| 289 | const uint32_t pc_offset = pc() - start; |
| 290 | if (it.Find(pc_offset)) { |
| 291 | if (is_interpreted()) { |
| 292 | UNIMPLEMENTED(); |
| 293 | } |
| 294 | ObjectPtr* first = reinterpret_cast<ObjectPtr*>(sp()); |
| 295 | ObjectPtr* last = reinterpret_cast<ObjectPtr*>( |
| 296 | fp() + (runtime_frame_layout.first_local_from_fp * kWordSize)); |
| 297 | |
| 298 | // A stack map is present in the code object, use the stack map to |
| 299 | // visit frame slots which are marked as having objects. |
| 300 | // |
| 301 | // The layout of the frame is (lower addresses to the right): |
| 302 | // | spill slots | outgoing arguments | saved registers | slow-path args | |
| 303 | // |XXXXXXXXXXXXX|--------------------|XXXXXXXXXXXXXXXXX|XXXXXXXXXXXXXXXX| |
| 304 | // |
| 305 | // The spill slots and any saved registers are described in the stack |
| 306 | // map. The outgoing arguments are assumed to be tagged; the number |
| 307 | // of outgoing arguments is not explicitly tracked. |
| 308 | |
| 309 | // Spill slots are at the 'bottom' of the frame. |
| 310 | intptr_t spill_slot_count = it.SpillSlotBitCount(); |
| 311 | for (intptr_t bit = 0; bit < spill_slot_count; ++bit) { |
| 312 | if (it.IsObject(bit)) { |
| 313 | visitor->VisitPointer(last); |
| 314 | } |
| 315 | --last; |
| 316 | } |
| 317 | |
| 318 | // The live registers at the 'top' of the frame comprise the rest of the |
| 319 | // stack map. |
| 320 | for (intptr_t bit = it.Length() - 1; bit >= spill_slot_count; --bit) { |
| 321 | if (it.IsObject(bit)) { |
| 322 | visitor->VisitPointer(first); |
| 323 | } |
| 324 | ++first; |
| 325 | } |
| 326 | |
| 327 | // The last slot can be one slot (but not more) past the last slot |
| 328 | // in the case that all slots were covered by the stack map. |
| 329 | ASSERT((last + 1) >= first); |
| 330 | visitor->VisitPointers(first, last); |
| 331 | |
| 332 | // Now visit other slots which might be part of the calling convention. |
| 333 | first = reinterpret_cast<ObjectPtr*>( |
| 334 | fp() + ((runtime_frame_layout.first_local_from_fp + 1) * kWordSize)); |
| 335 | last = reinterpret_cast<ObjectPtr*>( |
| 336 | fp() + (runtime_frame_layout.first_object_from_fp * kWordSize)); |
| 337 | visitor->VisitPointers(first, last); |
| 338 | return; |
| 339 | } |
| 340 | |
| 341 | // If we are missing a stack map for a given PC offset, this must either be |
| 342 | // unoptimized code, code with no stack map information at all, or the entry |
| 343 | // to an osr function. In each of these cases, all stack slots contain |
| 344 | // tagged pointers, so fall through. |
| 345 | ASSERT(!code.is_optimized() || maps.IsNull() || |
| 346 | (pc_offset == code.EntryPoint() - code.PayloadStart())); |
| 347 | } |
| 348 | |
| 349 | // For normal unoptimized Dart frames and Stub frames each slot |
| 350 | // between the first and last included are tagged objects. |
| 351 | if (is_interpreted()) { |
| 352 | // Do not visit caller's pc or caller's fp. |
| 353 | ObjectPtr* first = |
| 354 | reinterpret_cast<ObjectPtr*>(fp()) + kKBCFirstObjectSlotFromFp; |
| 355 | ObjectPtr* last = |
| 356 | reinterpret_cast<ObjectPtr*>(fp()) + kKBCLastFixedObjectSlotFromFp; |
| 357 | |
| 358 | visitor->VisitPointers(first, last); |
| 359 | } |
| 360 | ObjectPtr* first = |
| 361 | reinterpret_cast<ObjectPtr*>(is_interpreted() ? fp() : sp()); |
| 362 | ObjectPtr* last = reinterpret_cast<ObjectPtr*>( |
| 363 | is_interpreted() |
| 364 | ? sp() |
| 365 | : fp() + (runtime_frame_layout.first_object_from_fp * kWordSize)); |
| 366 | |
| 367 | visitor->VisitPointers(first, last); |
| 368 | } |
| 369 | |
| 370 | FunctionPtr StackFrame::LookupDartFunction() const { |
| 371 | if (is_interpreted()) { |
| 372 | ObjectPtr result = *(reinterpret_cast<FunctionPtr*>( |
| 373 | fp() + kKBCFunctionSlotFromFp * kWordSize)); |
| 374 | ASSERT((result == Object::null()) || |
| 375 | (result->GetClassId() == kFunctionCid)); |
| 376 | return static_cast<FunctionPtr>(result); |
| 377 | } |
| 378 | const Code& code = Code::Handle(LookupDartCode()); |
| 379 | if (!code.IsNull()) { |
| 380 | return code.function(); |
| 381 | } |
| 382 | return Function::null(); |
| 383 | } |
| 384 | |
| 385 | CodePtr StackFrame::LookupDartCode() const { |
| 386 | // We add a no gc scope to ensure that the code below does not trigger |
| 387 | // a GC as we are handling raw object references here. It is possible |
| 388 | // that the code is called while a GC is in progress, that is ok. |
| 389 | #if !defined(HOST_OS_WINDOWS) && !defined(HOST_OS_FUCHSIA) |
| 390 | // On Windows and Fuchsia, the profiler calls this from a separate thread |
| 391 | // where Thread::Current() is NULL, so we cannot create a NoSafepointScope. |
| 392 | NoSafepointScope no_safepoint; |
| 393 | #endif |
| 394 | CodePtr code = GetCodeObject(); |
| 395 | if ((code != Code::null()) && Code::OwnerClassIdOf(code) == kFunctionCid) { |
| 396 | return code; |
| 397 | } |
| 398 | return Code::null(); |
| 399 | } |
| 400 | |
| 401 | CodePtr StackFrame::GetCodeObject() const { |
| 402 | ASSERT(!is_interpreted()); |
| 403 | |
| 404 | #if defined(DART_PRECOMPILED_RUNTIME) |
| 405 | if (FLAG_precompiled_mode && FLAG_use_bare_instructions) { |
| 406 | CodePtr code = ReversePc::Lookup(isolate_group(), pc(), |
| 407 | /*is_return_address=*/true); |
| 408 | if (code != Code::null()) { |
| 409 | return code; |
| 410 | } |
| 411 | code = ReversePc::Lookup(Dart::vm_isolate()->group(), pc(), |
| 412 | /*is_return_address=*/true); |
| 413 | if (code != Code::null()) { |
| 414 | return code; |
| 415 | } |
| 416 | UNREACHABLE(); |
| 417 | } |
| 418 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
| 419 | |
| 420 | ObjectPtr pc_marker = *(reinterpret_cast<ObjectPtr*>( |
| 421 | fp() + runtime_frame_layout.code_from_fp * kWordSize)); |
| 422 | ASSERT((pc_marker == Object::null()) || |
| 423 | (pc_marker->GetClassId() == kCodeCid)); |
| 424 | return static_cast<CodePtr>(pc_marker); |
| 425 | } |
| 426 | |
| 427 | BytecodePtr StackFrame::LookupDartBytecode() const { |
| 428 | // We add a no gc scope to ensure that the code below does not trigger |
| 429 | // a GC as we are handling raw object references here. It is possible |
| 430 | // that the code is called while a GC is in progress, that is ok. |
| 431 | #if !defined(HOST_OS_WINDOWS) && !defined(HOST_OS_FUCHSIA) |
| 432 | // On Windows and Fuchsia, the profiler calls this from a separate thread |
| 433 | // where Thread::Current() is NULL, so we cannot create a NoSafepointScope. |
| 434 | NoSafepointScope no_safepoint; |
| 435 | #endif |
| 436 | return GetBytecodeObject(); |
| 437 | } |
| 438 | |
| 439 | BytecodePtr StackFrame::GetBytecodeObject() const { |
| 440 | ASSERT(is_interpreted()); |
| 441 | ObjectPtr pc_marker = *( |
| 442 | reinterpret_cast<ObjectPtr*>(fp() + kKBCPcMarkerSlotFromFp * kWordSize)); |
| 443 | ASSERT((pc_marker == Object::null()) || |
| 444 | (pc_marker->GetClassId() == kBytecodeCid)); |
| 445 | return static_cast<BytecodePtr>(pc_marker); |
| 446 | } |
| 447 | |
| 448 | bool StackFrame::FindExceptionHandler(Thread* thread, |
| 449 | uword* handler_pc, |
| 450 | bool* needs_stacktrace, |
| 451 | bool* has_catch_all, |
| 452 | bool* is_optimized) const { |
| 453 | REUSABLE_CODE_HANDLESCOPE(thread); |
| 454 | Code& code = reused_code_handle.Handle(); |
| 455 | REUSABLE_BYTECODE_HANDLESCOPE(thread); |
| 456 | Bytecode& bytecode = reused_bytecode_handle.Handle(); |
| 457 | REUSABLE_EXCEPTION_HANDLERS_HANDLESCOPE(thread); |
| 458 | ExceptionHandlers& handlers = reused_exception_handlers_handle.Handle(); |
| 459 | REUSABLE_PC_DESCRIPTORS_HANDLESCOPE(thread); |
| 460 | PcDescriptors& descriptors = reused_pc_descriptors_handle.Handle(); |
| 461 | uword start; |
| 462 | if (is_interpreted()) { |
| 463 | bytecode = LookupDartBytecode(); |
| 464 | ASSERT(!bytecode.IsNull()); |
| 465 | start = bytecode.PayloadStart(); |
| 466 | handlers = bytecode.exception_handlers(); |
| 467 | } else { |
| 468 | code = LookupDartCode(); |
| 469 | if (code.IsNull()) { |
| 470 | return false; // Stub frames do not have exception handlers. |
| 471 | } |
| 472 | start = code.PayloadStart(); |
| 473 | handlers = code.exception_handlers(); |
| 474 | descriptors = code.pc_descriptors(); |
| 475 | *is_optimized = code.is_optimized(); |
| 476 | } |
| 477 | HandlerInfoCache* cache = thread->isolate()->handler_info_cache(); |
| 478 | ExceptionHandlerInfo* info = cache->Lookup(pc()); |
| 479 | if (info != NULL) { |
| 480 | *handler_pc = start + info->handler_pc_offset; |
| 481 | *needs_stacktrace = (info->needs_stacktrace != 0); |
| 482 | *has_catch_all = (info->has_catch_all != 0); |
| 483 | return true; |
| 484 | } |
| 485 | |
| 486 | if (handlers.num_entries() == 0) { |
| 487 | return false; |
| 488 | } |
| 489 | |
| 490 | intptr_t try_index = -1; |
| 491 | if (is_interpreted()) { |
| 492 | try_index = bytecode.GetTryIndexAtPc(pc()); |
| 493 | } else { |
| 494 | uword pc_offset = pc() - code.PayloadStart(); |
| 495 | PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kAnyKind); |
| 496 | while (iter.MoveNext()) { |
| 497 | const intptr_t current_try_index = iter.TryIndex(); |
| 498 | if ((iter.PcOffset() == pc_offset) && (current_try_index != -1)) { |
| 499 | try_index = current_try_index; |
| 500 | break; |
| 501 | } |
| 502 | } |
| 503 | } |
| 504 | if (try_index == -1) { |
| 505 | return false; |
| 506 | } |
| 507 | ExceptionHandlerInfo handler_info; |
| 508 | handlers.GetHandlerInfo(try_index, &handler_info); |
| 509 | *handler_pc = start + handler_info.handler_pc_offset; |
| 510 | *needs_stacktrace = (handler_info.needs_stacktrace != 0); |
| 511 | *has_catch_all = (handler_info.has_catch_all != 0); |
| 512 | cache->Insert(pc(), handler_info); |
| 513 | return true; |
| 514 | } |
| 515 | |
| 516 | TokenPosition StackFrame::GetTokenPos() const { |
| 517 | if (is_interpreted()) { |
| 518 | const Bytecode& bytecode = Bytecode::Handle(LookupDartBytecode()); |
| 519 | if (bytecode.IsNull()) { |
| 520 | return TokenPosition::kNoSource; // Stub frames do not have token_pos. |
| 521 | } |
| 522 | return bytecode.GetTokenIndexOfPC(pc()); |
| 523 | } |
| 524 | const Code& code = Code::Handle(LookupDartCode()); |
| 525 | if (code.IsNull()) { |
| 526 | return TokenPosition::kNoSource; // Stub frames do not have token_pos. |
| 527 | } |
| 528 | uword pc_offset = pc() - code.PayloadStart(); |
| 529 | const PcDescriptors& descriptors = |
| 530 | PcDescriptors::Handle(code.pc_descriptors()); |
| 531 | ASSERT(!descriptors.IsNull()); |
| 532 | PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kAnyKind); |
| 533 | while (iter.MoveNext()) { |
| 534 | if (iter.PcOffset() == pc_offset) { |
| 535 | return TokenPosition(iter.TokenPos()); |
| 536 | } |
| 537 | } |
| 538 | return TokenPosition::kNoSource; |
| 539 | } |
| 540 | |
| 541 | bool StackFrame::IsValid() const { |
| 542 | if (IsEntryFrame() || IsExitFrame() || IsStubFrame()) { |
| 543 | return true; |
| 544 | } |
| 545 | if (is_interpreted()) { |
| 546 | return (LookupDartBytecode() != Bytecode::null()); |
| 547 | } |
| 548 | return (LookupDartCode() != Code::null()); |
| 549 | } |
| 550 | |
| 551 | void StackFrame::DumpCurrentTrace() { |
| 552 | StackFrameIterator frames(ValidationPolicy::kDontValidateFrames, |
| 553 | Thread::Current(), |
| 554 | StackFrameIterator::kNoCrossThreadIteration); |
| 555 | StackFrame* frame = frames.NextFrame(); |
| 556 | while (frame != nullptr) { |
| 557 | OS::PrintErr("%s\n" , frame->ToCString()); |
| 558 | frame = frames.NextFrame(); |
| 559 | } |
| 560 | } |
| 561 | |
| 562 | void StackFrameIterator::SetupLastExitFrameData() { |
| 563 | ASSERT(thread_ != NULL); |
| 564 | uword exit_marker = thread_->top_exit_frame_info(); |
| 565 | frames_.fp_ = exit_marker; |
| 566 | frames_.sp_ = 0; |
| 567 | frames_.pc_ = 0; |
| 568 | if (FLAG_enable_interpreter) { |
| 569 | frames_.CheckIfInterpreted(exit_marker); |
| 570 | } |
| 571 | frames_.Unpoison(); |
| 572 | } |
| 573 | |
| 574 | void StackFrameIterator::SetupNextExitFrameData() { |
| 575 | ASSERT(entry_.fp() != 0); |
| 576 | uword exit_address = |
| 577 | entry_.fp() + ((entry_.is_interpreted() ? kKBCExitLinkSlotFromEntryFp |
| 578 | : kExitLinkSlotFromEntryFp) * |
| 579 | kWordSize); |
| 580 | uword exit_marker = *reinterpret_cast<uword*>(exit_address); |
| 581 | frames_.fp_ = exit_marker; |
| 582 | frames_.sp_ = 0; |
| 583 | frames_.pc_ = 0; |
| 584 | if (FLAG_enable_interpreter) { |
| 585 | frames_.CheckIfInterpreted(exit_marker); |
| 586 | } |
| 587 | frames_.Unpoison(); |
| 588 | } |
| 589 | |
| 590 | StackFrameIterator::StackFrameIterator(ValidationPolicy validation_policy, |
| 591 | Thread* thread, |
| 592 | CrossThreadPolicy cross_thread_policy) |
| 593 | : validate_(validation_policy == ValidationPolicy::kValidateFrames), |
| 594 | entry_(thread), |
| 595 | exit_(thread), |
| 596 | frames_(thread), |
| 597 | current_frame_(NULL), |
| 598 | thread_(thread) { |
| 599 | ASSERT(cross_thread_policy == kAllowCrossThreadIteration || |
| 600 | thread_ == Thread::Current()); |
| 601 | SetupLastExitFrameData(); // Setup data for last exit frame. |
| 602 | } |
| 603 | |
| 604 | StackFrameIterator::StackFrameIterator(uword last_fp, |
| 605 | ValidationPolicy validation_policy, |
| 606 | Thread* thread, |
| 607 | CrossThreadPolicy cross_thread_policy) |
| 608 | : validate_(validation_policy == ValidationPolicy::kValidateFrames), |
| 609 | entry_(thread), |
| 610 | exit_(thread), |
| 611 | frames_(thread), |
| 612 | current_frame_(NULL), |
| 613 | thread_(thread) { |
| 614 | ASSERT(cross_thread_policy == kAllowCrossThreadIteration || |
| 615 | thread_ == Thread::Current()); |
| 616 | frames_.fp_ = last_fp; |
| 617 | frames_.sp_ = 0; |
| 618 | frames_.pc_ = 0; |
| 619 | if (FLAG_enable_interpreter) { |
| 620 | frames_.CheckIfInterpreted(last_fp); |
| 621 | } |
| 622 | frames_.Unpoison(); |
| 623 | } |
| 624 | |
| 625 | StackFrameIterator::StackFrameIterator(uword fp, |
| 626 | uword sp, |
| 627 | uword pc, |
| 628 | ValidationPolicy validation_policy, |
| 629 | Thread* thread, |
| 630 | CrossThreadPolicy cross_thread_policy) |
| 631 | : validate_(validation_policy == ValidationPolicy::kValidateFrames), |
| 632 | entry_(thread), |
| 633 | exit_(thread), |
| 634 | frames_(thread), |
| 635 | current_frame_(NULL), |
| 636 | thread_(thread) { |
| 637 | ASSERT(cross_thread_policy == kAllowCrossThreadIteration || |
| 638 | thread_ == Thread::Current()); |
| 639 | frames_.fp_ = fp; |
| 640 | frames_.sp_ = sp; |
| 641 | frames_.pc_ = pc; |
| 642 | if (FLAG_enable_interpreter) { |
| 643 | frames_.CheckIfInterpreted(fp); |
| 644 | } |
| 645 | frames_.Unpoison(); |
| 646 | } |
| 647 | |
| 648 | StackFrame* StackFrameIterator::NextFrame() { |
| 649 | // When we are at the start of iteration after having created an |
| 650 | // iterator object, current_frame_ will be NULL as we haven't seen |
| 651 | // any frames yet (unless we start iterating in the simulator from a given |
| 652 | // triplet of fp, sp, and pc). At this point, if NextFrame is called, it tries |
| 653 | // to set up the next exit frame by reading the top_exit_frame_info |
| 654 | // from the isolate. If we do not have any dart invocations yet, |
| 655 | // top_exit_frame_info will be 0 and so we would return NULL. |
| 656 | |
| 657 | // current_frame_ will also be NULL, when we are at the end of having |
| 658 | // iterated through all the frames. If NextFrame is called at this |
| 659 | // point, we will try and set up the next exit frame, but since we are |
| 660 | // at the end of the iteration, fp_ will be 0 and we would return NULL. |
| 661 | if (current_frame_ == NULL) { |
| 662 | if (!HasNextFrame()) { |
| 663 | return NULL; |
| 664 | } |
| 665 | if (frames_.pc_ == 0) { |
| 666 | // Iteration starts from an exit frame given by its fp. |
| 667 | current_frame_ = NextExitFrame(); |
| 668 | } else if (*(reinterpret_cast<uword*>( |
| 669 | frames_.fp_ + |
| 670 | ((frames_.is_interpreted() ? kKBCSavedCallerFpSlotFromFp |
| 671 | : kSavedCallerFpSlotFromFp) * |
| 672 | kWordSize))) == 0) { |
| 673 | // Iteration starts from an entry frame given by its fp, sp, and pc. |
| 674 | current_frame_ = NextEntryFrame(); |
| 675 | } else { |
| 676 | // Iteration starts from a Dart or stub frame given by its fp, sp, and pc. |
| 677 | current_frame_ = frames_.NextFrame(validate_); |
| 678 | } |
| 679 | return current_frame_; |
| 680 | } |
| 681 | ASSERT(!validate_ || current_frame_->IsValid()); |
| 682 | if (current_frame_->IsEntryFrame()) { |
| 683 | if (HasNextFrame()) { // We have another chained block. |
| 684 | current_frame_ = NextExitFrame(); |
| 685 | return current_frame_; |
| 686 | } |
| 687 | current_frame_ = NULL; // No more frames. |
| 688 | return current_frame_; |
| 689 | } |
| 690 | ASSERT(!validate_ || current_frame_->IsExitFrame() || |
| 691 | current_frame_->IsDartFrame(validate_) || |
| 692 | current_frame_->IsStubFrame()); |
| 693 | |
| 694 | // Consume dart/stub frames using StackFrameIterator::FrameSetIterator |
| 695 | // until we are out of dart/stub frames at which point we return the |
| 696 | // corresponding entry frame for that set of dart/stub frames. |
| 697 | current_frame_ = |
| 698 | (frames_.HasNext()) ? frames_.NextFrame(validate_) : NextEntryFrame(); |
| 699 | return current_frame_; |
| 700 | } |
| 701 | |
| 702 | void StackFrameIterator::FrameSetIterator::CheckIfInterpreted( |
| 703 | uword exit_marker) { |
| 704 | #if !defined(DART_PRECOMPILED_RUNTIME) |
| 705 | // TODO(regis): We should rely on a new thread vm_tag to identify an |
| 706 | // interpreter frame and not need the HasFrame() method. |
| 707 | ASSERT(FLAG_enable_interpreter); |
| 708 | Interpreter* interpreter = thread_->interpreter(); |
| 709 | is_interpreted_ = (interpreter != NULL) && interpreter->HasFrame(exit_marker); |
| 710 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| 711 | } |
| 712 | |
| 713 | // Tell MemorySanitizer that generated code initializes part of the stack. |
| 714 | void StackFrameIterator::FrameSetIterator::Unpoison() { |
| 715 | // When using a simulator, all writes to the stack happened from MSAN |
| 716 | // instrumented C++, so there is nothing to unpoison. Additionally, |
| 717 | // fp_ will be somewhere in the simulator's stack instead of the OSThread's |
| 718 | // stack. |
| 719 | #if !defined(USING_SIMULATOR) |
| 720 | if (fp_ == 0) return; |
| 721 | // Note that Thread::os_thread_ is cleared when the thread is descheduled. |
| 722 | ASSERT(is_interpreted_ || (thread_->os_thread() == nullptr) || |
| 723 | ((thread_->os_thread()->stack_limit() < fp_) && |
| 724 | (thread_->os_thread()->stack_base() > fp_))); |
| 725 | uword lower; |
| 726 | if (sp_ == 0) { |
| 727 | // Exit frame: guess sp. |
| 728 | lower = fp_ - kDartFrameFixedSize * kWordSize; |
| 729 | } else { |
| 730 | lower = sp_; |
| 731 | } |
| 732 | uword upper = fp_ + kSavedCallerPcSlotFromFp * kWordSize; |
| 733 | // Both lower and upper are inclusive, so we add one word when computing size. |
| 734 | MSAN_UNPOISON(reinterpret_cast<void*>(lower), upper - lower + kWordSize); |
| 735 | #endif // !defined(USING_SIMULATOR) |
| 736 | } |
| 737 | |
| 738 | StackFrame* StackFrameIterator::FrameSetIterator::NextFrame(bool validate) { |
| 739 | StackFrame* frame; |
| 740 | ASSERT(HasNext()); |
| 741 | frame = &stack_frame_; |
| 742 | frame->sp_ = sp_; |
| 743 | frame->fp_ = fp_; |
| 744 | frame->pc_ = pc_; |
| 745 | frame->is_interpreted_ = is_interpreted_; |
| 746 | sp_ = frame->GetCallerSp(); |
| 747 | fp_ = frame->GetCallerFp(); |
| 748 | pc_ = frame->GetCallerPc(); |
| 749 | Unpoison(); |
| 750 | ASSERT(is_interpreted_ == frame->is_interpreted_); |
| 751 | ASSERT(!validate || frame->IsValid()); |
| 752 | return frame; |
| 753 | } |
| 754 | |
| 755 | ExitFrame* StackFrameIterator::NextExitFrame() { |
| 756 | exit_.sp_ = frames_.sp_; |
| 757 | exit_.fp_ = frames_.fp_; |
| 758 | exit_.pc_ = frames_.pc_; |
| 759 | exit_.is_interpreted_ = frames_.is_interpreted_; |
| 760 | frames_.sp_ = exit_.GetCallerSp(); |
| 761 | frames_.fp_ = exit_.GetCallerFp(); |
| 762 | frames_.pc_ = exit_.GetCallerPc(); |
| 763 | frames_.Unpoison(); |
| 764 | ASSERT(frames_.is_interpreted_ == exit_.is_interpreted_); |
| 765 | ASSERT(!validate_ || exit_.IsValid()); |
| 766 | return &exit_; |
| 767 | } |
| 768 | |
| 769 | EntryFrame* StackFrameIterator::NextEntryFrame() { |
| 770 | ASSERT(!frames_.HasNext()); |
| 771 | entry_.sp_ = frames_.sp_; |
| 772 | entry_.fp_ = frames_.fp_; |
| 773 | entry_.pc_ = frames_.pc_; |
| 774 | entry_.is_interpreted_ = frames_.is_interpreted_; |
| 775 | SetupNextExitFrameData(); // Setup data for next exit frame in chain. |
| 776 | ASSERT(!validate_ || entry_.IsValid()); |
| 777 | return &entry_; |
| 778 | } |
| 779 | |
| 780 | InlinedFunctionsIterator::InlinedFunctionsIterator(const Code& code, uword pc) |
| 781 | : index_(0), |
| 782 | num_materializations_(0), |
| 783 | dest_frame_size_(0), |
| 784 | code_(Code::Handle(code.raw())), |
| 785 | deopt_info_(TypedData::Handle()), |
| 786 | function_(Function::Handle()), |
| 787 | pc_(pc), |
| 788 | deopt_instructions_(), |
| 789 | object_table_(ObjectPool::Handle()) { |
| 790 | ASSERT(code_.is_optimized()); |
| 791 | ASSERT(pc_ != 0); |
| 792 | ASSERT(code.ContainsInstructionAt(pc)); |
| 793 | #if defined(DART_PRECOMPILED_RUNTIME) |
| 794 | ASSERT(deopt_info_.IsNull()); |
| 795 | function_ = code_.function(); |
| 796 | #else |
| 797 | ICData::DeoptReasonId deopt_reason = ICData::kDeoptUnknown; |
| 798 | uint32_t deopt_flags = 0; |
| 799 | deopt_info_ = code_.GetDeoptInfoAtPc(pc, &deopt_reason, &deopt_flags); |
| 800 | if (deopt_info_.IsNull()) { |
| 801 | // This is the case when a call without deopt info in optimized code |
| 802 | // throws an exception. (e.g. in the parameter copying prologue). |
| 803 | // In that case there won't be any inlined frames. |
| 804 | function_ = code_.function(); |
| 805 | } else { |
| 806 | // Unpack deopt info into instructions (translate away suffixes). |
| 807 | const Array& deopt_table = Array::Handle(code_.deopt_info_array()); |
| 808 | ASSERT(!deopt_table.IsNull()); |
| 809 | DeoptInfo::Unpack(deopt_table, deopt_info_, &deopt_instructions_); |
| 810 | num_materializations_ = DeoptInfo::NumMaterializations(deopt_instructions_); |
| 811 | dest_frame_size_ = DeoptInfo::FrameSize(deopt_info_); |
| 812 | object_table_ = code_.GetObjectPool(); |
| 813 | Advance(); |
| 814 | } |
| 815 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
| 816 | } |
| 817 | |
| 818 | void InlinedFunctionsIterator::Advance() { |
| 819 | // Iterate over the deopt instructions and determine the inlined |
| 820 | // functions if any and iterate over them. |
| 821 | ASSERT(!Done()); |
| 822 | |
| 823 | #if defined(DART_PRECOMPILED_RUNTIME) |
| 824 | ASSERT(deopt_info_.IsNull()); |
| 825 | SetDone(); |
| 826 | return; |
| 827 | #else |
| 828 | if (deopt_info_.IsNull()) { |
| 829 | SetDone(); |
| 830 | return; |
| 831 | } |
| 832 | |
| 833 | ASSERT(deopt_instructions_.length() != 0); |
| 834 | while (index_ < deopt_instructions_.length()) { |
| 835 | DeoptInstr* deopt_instr = deopt_instructions_[index_++]; |
| 836 | if (deopt_instr->kind() == DeoptInstr::kRetAddress) { |
| 837 | pc_ = DeoptInstr::GetRetAddress(deopt_instr, object_table_, &code_); |
| 838 | function_ = code_.function(); |
| 839 | return; |
| 840 | } |
| 841 | } |
| 842 | SetDone(); |
| 843 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
| 844 | } |
| 845 | |
| 846 | #if !defined(DART_PRECOMPILED_RUNTIME) |
| 847 | // Finds the potential offset for the current function's FP if the |
| 848 | // current frame were to be deoptimized. |
| 849 | intptr_t InlinedFunctionsIterator::GetDeoptFpOffset() const { |
| 850 | ASSERT(deopt_instructions_.length() != 0); |
| 851 | for (intptr_t index = index_; index < deopt_instructions_.length(); index++) { |
| 852 | DeoptInstr* deopt_instr = deopt_instructions_[index]; |
| 853 | if (deopt_instr->kind() == DeoptInstr::kCallerFp) { |
| 854 | return index - num_materializations_; |
| 855 | } |
| 856 | } |
| 857 | UNREACHABLE(); |
| 858 | return 0; |
| 859 | } |
| 860 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| 861 | |
| 862 | #if defined(DEBUG) |
| 863 | void ValidateFrames() { |
| 864 | StackFrameIterator frames(ValidationPolicy::kValidateFrames, |
| 865 | Thread::Current(), |
| 866 | StackFrameIterator::kNoCrossThreadIteration); |
| 867 | StackFrame* frame = frames.NextFrame(); |
| 868 | while (frame != NULL) { |
| 869 | frame = frames.NextFrame(); |
| 870 | } |
| 871 | } |
| 872 | #endif |
| 873 | |
| 874 | } // namespace dart |
| 875 | |