| 1 | // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 | // for details. All rights reserved. Use of this source code is governed by a |
| 3 | // BSD-style license that can be found in the LICENSE file. |
| 4 | |
| 5 | #if !defined(DART_PRECOMPILED_RUNTIME) |
| 6 | |
| 7 | #include "vm/deopt_instructions.h" |
| 8 | |
| 9 | #include "vm/code_patcher.h" |
| 10 | #include "vm/compiler/assembler/assembler.h" |
| 11 | #include "vm/compiler/assembler/disassembler.h" |
| 12 | #include "vm/compiler/backend/il.h" |
| 13 | #include "vm/compiler/backend/locations.h" |
| 14 | #include "vm/compiler/jit/compiler.h" |
| 15 | #include "vm/parser.h" |
| 16 | #include "vm/stack_frame.h" |
| 17 | #include "vm/thread.h" |
| 18 | #include "vm/timeline.h" |
| 19 | |
| 20 | namespace dart { |
| 21 | |
| 22 | DEFINE_FLAG(bool, |
| 23 | compress_deopt_info, |
| 24 | true, |
| 25 | "Compress the size of the deoptimization info for optimized code." ); |
| 26 | DECLARE_FLAG(bool, trace_deoptimization); |
| 27 | DECLARE_FLAG(bool, trace_deoptimization_verbose); |
| 28 | |
| 29 | DeoptContext::DeoptContext(const StackFrame* frame, |
| 30 | const Code& code, |
| 31 | DestFrameOptions dest_options, |
| 32 | fpu_register_t* fpu_registers, |
| 33 | intptr_t* cpu_registers, |
| 34 | bool is_lazy_deopt, |
| 35 | bool deoptimizing_code) |
| 36 | : code_(code.raw()), |
| 37 | object_pool_(code.GetObjectPool()), |
| 38 | deopt_info_(TypedData::null()), |
| 39 | dest_frame_is_allocated_(false), |
| 40 | dest_frame_(NULL), |
| 41 | dest_frame_size_(0), |
| 42 | source_frame_is_allocated_(false), |
| 43 | source_frame_(NULL), |
| 44 | source_frame_size_(0), |
| 45 | cpu_registers_(cpu_registers), |
| 46 | fpu_registers_(fpu_registers), |
| 47 | num_args_(0), |
| 48 | deopt_reason_(ICData::kDeoptUnknown), |
| 49 | deopt_flags_(0), |
| 50 | thread_(Thread::Current()), |
| 51 | deopt_start_micros_(0), |
| 52 | deferred_slots_(NULL), |
| 53 | deferred_objects_count_(0), |
| 54 | deferred_objects_(NULL), |
| 55 | is_lazy_deopt_(is_lazy_deopt), |
| 56 | deoptimizing_code_(deoptimizing_code) { |
| 57 | const TypedData& deopt_info = TypedData::Handle( |
| 58 | code.GetDeoptInfoAtPc(frame->pc(), &deopt_reason_, &deopt_flags_)); |
| 59 | #if defined(DEBUG) |
| 60 | if (deopt_info.IsNull()) { |
| 61 | OS::PrintErr("Missing deopt info for pc %" Px "\n" , frame->pc()); |
| 62 | DisassembleToStdout formatter; |
| 63 | code.Disassemble(&formatter); |
| 64 | } |
| 65 | #endif |
| 66 | ASSERT(!deopt_info.IsNull()); |
| 67 | deopt_info_ = deopt_info.raw(); |
| 68 | |
| 69 | const Function& function = Function::Handle(code.function()); |
| 70 | |
| 71 | // Do not include incoming arguments if there are optional arguments |
| 72 | // (they are copied into local space at method entry). |
| 73 | num_args_ = |
| 74 | function.HasOptionalParameters() ? 0 : function.num_fixed_parameters(); |
| 75 | |
| 76 | // The fixed size section of the (fake) Dart frame called via a stub by the |
| 77 | // optimized function contains FP, PP (ARM only), PC-marker and |
| 78 | // return-address. This section is copied as well, so that its contained |
| 79 | // values can be updated before returning to the deoptimized function. |
| 80 | ASSERT(frame->fp() >= frame->sp()); |
| 81 | const intptr_t frame_size = (frame->fp() - frame->sp()) / kWordSize; |
| 82 | |
| 83 | source_frame_size_ = +kDartFrameFixedSize // For saved values below sp. |
| 84 | + frame_size // For frame size incl. sp. |
| 85 | + 1 // For fp. |
| 86 | + kParamEndSlotFromFp // For saved values above fp. |
| 87 | + num_args_; // For arguments. |
| 88 | |
| 89 | source_frame_ = FrameBase(frame); |
| 90 | |
| 91 | if (dest_options == kDestIsOriginalFrame) { |
| 92 | // Work from a copy of the source frame. |
| 93 | intptr_t* original_frame = source_frame_; |
| 94 | source_frame_ = new intptr_t[source_frame_size_]; |
| 95 | ASSERT(source_frame_ != NULL); |
| 96 | for (intptr_t i = 0; i < source_frame_size_; i++) { |
| 97 | source_frame_[i] = original_frame[i]; |
| 98 | } |
| 99 | source_frame_is_allocated_ = true; |
| 100 | } |
| 101 | caller_fp_ = GetSourceFp(); |
| 102 | |
| 103 | dest_frame_size_ = DeoptInfo::FrameSize(deopt_info); |
| 104 | |
| 105 | if (dest_options == kDestIsAllocated) { |
| 106 | dest_frame_ = new intptr_t[dest_frame_size_]; |
| 107 | ASSERT(source_frame_ != NULL); |
| 108 | for (intptr_t i = 0; i < dest_frame_size_; i++) { |
| 109 | dest_frame_[i] = 0; |
| 110 | } |
| 111 | dest_frame_is_allocated_ = true; |
| 112 | } |
| 113 | |
| 114 | if (dest_options != kDestIsAllocated) { |
| 115 | // kDestIsAllocated is used by the debugger to generate a stack trace |
| 116 | // and does not signal a real deopt. |
| 117 | deopt_start_micros_ = OS::GetCurrentMonotonicMicros(); |
| 118 | } |
| 119 | |
| 120 | if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) { |
| 121 | THR_Print( |
| 122 | "Deoptimizing (reason %d '%s') at " |
| 123 | "pc=%" Pp " fp=%" Pp " '%s' (count %d)\n" , |
| 124 | deopt_reason(), DeoptReasonToCString(deopt_reason()), frame->pc(), |
| 125 | frame->fp(), function.ToFullyQualifiedCString(), |
| 126 | function.deoptimization_counter()); |
| 127 | } |
| 128 | } |
| 129 | |
| 130 | DeoptContext::~DeoptContext() { |
| 131 | // Delete memory for source frame and registers. |
| 132 | if (source_frame_is_allocated_) { |
| 133 | delete[] source_frame_; |
| 134 | } |
| 135 | source_frame_ = NULL; |
| 136 | delete[] fpu_registers_; |
| 137 | delete[] cpu_registers_; |
| 138 | fpu_registers_ = NULL; |
| 139 | cpu_registers_ = NULL; |
| 140 | if (dest_frame_is_allocated_) { |
| 141 | delete[] dest_frame_; |
| 142 | } |
| 143 | dest_frame_ = NULL; |
| 144 | |
| 145 | // Delete all deferred objects. |
| 146 | for (intptr_t i = 0; i < deferred_objects_count_; i++) { |
| 147 | delete deferred_objects_[i]; |
| 148 | } |
| 149 | delete[] deferred_objects_; |
| 150 | deferred_objects_ = NULL; |
| 151 | deferred_objects_count_ = 0; |
| 152 | |
| 153 | #if defined(SUPPORT_TIMELINE) |
| 154 | if (deopt_start_micros_ != 0) { |
| 155 | TimelineStream* compiler_stream = Timeline::GetCompilerStream(); |
| 156 | ASSERT(compiler_stream != NULL); |
| 157 | if (compiler_stream->enabled()) { |
| 158 | // Allocate all Dart objects needed before calling StartEvent, |
| 159 | // which blocks safe points until Complete is called. |
| 160 | const Code& code = Code::Handle(zone(), code_); |
| 161 | const Function& function = Function::Handle(zone(), code.function()); |
| 162 | const String& function_name = |
| 163 | String::Handle(zone(), function.QualifiedScrubbedName()); |
| 164 | const char* reason = DeoptReasonToCString(deopt_reason()); |
| 165 | const int counter = function.deoptimization_counter(); |
| 166 | TimelineEvent* timeline_event = compiler_stream->StartEvent(); |
| 167 | if (timeline_event != NULL) { |
| 168 | timeline_event->Duration("Deoptimize" , deopt_start_micros_, |
| 169 | OS::GetCurrentMonotonicMicros()); |
| 170 | timeline_event->SetNumArguments(3); |
| 171 | timeline_event->CopyArgument(0, "function" , function_name.ToCString()); |
| 172 | timeline_event->CopyArgument(1, "reason" , reason); |
| 173 | timeline_event->FormatArgument(2, "deoptimizationCount" , "%d" , counter); |
| 174 | timeline_event->Complete(); |
| 175 | } |
| 176 | } |
| 177 | } |
| 178 | #endif // !PRODUCT |
| 179 | } |
| 180 | |
| 181 | void DeoptContext::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
| 182 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&code_)); |
| 183 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&object_pool_)); |
| 184 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&deopt_info_)); |
| 185 | |
| 186 | // Visit any object pointers on the destination stack. |
| 187 | if (dest_frame_is_allocated_) { |
| 188 | for (intptr_t i = 0; i < dest_frame_size_; i++) { |
| 189 | if (dest_frame_[i] != 0) { |
| 190 | visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&dest_frame_[i])); |
| 191 | } |
| 192 | } |
| 193 | } |
| 194 | } |
| 195 | |
| 196 | intptr_t DeoptContext::DestStackAdjustment() const { |
| 197 | return dest_frame_size_ - kDartFrameFixedSize - num_args_ |
| 198 | - 1 // For fp. |
| 199 | - kParamEndSlotFromFp; |
| 200 | } |
| 201 | |
| 202 | intptr_t DeoptContext::GetSourceFp() const { |
| 203 | return source_frame_[source_frame_size_ - 1 - num_args_ - |
| 204 | kParamEndSlotFromFp]; |
| 205 | } |
| 206 | |
| 207 | intptr_t DeoptContext::GetSourcePp() const { |
| 208 | return source_frame_[source_frame_size_ - 1 - num_args_ - |
| 209 | kParamEndSlotFromFp + |
| 210 | StackFrame::SavedCallerPpSlotFromFp()]; |
| 211 | } |
| 212 | |
| 213 | intptr_t DeoptContext::GetSourcePc() const { |
| 214 | return source_frame_[source_frame_size_ - num_args_ + kSavedPcSlotFromSp]; |
| 215 | } |
| 216 | |
| 217 | intptr_t DeoptContext::GetCallerFp() const { |
| 218 | return caller_fp_; |
| 219 | } |
| 220 | |
| 221 | void DeoptContext::SetCallerFp(intptr_t caller_fp) { |
| 222 | caller_fp_ = caller_fp; |
| 223 | } |
| 224 | |
| 225 | static bool IsObjectInstruction(DeoptInstr::Kind kind) { |
| 226 | switch (kind) { |
| 227 | case DeoptInstr::kConstant: |
| 228 | case DeoptInstr::kPp: |
| 229 | case DeoptInstr::kCallerPp: |
| 230 | case DeoptInstr::kMaterializedObjectRef: |
| 231 | case DeoptInstr::kFloat32x4: |
| 232 | case DeoptInstr::kInt32x4: |
| 233 | case DeoptInstr::kFloat64x2: |
| 234 | case DeoptInstr::kWord: |
| 235 | case DeoptInstr::kDouble: |
| 236 | case DeoptInstr::kMint: |
| 237 | case DeoptInstr::kMintPair: |
| 238 | case DeoptInstr::kInt32: |
| 239 | case DeoptInstr::kUint32: |
| 240 | return true; |
| 241 | |
| 242 | case DeoptInstr::kRetAddress: |
| 243 | case DeoptInstr::kPcMarker: |
| 244 | case DeoptInstr::kCallerFp: |
| 245 | case DeoptInstr::kCallerPc: |
| 246 | return false; |
| 247 | |
| 248 | case DeoptInstr::kMaterializeObject: |
| 249 | default: |
| 250 | // We should not encounter these instructions when filling stack slots. |
| 251 | UNREACHABLE(); |
| 252 | return false; |
| 253 | } |
| 254 | UNREACHABLE(); |
| 255 | return false; |
| 256 | } |
| 257 | |
| 258 | void DeoptContext::FillDestFrame() { |
| 259 | const Code& code = Code::Handle(code_); |
| 260 | const TypedData& deopt_info = TypedData::Handle(deopt_info_); |
| 261 | |
| 262 | GrowableArray<DeoptInstr*> deopt_instructions; |
| 263 | const Array& deopt_table = Array::Handle(code.deopt_info_array()); |
| 264 | ASSERT(!deopt_table.IsNull()); |
| 265 | DeoptInfo::Unpack(deopt_table, deopt_info, &deopt_instructions); |
| 266 | |
| 267 | const intptr_t len = deopt_instructions.length(); |
| 268 | const intptr_t frame_size = dest_frame_size_; |
| 269 | |
| 270 | // For now, we never place non-objects in the deoptimized frame if |
| 271 | // the destination frame is a copy. This allows us to copy the |
| 272 | // deoptimized frame into an Array. |
| 273 | const bool objects_only = dest_frame_is_allocated_; |
| 274 | |
| 275 | // All kMaterializeObject instructions are emitted before the instructions |
| 276 | // that describe stack frames. Skip them and defer materialization of |
| 277 | // objects until the frame is fully reconstructed and it is safe to perform |
| 278 | // GC. |
| 279 | // Arguments (class of the instance to allocate and field-value pairs) are |
| 280 | // described as part of the expression stack for the bottom-most deoptimized |
| 281 | // frame. They will be used during materialization and removed from the stack |
| 282 | // right before control switches to the unoptimized code. |
| 283 | const intptr_t num_materializations = |
| 284 | DeoptInfo::NumMaterializations(deopt_instructions); |
| 285 | PrepareForDeferredMaterialization(num_materializations); |
| 286 | for (intptr_t from_index = 0, to_index = kDartFrameFixedSize; |
| 287 | from_index < num_materializations; from_index++) { |
| 288 | const intptr_t field_count = |
| 289 | DeoptInstr::GetFieldCount(deopt_instructions[from_index]); |
| 290 | intptr_t* args = GetDestFrameAddressAt(to_index); |
| 291 | DeferredObject* obj = new DeferredObject(field_count, args); |
| 292 | SetDeferredObjectAt(from_index, obj); |
| 293 | to_index += obj->ArgumentCount(); |
| 294 | } |
| 295 | |
| 296 | // Populate stack frames. |
| 297 | for (intptr_t to_index = frame_size - 1, from_index = len - 1; to_index >= 0; |
| 298 | to_index--, from_index--) { |
| 299 | intptr_t* to_addr = GetDestFrameAddressAt(to_index); |
| 300 | DeoptInstr* instr = deopt_instructions[from_index]; |
| 301 | if (!objects_only || IsObjectInstruction(instr->kind())) { |
| 302 | instr->Execute(this, to_addr); |
| 303 | } else { |
| 304 | *reinterpret_cast<ObjectPtr*>(to_addr) = Object::null(); |
| 305 | } |
| 306 | } |
| 307 | |
| 308 | if (FLAG_trace_deoptimization_verbose) { |
| 309 | for (intptr_t i = 0; i < frame_size; i++) { |
| 310 | intptr_t* to_addr = GetDestFrameAddressAt(i); |
| 311 | THR_Print("*%" Pd ". [%p] 0x%" Px " [%s]\n" , i, to_addr, *to_addr, |
| 312 | deopt_instructions[i + (len - frame_size)]->ToCString()); |
| 313 | } |
| 314 | } |
| 315 | } |
| 316 | |
| 317 | const CatchEntryMoves* DeoptContext::ToCatchEntryMoves(intptr_t num_vars) { |
| 318 | const Code& code = Code::Handle(code_); |
| 319 | const TypedData& deopt_info = TypedData::Handle(deopt_info_); |
| 320 | GrowableArray<DeoptInstr*> deopt_instructions; |
| 321 | const Array& deopt_table = Array::Handle(code.deopt_info_array()); |
| 322 | ASSERT(!deopt_table.IsNull()); |
| 323 | DeoptInfo::Unpack(deopt_table, deopt_info, &deopt_instructions); |
| 324 | |
| 325 | CatchEntryMoves* moves = CatchEntryMoves::Allocate(num_vars); |
| 326 | |
| 327 | Function& function = Function::Handle(zone(), code.function()); |
| 328 | intptr_t params = |
| 329 | function.HasOptionalParameters() ? 0 : function.num_fixed_parameters(); |
| 330 | for (intptr_t i = 0; i < num_vars; i++) { |
| 331 | const intptr_t len = deopt_instructions.length(); |
| 332 | intptr_t slot = i < params ? i |
| 333 | : i + kParamEndSlotFromFp - |
| 334 | runtime_frame_layout.first_local_from_fp; |
| 335 | DeoptInstr* instr = deopt_instructions[len - 1 - slot]; |
| 336 | intptr_t dest_index = i - params; |
| 337 | moves->At(i) = instr->ToCatchEntryMove(this, dest_index); |
| 338 | } |
| 339 | |
| 340 | return moves; |
| 341 | } |
| 342 | |
| 343 | static void FillDeferredSlots(DeoptContext* deopt_context, |
| 344 | DeferredSlot** slot_list) { |
| 345 | DeferredSlot* slot = *slot_list; |
| 346 | *slot_list = NULL; |
| 347 | |
| 348 | while (slot != NULL) { |
| 349 | DeferredSlot* current = slot; |
| 350 | slot = slot->next(); |
| 351 | |
| 352 | current->Materialize(deopt_context); |
| 353 | |
| 354 | delete current; |
| 355 | } |
| 356 | } |
| 357 | |
| 358 | // Materializes all deferred objects. Returns the total number of |
| 359 | // artificial arguments used during deoptimization. |
| 360 | intptr_t DeoptContext::MaterializeDeferredObjects() { |
| 361 | // Populate slots with references to all unboxed "primitive" values (doubles, |
| 362 | // mints, simd) and deferred objects. Deferred objects are only allocated |
| 363 | // but not filled with data. This is done later because deferred objects |
| 364 | // can references each other. |
| 365 | FillDeferredSlots(this, &deferred_slots_); |
| 366 | |
| 367 | // Compute total number of artificial arguments used during deoptimization. |
| 368 | intptr_t deopt_arg_count = 0; |
| 369 | for (intptr_t i = 0; i < DeferredObjectsCount(); i++) { |
| 370 | GetDeferredObject(i)->Fill(); |
| 371 | deopt_arg_count += GetDeferredObject(i)->ArgumentCount(); |
| 372 | } |
| 373 | |
| 374 | // Since this is the only step where GC can occur during deoptimization, |
| 375 | // use it to report the source line where deoptimization occured. |
| 376 | if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) { |
| 377 | DartFrameIterator iterator(Thread::Current(), |
| 378 | StackFrameIterator::kNoCrossThreadIteration); |
| 379 | StackFrame* top_frame = iterator.NextFrame(); |
| 380 | ASSERT(top_frame != NULL); |
| 381 | ASSERT(!top_frame->is_interpreted()); |
| 382 | const Code& code = Code::Handle(top_frame->LookupDartCode()); |
| 383 | const Function& top_function = Function::Handle(code.function()); |
| 384 | const Script& script = Script::Handle(top_function.script()); |
| 385 | const TokenPosition token_pos = code.GetTokenIndexOfPC(top_frame->pc()); |
| 386 | intptr_t line, column; |
| 387 | script.GetTokenLocation(token_pos, &line, &column); |
| 388 | String& line_string = String::Handle(script.GetLine(line)); |
| 389 | THR_Print(" Function: %s\n" , top_function.ToFullyQualifiedCString()); |
| 390 | char line_buffer[80]; |
| 391 | Utils::SNPrint(line_buffer, sizeof(line_buffer), " Line %" Pd ": '%s'" , |
| 392 | line, line_string.ToCString()); |
| 393 | THR_Print("%s\n" , line_buffer); |
| 394 | THR_Print(" Deopt args: %" Pd "\n" , deopt_arg_count); |
| 395 | } |
| 396 | |
| 397 | return deopt_arg_count; |
| 398 | } |
| 399 | |
| 400 | ArrayPtr DeoptContext::DestFrameAsArray() { |
| 401 | ASSERT(dest_frame_ != NULL && dest_frame_is_allocated_); |
| 402 | const Array& dest_array = Array::Handle(zone(), Array::New(dest_frame_size_)); |
| 403 | PassiveObject& obj = PassiveObject::Handle(zone()); |
| 404 | for (intptr_t i = 0; i < dest_frame_size_; i++) { |
| 405 | obj = static_cast<ObjectPtr>(dest_frame_[i]); |
| 406 | dest_array.SetAt(i, obj); |
| 407 | } |
| 408 | return dest_array.raw(); |
| 409 | } |
| 410 | |
| 411 | // Deoptimization instruction creating return address using function and |
| 412 | // deopt-id stored at 'object_table_index'. |
| 413 | class DeoptRetAddressInstr : public DeoptInstr { |
| 414 | public: |
| 415 | DeoptRetAddressInstr(intptr_t object_table_index, intptr_t deopt_id) |
| 416 | : object_table_index_(object_table_index), deopt_id_(deopt_id) { |
| 417 | ASSERT(object_table_index >= 0); |
| 418 | ASSERT(deopt_id >= 0); |
| 419 | } |
| 420 | |
| 421 | explicit DeoptRetAddressInstr(intptr_t source_index) |
| 422 | : object_table_index_(ObjectTableIndex::decode(source_index)), |
| 423 | deopt_id_(DeoptId::decode(source_index)) {} |
| 424 | |
| 425 | virtual intptr_t source_index() const { |
| 426 | return ObjectTableIndex::encode(object_table_index_) | |
| 427 | DeoptId::encode(deopt_id_); |
| 428 | } |
| 429 | |
| 430 | virtual DeoptInstr::Kind kind() const { return kRetAddress; } |
| 431 | |
| 432 | virtual const char* ArgumentsToCString() const { |
| 433 | return Thread::Current()->zone()->PrintToString( |
| 434 | "%" Pd ", %" Pd "" , object_table_index_, deopt_id_); |
| 435 | } |
| 436 | |
| 437 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 438 | *dest_addr = Smi::RawValue(0); |
| 439 | deopt_context->DeferRetAddrMaterialization(object_table_index_, deopt_id_, |
| 440 | dest_addr); |
| 441 | } |
| 442 | |
| 443 | intptr_t object_table_index() const { return object_table_index_; } |
| 444 | intptr_t deopt_id() const { return deopt_id_; } |
| 445 | |
| 446 | private: |
| 447 | static const intptr_t kFieldWidth = kBitsPerWord / 2; |
| 448 | class ObjectTableIndex : public BitField<intptr_t, intptr_t, 0, kFieldWidth> { |
| 449 | }; |
| 450 | class DeoptId |
| 451 | : public BitField<intptr_t, intptr_t, kFieldWidth, kFieldWidth> {}; |
| 452 | |
| 453 | const intptr_t object_table_index_; |
| 454 | const intptr_t deopt_id_; |
| 455 | |
| 456 | DISALLOW_COPY_AND_ASSIGN(DeoptRetAddressInstr); |
| 457 | }; |
| 458 | |
| 459 | // Deoptimization instruction moving a constant stored at 'object_table_index'. |
| 460 | class DeoptConstantInstr : public DeoptInstr { |
| 461 | public: |
| 462 | explicit DeoptConstantInstr(intptr_t object_table_index) |
| 463 | : object_table_index_(object_table_index) { |
| 464 | ASSERT(object_table_index >= 0); |
| 465 | } |
| 466 | |
| 467 | virtual intptr_t source_index() const { return object_table_index_; } |
| 468 | virtual DeoptInstr::Kind kind() const { return kConstant; } |
| 469 | |
| 470 | virtual const char* ArgumentsToCString() const { |
| 471 | return Thread::Current()->zone()->PrintToString("%" Pd "" , |
| 472 | object_table_index_); |
| 473 | } |
| 474 | |
| 475 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 476 | const PassiveObject& obj = PassiveObject::Handle( |
| 477 | deopt_context->zone(), deopt_context->ObjectAt(object_table_index_)); |
| 478 | *reinterpret_cast<ObjectPtr*>(dest_addr) = obj.raw(); |
| 479 | } |
| 480 | |
| 481 | CatchEntryMove ToCatchEntryMove(DeoptContext* deopt_context, |
| 482 | intptr_t dest_slot) { |
| 483 | return CatchEntryMove::FromConstant(object_table_index_, dest_slot); |
| 484 | } |
| 485 | |
| 486 | private: |
| 487 | const intptr_t object_table_index_; |
| 488 | |
| 489 | DISALLOW_COPY_AND_ASSIGN(DeoptConstantInstr); |
| 490 | }; |
| 491 | |
| 492 | // Deoptimization instruction moving value from optimized frame at |
| 493 | // 'source_index' to specified slots in the unoptimized frame. |
| 494 | // 'source_index' represents the slot index of the frame (0 being |
| 495 | // first argument) and accounts for saved return address, frame |
| 496 | // pointer, pool pointer and pc marker. |
| 497 | // Deoptimization instruction moving a CPU register. |
| 498 | class DeoptWordInstr : public DeoptInstr { |
| 499 | public: |
| 500 | explicit DeoptWordInstr(intptr_t source_index) : source_(source_index) {} |
| 501 | |
| 502 | explicit DeoptWordInstr(const CpuRegisterSource& source) : source_(source) {} |
| 503 | |
| 504 | virtual intptr_t source_index() const { return source_.source_index(); } |
| 505 | virtual DeoptInstr::Kind kind() const { return kWord; } |
| 506 | |
| 507 | virtual const char* ArgumentsToCString() const { return source_.ToCString(); } |
| 508 | |
| 509 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 510 | *dest_addr = source_.Value<intptr_t>(deopt_context); |
| 511 | } |
| 512 | |
| 513 | CatchEntryMove ToCatchEntryMove(DeoptContext* deopt_context, |
| 514 | intptr_t dest_slot) { |
| 515 | return CatchEntryMove::FromSlot(CatchEntryMove::SourceKind::kTaggedSlot, |
| 516 | source_.StackSlot(deopt_context), |
| 517 | dest_slot); |
| 518 | } |
| 519 | |
| 520 | private: |
| 521 | const CpuRegisterSource source_; |
| 522 | |
| 523 | DISALLOW_COPY_AND_ASSIGN(DeoptWordInstr); |
| 524 | }; |
| 525 | |
| 526 | class DeoptIntegerInstrBase : public DeoptInstr { |
| 527 | public: |
| 528 | DeoptIntegerInstrBase() {} |
| 529 | |
| 530 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 531 | const int64_t value = GetValue(deopt_context); |
| 532 | if (Smi::IsValid(value)) { |
| 533 | *dest_addr = Smi::RawValue(static_cast<intptr_t>(value)); |
| 534 | } else { |
| 535 | *dest_addr = Smi::RawValue(0); |
| 536 | deopt_context->DeferMintMaterialization( |
| 537 | value, reinterpret_cast<MintPtr*>(dest_addr)); |
| 538 | } |
| 539 | } |
| 540 | |
| 541 | virtual int64_t GetValue(DeoptContext* deopt_context) = 0; |
| 542 | |
| 543 | private: |
| 544 | DISALLOW_COPY_AND_ASSIGN(DeoptIntegerInstrBase); |
| 545 | }; |
| 546 | |
| 547 | class DeoptMintPairInstr : public DeoptIntegerInstrBase { |
| 548 | public: |
| 549 | explicit DeoptMintPairInstr(intptr_t source_index) |
| 550 | : DeoptIntegerInstrBase(), |
| 551 | lo_(LoRegister::decode(source_index)), |
| 552 | hi_(HiRegister::decode(source_index)) {} |
| 553 | |
| 554 | DeoptMintPairInstr(const CpuRegisterSource& lo, const CpuRegisterSource& hi) |
| 555 | : DeoptIntegerInstrBase(), lo_(lo), hi_(hi) {} |
| 556 | |
| 557 | virtual intptr_t source_index() const { |
| 558 | return LoRegister::encode(lo_.source_index()) | |
| 559 | HiRegister::encode(hi_.source_index()); |
| 560 | } |
| 561 | virtual DeoptInstr::Kind kind() const { return kMintPair; } |
| 562 | |
| 563 | virtual const char* ArgumentsToCString() const { |
| 564 | return Thread::Current()->zone()->PrintToString("%s,%s" , lo_.ToCString(), |
| 565 | hi_.ToCString()); |
| 566 | } |
| 567 | |
| 568 | virtual int64_t GetValue(DeoptContext* deopt_context) { |
| 569 | return Utils::LowHighTo64Bits(lo_.Value<uint32_t>(deopt_context), |
| 570 | hi_.Value<int32_t>(deopt_context)); |
| 571 | } |
| 572 | |
| 573 | CatchEntryMove ToCatchEntryMove(DeoptContext* deopt_context, |
| 574 | intptr_t dest_slot) { |
| 575 | return CatchEntryMove::FromSlot( |
| 576 | CatchEntryMove::SourceKind::kInt64PairSlot, |
| 577 | CatchEntryMove::EncodePairSource(lo_.StackSlot(deopt_context), |
| 578 | hi_.StackSlot(deopt_context)), |
| 579 | dest_slot); |
| 580 | } |
| 581 | |
| 582 | private: |
| 583 | static const intptr_t kFieldWidth = kBitsPerWord / 2; |
| 584 | class LoRegister : public BitField<intptr_t, intptr_t, 0, kFieldWidth> {}; |
| 585 | class HiRegister |
| 586 | : public BitField<intptr_t, intptr_t, kFieldWidth, kFieldWidth> {}; |
| 587 | |
| 588 | const CpuRegisterSource lo_; |
| 589 | const CpuRegisterSource hi_; |
| 590 | |
| 591 | DISALLOW_COPY_AND_ASSIGN(DeoptMintPairInstr); |
| 592 | }; |
| 593 | |
| 594 | template <DeoptInstr::Kind K, CatchEntryMove::SourceKind slot_kind, typename T> |
| 595 | class DeoptIntInstr : public DeoptIntegerInstrBase { |
| 596 | public: |
| 597 | explicit DeoptIntInstr(intptr_t source_index) |
| 598 | : DeoptIntegerInstrBase(), source_(source_index) {} |
| 599 | |
| 600 | explicit DeoptIntInstr(const CpuRegisterSource& source) |
| 601 | : DeoptIntegerInstrBase(), source_(source) {} |
| 602 | |
| 603 | virtual intptr_t source_index() const { return source_.source_index(); } |
| 604 | virtual DeoptInstr::Kind kind() const { return K; } |
| 605 | |
| 606 | virtual const char* ArgumentsToCString() const { return source_.ToCString(); } |
| 607 | |
| 608 | virtual int64_t GetValue(DeoptContext* deopt_context) { |
| 609 | return static_cast<int64_t>(source_.Value<T>(deopt_context)); |
| 610 | } |
| 611 | |
| 612 | CatchEntryMove ToCatchEntryMove(DeoptContext* deopt_context, |
| 613 | intptr_t dest_slot) { |
| 614 | return CatchEntryMove::FromSlot(slot_kind, source_.StackSlot(deopt_context), |
| 615 | dest_slot); |
| 616 | } |
| 617 | |
| 618 | private: |
| 619 | const CpuRegisterSource source_; |
| 620 | |
| 621 | DISALLOW_COPY_AND_ASSIGN(DeoptIntInstr); |
| 622 | }; |
| 623 | |
| 624 | typedef DeoptIntInstr<DeoptInstr::kUint32, |
| 625 | CatchEntryMove::SourceKind::kUint32Slot, |
| 626 | uint32_t> |
| 627 | DeoptUint32Instr; |
| 628 | typedef DeoptIntInstr<DeoptInstr::kInt32, |
| 629 | CatchEntryMove::SourceKind::kInt32Slot, |
| 630 | int32_t> |
| 631 | DeoptInt32Instr; |
| 632 | typedef DeoptIntInstr<DeoptInstr::kMint, |
| 633 | CatchEntryMove::SourceKind::kInt64Slot, |
| 634 | int64_t> |
| 635 | DeoptMintInstr; |
| 636 | |
| 637 | template <DeoptInstr::Kind K, |
| 638 | CatchEntryMove::SourceKind slot_kind, |
| 639 | typename Type, |
| 640 | typename RawObjectType> |
| 641 | class DeoptFpuInstr : public DeoptInstr { |
| 642 | public: |
| 643 | explicit DeoptFpuInstr(intptr_t source_index) : source_(source_index) {} |
| 644 | |
| 645 | explicit DeoptFpuInstr(const FpuRegisterSource& source) : source_(source) {} |
| 646 | |
| 647 | virtual intptr_t source_index() const { return source_.source_index(); } |
| 648 | virtual DeoptInstr::Kind kind() const { return K; } |
| 649 | |
| 650 | virtual const char* ArgumentsToCString() const { return source_.ToCString(); } |
| 651 | |
| 652 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 653 | *dest_addr = Smi::RawValue(0); |
| 654 | deopt_context->DeferMaterialization( |
| 655 | source_.Value<Type>(deopt_context), |
| 656 | reinterpret_cast<RawObjectType*>(dest_addr)); |
| 657 | } |
| 658 | |
| 659 | CatchEntryMove ToCatchEntryMove(DeoptContext* deopt_context, |
| 660 | intptr_t dest_slot) { |
| 661 | return CatchEntryMove::FromSlot(slot_kind, source_.StackSlot(deopt_context), |
| 662 | dest_slot); |
| 663 | } |
| 664 | |
| 665 | private: |
| 666 | const FpuRegisterSource source_; |
| 667 | |
| 668 | DISALLOW_COPY_AND_ASSIGN(DeoptFpuInstr); |
| 669 | }; |
| 670 | |
| 671 | typedef DeoptFpuInstr<DeoptInstr::kDouble, |
| 672 | CatchEntryMove::SourceKind::kDoubleSlot, |
| 673 | double, |
| 674 | DoublePtr> |
| 675 | DeoptDoubleInstr; |
| 676 | |
| 677 | // Simd128 types. |
| 678 | typedef DeoptFpuInstr<DeoptInstr::kFloat32x4, |
| 679 | CatchEntryMove::SourceKind::kFloat32x4Slot, |
| 680 | simd128_value_t, |
| 681 | Float32x4Ptr> |
| 682 | DeoptFloat32x4Instr; |
| 683 | typedef DeoptFpuInstr<DeoptInstr::kFloat64x2, |
| 684 | CatchEntryMove::SourceKind::kFloat64x2Slot, |
| 685 | simd128_value_t, |
| 686 | Float64x2Ptr> |
| 687 | DeoptFloat64x2Instr; |
| 688 | typedef DeoptFpuInstr<DeoptInstr::kInt32x4, |
| 689 | CatchEntryMove::SourceKind::kInt32x4Slot, |
| 690 | simd128_value_t, |
| 691 | Int32x4Ptr> |
| 692 | DeoptInt32x4Instr; |
| 693 | |
| 694 | // Deoptimization instruction creating a PC marker for the code of |
| 695 | // function at 'object_table_index'. |
| 696 | class DeoptPcMarkerInstr : public DeoptInstr { |
| 697 | public: |
| 698 | explicit DeoptPcMarkerInstr(intptr_t object_table_index) |
| 699 | : object_table_index_(object_table_index) { |
| 700 | ASSERT(object_table_index >= 0); |
| 701 | } |
| 702 | |
| 703 | virtual intptr_t source_index() const { return object_table_index_; } |
| 704 | virtual DeoptInstr::Kind kind() const { return kPcMarker; } |
| 705 | |
| 706 | virtual const char* ArgumentsToCString() const { |
| 707 | return Thread::Current()->zone()->PrintToString("%" Pd "" , |
| 708 | object_table_index_); |
| 709 | } |
| 710 | |
| 711 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 712 | Function& function = Function::Handle(deopt_context->zone()); |
| 713 | function ^= deopt_context->ObjectAt(object_table_index_); |
| 714 | if (function.IsNull()) { |
| 715 | *reinterpret_cast<ObjectPtr*>(dest_addr) = |
| 716 | deopt_context->is_lazy_deopt() |
| 717 | ? StubCode::DeoptimizeLazyFromReturn().raw() |
| 718 | : StubCode::Deoptimize().raw(); |
| 719 | return; |
| 720 | } |
| 721 | |
| 722 | // We don't always have the Code object for the frame's corresponding |
| 723 | // unoptimized code as it may have been collected. Use a stub as the pc |
| 724 | // marker until we can recreate that Code object during deferred |
| 725 | // materialization to maintain the invariant that Dart frames always have |
| 726 | // a pc marker. |
| 727 | *reinterpret_cast<ObjectPtr*>(dest_addr) = |
| 728 | StubCode::FrameAwaitingMaterialization().raw(); |
| 729 | deopt_context->DeferPcMarkerMaterialization(object_table_index_, dest_addr); |
| 730 | } |
| 731 | |
| 732 | private: |
| 733 | intptr_t object_table_index_; |
| 734 | |
| 735 | DISALLOW_COPY_AND_ASSIGN(DeoptPcMarkerInstr); |
| 736 | }; |
| 737 | |
| 738 | // Deoptimization instruction creating a pool pointer for the code of |
| 739 | // function at 'object_table_index'. |
| 740 | class DeoptPpInstr : public DeoptInstr { |
| 741 | public: |
| 742 | explicit DeoptPpInstr(intptr_t object_table_index) |
| 743 | : object_table_index_(object_table_index) { |
| 744 | ASSERT(object_table_index >= 0); |
| 745 | } |
| 746 | |
| 747 | virtual intptr_t source_index() const { return object_table_index_; } |
| 748 | virtual DeoptInstr::Kind kind() const { return kPp; } |
| 749 | |
| 750 | virtual const char* ArgumentsToCString() const { |
| 751 | return Thread::Current()->zone()->PrintToString("%" Pd "" , |
| 752 | object_table_index_); |
| 753 | } |
| 754 | |
| 755 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 756 | *dest_addr = Smi::RawValue(0); |
| 757 | deopt_context->DeferPpMaterialization( |
| 758 | object_table_index_, reinterpret_cast<ObjectPtr*>(dest_addr)); |
| 759 | } |
| 760 | |
| 761 | private: |
| 762 | intptr_t object_table_index_; |
| 763 | |
| 764 | DISALLOW_COPY_AND_ASSIGN(DeoptPpInstr); |
| 765 | }; |
| 766 | |
| 767 | // Deoptimization instruction copying the caller saved FP from optimized frame. |
| 768 | class DeoptCallerFpInstr : public DeoptInstr { |
| 769 | public: |
| 770 | DeoptCallerFpInstr() {} |
| 771 | |
| 772 | virtual intptr_t source_index() const { return 0; } |
| 773 | virtual DeoptInstr::Kind kind() const { return kCallerFp; } |
| 774 | |
| 775 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 776 | *dest_addr = deopt_context->GetCallerFp(); |
| 777 | deopt_context->SetCallerFp( |
| 778 | reinterpret_cast<intptr_t>(dest_addr - kSavedCallerFpSlotFromFp)); |
| 779 | } |
| 780 | |
| 781 | private: |
| 782 | DISALLOW_COPY_AND_ASSIGN(DeoptCallerFpInstr); |
| 783 | }; |
| 784 | |
| 785 | // Deoptimization instruction copying the caller saved PP from optimized frame. |
| 786 | class DeoptCallerPpInstr : public DeoptInstr { |
| 787 | public: |
| 788 | DeoptCallerPpInstr() {} |
| 789 | |
| 790 | virtual intptr_t source_index() const { return 0; } |
| 791 | virtual DeoptInstr::Kind kind() const { return kCallerPp; } |
| 792 | |
| 793 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 794 | *dest_addr = deopt_context->GetSourcePp(); |
| 795 | } |
| 796 | |
| 797 | private: |
| 798 | DISALLOW_COPY_AND_ASSIGN(DeoptCallerPpInstr); |
| 799 | }; |
| 800 | |
| 801 | // Deoptimization instruction copying the caller return address from optimized |
| 802 | // frame. |
| 803 | class DeoptCallerPcInstr : public DeoptInstr { |
| 804 | public: |
| 805 | DeoptCallerPcInstr() {} |
| 806 | |
| 807 | virtual intptr_t source_index() const { return 0; } |
| 808 | virtual DeoptInstr::Kind kind() const { return kCallerPc; } |
| 809 | |
| 810 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 811 | *dest_addr = deopt_context->GetSourcePc(); |
| 812 | } |
| 813 | |
| 814 | private: |
| 815 | DISALLOW_COPY_AND_ASSIGN(DeoptCallerPcInstr); |
| 816 | }; |
| 817 | |
| 818 | // Write reference to a materialized object with the given index into the |
| 819 | // stack slot. |
| 820 | class DeoptMaterializedObjectRefInstr : public DeoptInstr { |
| 821 | public: |
| 822 | explicit DeoptMaterializedObjectRefInstr(intptr_t index) : index_(index) { |
| 823 | ASSERT(index >= 0); |
| 824 | } |
| 825 | |
| 826 | virtual intptr_t source_index() const { return index_; } |
| 827 | virtual DeoptInstr::Kind kind() const { return kMaterializedObjectRef; } |
| 828 | |
| 829 | virtual const char* ArgumentsToCString() const { |
| 830 | return Thread::Current()->zone()->PrintToString("#%" Pd "" , index_); |
| 831 | } |
| 832 | |
| 833 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 834 | *reinterpret_cast<SmiPtr*>(dest_addr) = Smi::New(0); |
| 835 | deopt_context->DeferMaterializedObjectRef(index_, dest_addr); |
| 836 | } |
| 837 | |
| 838 | private: |
| 839 | intptr_t index_; |
| 840 | |
| 841 | DISALLOW_COPY_AND_ASSIGN(DeoptMaterializedObjectRefInstr); |
| 842 | }; |
| 843 | |
| 844 | // Materialize object with the given number of fields. |
| 845 | // Arguments for materialization (class and field-value pairs) are pushed |
| 846 | // to the expression stack of the bottom-most frame. |
| 847 | class DeoptMaterializeObjectInstr : public DeoptInstr { |
| 848 | public: |
| 849 | explicit DeoptMaterializeObjectInstr(intptr_t field_count) |
| 850 | : field_count_(field_count) { |
| 851 | ASSERT(field_count >= 0); |
| 852 | } |
| 853 | |
| 854 | virtual intptr_t source_index() const { return field_count_; } |
| 855 | virtual DeoptInstr::Kind kind() const { return kMaterializeObject; } |
| 856 | |
| 857 | virtual const char* ArgumentsToCString() const { |
| 858 | return Thread::Current()->zone()->PrintToString("%" Pd "" , field_count_); |
| 859 | } |
| 860 | |
| 861 | void Execute(DeoptContext* deopt_context, intptr_t* dest_addr) { |
| 862 | // This instructions are executed manually by the DeoptimizeWithDeoptInfo. |
| 863 | UNREACHABLE(); |
| 864 | } |
| 865 | |
| 866 | private: |
| 867 | intptr_t field_count_; |
| 868 | |
| 869 | DISALLOW_COPY_AND_ASSIGN(DeoptMaterializeObjectInstr); |
| 870 | }; |
| 871 | |
| 872 | uword DeoptInstr::GetRetAddress(DeoptInstr* instr, |
| 873 | const ObjectPool& object_table, |
| 874 | Code* code) { |
| 875 | ASSERT(instr->kind() == kRetAddress); |
| 876 | DeoptRetAddressInstr* ret_address_instr = |
| 877 | static_cast<DeoptRetAddressInstr*>(instr); |
| 878 | // The following assert may trigger when displaying a backtrace |
| 879 | // from the simulator. |
| 880 | ASSERT(DeoptId::IsDeoptAfter(ret_address_instr->deopt_id())); |
| 881 | ASSERT(!object_table.IsNull()); |
| 882 | Thread* thread = Thread::Current(); |
| 883 | Zone* zone = thread->zone(); |
| 884 | Function& function = Function::Handle(zone); |
| 885 | function ^= object_table.ObjectAt(ret_address_instr->object_table_index()); |
| 886 | ASSERT(code != NULL); |
| 887 | const Error& error = |
| 888 | Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, function)); |
| 889 | if (!error.IsNull()) { |
| 890 | Exceptions::PropagateError(error); |
| 891 | } |
| 892 | *code = function.unoptimized_code(); |
| 893 | ASSERT(!code->IsNull()); |
| 894 | uword res = code->GetPcForDeoptId(ret_address_instr->deopt_id(), |
| 895 | PcDescriptorsLayout::kDeopt); |
| 896 | ASSERT(res != 0); |
| 897 | return res; |
| 898 | } |
| 899 | |
| 900 | DeoptInstr* DeoptInstr::Create(intptr_t kind_as_int, intptr_t source_index) { |
| 901 | Kind kind = static_cast<Kind>(kind_as_int); |
| 902 | switch (kind) { |
| 903 | case kWord: |
| 904 | return new DeoptWordInstr(source_index); |
| 905 | case kDouble: |
| 906 | return new DeoptDoubleInstr(source_index); |
| 907 | case kMint: |
| 908 | return new DeoptMintInstr(source_index); |
| 909 | case kMintPair: |
| 910 | return new DeoptMintPairInstr(source_index); |
| 911 | case kInt32: |
| 912 | return new DeoptInt32Instr(source_index); |
| 913 | case kUint32: |
| 914 | return new DeoptUint32Instr(source_index); |
| 915 | case kFloat32x4: |
| 916 | return new DeoptFloat32x4Instr(source_index); |
| 917 | case kFloat64x2: |
| 918 | return new DeoptFloat64x2Instr(source_index); |
| 919 | case kInt32x4: |
| 920 | return new DeoptInt32x4Instr(source_index); |
| 921 | case kRetAddress: |
| 922 | return new DeoptRetAddressInstr(source_index); |
| 923 | case kConstant: |
| 924 | return new DeoptConstantInstr(source_index); |
| 925 | case kPcMarker: |
| 926 | return new DeoptPcMarkerInstr(source_index); |
| 927 | case kPp: |
| 928 | return new DeoptPpInstr(source_index); |
| 929 | case kCallerFp: |
| 930 | return new DeoptCallerFpInstr(); |
| 931 | case kCallerPp: |
| 932 | return new DeoptCallerPpInstr(); |
| 933 | case kCallerPc: |
| 934 | return new DeoptCallerPcInstr(); |
| 935 | case kMaterializedObjectRef: |
| 936 | return new DeoptMaterializedObjectRefInstr(source_index); |
| 937 | case kMaterializeObject: |
| 938 | return new DeoptMaterializeObjectInstr(source_index); |
| 939 | } |
| 940 | UNREACHABLE(); |
| 941 | return NULL; |
| 942 | } |
| 943 | |
| 944 | const char* DeoptInstr::KindToCString(Kind kind) { |
| 945 | switch (kind) { |
| 946 | case kWord: |
| 947 | return "word" ; |
| 948 | case kDouble: |
| 949 | return "double" ; |
| 950 | case kMint: |
| 951 | case kMintPair: |
| 952 | return "mint" ; |
| 953 | case kInt32: |
| 954 | return "int32" ; |
| 955 | case kUint32: |
| 956 | return "uint32" ; |
| 957 | case kFloat32x4: |
| 958 | return "float32x4" ; |
| 959 | case kFloat64x2: |
| 960 | return "float64x2" ; |
| 961 | case kInt32x4: |
| 962 | return "int32x4" ; |
| 963 | case kRetAddress: |
| 964 | return "retaddr" ; |
| 965 | case kConstant: |
| 966 | return "const" ; |
| 967 | case kPcMarker: |
| 968 | return "pc" ; |
| 969 | case kPp: |
| 970 | return "pp" ; |
| 971 | case kCallerFp: |
| 972 | return "callerfp" ; |
| 973 | case kCallerPp: |
| 974 | return "callerpp" ; |
| 975 | case kCallerPc: |
| 976 | return "callerpc" ; |
| 977 | case kMaterializedObjectRef: |
| 978 | return "ref" ; |
| 979 | case kMaterializeObject: |
| 980 | return "mat" ; |
| 981 | } |
| 982 | UNREACHABLE(); |
| 983 | return NULL; |
| 984 | } |
| 985 | |
| 986 | class DeoptInfoBuilder::TrieNode : public ZoneAllocated { |
| 987 | public: |
| 988 | // Construct the root node representing the implicit "shared" terminator |
| 989 | // at the end of each deopt info. |
| 990 | TrieNode() : instruction_(NULL), info_number_(-1), children_(16) {} |
| 991 | |
| 992 | // Construct a node representing a written instruction. |
| 993 | TrieNode(DeoptInstr* instruction, intptr_t info_number) |
| 994 | : instruction_(instruction), info_number_(info_number), children_(4) {} |
| 995 | |
| 996 | intptr_t info_number() const { return info_number_; } |
| 997 | |
| 998 | void AddChild(TrieNode* child) { |
| 999 | if (child != NULL) children_.Add(child); |
| 1000 | } |
| 1001 | |
| 1002 | TrieNode* FindChild(const DeoptInstr& instruction) { |
| 1003 | for (intptr_t i = 0; i < children_.length(); ++i) { |
| 1004 | TrieNode* child = children_[i]; |
| 1005 | if (child->instruction_->Equals(instruction)) return child; |
| 1006 | } |
| 1007 | return NULL; |
| 1008 | } |
| 1009 | |
| 1010 | private: |
| 1011 | const DeoptInstr* instruction_; // Instruction that was written. |
| 1012 | const intptr_t info_number_; // Index of the deopt info it was written to. |
| 1013 | |
| 1014 | GrowableArray<TrieNode*> children_; |
| 1015 | }; |
| 1016 | |
| 1017 | DeoptInfoBuilder::DeoptInfoBuilder(Zone* zone, |
| 1018 | const intptr_t num_args, |
| 1019 | compiler::Assembler* assembler) |
| 1020 | : zone_(zone), |
| 1021 | instructions_(), |
| 1022 | num_args_(num_args), |
| 1023 | assembler_(assembler), |
| 1024 | trie_root_(new (zone) TrieNode()), |
| 1025 | current_info_number_(0), |
| 1026 | frame_start_(-1), |
| 1027 | materializations_() {} |
| 1028 | |
| 1029 | intptr_t DeoptInfoBuilder::FindOrAddObjectInTable(const Object& obj) const { |
| 1030 | return assembler_->object_pool_builder().FindObject(obj); |
| 1031 | } |
| 1032 | |
| 1033 | intptr_t DeoptInfoBuilder::CalculateStackIndex( |
| 1034 | const Location& source_loc) const { |
| 1035 | intptr_t index = -compiler::target::frame_layout.VariableIndexForFrameSlot( |
| 1036 | source_loc.stack_index()); |
| 1037 | return index < 0 ? index + num_args_ |
| 1038 | : index + num_args_ + kDartFrameFixedSize; |
| 1039 | } |
| 1040 | |
| 1041 | CpuRegisterSource DeoptInfoBuilder::ToCpuRegisterSource(const Location& loc) { |
| 1042 | if (loc.IsRegister()) { |
| 1043 | return CpuRegisterSource(CpuRegisterSource::kRegister, loc.reg()); |
| 1044 | } else { |
| 1045 | ASSERT(loc.IsStackSlot()); |
| 1046 | return CpuRegisterSource(CpuRegisterSource::kStackSlot, |
| 1047 | CalculateStackIndex(loc)); |
| 1048 | } |
| 1049 | } |
| 1050 | |
| 1051 | FpuRegisterSource DeoptInfoBuilder::ToFpuRegisterSource( |
| 1052 | const Location& loc, |
| 1053 | Location::Kind stack_slot_kind) { |
| 1054 | if (loc.IsFpuRegister()) { |
| 1055 | return FpuRegisterSource(FpuRegisterSource::kRegister, loc.fpu_reg()); |
| 1056 | } else { |
| 1057 | ASSERT((stack_slot_kind == Location::kQuadStackSlot) || |
| 1058 | (stack_slot_kind == Location::kDoubleStackSlot)); |
| 1059 | ASSERT(loc.kind() == stack_slot_kind); |
| 1060 | return FpuRegisterSource(FpuRegisterSource::kStackSlot, |
| 1061 | CalculateStackIndex(loc)); |
| 1062 | } |
| 1063 | } |
| 1064 | |
| 1065 | void DeoptInfoBuilder::AddReturnAddress(const Function& function, |
| 1066 | intptr_t deopt_id, |
| 1067 | intptr_t dest_index) { |
| 1068 | const intptr_t object_table_index = FindOrAddObjectInTable(function); |
| 1069 | ASSERT(dest_index == FrameSize()); |
| 1070 | instructions_.Add(new (zone()) |
| 1071 | DeoptRetAddressInstr(object_table_index, deopt_id)); |
| 1072 | } |
| 1073 | |
| 1074 | void DeoptInfoBuilder::AddPcMarker(const Function& function, |
| 1075 | intptr_t dest_index) { |
| 1076 | intptr_t object_table_index = FindOrAddObjectInTable(function); |
| 1077 | ASSERT(dest_index == FrameSize()); |
| 1078 | instructions_.Add(new (zone()) DeoptPcMarkerInstr(object_table_index)); |
| 1079 | } |
| 1080 | |
| 1081 | void DeoptInfoBuilder::AddPp(const Function& function, intptr_t dest_index) { |
| 1082 | intptr_t object_table_index = FindOrAddObjectInTable(function); |
| 1083 | ASSERT(dest_index == FrameSize()); |
| 1084 | instructions_.Add(new (zone()) DeoptPpInstr(object_table_index)); |
| 1085 | } |
| 1086 | |
| 1087 | void DeoptInfoBuilder::AddCopy(Value* value, |
| 1088 | const Location& source_loc, |
| 1089 | const intptr_t dest_index) { |
| 1090 | DeoptInstr* deopt_instr = NULL; |
| 1091 | if (source_loc.IsConstant()) { |
| 1092 | intptr_t object_table_index = FindOrAddObjectInTable(source_loc.constant()); |
| 1093 | deopt_instr = new (zone()) DeoptConstantInstr(object_table_index); |
| 1094 | } else if (source_loc.IsInvalid() && |
| 1095 | value->definition()->IsMaterializeObject()) { |
| 1096 | const intptr_t index = |
| 1097 | FindMaterialization(value->definition()->AsMaterializeObject()); |
| 1098 | ASSERT(index >= 0); |
| 1099 | deopt_instr = new (zone()) DeoptMaterializedObjectRefInstr(index); |
| 1100 | } else { |
| 1101 | ASSERT(!source_loc.IsInvalid()); |
| 1102 | Representation rep = value->definition()->representation(); |
| 1103 | switch (rep) { |
| 1104 | case kTagged: |
| 1105 | deopt_instr = |
| 1106 | new (zone()) DeoptWordInstr(ToCpuRegisterSource(source_loc)); |
| 1107 | break; |
| 1108 | case kUnboxedInt64: { |
| 1109 | if (source_loc.IsPairLocation()) { |
| 1110 | PairLocation* pair = source_loc.AsPairLocation(); |
| 1111 | deopt_instr = |
| 1112 | new (zone()) DeoptMintPairInstr(ToCpuRegisterSource(pair->At(0)), |
| 1113 | ToCpuRegisterSource(pair->At(1))); |
| 1114 | } else { |
| 1115 | ASSERT(!source_loc.IsPairLocation()); |
| 1116 | deopt_instr = |
| 1117 | new (zone()) DeoptMintInstr(ToCpuRegisterSource(source_loc)); |
| 1118 | } |
| 1119 | break; |
| 1120 | } |
| 1121 | case kUnboxedInt32: |
| 1122 | deopt_instr = |
| 1123 | new (zone()) DeoptInt32Instr(ToCpuRegisterSource(source_loc)); |
| 1124 | break; |
| 1125 | case kUnboxedUint32: |
| 1126 | deopt_instr = |
| 1127 | new (zone()) DeoptUint32Instr(ToCpuRegisterSource(source_loc)); |
| 1128 | break; |
| 1129 | case kUnboxedFloat: |
| 1130 | case kUnboxedDouble: |
| 1131 | deopt_instr = new (zone()) DeoptDoubleInstr( |
| 1132 | ToFpuRegisterSource(source_loc, Location::kDoubleStackSlot)); |
| 1133 | break; |
| 1134 | case kUnboxedFloat32x4: |
| 1135 | deopt_instr = new (zone()) DeoptFloat32x4Instr( |
| 1136 | ToFpuRegisterSource(source_loc, Location::kQuadStackSlot)); |
| 1137 | break; |
| 1138 | case kUnboxedFloat64x2: |
| 1139 | deopt_instr = new (zone()) DeoptFloat64x2Instr( |
| 1140 | ToFpuRegisterSource(source_loc, Location::kQuadStackSlot)); |
| 1141 | break; |
| 1142 | case kUnboxedInt32x4: |
| 1143 | deopt_instr = new (zone()) DeoptInt32x4Instr( |
| 1144 | ToFpuRegisterSource(source_loc, Location::kQuadStackSlot)); |
| 1145 | break; |
| 1146 | default: |
| 1147 | UNREACHABLE(); |
| 1148 | break; |
| 1149 | } |
| 1150 | } |
| 1151 | ASSERT(dest_index == FrameSize()); |
| 1152 | ASSERT(deopt_instr != NULL); |
| 1153 | instructions_.Add(deopt_instr); |
| 1154 | } |
| 1155 | |
| 1156 | void DeoptInfoBuilder::AddCallerFp(intptr_t dest_index) { |
| 1157 | ASSERT(dest_index == FrameSize()); |
| 1158 | instructions_.Add(new (zone()) DeoptCallerFpInstr()); |
| 1159 | } |
| 1160 | |
| 1161 | void DeoptInfoBuilder::AddCallerPp(intptr_t dest_index) { |
| 1162 | ASSERT(dest_index == FrameSize()); |
| 1163 | instructions_.Add(new (zone()) DeoptCallerPpInstr()); |
| 1164 | } |
| 1165 | |
| 1166 | void DeoptInfoBuilder::AddCallerPc(intptr_t dest_index) { |
| 1167 | ASSERT(dest_index == FrameSize()); |
| 1168 | instructions_.Add(new (zone()) DeoptCallerPcInstr()); |
| 1169 | } |
| 1170 | |
| 1171 | void DeoptInfoBuilder::AddConstant(const Object& obj, intptr_t dest_index) { |
| 1172 | ASSERT(dest_index == FrameSize()); |
| 1173 | intptr_t object_table_index = FindOrAddObjectInTable(obj); |
| 1174 | instructions_.Add(new (zone()) DeoptConstantInstr(object_table_index)); |
| 1175 | } |
| 1176 | |
| 1177 | void DeoptInfoBuilder::AddMaterialization(MaterializeObjectInstr* mat) { |
| 1178 | const intptr_t index = FindMaterialization(mat); |
| 1179 | if (index >= 0) { |
| 1180 | return; // Already added. |
| 1181 | } |
| 1182 | materializations_.Add(mat); |
| 1183 | |
| 1184 | // Count initialized fields and emit kMaterializeObject instruction. |
| 1185 | // There is no need to write nulls into fields because object is null |
| 1186 | // initialized by default. |
| 1187 | intptr_t non_null_fields = 0; |
| 1188 | for (intptr_t i = 0; i < mat->InputCount(); i++) { |
| 1189 | if (!mat->InputAt(i)->BindsToConstantNull()) { |
| 1190 | non_null_fields++; |
| 1191 | } |
| 1192 | } |
| 1193 | |
| 1194 | instructions_.Add(new (zone()) DeoptMaterializeObjectInstr(non_null_fields)); |
| 1195 | |
| 1196 | for (intptr_t i = 0; i < mat->InputCount(); i++) { |
| 1197 | MaterializeObjectInstr* nested_mat = |
| 1198 | mat->InputAt(i)->definition()->AsMaterializeObject(); |
| 1199 | if (nested_mat != NULL) { |
| 1200 | AddMaterialization(nested_mat); |
| 1201 | } |
| 1202 | } |
| 1203 | } |
| 1204 | |
| 1205 | intptr_t DeoptInfoBuilder::EmitMaterializationArguments(intptr_t dest_index) { |
| 1206 | ASSERT(dest_index == kDartFrameFixedSize); |
| 1207 | for (intptr_t i = 0; i < materializations_.length(); i++) { |
| 1208 | MaterializeObjectInstr* mat = materializations_[i]; |
| 1209 | // Class of the instance to allocate. |
| 1210 | AddConstant(mat->cls(), dest_index++); |
| 1211 | AddConstant(Smi::ZoneHandle(Smi::New(mat->num_variables())), dest_index++); |
| 1212 | for (intptr_t i = 0; i < mat->InputCount(); i++) { |
| 1213 | if (!mat->InputAt(i)->BindsToConstantNull()) { |
| 1214 | // Emit offset-value pair. |
| 1215 | AddConstant(Smi::ZoneHandle(Smi::New(mat->FieldOffsetAt(i))), |
| 1216 | dest_index++); |
| 1217 | AddCopy(mat->InputAt(i), mat->LocationAt(i), dest_index++); |
| 1218 | } |
| 1219 | } |
| 1220 | } |
| 1221 | return dest_index; |
| 1222 | } |
| 1223 | |
| 1224 | intptr_t DeoptInfoBuilder::FindMaterialization( |
| 1225 | MaterializeObjectInstr* mat) const { |
| 1226 | for (intptr_t i = 0; i < materializations_.length(); i++) { |
| 1227 | if (materializations_[i] == mat) { |
| 1228 | return i; |
| 1229 | } |
| 1230 | } |
| 1231 | return -1; |
| 1232 | } |
| 1233 | |
| 1234 | static uint8_t* ZoneReAlloc(uint8_t* ptr, |
| 1235 | intptr_t old_size, |
| 1236 | intptr_t new_size) { |
| 1237 | return Thread::Current()->zone()->Realloc<uint8_t>(ptr, old_size, new_size); |
| 1238 | } |
| 1239 | |
| 1240 | TypedDataPtr DeoptInfoBuilder::CreateDeoptInfo(const Array& deopt_table) { |
| 1241 | intptr_t length = instructions_.length(); |
| 1242 | |
| 1243 | // Count the number of instructions that are a shared suffix of some deopt |
| 1244 | // info already written. |
| 1245 | TrieNode* suffix = trie_root_; |
| 1246 | intptr_t suffix_length = 0; |
| 1247 | if (FLAG_compress_deopt_info) { |
| 1248 | for (intptr_t i = length - 1; i >= 0; --i) { |
| 1249 | TrieNode* node = suffix->FindChild(*instructions_[i]); |
| 1250 | if (node == NULL) break; |
| 1251 | suffix = node; |
| 1252 | ++suffix_length; |
| 1253 | } |
| 1254 | } |
| 1255 | |
| 1256 | // Allocate space for the translation. If the shared suffix is longer |
| 1257 | // than one instruction, we replace it with a single suffix instruction. |
| 1258 | const bool use_suffix = suffix_length > 1; |
| 1259 | if (use_suffix) { |
| 1260 | length -= (suffix_length - 1); |
| 1261 | } |
| 1262 | |
| 1263 | uint8_t* buffer; |
| 1264 | typedef WriteStream::Raw<sizeof(intptr_t), intptr_t> Writer; |
| 1265 | WriteStream stream(&buffer, ZoneReAlloc, 2 * length * kWordSize); |
| 1266 | |
| 1267 | Writer::Write(&stream, FrameSize()); |
| 1268 | |
| 1269 | if (use_suffix) { |
| 1270 | Writer::Write(&stream, suffix_length); |
| 1271 | Writer::Write(&stream, suffix->info_number()); |
| 1272 | } else { |
| 1273 | Writer::Write(&stream, 0); |
| 1274 | } |
| 1275 | |
| 1276 | // Write the unshared instructions and build their sub-tree. |
| 1277 | TrieNode* node = use_suffix ? suffix : trie_root_; |
| 1278 | const intptr_t write_count = use_suffix ? length - 1 : length; |
| 1279 | for (intptr_t i = write_count - 1; i >= 0; --i) { |
| 1280 | DeoptInstr* instr = instructions_[i]; |
| 1281 | Writer::Write(&stream, instr->kind()); |
| 1282 | Writer::Write(&stream, instr->source_index()); |
| 1283 | |
| 1284 | TrieNode* child = new (zone()) TrieNode(instr, current_info_number_); |
| 1285 | node->AddChild(child); |
| 1286 | node = child; |
| 1287 | } |
| 1288 | |
| 1289 | const TypedData& deopt_info = TypedData::Handle( |
| 1290 | zone(), TypedData::New(kTypedDataUint8ArrayCid, stream.bytes_written(), |
| 1291 | Heap::kOld)); |
| 1292 | { |
| 1293 | NoSafepointScope no_safepoint; |
| 1294 | memmove(deopt_info.DataAddr(0), stream.buffer(), stream.bytes_written()); |
| 1295 | } |
| 1296 | |
| 1297 | ASSERT( |
| 1298 | DeoptInfo::VerifyDecompression(instructions_, deopt_table, deopt_info)); |
| 1299 | instructions_.Clear(); |
| 1300 | materializations_.Clear(); |
| 1301 | frame_start_ = -1; |
| 1302 | |
| 1303 | ++current_info_number_; |
| 1304 | return deopt_info.raw(); |
| 1305 | } |
| 1306 | |
| 1307 | intptr_t DeoptTable::SizeFor(intptr_t length) { |
| 1308 | return length * kEntrySize; |
| 1309 | } |
| 1310 | |
| 1311 | void DeoptTable::SetEntry(const Array& table, |
| 1312 | intptr_t index, |
| 1313 | const Smi& offset, |
| 1314 | const TypedData& info, |
| 1315 | const Smi& reason) { |
| 1316 | ASSERT((table.Length() % kEntrySize) == 0); |
| 1317 | intptr_t i = index * kEntrySize; |
| 1318 | table.SetAt(i, offset); |
| 1319 | table.SetAt(i + 1, info); |
| 1320 | table.SetAt(i + 2, reason); |
| 1321 | } |
| 1322 | |
| 1323 | intptr_t DeoptTable::GetLength(const Array& table) { |
| 1324 | ASSERT((table.Length() % kEntrySize) == 0); |
| 1325 | return table.Length() / kEntrySize; |
| 1326 | } |
| 1327 | |
| 1328 | void DeoptTable::GetEntry(const Array& table, |
| 1329 | intptr_t index, |
| 1330 | Smi* offset, |
| 1331 | TypedData* info, |
| 1332 | Smi* reason) { |
| 1333 | intptr_t i = index * kEntrySize; |
| 1334 | *offset ^= table.At(i); |
| 1335 | *info ^= table.At(i + 1); |
| 1336 | *reason ^= table.At(i + 2); |
| 1337 | } |
| 1338 | |
| 1339 | |
| 1340 | intptr_t DeoptInfo::FrameSize(const TypedData& packed) { |
| 1341 | NoSafepointScope no_safepoint; |
| 1342 | typedef ReadStream::Raw<sizeof(intptr_t), intptr_t> Reader; |
| 1343 | ReadStream read_stream(reinterpret_cast<uint8_t*>(packed.DataAddr(0)), |
| 1344 | packed.LengthInBytes()); |
| 1345 | return Reader::Read(&read_stream); |
| 1346 | } |
| 1347 | |
| 1348 | |
| 1349 | intptr_t DeoptInfo::NumMaterializations( |
| 1350 | const GrowableArray<DeoptInstr*>& unpacked) { |
| 1351 | intptr_t num = 0; |
| 1352 | while (unpacked[num]->kind() == DeoptInstr::kMaterializeObject) { |
| 1353 | num++; |
| 1354 | } |
| 1355 | return num; |
| 1356 | } |
| 1357 | |
| 1358 | |
| 1359 | void DeoptInfo::UnpackInto(const Array& table, |
| 1360 | const TypedData& packed, |
| 1361 | GrowableArray<DeoptInstr*>* unpacked, |
| 1362 | intptr_t length) { |
| 1363 | NoSafepointScope no_safepoint; |
| 1364 | typedef ReadStream::Raw<sizeof(intptr_t), intptr_t> Reader; |
| 1365 | ReadStream read_stream(reinterpret_cast<uint8_t*>(packed.DataAddr(0)), |
| 1366 | packed.LengthInBytes()); |
| 1367 | const intptr_t frame_size = Reader::Read(&read_stream); // Skip frame size. |
| 1368 | USE(frame_size); |
| 1369 | |
| 1370 | const intptr_t suffix_length = Reader::Read(&read_stream); |
| 1371 | if (suffix_length != 0) { |
| 1372 | ASSERT(suffix_length > 1); |
| 1373 | const intptr_t info_number = Reader::Read(&read_stream); |
| 1374 | |
| 1375 | TypedData& suffix = TypedData::Handle(); |
| 1376 | Smi& offset = Smi::Handle(); |
| 1377 | Smi& reason_and_flags = Smi::Handle(); |
| 1378 | DeoptTable::GetEntry(table, info_number, &offset, &suffix, |
| 1379 | &reason_and_flags); |
| 1380 | UnpackInto(table, suffix, unpacked, suffix_length); |
| 1381 | } |
| 1382 | |
| 1383 | while ((read_stream.PendingBytes() > 0) && (unpacked->length() < length)) { |
| 1384 | const intptr_t instruction = Reader::Read(&read_stream); |
| 1385 | const intptr_t from_index = Reader::Read(&read_stream); |
| 1386 | unpacked->Add(DeoptInstr::Create(instruction, from_index)); |
| 1387 | } |
| 1388 | } |
| 1389 | |
| 1390 | |
| 1391 | void DeoptInfo::Unpack(const Array& table, |
| 1392 | const TypedData& packed, |
| 1393 | GrowableArray<DeoptInstr*>* unpacked) { |
| 1394 | ASSERT(unpacked->is_empty()); |
| 1395 | |
| 1396 | // Pass kMaxInt32 as the length to unpack all instructions from the |
| 1397 | // packed stream. |
| 1398 | UnpackInto(table, packed, unpacked, kMaxInt32); |
| 1399 | |
| 1400 | unpacked->Reverse(); |
| 1401 | } |
| 1402 | |
| 1403 | |
| 1404 | const char* DeoptInfo::ToCString(const Array& deopt_table, |
| 1405 | const TypedData& packed) { |
| 1406 | #define FORMAT "[%s]" |
| 1407 | GrowableArray<DeoptInstr*> deopt_instrs; |
| 1408 | Unpack(deopt_table, packed, &deopt_instrs); |
| 1409 | |
| 1410 | // Compute the buffer size required. |
| 1411 | intptr_t len = 1; // Trailing '\0'. |
| 1412 | for (intptr_t i = 0; i < deopt_instrs.length(); i++) { |
| 1413 | len += Utils::SNPrint(NULL, 0, FORMAT, deopt_instrs[i]->ToCString()); |
| 1414 | } |
| 1415 | |
| 1416 | // Allocate the buffer. |
| 1417 | char* buffer = Thread::Current()->zone()->Alloc<char>(len); |
| 1418 | |
| 1419 | // Layout the fields in the buffer. |
| 1420 | intptr_t index = 0; |
| 1421 | for (intptr_t i = 0; i < deopt_instrs.length(); i++) { |
| 1422 | index += Utils::SNPrint((buffer + index), (len - index), FORMAT, |
| 1423 | deopt_instrs[i]->ToCString()); |
| 1424 | } |
| 1425 | |
| 1426 | return buffer; |
| 1427 | #undef FORMAT |
| 1428 | } |
| 1429 | |
| 1430 | |
| 1431 | // Returns a bool so it can be asserted. |
| 1432 | bool DeoptInfo::VerifyDecompression(const GrowableArray<DeoptInstr*>& original, |
| 1433 | const Array& deopt_table, |
| 1434 | const TypedData& packed) { |
| 1435 | GrowableArray<DeoptInstr*> unpacked; |
| 1436 | Unpack(deopt_table, packed, &unpacked); |
| 1437 | ASSERT(unpacked.length() == original.length()); |
| 1438 | for (intptr_t i = 0; i < unpacked.length(); ++i) { |
| 1439 | ASSERT(unpacked[i]->Equals(*original[i])); |
| 1440 | } |
| 1441 | return true; |
| 1442 | } |
| 1443 | |
| 1444 | } // namespace dart |
| 1445 | |
| 1446 | #endif // !defined(DART_PRECOMPILED_RUNTIME) |
| 1447 | |