1// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include <setjmp.h> // NOLINT
6#include <stdlib.h>
7
8#include "vm/globals.h"
9#if !defined(DART_PRECOMPILED_RUNTIME)
10
11#include "vm/interpreter.h"
12
13#include "vm/compiler/api/type_check_mode.h"
14#include "vm/compiler/assembler/assembler.h"
15#include "vm/compiler/assembler/disassembler_kbc.h"
16#include "vm/compiler/backend/flow_graph_compiler.h"
17#include "vm/compiler/ffi/abi.h"
18#include "vm/compiler/frontend/bytecode_reader.h"
19#include "vm/compiler/jit/compiler.h"
20#include "vm/cpu.h"
21#include "vm/dart_entry.h"
22#include "vm/debugger.h"
23#include "vm/lockers.h"
24#include "vm/native_arguments.h"
25#include "vm/native_entry.h"
26#include "vm/object.h"
27#include "vm/object_store.h"
28#include "vm/os_thread.h"
29#include "vm/stack_frame_kbc.h"
30#include "vm/symbols.h"
31
32namespace dart {
33
34DEFINE_FLAG(uint64_t,
35 trace_interpreter_after,
36 ULLONG_MAX,
37 "Trace interpreter execution after instruction count reached.");
38DEFINE_FLAG(charp,
39 interpreter_trace_file,
40 NULL,
41 "File to write a dynamic instruction trace to.");
42DEFINE_FLAG(uint64_t,
43 interpreter_trace_file_max_bytes,
44 100 * MB,
45 "Maximum size in bytes of the interpreter trace file");
46
47// InterpreterSetjmpBuffer are linked together, and the last created one
48// is referenced by the Interpreter. When an exception is thrown, the exception
49// runtime looks at where to jump and finds the corresponding
50// InterpreterSetjmpBuffer based on the stack pointer of the exception handler.
51// The runtime then does a Longjmp on that buffer to return to the interpreter.
52class InterpreterSetjmpBuffer {
53 public:
54 void Longjmp() {
55 // "This" is now the last setjmp buffer.
56 interpreter_->set_last_setjmp_buffer(this);
57 longjmp(buffer_, 1);
58 }
59
60 explicit InterpreterSetjmpBuffer(Interpreter* interpreter) {
61 interpreter_ = interpreter;
62 link_ = interpreter->last_setjmp_buffer();
63 interpreter->set_last_setjmp_buffer(this);
64 fp_ = interpreter->fp_;
65 }
66
67 ~InterpreterSetjmpBuffer() {
68 ASSERT(interpreter_->last_setjmp_buffer() == this);
69 interpreter_->set_last_setjmp_buffer(link_);
70 }
71
72 InterpreterSetjmpBuffer* link() const { return link_; }
73
74 uword fp() const { return reinterpret_cast<uword>(fp_); }
75
76 jmp_buf buffer_;
77
78 private:
79 ObjectPtr* fp_;
80 Interpreter* interpreter_;
81 InterpreterSetjmpBuffer* link_;
82
83 friend class Interpreter;
84
85 DISALLOW_ALLOCATION();
86 DISALLOW_COPY_AND_ASSIGN(InterpreterSetjmpBuffer);
87};
88
89DART_FORCE_INLINE static ObjectPtr* SavedCallerFP(ObjectPtr* FP) {
90 return reinterpret_cast<ObjectPtr*>(
91 static_cast<uword>(FP[kKBCSavedCallerFpSlotFromFp]));
92}
93
94DART_FORCE_INLINE static ObjectPtr* FrameArguments(ObjectPtr* FP,
95 intptr_t argc) {
96 return FP - (kKBCDartFrameFixedSize + argc);
97}
98
99#define RAW_CAST(Type, val) (InterpreterHelpers::CastTo##Type(val))
100
101class InterpreterHelpers {
102 public:
103#define DEFINE_CASTS(Type) \
104 DART_FORCE_INLINE static Type##Ptr CastTo##Type(ObjectPtr obj) { \
105 ASSERT((k##Type##Cid == kSmiCid) \
106 ? !obj->IsHeapObject() \
107 : (k##Type##Cid == kIntegerCid) \
108 ? (!obj->IsHeapObject() || obj->IsMint()) \
109 : obj->Is##Type()); \
110 return static_cast<Type##Ptr>(obj); \
111 }
112 CLASS_LIST(DEFINE_CASTS)
113#undef DEFINE_CASTS
114
115 DART_FORCE_INLINE static SmiPtr GetClassIdAsSmi(ObjectPtr obj) {
116 return Smi::New(obj->IsHeapObject() ? obj->GetClassId()
117 : static_cast<intptr_t>(kSmiCid));
118 }
119
120 DART_FORCE_INLINE static intptr_t GetClassId(ObjectPtr obj) {
121 return obj->IsHeapObject() ? obj->GetClassId()
122 : static_cast<intptr_t>(kSmiCid);
123 }
124
125 DART_FORCE_INLINE static TypeArgumentsPtr GetTypeArguments(
126 Thread* thread,
127 InstancePtr instance) {
128 ClassPtr instance_class =
129 thread->isolate()->class_table()->At(GetClassId(instance));
130 return instance_class->ptr()->num_type_arguments_ > 0
131 ? reinterpret_cast<TypeArgumentsPtr*>(instance->ptr())
132 [instance_class->ptr()
133 ->host_type_arguments_field_offset_in_words_]
134 : TypeArguments::null();
135 }
136
137 // The usage counter is actually a 'hotness' counter.
138 // For an instance call, both the usage counters of the caller and of the
139 // calle will get incremented, as well as the ICdata counter at the call site.
140 DART_FORCE_INLINE static void IncrementUsageCounter(FunctionPtr f) {
141 f->ptr()->usage_counter_++;
142 }
143
144 DART_FORCE_INLINE static void IncrementICUsageCount(ObjectPtr* entries,
145 intptr_t offset,
146 intptr_t args_tested) {
147 const intptr_t count_offset = ICData::CountIndexFor(args_tested);
148 const intptr_t raw_smi_old =
149 static_cast<intptr_t>(entries[offset + count_offset]);
150 const intptr_t raw_smi_new = raw_smi_old + Smi::RawValue(1);
151 *reinterpret_cast<intptr_t*>(&entries[offset + count_offset]) = raw_smi_new;
152 }
153
154 DART_FORCE_INLINE static bool CheckIndex(SmiPtr index, SmiPtr length) {
155 return !index->IsHeapObject() && (static_cast<intptr_t>(index) >= 0) &&
156 (static_cast<intptr_t>(index) < static_cast<intptr_t>(length));
157 }
158
159 DART_FORCE_INLINE static intptr_t ArgDescTypeArgsLen(ArrayPtr argdesc) {
160 return Smi::Value(*reinterpret_cast<SmiPtr*>(
161 reinterpret_cast<uword>(argdesc->ptr()) +
162 Array::element_offset(ArgumentsDescriptor::kTypeArgsLenIndex)));
163 }
164
165 DART_FORCE_INLINE static intptr_t ArgDescArgCount(ArrayPtr argdesc) {
166 return Smi::Value(*reinterpret_cast<SmiPtr*>(
167 reinterpret_cast<uword>(argdesc->ptr()) +
168 Array::element_offset(ArgumentsDescriptor::kCountIndex)));
169 }
170
171 DART_FORCE_INLINE static intptr_t ArgDescArgSize(ArrayPtr argdesc) {
172 return Smi::Value(*reinterpret_cast<SmiPtr*>(
173 reinterpret_cast<uword>(argdesc->ptr()) +
174 Array::element_offset(ArgumentsDescriptor::kSizeIndex)));
175 }
176
177 DART_FORCE_INLINE static intptr_t ArgDescPosCount(ArrayPtr argdesc) {
178 return Smi::Value(*reinterpret_cast<SmiPtr*>(
179 reinterpret_cast<uword>(argdesc->ptr()) +
180 Array::element_offset(ArgumentsDescriptor::kPositionalCountIndex)));
181 }
182
183 DART_FORCE_INLINE static BytecodePtr FrameBytecode(ObjectPtr* FP) {
184 ASSERT(GetClassId(FP[kKBCPcMarkerSlotFromFp]) == kBytecodeCid);
185 return static_cast<BytecodePtr>(FP[kKBCPcMarkerSlotFromFp]);
186 }
187
188 DART_FORCE_INLINE static bool FieldNeedsGuardUpdate(FieldPtr field,
189 ObjectPtr value) {
190 // The interpreter should never see a cloned field.
191 ASSERT(field->ptr()->owner_->GetClassId() != kFieldCid);
192
193 const classid_t guarded_cid = field->ptr()->guarded_cid_;
194
195 if (guarded_cid == kDynamicCid) {
196 // Field is not guarded.
197 return false;
198 }
199
200 ASSERT(Isolate::Current()->use_field_guards());
201
202 const classid_t nullability_cid = field->ptr()->is_nullable_;
203 const classid_t value_cid = InterpreterHelpers::GetClassId(value);
204
205 if (nullability_cid == value_cid) {
206 // Storing null into a nullable field.
207 return false;
208 }
209
210 if (guarded_cid != value_cid) {
211 // First assignment (guarded_cid == kIllegalCid) or
212 // field no longer monomorphic or
213 // field has become nullable.
214 return true;
215 }
216
217 intptr_t guarded_list_length =
218 Smi::Value(field->ptr()->guarded_list_length_);
219
220 if (UNLIKELY(guarded_list_length >= Field::kUnknownFixedLength)) {
221 // Guarding length, check this in the runtime.
222 return true;
223 }
224
225 if (UNLIKELY(field->ptr()->static_type_exactness_state_ >=
226 StaticTypeExactnessState::Uninitialized().Encode())) {
227 // Guarding "exactness", check this in the runtime.
228 return true;
229 }
230
231 // Everything matches.
232 return false;
233 }
234
235 DART_FORCE_INLINE static bool IsAllocateFinalized(ClassPtr cls) {
236 return Class::ClassFinalizedBits::decode(cls->ptr()->state_bits_) ==
237 ClassLayout::kAllocateFinalized;
238 }
239};
240
241DART_FORCE_INLINE static const KBCInstr* SavedCallerPC(ObjectPtr* FP) {
242 return reinterpret_cast<const KBCInstr*>(
243 static_cast<uword>(FP[kKBCSavedCallerPcSlotFromFp]));
244}
245
246DART_FORCE_INLINE static FunctionPtr FrameFunction(ObjectPtr* FP) {
247 FunctionPtr function = static_cast<FunctionPtr>(FP[kKBCFunctionSlotFromFp]);
248 ASSERT(InterpreterHelpers::GetClassId(function) == kFunctionCid ||
249 InterpreterHelpers::GetClassId(function) == kNullCid);
250 return function;
251}
252
253DART_FORCE_INLINE static ObjectPtr InitializeHeader(uword addr,
254 intptr_t class_id,
255 intptr_t instance_size) {
256 uint32_t tags = 0;
257 tags = ObjectLayout::ClassIdTag::update(class_id, tags);
258 tags = ObjectLayout::SizeTag::update(instance_size, tags);
259 tags = ObjectLayout::OldBit::update(false, tags);
260 tags = ObjectLayout::OldAndNotMarkedBit::update(false, tags);
261 tags = ObjectLayout::OldAndNotRememberedBit::update(false, tags);
262 tags = ObjectLayout::NewBit::update(true, tags);
263 // Also writes zero in the hash_ field.
264 *reinterpret_cast<uword*>(addr + Object::tags_offset()) = tags;
265 return ObjectLayout::FromAddr(addr);
266}
267
268DART_FORCE_INLINE static bool TryAllocate(Thread* thread,
269 intptr_t class_id,
270 intptr_t instance_size,
271 ObjectPtr* result) {
272 ASSERT(instance_size > 0);
273 ASSERT(Utils::IsAligned(instance_size, kObjectAlignment));
274
275#ifndef PRODUCT
276 auto table = thread->isolate_group()->shared_class_table();
277 if (UNLIKELY(table->TraceAllocationFor(class_id))) {
278 return false;
279 }
280#endif
281 const uword top = thread->top();
282 const intptr_t remaining = thread->end() - top;
283 if (LIKELY(remaining >= instance_size)) {
284 thread->set_top(top + instance_size);
285 *result = InitializeHeader(top, class_id, instance_size);
286 return true;
287 }
288 return false;
289}
290
291void LookupCache::Clear() {
292 for (intptr_t i = 0; i < kNumEntries; i++) {
293 entries_[i].receiver_cid = kIllegalCid;
294 }
295}
296
297bool LookupCache::Lookup(intptr_t receiver_cid,
298 StringPtr function_name,
299 ArrayPtr arguments_descriptor,
300 FunctionPtr* target) const {
301 ASSERT(receiver_cid != kIllegalCid); // Sentinel value.
302
303 const intptr_t hash = receiver_cid ^ static_cast<intptr_t>(function_name) ^
304 static_cast<intptr_t>(arguments_descriptor);
305 const intptr_t probe1 = hash & kTableMask;
306 if (entries_[probe1].receiver_cid == receiver_cid &&
307 entries_[probe1].function_name == function_name &&
308 entries_[probe1].arguments_descriptor == arguments_descriptor) {
309 *target = entries_[probe1].target;
310 return true;
311 }
312
313 intptr_t probe2 = (hash >> 3) & kTableMask;
314 if (entries_[probe2].receiver_cid == receiver_cid &&
315 entries_[probe2].function_name == function_name &&
316 entries_[probe2].arguments_descriptor == arguments_descriptor) {
317 *target = entries_[probe2].target;
318 return true;
319 }
320
321 return false;
322}
323
324void LookupCache::Insert(intptr_t receiver_cid,
325 StringPtr function_name,
326 ArrayPtr arguments_descriptor,
327 FunctionPtr target) {
328 // Otherwise we have to clear the cache or rehash on scavenges too.
329 ASSERT(function_name->IsOldObject());
330 ASSERT(arguments_descriptor->IsOldObject());
331 ASSERT(target->IsOldObject());
332
333 const intptr_t hash = receiver_cid ^ static_cast<intptr_t>(function_name) ^
334 static_cast<intptr_t>(arguments_descriptor);
335 const intptr_t probe1 = hash & kTableMask;
336 if (entries_[probe1].receiver_cid == kIllegalCid) {
337 entries_[probe1].receiver_cid = receiver_cid;
338 entries_[probe1].function_name = function_name;
339 entries_[probe1].arguments_descriptor = arguments_descriptor;
340 entries_[probe1].target = target;
341 return;
342 }
343
344 const intptr_t probe2 = (hash >> 3) & kTableMask;
345 if (entries_[probe2].receiver_cid == kIllegalCid) {
346 entries_[probe2].receiver_cid = receiver_cid;
347 entries_[probe2].function_name = function_name;
348 entries_[probe2].arguments_descriptor = arguments_descriptor;
349 entries_[probe2].target = target;
350 return;
351 }
352
353 entries_[probe1].receiver_cid = receiver_cid;
354 entries_[probe1].function_name = function_name;
355 entries_[probe1].arguments_descriptor = arguments_descriptor;
356 entries_[probe1].target = target;
357}
358
359Interpreter::Interpreter()
360 : stack_(NULL),
361 fp_(NULL),
362 pp_(nullptr),
363 argdesc_(nullptr),
364 lookup_cache_() {
365 // Setup interpreter support first. Some of this information is needed to
366 // setup the architecture state.
367 // We allocate the stack here, the size is computed as the sum of
368 // the size specified by the user and the buffer space needed for
369 // handling stack overflow exceptions. To be safe in potential
370 // stack underflows we also add some underflow buffer space.
371 stack_ = new uintptr_t[(OSThread::GetSpecifiedStackSize() +
372 OSThread::kStackSizeBufferMax +
373 kInterpreterStackUnderflowSize) /
374 sizeof(uintptr_t)];
375 // Low address.
376 stack_base_ =
377 reinterpret_cast<uword>(stack_) + kInterpreterStackUnderflowSize;
378 // Limit for StackOverflowError.
379 overflow_stack_limit_ = stack_base_ + OSThread::GetSpecifiedStackSize();
380 // High address.
381 stack_limit_ = overflow_stack_limit_ + OSThread::kStackSizeBufferMax;
382
383 last_setjmp_buffer_ = NULL;
384
385 DEBUG_ONLY(icount_ = 1); // So that tracing after 0 traces first bytecode.
386
387#if defined(DEBUG)
388 trace_file_bytes_written_ = 0;
389 trace_file_ = NULL;
390 if (FLAG_interpreter_trace_file != NULL) {
391 Dart_FileOpenCallback file_open = Dart::file_open_callback();
392 if (file_open != NULL) {
393 trace_file_ = file_open(FLAG_interpreter_trace_file, /* write */ true);
394 trace_buffer_ = new KBCInstr[kTraceBufferInstrs];
395 trace_buffer_idx_ = 0;
396 }
397 }
398#endif
399 // Make sure interpreter's unboxing view is consistent with compiler.
400 supports_unboxed_doubles_ = FlowGraphCompiler::SupportsUnboxedDoubles();
401 supports_unboxed_simd128_ = FlowGraphCompiler::SupportsUnboxedSimd128();
402}
403
404Interpreter::~Interpreter() {
405 delete[] stack_;
406 pp_ = NULL;
407 argdesc_ = NULL;
408#if defined(DEBUG)
409 if (trace_file_ != NULL) {
410 FlushTraceBuffer();
411 // Close the file.
412 Dart_FileCloseCallback file_close = Dart::file_close_callback();
413 if (file_close != NULL) {
414 file_close(trace_file_);
415 trace_file_ = NULL;
416 delete[] trace_buffer_;
417 trace_buffer_ = NULL;
418 }
419 }
420#endif
421}
422
423// Get the active Interpreter for the current isolate.
424Interpreter* Interpreter::Current() {
425 Thread* thread = Thread::Current();
426 Interpreter* interpreter = thread->interpreter();
427 if (interpreter == nullptr) {
428 NoSafepointScope no_safepoint;
429 interpreter = new Interpreter();
430 thread->set_interpreter(interpreter);
431 }
432 return interpreter;
433}
434
435#if defined(DEBUG)
436// Returns true if tracing of executed instructions is enabled.
437// May be called on entry, when icount_ has not been incremented yet.
438DART_FORCE_INLINE bool Interpreter::IsTracingExecution() const {
439 return icount_ > FLAG_trace_interpreter_after;
440}
441
442// Prints bytecode instruction at given pc for instruction tracing.
443DART_NOINLINE void Interpreter::TraceInstruction(const KBCInstr* pc) const {
444 THR_Print("%" Pu64 " ", icount_);
445 if (FLAG_support_disassembler) {
446 KernelBytecodeDisassembler::Disassemble(
447 reinterpret_cast<uword>(pc),
448 reinterpret_cast<uword>(KernelBytecode::Next(pc)));
449 } else {
450 THR_Print("Disassembler not supported in this mode.\n");
451 }
452}
453
454DART_FORCE_INLINE bool Interpreter::IsWritingTraceFile() const {
455 return (trace_file_ != NULL) &&
456 (trace_file_bytes_written_ < FLAG_interpreter_trace_file_max_bytes);
457}
458
459void Interpreter::FlushTraceBuffer() {
460 Dart_FileWriteCallback file_write = Dart::file_write_callback();
461 if (file_write == NULL) {
462 return;
463 }
464 if (trace_file_bytes_written_ >= FLAG_interpreter_trace_file_max_bytes) {
465 return;
466 }
467 const intptr_t bytes_to_write = Utils::Minimum(
468 static_cast<uint64_t>(trace_buffer_idx_ * sizeof(KBCInstr)),
469 FLAG_interpreter_trace_file_max_bytes - trace_file_bytes_written_);
470 if (bytes_to_write == 0) {
471 return;
472 }
473 file_write(trace_buffer_, bytes_to_write, trace_file_);
474 trace_file_bytes_written_ += bytes_to_write;
475 trace_buffer_idx_ = 0;
476}
477
478DART_NOINLINE void Interpreter::WriteInstructionToTrace(const KBCInstr* pc) {
479 Dart_FileWriteCallback file_write = Dart::file_write_callback();
480 if (file_write == NULL) {
481 return;
482 }
483 const KBCInstr* next = KernelBytecode::Next(pc);
484 while ((trace_buffer_idx_ < kTraceBufferInstrs) && (pc != next)) {
485 trace_buffer_[trace_buffer_idx_++] = *pc;
486 ++pc;
487 }
488 if (trace_buffer_idx_ == kTraceBufferInstrs) {
489 FlushTraceBuffer();
490 }
491}
492
493#endif // defined(DEBUG)
494
495// Calls into the Dart runtime are based on this interface.
496typedef void (*InterpreterRuntimeCall)(NativeArguments arguments);
497
498// Calls to leaf Dart runtime functions are based on this interface.
499typedef intptr_t (*InterpreterLeafRuntimeCall)(intptr_t r0,
500 intptr_t r1,
501 intptr_t r2,
502 intptr_t r3);
503
504// Calls to leaf float Dart runtime functions are based on this interface.
505typedef double (*InterpreterLeafFloatRuntimeCall)(double d0, double d1);
506
507void Interpreter::Exit(Thread* thread,
508 ObjectPtr* base,
509 ObjectPtr* frame,
510 const KBCInstr* pc) {
511 frame[0] = Function::null();
512 frame[1] = Bytecode::null();
513 frame[2] = static_cast<ObjectPtr>(reinterpret_cast<uword>(pc));
514 frame[3] = static_cast<ObjectPtr>(reinterpret_cast<uword>(base));
515
516 ObjectPtr* exit_fp = frame + kKBCDartFrameFixedSize;
517 thread->set_top_exit_frame_info(reinterpret_cast<uword>(exit_fp));
518 fp_ = exit_fp;
519
520#if defined(DEBUG)
521 if (IsTracingExecution()) {
522 THR_Print("%" Pu64 " ", icount_);
523 THR_Print("Exiting interpreter 0x%" Px " at fp_ 0x%" Px "\n",
524 reinterpret_cast<uword>(this), reinterpret_cast<uword>(exit_fp));
525 }
526#endif
527}
528
529void Interpreter::Unexit(Thread* thread) {
530#if !defined(PRODUCT)
531 // For the profiler.
532 ObjectPtr* exit_fp =
533 reinterpret_cast<ObjectPtr*>(thread->top_exit_frame_info());
534 ASSERT(exit_fp != 0);
535 pc_ = SavedCallerPC(exit_fp);
536 fp_ = SavedCallerFP(exit_fp);
537#endif
538 thread->set_top_exit_frame_info(0);
539}
540
541// Calling into runtime may trigger garbage collection and relocate objects,
542// so all ObjectPtr pointers become outdated and should not be used across
543// runtime calls.
544// Note: functions below are marked DART_NOINLINE to recover performance where
545// inlining these functions into the interpreter loop seemed to cause some code
546// quality issues. Functions with the "returns_twice" attribute, such as setjmp,
547// prevent reusing spill slots and large frame sizes.
548static DART_NOINLINE bool InvokeRuntime(Thread* thread,
549 Interpreter* interpreter,
550 RuntimeFunction drt,
551 const NativeArguments& args) {
552 InterpreterSetjmpBuffer buffer(interpreter);
553 if (!setjmp(buffer.buffer_)) {
554 thread->set_vm_tag(reinterpret_cast<uword>(drt));
555 drt(args);
556 thread->set_vm_tag(VMTag::kDartInterpretedTagId);
557 interpreter->Unexit(thread);
558 return true;
559 } else {
560 return false;
561 }
562}
563
564static DART_NOINLINE bool InvokeNative(Thread* thread,
565 Interpreter* interpreter,
566 NativeFunctionWrapper wrapper,
567 Dart_NativeFunction function,
568 Dart_NativeArguments args) {
569 InterpreterSetjmpBuffer buffer(interpreter);
570 if (!setjmp(buffer.buffer_)) {
571 thread->set_vm_tag(reinterpret_cast<uword>(function));
572 wrapper(args, function);
573 thread->set_vm_tag(VMTag::kDartInterpretedTagId);
574 interpreter->Unexit(thread);
575 return true;
576 } else {
577 return false;
578 }
579}
580
581extern "C" {
582// Note: The invocation stub follows the C ABI, so we cannot pass C++ struct
583// values like ObjectPtr. In some calling conventions (IA32), ObjectPtr is
584// passed/returned different from a pointer.
585typedef uword /*ObjectPtr*/ (*invokestub)(uword /*CodePtr*/ code,
586 uword /*ArrayPtr*/ argdesc,
587 ObjectPtr* arg0,
588 Thread* thread);
589}
590
591DART_NOINLINE bool Interpreter::InvokeCompiled(Thread* thread,
592 FunctionPtr function,
593 ObjectPtr* call_base,
594 ObjectPtr* call_top,
595 const KBCInstr** pc,
596 ObjectPtr** FP,
597 ObjectPtr** SP) {
598 ASSERT(Function::HasCode(function));
599 CodePtr code = function->ptr()->code_;
600 ASSERT(code != StubCode::LazyCompile().raw());
601 // TODO(regis): Once we share the same stack, try to invoke directly.
602#if defined(DEBUG)
603 if (IsTracingExecution()) {
604 THR_Print("%" Pu64 " ", icount_);
605 THR_Print("invoking compiled %s\n", Function::Handle(function).ToCString());
606 }
607#endif
608 // On success, returns a RawInstance. On failure, a RawError.
609 invokestub volatile entrypoint = reinterpret_cast<invokestub>(
610 StubCode::InvokeDartCodeFromBytecode().EntryPoint());
611 ObjectPtr result;
612 Exit(thread, *FP, call_top + 1, *pc);
613 {
614 InterpreterSetjmpBuffer buffer(this);
615 if (!setjmp(buffer.buffer_)) {
616#if defined(USING_SIMULATOR)
617 // We need to beware that bouncing between the interpreter and the
618 // simulator may exhaust the C stack before exhausting either the
619 // interpreter or simulator stacks.
620 if (!thread->os_thread()->HasStackHeadroom()) {
621 thread->SetStackLimit(-1);
622 }
623 result = bit_copy<ObjectPtr, int64_t>(Simulator::Current()->Call(
624 reinterpret_cast<intptr_t>(entrypoint), static_cast<intptr_t>(code),
625 static_cast<intptr_t>(argdesc_),
626 reinterpret_cast<intptr_t>(call_base),
627 reinterpret_cast<intptr_t>(thread)));
628#else
629 result = static_cast<ObjectPtr>(entrypoint(static_cast<uword>(code),
630 static_cast<uword>(argdesc_),
631 call_base, thread));
632#endif
633 ASSERT(thread->vm_tag() == VMTag::kDartInterpretedTagId);
634 ASSERT(thread->execution_state() == Thread::kThreadInGenerated);
635 Unexit(thread);
636 } else {
637 return false;
638 }
639 }
640 // Pop args and push result.
641 *SP = call_base;
642 **SP = result;
643 pp_ = InterpreterHelpers::FrameBytecode(*FP)->ptr()->object_pool_;
644
645 // If the result is an error (not a Dart instance), it must either be rethrown
646 // (in the case of an unhandled exception) or it must be returned to the
647 // caller of the interpreter to be propagated.
648 if (result->IsHeapObject()) {
649 const intptr_t result_cid = result->GetClassId();
650 if (result_cid == kUnhandledExceptionCid) {
651 (*SP)[0] = UnhandledException::RawCast(result)->ptr()->exception_;
652 (*SP)[1] = UnhandledException::RawCast(result)->ptr()->stacktrace_;
653 (*SP)[2] = 0; // Space for result.
654 Exit(thread, *FP, *SP + 3, *pc);
655 NativeArguments args(thread, 2, *SP, *SP + 2);
656 if (!InvokeRuntime(thread, this, DRT_ReThrow, args)) {
657 return false;
658 }
659 UNREACHABLE();
660 }
661 if (IsErrorClassId(result_cid)) {
662 // Unwind to entry frame.
663 fp_ = *FP;
664 pc_ = SavedCallerPC(fp_);
665 while (!IsEntryFrameMarker(pc_)) {
666 fp_ = SavedCallerFP(fp_);
667 pc_ = SavedCallerPC(fp_);
668 }
669 // Pop entry frame.
670 fp_ = SavedCallerFP(fp_);
671 special_[KernelBytecode::kExceptionSpecialIndex] = result;
672 return false;
673 }
674 }
675 return true;
676}
677
678DART_FORCE_INLINE bool Interpreter::InvokeBytecode(Thread* thread,
679 FunctionPtr function,
680 ObjectPtr* call_base,
681 ObjectPtr* call_top,
682 const KBCInstr** pc,
683 ObjectPtr** FP,
684 ObjectPtr** SP) {
685 ASSERT(Function::HasBytecode(function));
686#if defined(DEBUG)
687 if (IsTracingExecution()) {
688 THR_Print("%" Pu64 " ", icount_);
689 THR_Print("invoking %s\n",
690 Function::Handle(function).ToFullyQualifiedCString());
691 }
692#endif
693 ObjectPtr* callee_fp = call_top + kKBCDartFrameFixedSize;
694 ASSERT(function == FrameFunction(callee_fp));
695 BytecodePtr bytecode = function->ptr()->bytecode_;
696 callee_fp[kKBCPcMarkerSlotFromFp] = bytecode;
697 callee_fp[kKBCSavedCallerPcSlotFromFp] =
698 static_cast<ObjectPtr>(reinterpret_cast<uword>(*pc));
699 callee_fp[kKBCSavedCallerFpSlotFromFp] =
700 static_cast<ObjectPtr>(reinterpret_cast<uword>(*FP));
701 pp_ = bytecode->ptr()->object_pool_;
702 *pc = reinterpret_cast<const KBCInstr*>(bytecode->ptr()->instructions_);
703 NOT_IN_PRODUCT(pc_ = *pc); // For the profiler.
704 *FP = callee_fp;
705 NOT_IN_PRODUCT(fp_ = callee_fp); // For the profiler.
706 *SP = *FP - 1;
707 return true;
708}
709
710DART_FORCE_INLINE bool Interpreter::Invoke(Thread* thread,
711 ObjectPtr* call_base,
712 ObjectPtr* call_top,
713 const KBCInstr** pc,
714 ObjectPtr** FP,
715 ObjectPtr** SP) {
716 ObjectPtr* callee_fp = call_top + kKBCDartFrameFixedSize;
717 FunctionPtr function = FrameFunction(callee_fp);
718
719 for (;;) {
720 if (Function::HasCode(function)) {
721 return InvokeCompiled(thread, function, call_base, call_top, pc, FP, SP);
722 }
723 if (Function::HasBytecode(function)) {
724 return InvokeBytecode(thread, function, call_base, call_top, pc, FP, SP);
725 }
726
727 // Compile the function to either generate code or load bytecode.
728 call_top[1] = 0; // Code result.
729 call_top[2] = function;
730 Exit(thread, *FP, call_top + 3, *pc);
731 NativeArguments native_args(thread, 1, call_top + 2, call_top + 1);
732 if (!InvokeRuntime(thread, this, DRT_CompileFunction, native_args)) {
733 return false;
734 }
735 // Reload objects after the call which may trigger GC.
736 function = Function::RawCast(call_top[2]);
737
738 ASSERT(Function::HasCode(function) || Function::HasBytecode(function));
739 }
740}
741
742DART_FORCE_INLINE bool Interpreter::InstanceCall(Thread* thread,
743 StringPtr target_name,
744 ObjectPtr* call_base,
745 ObjectPtr* top,
746 const KBCInstr** pc,
747 ObjectPtr** FP,
748 ObjectPtr** SP) {
749 ObjectPtr null_value = Object::null();
750 const intptr_t type_args_len =
751 InterpreterHelpers::ArgDescTypeArgsLen(argdesc_);
752 const intptr_t receiver_idx = type_args_len > 0 ? 1 : 0;
753
754 intptr_t receiver_cid =
755 InterpreterHelpers::GetClassId(call_base[receiver_idx]);
756
757 FunctionPtr target;
758 if (UNLIKELY(!lookup_cache_.Lookup(receiver_cid, target_name, argdesc_,
759 &target))) {
760 // Table lookup miss.
761 top[0] = null_value; // Clean up slot as it may be visited by GC.
762 top[1] = call_base[receiver_idx];
763 top[2] = target_name;
764 top[3] = argdesc_;
765 top[4] = null_value; // Result slot.
766
767 Exit(thread, *FP, top + 5, *pc);
768 NativeArguments native_args(thread, 3, /* argv */ top + 1,
769 /* result */ top + 4);
770 if (!InvokeRuntime(thread, this, DRT_InterpretedInstanceCallMissHandler,
771 native_args)) {
772 return false;
773 }
774
775 target = static_cast<FunctionPtr>(top[4]);
776 target_name = static_cast<StringPtr>(top[2]);
777 argdesc_ = static_cast<ArrayPtr>(top[3]);
778 }
779
780 if (target != Function::null()) {
781 lookup_cache_.Insert(receiver_cid, target_name, argdesc_, target);
782 top[0] = target;
783 return Invoke(thread, call_base, top, pc, FP, SP);
784 }
785
786 // The miss handler should only fail to return a function if lazy dispatchers
787 // are disabled, in which case we need to call DRT_InvokeNoSuchMethod, which
788 // walks the receiver appropriately in this case.
789 ASSERT(!FLAG_lazy_dispatchers);
790
791 // The receiver, name, and argument descriptor are already in the appropriate
792 // places on the stack from the previous call.
793 ASSERT(top[4] == null_value);
794
795 // Allocate array of arguments.
796 {
797 const intptr_t argc =
798 InterpreterHelpers::ArgDescArgCount(argdesc_) + receiver_idx;
799 ASSERT_EQUAL(top - call_base, argc);
800
801 top[5] = Smi::New(argc); // length
802 top[6] = null_value; // type
803 Exit(thread, *FP, top + 7, *pc);
804 NativeArguments native_args(thread, 2, /* argv */ top + 5,
805 /* result */ top + 4);
806 if (!InvokeRuntime(thread, this, DRT_AllocateArray, native_args)) {
807 return false;
808 }
809
810 // Copy arguments into the newly allocated array.
811 ArrayPtr array = Array::RawCast(top[4]);
812 for (intptr_t i = 0; i < argc; i++) {
813 array->ptr()->data()[i] = call_base[i];
814 }
815 }
816
817 {
818 Exit(thread, *FP, top + 5, *pc);
819 NativeArguments native_args(thread, 4, /* argv */ top + 1,
820 /* result */ top);
821 if (!InvokeRuntime(thread, this, DRT_InvokeNoSuchMethod, native_args)) {
822 return false;
823 }
824
825 // Pop the call args and push the result.
826 ObjectPtr result = top[0];
827 *SP = call_base;
828 **SP = result;
829 pp_ = InterpreterHelpers::FrameBytecode(*FP)->ptr()->object_pool_;
830 }
831
832 return true;
833}
834
835// Note:
836// All macro helpers are intended to be used only inside Interpreter::Call.
837
838// Counts and prints executed bytecode instructions (in DEBUG mode).
839#if defined(DEBUG)
840#define TRACE_INSTRUCTION \
841 if (IsTracingExecution()) { \
842 TraceInstruction(pc); \
843 } \
844 if (IsWritingTraceFile()) { \
845 WriteInstructionToTrace(pc); \
846 } \
847 icount_++;
848#else
849#define TRACE_INSTRUCTION
850#endif // defined(DEBUG)
851
852// Decode opcode and A part of the given value and dispatch to the
853// corresponding bytecode handler.
854#ifdef DART_HAS_COMPUTED_GOTO
855#define DISPATCH_OP(val) \
856 do { \
857 op = (val); \
858 TRACE_INSTRUCTION \
859 goto* dispatch[op]; \
860 } while (0)
861#else
862#define DISPATCH_OP(val) \
863 do { \
864 op = (val); \
865 TRACE_INSTRUCTION \
866 goto SwitchDispatch; \
867 } while (0)
868#endif
869
870// Fetch next operation from PC and dispatch.
871#define DISPATCH() DISPATCH_OP(*pc)
872
873// Load target of a jump instruction into PC.
874#define LOAD_JUMP_TARGET() pc = rT
875
876#define BYTECODE_ENTRY_LABEL(Name) bc##Name:
877#define BYTECODE_WIDE_ENTRY_LABEL(Name) bc##Name##_Wide:
878#define BYTECODE_IMPL_LABEL(Name) bc##Name##Impl:
879#define GOTO_BYTECODE_IMPL(Name) goto bc##Name##Impl;
880
881// Define entry point that handles bytecode Name with the given operand format.
882#define BYTECODE(Name, Operands) BYTECODE_HEADER_##Operands(Name)
883
884// Helpers to decode common instruction formats. Used in conjunction with
885// BYTECODE() macro.
886
887#define BYTECODE_HEADER_0(Name) \
888 BYTECODE_ENTRY_LABEL(Name) \
889 pc += 1;
890
891#define BYTECODE_HEADER_A(Name) \
892 uint32_t rA; \
893 USE(rA); \
894 BYTECODE_ENTRY_LABEL(Name) \
895 rA = pc[1]; \
896 pc += 2;
897
898#define BYTECODE_HEADER_D(Name) \
899 uint32_t rD; \
900 USE(rD); \
901 BYTECODE_WIDE_ENTRY_LABEL(Name) \
902 rD = static_cast<uint32_t>(pc[1]) | (static_cast<uint32_t>(pc[2]) << 8) | \
903 (static_cast<uint32_t>(pc[3]) << 16) | \
904 (static_cast<uint32_t>(pc[4]) << 24); \
905 pc += 5; \
906 GOTO_BYTECODE_IMPL(Name); \
907 BYTECODE_ENTRY_LABEL(Name) \
908 rD = pc[1]; \
909 pc += 2; \
910 BYTECODE_IMPL_LABEL(Name)
911
912#define BYTECODE_HEADER_X(Name) \
913 int32_t rX; \
914 USE(rX); \
915 BYTECODE_WIDE_ENTRY_LABEL(Name) \
916 rX = static_cast<int32_t>(static_cast<uint32_t>(pc[1]) | \
917 (static_cast<uint32_t>(pc[2]) << 8) | \
918 (static_cast<uint32_t>(pc[3]) << 16) | \
919 (static_cast<uint32_t>(pc[4]) << 24)); \
920 pc += 5; \
921 GOTO_BYTECODE_IMPL(Name); \
922 BYTECODE_ENTRY_LABEL(Name) \
923 rX = static_cast<int8_t>(pc[1]); \
924 pc += 2; \
925 BYTECODE_IMPL_LABEL(Name)
926
927#define BYTECODE_HEADER_T(Name) \
928 const KBCInstr* rT; \
929 USE(rT); \
930 BYTECODE_WIDE_ENTRY_LABEL(Name) \
931 rT = pc + (static_cast<int32_t>((static_cast<uint32_t>(pc[1]) << 8) | \
932 (static_cast<uint32_t>(pc[2]) << 16) | \
933 (static_cast<uint32_t>(pc[3]) << 24)) >> \
934 8); \
935 pc += 4; \
936 GOTO_BYTECODE_IMPL(Name); \
937 BYTECODE_ENTRY_LABEL(Name) \
938 rT = pc + static_cast<int8_t>(pc[1]); \
939 pc += 2; \
940 BYTECODE_IMPL_LABEL(Name)
941
942#define BYTECODE_HEADER_A_E(Name) \
943 uint32_t rA, rE; \
944 USE(rA); \
945 USE(rE); \
946 BYTECODE_WIDE_ENTRY_LABEL(Name) \
947 rA = pc[1]; \
948 rE = static_cast<uint32_t>(pc[2]) | (static_cast<uint32_t>(pc[3]) << 8) | \
949 (static_cast<uint32_t>(pc[4]) << 16) | \
950 (static_cast<uint32_t>(pc[5]) << 24); \
951 pc += 6; \
952 GOTO_BYTECODE_IMPL(Name); \
953 BYTECODE_ENTRY_LABEL(Name) \
954 rA = pc[1]; \
955 rE = pc[2]; \
956 pc += 3; \
957 BYTECODE_IMPL_LABEL(Name)
958
959#define BYTECODE_HEADER_A_Y(Name) \
960 uint32_t rA; \
961 int32_t rY; \
962 USE(rA); \
963 USE(rY); \
964 BYTECODE_WIDE_ENTRY_LABEL(Name) \
965 rA = pc[1]; \
966 rY = static_cast<int32_t>(static_cast<uint32_t>(pc[2]) | \
967 (static_cast<uint32_t>(pc[3]) << 8) | \
968 (static_cast<uint32_t>(pc[4]) << 16) | \
969 (static_cast<uint32_t>(pc[5]) << 24)); \
970 pc += 6; \
971 GOTO_BYTECODE_IMPL(Name); \
972 BYTECODE_ENTRY_LABEL(Name) \
973 rA = pc[1]; \
974 rY = static_cast<int8_t>(pc[2]); \
975 pc += 3; \
976 BYTECODE_IMPL_LABEL(Name)
977
978#define BYTECODE_HEADER_D_F(Name) \
979 uint32_t rD, rF; \
980 USE(rD); \
981 USE(rF); \
982 BYTECODE_WIDE_ENTRY_LABEL(Name) \
983 rD = static_cast<uint32_t>(pc[1]) | (static_cast<uint32_t>(pc[2]) << 8) | \
984 (static_cast<uint32_t>(pc[3]) << 16) | \
985 (static_cast<uint32_t>(pc[4]) << 24); \
986 rF = pc[5]; \
987 pc += 6; \
988 GOTO_BYTECODE_IMPL(Name); \
989 BYTECODE_ENTRY_LABEL(Name) \
990 rD = pc[1]; \
991 rF = pc[2]; \
992 pc += 3; \
993 BYTECODE_IMPL_LABEL(Name)
994
995#define BYTECODE_HEADER_A_B_C(Name) \
996 uint32_t rA, rB, rC; \
997 USE(rA); \
998 USE(rB); \
999 USE(rC); \
1000 BYTECODE_ENTRY_LABEL(Name) \
1001 rA = pc[1]; \
1002 rB = pc[2]; \
1003 rC = pc[3]; \
1004 pc += 4;
1005
1006#define HANDLE_EXCEPTION \
1007 do { \
1008 goto HandleException; \
1009 } while (0)
1010
1011#define HANDLE_RETURN \
1012 do { \
1013 pp_ = InterpreterHelpers::FrameBytecode(FP)->ptr()->object_pool_; \
1014 } while (0)
1015
1016// Runtime call helpers: handle invocation and potential exception after return.
1017#define INVOKE_RUNTIME(Func, Args) \
1018 if (!InvokeRuntime(thread, this, Func, Args)) { \
1019 HANDLE_EXCEPTION; \
1020 } else { \
1021 HANDLE_RETURN; \
1022 }
1023
1024#define INVOKE_NATIVE(Wrapper, Func, Args) \
1025 if (!InvokeNative(thread, this, Wrapper, Func, Args)) { \
1026 HANDLE_EXCEPTION; \
1027 } else { \
1028 HANDLE_RETURN; \
1029 }
1030
1031#define LOAD_CONSTANT(index) (pp_->ptr()->data()[(index)].raw_obj_)
1032
1033#define UNBOX_INT64(value, obj, selector) \
1034 int64_t value; \
1035 { \
1036 word raw_value = static_cast<word>(obj); \
1037 if (LIKELY((raw_value & kSmiTagMask) == kSmiTag)) { \
1038 value = raw_value >> kSmiTagShift; \
1039 } else { \
1040 if (UNLIKELY(obj == null_value)) { \
1041 SP[0] = selector.raw(); \
1042 goto ThrowNullError; \
1043 } \
1044 value = Integer::GetInt64Value(RAW_CAST(Integer, obj)); \
1045 } \
1046 }
1047
1048#define BOX_INT64_RESULT(result) \
1049 if (LIKELY(Smi::IsValid(result))) { \
1050 SP[0] = Smi::New(static_cast<intptr_t>(result)); \
1051 } else if (!AllocateMint(thread, result, pc, FP, SP)) { \
1052 HANDLE_EXCEPTION; \
1053 } \
1054 ASSERT(Integer::GetInt64Value(RAW_CAST(Integer, SP[0])) == result);
1055
1056#define UNBOX_DOUBLE(value, obj, selector) \
1057 double value; \
1058 { \
1059 if (UNLIKELY(obj == null_value)) { \
1060 SP[0] = selector.raw(); \
1061 goto ThrowNullError; \
1062 } \
1063 value = Double::RawCast(obj)->ptr()->value_; \
1064 }
1065
1066#define BOX_DOUBLE_RESULT(result) \
1067 if (!AllocateDouble(thread, result, pc, FP, SP)) { \
1068 HANDLE_EXCEPTION; \
1069 } \
1070 ASSERT(Utils::DoublesBitEqual(Double::RawCast(SP[0])->ptr()->value_, result));
1071
1072#define BUMP_USAGE_COUNTER_ON_ENTRY(function) \
1073 { \
1074 int32_t counter = ++(function->ptr()->usage_counter_); \
1075 if (UNLIKELY(FLAG_compilation_counter_threshold >= 0 && \
1076 counter >= FLAG_compilation_counter_threshold && \
1077 !Function::HasCode(function))) { \
1078 SP[1] = 0; /* Unused result. */ \
1079 SP[2] = function; \
1080 Exit(thread, FP, SP + 3, pc); \
1081 INVOKE_RUNTIME(DRT_CompileInterpretedFunction, \
1082 NativeArguments(thread, 1, SP + 2, SP + 1)); \
1083 function = FrameFunction(FP); \
1084 } \
1085 }
1086
1087#ifdef PRODUCT
1088#define DEBUG_CHECK
1089#else
1090// The DEBUG_CHECK macro must only be called from bytecodes listed in
1091// KernelBytecode::IsDebugCheckedOpcode.
1092#define DEBUG_CHECK \
1093 if (is_debugging()) { \
1094 /* Check for debug breakpoint or if single stepping. */ \
1095 if (thread->isolate()->debugger()->HasBytecodeBreakpointAt(pc)) { \
1096 SP[1] = null_value; \
1097 Exit(thread, FP, SP + 2, pc); \
1098 INVOKE_RUNTIME(DRT_BreakpointRuntimeHandler, \
1099 NativeArguments(thread, 0, nullptr, SP + 1)) \
1100 } \
1101 /* The debugger expects to see the same pc again when single-stepping */ \
1102 if (thread->isolate()->single_step()) { \
1103 Exit(thread, FP, SP + 1, pc); \
1104 INVOKE_RUNTIME(DRT_SingleStepHandler, \
1105 NativeArguments(thread, 0, nullptr, nullptr)); \
1106 } \
1107 }
1108#endif // PRODUCT
1109
1110bool Interpreter::CopyParameters(Thread* thread,
1111 const KBCInstr** pc,
1112 ObjectPtr** FP,
1113 ObjectPtr** SP,
1114 const intptr_t num_fixed_params,
1115 const intptr_t num_opt_pos_params,
1116 const intptr_t num_opt_named_params) {
1117 const intptr_t min_num_pos_args = num_fixed_params;
1118 const intptr_t max_num_pos_args = num_fixed_params + num_opt_pos_params;
1119
1120 // Decode arguments descriptor.
1121 const intptr_t arg_count = InterpreterHelpers::ArgDescArgCount(argdesc_);
1122 const intptr_t pos_count = InterpreterHelpers::ArgDescPosCount(argdesc_);
1123 const intptr_t named_count = (arg_count - pos_count);
1124
1125 // Check that got the right number of positional parameters.
1126 if ((min_num_pos_args > pos_count) || (pos_count > max_num_pos_args)) {
1127 return false;
1128 }
1129
1130 // Copy all passed position arguments.
1131 ObjectPtr* first_arg = FrameArguments(*FP, arg_count);
1132 memmove(*FP, first_arg, pos_count * kWordSize);
1133
1134 if (num_opt_named_params != 0) {
1135 // This is a function with named parameters.
1136 // Walk the list of named parameters and their
1137 // default values encoded as pairs of LoadConstant instructions that
1138 // follows the entry point and find matching values via arguments
1139 // descriptor.
1140 ObjectPtr* argdesc_data = argdesc_->ptr()->data();
1141
1142 intptr_t i = 0; // argument position
1143 intptr_t j = 0; // parameter position
1144 while ((j < num_opt_named_params) && (i < named_count)) {
1145 // Fetch formal parameter information: name, default value, target slot.
1146 const KBCInstr* load_name = *pc;
1147 const KBCInstr* load_value = KernelBytecode::Next(load_name);
1148 *pc = KernelBytecode::Next(load_value);
1149 ASSERT(KernelBytecode::IsLoadConstantOpcode(load_name));
1150 ASSERT(KernelBytecode::IsLoadConstantOpcode(load_value));
1151 const uint8_t reg = KernelBytecode::DecodeA(load_name);
1152 ASSERT(reg == KernelBytecode::DecodeA(load_value));
1153
1154 StringPtr name = static_cast<StringPtr>(
1155 LOAD_CONSTANT(KernelBytecode::DecodeE(load_name)));
1156 if (name == argdesc_data[ArgumentsDescriptor::name_index(i)]) {
1157 // Parameter was passed. Fetch passed value.
1158 const intptr_t arg_index = Smi::Value(static_cast<SmiPtr>(
1159 argdesc_data[ArgumentsDescriptor::position_index(i)]));
1160 (*FP)[reg] = first_arg[arg_index];
1161 ++i; // Consume passed argument.
1162 } else {
1163 // Parameter was not passed. Fetch default value.
1164 (*FP)[reg] = LOAD_CONSTANT(KernelBytecode::DecodeE(load_value));
1165 }
1166 ++j; // Next formal parameter.
1167 }
1168
1169 // If we have unprocessed formal parameters then initialize them all
1170 // using default values.
1171 while (j < num_opt_named_params) {
1172 const KBCInstr* load_name = *pc;
1173 const KBCInstr* load_value = KernelBytecode::Next(load_name);
1174 *pc = KernelBytecode::Next(load_value);
1175 ASSERT(KernelBytecode::IsLoadConstantOpcode(load_name));
1176 ASSERT(KernelBytecode::IsLoadConstantOpcode(load_value));
1177 const uint8_t reg = KernelBytecode::DecodeA(load_name);
1178 ASSERT(reg == KernelBytecode::DecodeA(load_value));
1179
1180 (*FP)[reg] = LOAD_CONSTANT(KernelBytecode::DecodeE(load_value));
1181 ++j;
1182 }
1183
1184 // If we have unprocessed passed arguments that means we have mismatch
1185 // between formal parameters and concrete arguments. This can only
1186 // occur if the current function is a closure.
1187 if (i < named_count) {
1188 return false;
1189 }
1190
1191 // SP points past copied arguments.
1192 *SP = *FP + num_fixed_params + num_opt_named_params - 1;
1193 } else {
1194 ASSERT(num_opt_pos_params != 0);
1195 if (named_count != 0) {
1196 // Function can't have both named and optional positional parameters.
1197 // This kind of mismatch can only occur if the current function
1198 // is a closure.
1199 return false;
1200 }
1201
1202 // Process the list of default values encoded as a sequence of
1203 // LoadConstant instructions after EntryOpt bytecode.
1204 // Execute only those that correspond to parameters that were not passed.
1205 for (intptr_t i = num_fixed_params; i < pos_count; ++i) {
1206 ASSERT(KernelBytecode::IsLoadConstantOpcode(*pc));
1207 *pc = KernelBytecode::Next(*pc);
1208 }
1209 for (intptr_t i = pos_count; i < max_num_pos_args; ++i) {
1210 const KBCInstr* load_value = *pc;
1211 *pc = KernelBytecode::Next(load_value);
1212 ASSERT(KernelBytecode::IsLoadConstantOpcode(load_value));
1213 ASSERT(KernelBytecode::DecodeA(load_value) == i);
1214 (*FP)[i] = LOAD_CONSTANT(KernelBytecode::DecodeE(load_value));
1215 }
1216
1217 // SP points past the last copied parameter.
1218 *SP = *FP + max_num_pos_args - 1;
1219 }
1220
1221 return true;
1222}
1223
1224bool Interpreter::AssertAssignable(Thread* thread,
1225 const KBCInstr* pc,
1226 ObjectPtr* FP,
1227 ObjectPtr* call_top,
1228 ObjectPtr* args,
1229 SubtypeTestCachePtr cache) {
1230 ObjectPtr null_value = Object::null();
1231 if (cache != null_value) {
1232 InstancePtr instance = static_cast<InstancePtr>(args[0]);
1233 TypeArgumentsPtr instantiator_type_arguments =
1234 static_cast<TypeArgumentsPtr>(args[2]);
1235 TypeArgumentsPtr function_type_arguments =
1236 static_cast<TypeArgumentsPtr>(args[3]);
1237
1238 const intptr_t cid = InterpreterHelpers::GetClassId(instance);
1239
1240 TypeArgumentsPtr instance_type_arguments =
1241 static_cast<TypeArgumentsPtr>(null_value);
1242 ObjectPtr instance_cid_or_function;
1243
1244 TypeArgumentsPtr parent_function_type_arguments;
1245 TypeArgumentsPtr delayed_function_type_arguments;
1246 if (cid == kClosureCid) {
1247 ClosurePtr closure = static_cast<ClosurePtr>(instance);
1248 instance_type_arguments = closure->ptr()->instantiator_type_arguments_;
1249 parent_function_type_arguments = closure->ptr()->function_type_arguments_;
1250 delayed_function_type_arguments = closure->ptr()->delayed_type_arguments_;
1251 instance_cid_or_function = closure->ptr()->function_;
1252 } else {
1253 instance_cid_or_function = Smi::New(cid);
1254
1255 ClassPtr instance_class = thread->isolate()->class_table()->At(cid);
1256 if (instance_class->ptr()->num_type_arguments_ < 0) {
1257 goto AssertAssignableCallRuntime;
1258 } else if (instance_class->ptr()->num_type_arguments_ > 0) {
1259 instance_type_arguments = reinterpret_cast<TypeArgumentsPtr*>(
1260 instance->ptr())[instance_class->ptr()
1261 ->host_type_arguments_field_offset_in_words_];
1262 }
1263 parent_function_type_arguments =
1264 static_cast<TypeArgumentsPtr>(null_value);
1265 delayed_function_type_arguments =
1266 static_cast<TypeArgumentsPtr>(null_value);
1267 }
1268
1269 for (ObjectPtr* entries = cache->ptr()->cache_->ptr()->data();
1270 entries[0] != null_value;
1271 entries += SubtypeTestCache::kTestEntryLength) {
1272 if ((entries[SubtypeTestCache::kInstanceClassIdOrFunction] ==
1273 instance_cid_or_function) &&
1274 (entries[SubtypeTestCache::kInstanceTypeArguments] ==
1275 instance_type_arguments) &&
1276 (entries[SubtypeTestCache::kInstantiatorTypeArguments] ==
1277 instantiator_type_arguments) &&
1278 (entries[SubtypeTestCache::kFunctionTypeArguments] ==
1279 function_type_arguments) &&
1280 (entries[SubtypeTestCache::kInstanceParentFunctionTypeArguments] ==
1281 parent_function_type_arguments) &&
1282 (entries[SubtypeTestCache::kInstanceDelayedFunctionTypeArguments] ==
1283 delayed_function_type_arguments)) {
1284 if (Bool::True().raw() == entries[SubtypeTestCache::kTestResult]) {
1285 return true;
1286 } else {
1287 break;
1288 }
1289 }
1290 }
1291 }
1292
1293AssertAssignableCallRuntime:
1294 // args[0]: Instance.
1295 // args[1]: Type.
1296 // args[2]: Instantiator type args.
1297 // args[3]: Function type args.
1298 // args[4]: Name.
1299 args[5] = cache;
1300 args[6] = Smi::New(kTypeCheckFromInline);
1301 args[7] = 0; // Unused result.
1302 Exit(thread, FP, args + 8, pc);
1303 NativeArguments native_args(thread, 7, args, args + 7);
1304 return InvokeRuntime(thread, this, DRT_TypeCheck, native_args);
1305}
1306
1307template <bool is_getter>
1308bool Interpreter::AssertAssignableField(Thread* thread,
1309 const KBCInstr* pc,
1310 ObjectPtr* FP,
1311 ObjectPtr* SP,
1312 InstancePtr instance,
1313 FieldPtr field,
1314 InstancePtr value) {
1315 AbstractTypePtr field_type = field->ptr()->type_;
1316 // Perform type test of value if field type is not one of dynamic, object,
1317 // or void, and if the value is not null.
1318 if (field_type->GetClassId() == kTypeCid) {
1319 classid_t cid = Smi::Value(
1320 static_cast<SmiPtr>(Type::RawCast(field_type)->ptr()->type_class_id_));
1321 // TODO(regis): Revisit shortcut for NNBD.
1322 if (cid == kDynamicCid || cid == kInstanceCid || cid == kVoidCid) {
1323 return true;
1324 }
1325 }
1326 ObjectPtr null_value = Object::null();
1327 if (value == null_value) {
1328 // TODO(regis): Revisit null shortcut for NNBD.
1329 return true;
1330 }
1331
1332 SubtypeTestCachePtr cache = field->ptr()->type_test_cache_;
1333 if (UNLIKELY(cache == null_value)) {
1334 // Allocate new cache.
1335 SP[1] = instance; // Preserve.
1336 SP[2] = field; // Preserve.
1337 SP[3] = value; // Preserve.
1338 SP[4] = null_value; // Result slot.
1339
1340 Exit(thread, FP, SP + 5, pc);
1341 if (!InvokeRuntime(thread, this, DRT_AllocateSubtypeTestCache,
1342 NativeArguments(thread, 0, /* argv */ SP + 4,
1343 /* retval */ SP + 4))) {
1344 return false;
1345 }
1346
1347 // Reload objects after the call which may trigger GC.
1348 instance = static_cast<InstancePtr>(SP[1]);
1349 field = static_cast<FieldPtr>(SP[2]);
1350 value = static_cast<InstancePtr>(SP[3]);
1351 cache = static_cast<SubtypeTestCachePtr>(SP[4]);
1352 field_type = field->ptr()->type_;
1353 field->ptr()->type_test_cache_ = cache;
1354 }
1355
1356 // Push arguments of type test.
1357 SP[1] = value;
1358 SP[2] = field_type;
1359 // Provide type arguments of instance as instantiator.
1360 SP[3] = InterpreterHelpers::GetTypeArguments(thread, instance);
1361 SP[4] = null_value; // Implicit setters cannot be generic.
1362 SP[5] = is_getter ? Symbols::FunctionResult().raw() : field->ptr()->name_;
1363 return AssertAssignable(thread, pc, FP, /* call_top */ SP + 5,
1364 /* args */ SP + 1, cache);
1365}
1366
1367ObjectPtr Interpreter::Call(const Function& function,
1368 const Array& arguments_descriptor,
1369 const Array& arguments,
1370 Thread* thread) {
1371 return Call(function.raw(), arguments_descriptor.raw(), arguments.Length(),
1372 arguments.raw_ptr()->data(), thread);
1373}
1374
1375// Allocate a _Mint for the given int64_t value and puts it into SP[0].
1376// Returns false on exception.
1377DART_NOINLINE bool Interpreter::AllocateMint(Thread* thread,
1378 int64_t value,
1379 const KBCInstr* pc,
1380 ObjectPtr* FP,
1381 ObjectPtr* SP) {
1382 ASSERT(!Smi::IsValid(value));
1383 MintPtr result;
1384 if (TryAllocate(thread, kMintCid, Mint::InstanceSize(),
1385 reinterpret_cast<ObjectPtr*>(&result))) {
1386 result->ptr()->value_ = value;
1387 SP[0] = result;
1388 return true;
1389 } else {
1390 SP[0] = 0; // Space for the result.
1391 SP[1] = thread->isolate()->object_store()->mint_class(); // Class object.
1392 SP[2] = Object::null(); // Type arguments.
1393 Exit(thread, FP, SP + 3, pc);
1394 NativeArguments args(thread, 2, SP + 1, SP);
1395 if (!InvokeRuntime(thread, this, DRT_AllocateObject, args)) {
1396 return false;
1397 }
1398 static_cast<MintPtr>(SP[0])->ptr()->value_ = value;
1399 return true;
1400 }
1401}
1402
1403// Allocate a _Double for the given double value and put it into SP[0].
1404// Returns false on exception.
1405DART_NOINLINE bool Interpreter::AllocateDouble(Thread* thread,
1406 double value,
1407 const KBCInstr* pc,
1408 ObjectPtr* FP,
1409 ObjectPtr* SP) {
1410 DoublePtr result;
1411 if (TryAllocate(thread, kDoubleCid, Double::InstanceSize(),
1412 reinterpret_cast<ObjectPtr*>(&result))) {
1413 result->ptr()->value_ = value;
1414 SP[0] = result;
1415 return true;
1416 } else {
1417 SP[0] = 0; // Space for the result.
1418 SP[1] = thread->isolate()->object_store()->double_class();
1419 SP[2] = Object::null(); // Type arguments.
1420 Exit(thread, FP, SP + 3, pc);
1421 NativeArguments args(thread, 2, SP + 1, SP);
1422 if (!InvokeRuntime(thread, this, DRT_AllocateObject, args)) {
1423 return false;
1424 }
1425 Double::RawCast(SP[0])->ptr()->value_ = value;
1426 return true;
1427 }
1428}
1429
1430// Allocate a _Float32x4 for the given simd value and put it into SP[0].
1431// Returns false on exception.
1432DART_NOINLINE bool Interpreter::AllocateFloat32x4(Thread* thread,
1433 simd128_value_t value,
1434 const KBCInstr* pc,
1435 ObjectPtr* FP,
1436 ObjectPtr* SP) {
1437 Float32x4Ptr result;
1438 if (TryAllocate(thread, kFloat32x4Cid, Float32x4::InstanceSize(),
1439 reinterpret_cast<ObjectPtr*>(&result))) {
1440 value.writeTo(result->ptr()->value_);
1441 SP[0] = result;
1442 return true;
1443 } else {
1444 SP[0] = 0; // Space for the result.
1445 SP[1] = thread->isolate()->object_store()->float32x4_class();
1446 SP[2] = Object::null(); // Type arguments.
1447 Exit(thread, FP, SP + 3, pc);
1448 NativeArguments args(thread, 2, SP + 1, SP);
1449 if (!InvokeRuntime(thread, this, DRT_AllocateObject, args)) {
1450 return false;
1451 }
1452 value.writeTo(Float32x4::RawCast(SP[0])->ptr()->value_);
1453 return true;
1454 }
1455}
1456
1457// Allocate _Float64x2 box for the given simd value and put it into SP[0].
1458// Returns false on exception.
1459DART_NOINLINE bool Interpreter::AllocateFloat64x2(Thread* thread,
1460 simd128_value_t value,
1461 const KBCInstr* pc,
1462 ObjectPtr* FP,
1463 ObjectPtr* SP) {
1464 Float64x2Ptr result;
1465 if (TryAllocate(thread, kFloat64x2Cid, Float64x2::InstanceSize(),
1466 reinterpret_cast<ObjectPtr*>(&result))) {
1467 value.writeTo(result->ptr()->value_);
1468 SP[0] = result;
1469 return true;
1470 } else {
1471 SP[0] = 0; // Space for the result.
1472 SP[1] = thread->isolate()->object_store()->float64x2_class();
1473 SP[2] = Object::null(); // Type arguments.
1474 Exit(thread, FP, SP + 3, pc);
1475 NativeArguments args(thread, 2, SP + 1, SP);
1476 if (!InvokeRuntime(thread, this, DRT_AllocateObject, args)) {
1477 return false;
1478 }
1479 value.writeTo(Float64x2::RawCast(SP[0])->ptr()->value_);
1480 return true;
1481 }
1482}
1483
1484// Allocate a _List with the given type arguments and length and put it into
1485// SP[0]. Returns false on exception.
1486bool Interpreter::AllocateArray(Thread* thread,
1487 TypeArgumentsPtr type_args,
1488 ObjectPtr length_object,
1489 const KBCInstr* pc,
1490 ObjectPtr* FP,
1491 ObjectPtr* SP) {
1492 if (LIKELY(!length_object->IsHeapObject())) {
1493 const intptr_t length = Smi::Value(Smi::RawCast(length_object));
1494 if (LIKELY(Array::IsValidLength(length))) {
1495 ArrayPtr result;
1496 if (TryAllocate(thread, kArrayCid, Array::InstanceSize(length),
1497 reinterpret_cast<ObjectPtr*>(&result))) {
1498 result->ptr()->type_arguments_ = type_args;
1499 result->ptr()->length_ = Smi::New(length);
1500 for (intptr_t i = 0; i < length; i++) {
1501 result->ptr()->data()[i] = Object::null();
1502 }
1503 SP[0] = result;
1504 return true;
1505 }
1506 }
1507 }
1508
1509 SP[0] = 0; // Space for the result;
1510 SP[1] = length_object;
1511 SP[2] = type_args;
1512 Exit(thread, FP, SP + 3, pc);
1513 NativeArguments args(thread, 2, SP + 1, SP);
1514 return InvokeRuntime(thread, this, DRT_AllocateArray, args);
1515}
1516
1517// Allocate a _Context with the given length and put it into SP[0].
1518// Returns false on exception.
1519bool Interpreter::AllocateContext(Thread* thread,
1520 intptr_t num_context_variables,
1521 const KBCInstr* pc,
1522 ObjectPtr* FP,
1523 ObjectPtr* SP) {
1524 ContextPtr result;
1525 if (TryAllocate(thread, kContextCid,
1526 Context::InstanceSize(num_context_variables),
1527 reinterpret_cast<ObjectPtr*>(&result))) {
1528 result->ptr()->num_variables_ = num_context_variables;
1529 ObjectPtr null_value = Object::null();
1530 result->ptr()->parent_ = static_cast<ContextPtr>(null_value);
1531 for (intptr_t i = 0; i < num_context_variables; i++) {
1532 result->ptr()->data()[i] = null_value;
1533 }
1534 SP[0] = result;
1535 return true;
1536 } else {
1537 SP[0] = 0; // Space for the result.
1538 SP[1] = Smi::New(num_context_variables);
1539 Exit(thread, FP, SP + 2, pc);
1540 NativeArguments args(thread, 1, SP + 1, SP);
1541 return InvokeRuntime(thread, this, DRT_AllocateContext, args);
1542 }
1543}
1544
1545// Allocate a _Closure and put it into SP[0].
1546// Returns false on exception.
1547bool Interpreter::AllocateClosure(Thread* thread,
1548 const KBCInstr* pc,
1549 ObjectPtr* FP,
1550 ObjectPtr* SP) {
1551 const intptr_t instance_size = Closure::InstanceSize();
1552 ClosurePtr result;
1553 if (TryAllocate(thread, kClosureCid, instance_size,
1554 reinterpret_cast<ObjectPtr*>(&result))) {
1555 uword start = ObjectLayout::ToAddr(result);
1556 ObjectPtr null_value = Object::null();
1557 for (intptr_t offset = sizeof(InstanceLayout); offset < instance_size;
1558 offset += kWordSize) {
1559 *reinterpret_cast<ObjectPtr*>(start + offset) = null_value;
1560 }
1561 SP[0] = result;
1562 return true;
1563 } else {
1564 SP[0] = 0; // Space for the result.
1565 SP[1] = thread->isolate()->object_store()->closure_class();
1566 SP[2] = Object::null(); // Type arguments.
1567 Exit(thread, FP, SP + 3, pc);
1568 NativeArguments args(thread, 2, SP + 1, SP);
1569 return InvokeRuntime(thread, this, DRT_AllocateObject, args);
1570 }
1571}
1572
1573ObjectPtr Interpreter::Call(FunctionPtr function,
1574 ArrayPtr argdesc,
1575 intptr_t argc,
1576 ObjectPtr const* argv,
1577 Thread* thread) {
1578 // Interpreter state (see constants_kbc.h for high-level overview).
1579 const KBCInstr* pc; // Program Counter: points to the next op to execute.
1580 ObjectPtr* FP; // Frame Pointer.
1581 ObjectPtr* SP; // Stack Pointer.
1582
1583 uint32_t op; // Currently executing op.
1584
1585 bool reentering = fp_ != NULL;
1586 if (!reentering) {
1587 fp_ = reinterpret_cast<ObjectPtr*>(stack_base_);
1588 }
1589#if defined(DEBUG)
1590 if (IsTracingExecution()) {
1591 THR_Print("%" Pu64 " ", icount_);
1592 THR_Print("%s interpreter 0x%" Px " at fp_ 0x%" Px " exit 0x%" Px " %s\n",
1593 reentering ? "Re-entering" : "Entering",
1594 reinterpret_cast<uword>(this), reinterpret_cast<uword>(fp_),
1595 thread->top_exit_frame_info(),
1596 Function::Handle(function).ToFullyQualifiedCString());
1597 }
1598#endif
1599
1600 // Setup entry frame:
1601 //
1602 // ^
1603 // | previous Dart frames
1604 // |
1605 // | ........... | -+
1606 // fp_ > | exit fp_ | saved top_exit_frame_info
1607 // | argdesc_ | saved argdesc_ (for reentering interpreter)
1608 // | pp_ | saved pp_ (for reentering interpreter)
1609 // | arg 0 | -+
1610 // | arg 1 | |
1611 // ... |
1612 // > incoming arguments
1613 // |
1614 // | arg argc-1 | -+
1615 // | function | -+
1616 // | code | |
1617 // | caller PC | ---> special fake PC marking an entry frame
1618 // SP > | fp_ | |
1619 // FP > | ........... | > normal Dart frame (see stack_frame_kbc.h)
1620 // |
1621 // v
1622 //
1623 // A negative argc indicates reverse memory order of arguments.
1624 const intptr_t arg_count = argc < 0 ? -argc : argc;
1625 FP = fp_ + kKBCEntrySavedSlots + arg_count + kKBCDartFrameFixedSize;
1626 SP = FP - 1;
1627
1628 // Save outer top_exit_frame_info, current argdesc, and current pp.
1629 fp_[kKBCExitLinkSlotFromEntryFp] =
1630 static_cast<ObjectPtr>(thread->top_exit_frame_info());
1631 thread->set_top_exit_frame_info(0);
1632 fp_[kKBCSavedArgDescSlotFromEntryFp] = static_cast<ObjectPtr>(argdesc_);
1633 fp_[kKBCSavedPpSlotFromEntryFp] = static_cast<ObjectPtr>(pp_);
1634
1635 // Copy arguments and setup the Dart frame.
1636 for (intptr_t i = 0; i < arg_count; i++) {
1637 fp_[kKBCEntrySavedSlots + i] = argv[argc < 0 ? -i : i];
1638 }
1639
1640 BytecodePtr bytecode = function->ptr()->bytecode_;
1641 FP[kKBCFunctionSlotFromFp] = function;
1642 FP[kKBCPcMarkerSlotFromFp] = bytecode;
1643 FP[kKBCSavedCallerPcSlotFromFp] = static_cast<ObjectPtr>(kEntryFramePcMarker);
1644 FP[kKBCSavedCallerFpSlotFromFp] =
1645 static_cast<ObjectPtr>(reinterpret_cast<uword>(fp_));
1646
1647 // Load argument descriptor.
1648 argdesc_ = argdesc;
1649
1650 // Ready to start executing bytecode. Load entry point and corresponding
1651 // object pool.
1652 pc = reinterpret_cast<const KBCInstr*>(bytecode->ptr()->instructions_);
1653 NOT_IN_PRODUCT(pc_ = pc); // For the profiler.
1654 NOT_IN_PRODUCT(fp_ = FP); // For the profiler.
1655 pp_ = bytecode->ptr()->object_pool_;
1656
1657 // Save current VM tag and mark thread as executing Dart code. For the
1658 // profiler, do this *after* setting up the entry frame (compare the machine
1659 // code entry stubs).
1660 const uword vm_tag = thread->vm_tag();
1661 thread->set_vm_tag(VMTag::kDartInterpretedTagId);
1662
1663 // Save current top stack resource and reset the list.
1664 StackResource* top_resource = thread->top_resource();
1665 thread->set_top_resource(NULL);
1666
1667 // Cache some frequently used values in the frame.
1668 BoolPtr true_value = Bool::True().raw();
1669 BoolPtr false_value = Bool::False().raw();
1670 ObjectPtr null_value = Object::null();
1671
1672#ifdef DART_HAS_COMPUTED_GOTO
1673 static const void* dispatch[] = {
1674#define TARGET(name, fmt, kind, fmta, fmtb, fmtc) &&bc##name,
1675 KERNEL_BYTECODES_LIST(TARGET)
1676#undef TARGET
1677 };
1678 DISPATCH(); // Enter the dispatch loop.
1679#else
1680 DISPATCH(); // Enter the dispatch loop.
1681SwitchDispatch:
1682 switch (op & 0xFF) {
1683#define TARGET(name, fmt, kind, fmta, fmtb, fmtc) \
1684 case KernelBytecode::k##name: \
1685 goto bc##name;
1686 KERNEL_BYTECODES_LIST(TARGET)
1687#undef TARGET
1688 default:
1689 FATAL1("Undefined opcode: %d\n", op);
1690 }
1691#endif
1692
1693 // KernelBytecode handlers (see constants_kbc.h for bytecode descriptions).
1694 {
1695 BYTECODE(Entry, D);
1696 const intptr_t num_locals = rD;
1697
1698 // Initialize locals with null & set SP.
1699 for (intptr_t i = 0; i < num_locals; i++) {
1700 FP[i] = null_value;
1701 }
1702 SP = FP + num_locals - 1;
1703
1704 DISPATCH();
1705 }
1706
1707 {
1708 BYTECODE(EntryFixed, A_E);
1709 const intptr_t num_fixed_params = rA;
1710 const intptr_t num_locals = rE;
1711
1712 const intptr_t arg_count = InterpreterHelpers::ArgDescArgCount(argdesc_);
1713 const intptr_t pos_count = InterpreterHelpers::ArgDescPosCount(argdesc_);
1714 if ((arg_count != num_fixed_params) || (pos_count != num_fixed_params)) {
1715 goto NoSuchMethodFromPrologue;
1716 }
1717
1718 // Initialize locals with null & set SP.
1719 for (intptr_t i = 0; i < num_locals; i++) {
1720 FP[i] = null_value;
1721 }
1722 SP = FP + num_locals - 1;
1723
1724 DISPATCH();
1725 }
1726
1727 {
1728 BYTECODE(EntryOptional, A_B_C);
1729 if (CopyParameters(thread, &pc, &FP, &SP, rA, rB, rC)) {
1730 DISPATCH();
1731 } else {
1732 goto NoSuchMethodFromPrologue;
1733 }
1734 }
1735
1736 {
1737 BYTECODE(Frame, D);
1738 // Initialize locals with null and increment SP.
1739 const intptr_t num_locals = rD;
1740 for (intptr_t i = 1; i <= num_locals; i++) {
1741 SP[i] = null_value;
1742 }
1743 SP += num_locals;
1744
1745 DISPATCH();
1746 }
1747
1748 {
1749 BYTECODE(SetFrame, A);
1750 SP = FP + rA - 1;
1751 DISPATCH();
1752 }
1753
1754 {
1755 BYTECODE(CheckStack, A);
1756 {
1757 // Check the interpreter's own stack limit for actual interpreter's stack
1758 // overflows, and also the thread's stack limit for scheduled interrupts.
1759 if (reinterpret_cast<uword>(SP) >= overflow_stack_limit() ||
1760 thread->HasScheduledInterrupts()) {
1761 Exit(thread, FP, SP + 1, pc);
1762 INVOKE_RUNTIME(DRT_StackOverflow,
1763 NativeArguments(thread, 0, nullptr, nullptr));
1764 }
1765 }
1766 FunctionPtr function = FrameFunction(FP);
1767 int32_t counter = ++(function->ptr()->usage_counter_);
1768 if (UNLIKELY(FLAG_compilation_counter_threshold >= 0 &&
1769 counter >= FLAG_compilation_counter_threshold &&
1770 !Function::HasCode(function))) {
1771 SP[1] = 0; // Unused result.
1772 SP[2] = function;
1773 Exit(thread, FP, SP + 3, pc);
1774 INVOKE_RUNTIME(DRT_CompileInterpretedFunction,
1775 NativeArguments(thread, 1, SP + 2, SP + 1));
1776 }
1777 DISPATCH();
1778 }
1779
1780 {
1781 BYTECODE(DebugCheck, 0);
1782 DEBUG_CHECK;
1783 DISPATCH();
1784 }
1785
1786 {
1787 BYTECODE(CheckFunctionTypeArgs, A_E);
1788 const intptr_t declared_type_args_len = rA;
1789 const intptr_t first_stack_local_index = rE;
1790
1791 // Decode arguments descriptor's type args len.
1792 const intptr_t type_args_len =
1793 InterpreterHelpers::ArgDescTypeArgsLen(argdesc_);
1794 if ((type_args_len != declared_type_args_len) && (type_args_len != 0)) {
1795 goto NoSuchMethodFromPrologue;
1796 }
1797 if (type_args_len > 0) {
1798 // Decode arguments descriptor's argument count (excluding type args).
1799 const intptr_t arg_count = InterpreterHelpers::ArgDescArgCount(argdesc_);
1800 // Copy passed-in type args to first local slot.
1801 FP[first_stack_local_index] = *FrameArguments(FP, arg_count + 1);
1802 } else if (declared_type_args_len > 0) {
1803 FP[first_stack_local_index] = Object::null();
1804 }
1805 DISPATCH();
1806 }
1807
1808 {
1809 BYTECODE(InstantiateType, D);
1810 // Stack: instantiator type args, function type args
1811 ObjectPtr type = LOAD_CONSTANT(rD);
1812 SP[1] = type;
1813 SP[2] = SP[-1];
1814 SP[3] = SP[0];
1815 Exit(thread, FP, SP + 4, pc);
1816 {
1817 INVOKE_RUNTIME(DRT_InstantiateType,
1818 NativeArguments(thread, 3, SP + 1, SP - 1));
1819 }
1820 SP -= 1;
1821 DISPATCH();
1822 }
1823
1824 {
1825 BYTECODE(InstantiateTypeArgumentsTOS, A_E);
1826 // Stack: instantiator type args, function type args
1827 TypeArgumentsPtr type_arguments =
1828 static_cast<TypeArgumentsPtr>(LOAD_CONSTANT(rE));
1829
1830 ObjectPtr instantiator_type_args = SP[-1];
1831 ObjectPtr function_type_args = SP[0];
1832 // If both instantiators are null and if the type argument vector
1833 // instantiated from null becomes a vector of dynamic, then use null as
1834 // the type arguments.
1835 if ((rA == 0) || (null_value != instantiator_type_args) ||
1836 (null_value != function_type_args)) {
1837 // First lookup in the cache.
1838 ArrayPtr instantiations = type_arguments->ptr()->instantiations_;
1839 for (intptr_t i = 0;
1840 instantiations->ptr()->data()[i] !=
1841 static_cast<ObjectPtr>(TypeArguments::kNoInstantiator);
1842 i += TypeArguments::Instantiation::kSizeInWords) {
1843 if ((instantiations->ptr()->data()
1844 [i +
1845 TypeArguments::Instantiation::kInstantiatorTypeArgsIndex] ==
1846 instantiator_type_args) &&
1847 (instantiations->ptr()->data()
1848 [i + TypeArguments::Instantiation::kFunctionTypeArgsIndex] ==
1849 function_type_args)) {
1850 // Found in the cache.
1851 SP[-1] =
1852 instantiations->ptr()->data()[i + TypeArguments::Instantiation::
1853 kInstantiatedTypeArgsIndex];
1854 goto InstantiateTypeArgumentsTOSDone;
1855 }
1856 }
1857
1858 // Cache lookup failed, call runtime.
1859 SP[1] = type_arguments;
1860 SP[2] = instantiator_type_args;
1861 SP[3] = function_type_args;
1862
1863 Exit(thread, FP, SP + 4, pc);
1864 INVOKE_RUNTIME(DRT_InstantiateTypeArguments,
1865 NativeArguments(thread, 3, SP + 1, SP - 1));
1866 }
1867
1868 InstantiateTypeArgumentsTOSDone:
1869 SP -= 1;
1870 DISPATCH();
1871 }
1872
1873 {
1874 BYTECODE(Throw, A);
1875 {
1876 SP[1] = 0; // Space for result.
1877 Exit(thread, FP, SP + 2, pc);
1878 if (rA == 0) { // Throw
1879 INVOKE_RUNTIME(DRT_Throw, NativeArguments(thread, 1, SP, SP + 1));
1880 } else { // ReThrow
1881 INVOKE_RUNTIME(DRT_ReThrow, NativeArguments(thread, 2, SP - 1, SP + 1));
1882 }
1883 }
1884 DISPATCH();
1885 }
1886
1887 {
1888 BYTECODE(Drop1, 0);
1889 SP--;
1890 DISPATCH();
1891 }
1892
1893 {
1894 BYTECODE(LoadConstant, A_E);
1895 FP[rA] = LOAD_CONSTANT(rE);
1896 DISPATCH();
1897 }
1898
1899 {
1900 BYTECODE(PushConstant, D);
1901 *++SP = LOAD_CONSTANT(rD);
1902 DISPATCH();
1903 }
1904
1905 {
1906 BYTECODE(PushNull, 0);
1907 *++SP = null_value;
1908 DISPATCH();
1909 }
1910
1911 {
1912 BYTECODE(PushTrue, 0);
1913 *++SP = true_value;
1914 DISPATCH();
1915 }
1916
1917 {
1918 BYTECODE(PushFalse, 0);
1919 *++SP = false_value;
1920 DISPATCH();
1921 }
1922
1923 {
1924 BYTECODE(PushInt, X);
1925 *++SP = Smi::New(rX);
1926 DISPATCH();
1927 }
1928
1929 {
1930 BYTECODE(Push, X);
1931 *++SP = FP[rX];
1932 DISPATCH();
1933 }
1934
1935 {
1936 BYTECODE(StoreLocal, X);
1937 FP[rX] = *SP;
1938 DISPATCH();
1939 }
1940
1941 {
1942 BYTECODE(PopLocal, X);
1943 FP[rX] = *SP--;
1944 DISPATCH();
1945 }
1946
1947 {
1948 BYTECODE(MoveSpecial, A_Y);
1949 ASSERT(rA < KernelBytecode::kSpecialIndexCount);
1950 FP[rY] = special_[rA];
1951 DISPATCH();
1952 }
1953
1954 {
1955 BYTECODE(BooleanNegateTOS, 0);
1956 SP[0] = (SP[0] == true_value) ? false_value : true_value;
1957 DISPATCH();
1958 }
1959
1960 {
1961 BYTECODE(DirectCall, D_F);
1962 DEBUG_CHECK;
1963 // Invoke target function.
1964 {
1965 const uint32_t argc = rF;
1966 const uint32_t kidx = rD;
1967
1968 InterpreterHelpers::IncrementUsageCounter(FrameFunction(FP));
1969 *++SP = LOAD_CONSTANT(kidx);
1970 ObjectPtr* call_base = SP - argc;
1971 ObjectPtr* call_top = SP;
1972 argdesc_ = static_cast<ArrayPtr>(LOAD_CONSTANT(kidx + 1));
1973 if (!Invoke(thread, call_base, call_top, &pc, &FP, &SP)) {
1974 HANDLE_EXCEPTION;
1975 }
1976 }
1977
1978 DISPATCH();
1979 }
1980
1981 {
1982 BYTECODE(UncheckedDirectCall, D_F);
1983 DEBUG_CHECK;
1984 // Invoke target function.
1985 {
1986 const uint32_t argc = rF;
1987 const uint32_t kidx = rD;
1988
1989 InterpreterHelpers::IncrementUsageCounter(FrameFunction(FP));
1990 *++SP = LOAD_CONSTANT(kidx);
1991 ObjectPtr* call_base = SP - argc;
1992 ObjectPtr* call_top = SP;
1993 argdesc_ = static_cast<ArrayPtr>(LOAD_CONSTANT(kidx + 1));
1994 if (!Invoke(thread, call_base, call_top, &pc, &FP, &SP)) {
1995 HANDLE_EXCEPTION;
1996 }
1997 }
1998
1999 DISPATCH();
2000 }
2001
2002 {
2003 BYTECODE(InterfaceCall, D_F);
2004 DEBUG_CHECK;
2005 {
2006 const uint32_t argc = rF;
2007 const uint32_t kidx = rD;
2008
2009 ObjectPtr* call_base = SP - argc + 1;
2010 ObjectPtr* call_top = SP + 1;
2011
2012 InterpreterHelpers::IncrementUsageCounter(FrameFunction(FP));
2013 StringPtr target_name =
2014 static_cast<FunctionPtr>(LOAD_CONSTANT(kidx))->ptr()->name_;
2015 argdesc_ = static_cast<ArrayPtr>(LOAD_CONSTANT(kidx + 1));
2016 if (!InstanceCall(thread, target_name, call_base, call_top, &pc, &FP,
2017 &SP)) {
2018 HANDLE_EXCEPTION;
2019 }
2020 }
2021
2022 DISPATCH();
2023 }
2024 {
2025 BYTECODE(InstantiatedInterfaceCall, D_F);
2026 DEBUG_CHECK;
2027 {
2028 const uint32_t argc = rF;
2029 const uint32_t kidx = rD;
2030
2031 ObjectPtr* call_base = SP - argc + 1;
2032 ObjectPtr* call_top = SP + 1;
2033
2034 InterpreterHelpers::IncrementUsageCounter(FrameFunction(FP));
2035 StringPtr target_name =
2036 static_cast<FunctionPtr>(LOAD_CONSTANT(kidx))->ptr()->name_;
2037 argdesc_ = static_cast<ArrayPtr>(LOAD_CONSTANT(kidx + 1));
2038 if (!InstanceCall(thread, target_name, call_base, call_top, &pc, &FP,
2039 &SP)) {
2040 HANDLE_EXCEPTION;
2041 }
2042 }
2043
2044 DISPATCH();
2045 }
2046
2047 {
2048 BYTECODE(UncheckedClosureCall, D_F);
2049 DEBUG_CHECK;
2050 {
2051 const uint32_t argc = rF;
2052 const uint32_t kidx = rD;
2053
2054 ClosurePtr receiver = Closure::RawCast(*SP--);
2055 ObjectPtr* call_base = SP - argc + 1;
2056 ObjectPtr* call_top = SP + 1;
2057
2058 InterpreterHelpers::IncrementUsageCounter(FrameFunction(FP));
2059 if (UNLIKELY(receiver == null_value)) {
2060 SP[0] = Symbols::Call().raw();
2061 goto ThrowNullError;
2062 }
2063 argdesc_ = static_cast<ArrayPtr>(LOAD_CONSTANT(kidx));
2064 call_top[0] = receiver->ptr()->function_;
2065
2066 if (!Invoke(thread, call_base, call_top, &pc, &FP, &SP)) {
2067 HANDLE_EXCEPTION;
2068 }
2069 }
2070
2071 DISPATCH();
2072 }
2073
2074 {
2075 BYTECODE(UncheckedInterfaceCall, D_F);
2076 DEBUG_CHECK;
2077 {
2078 const uint32_t argc = rF;
2079 const uint32_t kidx = rD;
2080
2081 ObjectPtr* call_base = SP - argc + 1;
2082 ObjectPtr* call_top = SP + 1;
2083
2084 InterpreterHelpers::IncrementUsageCounter(FrameFunction(FP));
2085 StringPtr target_name =
2086 static_cast<FunctionPtr>(LOAD_CONSTANT(kidx))->ptr()->name_;
2087 argdesc_ = static_cast<ArrayPtr>(LOAD_CONSTANT(kidx + 1));
2088 if (!InstanceCall(thread, target_name, call_base, call_top, &pc, &FP,
2089 &SP)) {
2090 HANDLE_EXCEPTION;
2091 }
2092 }
2093
2094 DISPATCH();
2095 }
2096
2097 {
2098 BYTECODE(DynamicCall, D_F);
2099 DEBUG_CHECK;
2100 {
2101 const uint32_t argc = rF;
2102 const uint32_t kidx = rD;
2103
2104 ObjectPtr* call_base = SP - argc + 1;
2105 ObjectPtr* call_top = SP + 1;
2106
2107 InterpreterHelpers::IncrementUsageCounter(FrameFunction(FP));
2108 StringPtr target_name = String::RawCast(LOAD_CONSTANT(kidx));
2109 argdesc_ = Array::RawCast(LOAD_CONSTANT(kidx + 1));
2110 if (!InstanceCall(thread, target_name, call_base, call_top, &pc, &FP,
2111 &SP)) {
2112 HANDLE_EXCEPTION;
2113 }
2114 }
2115
2116 DISPATCH();
2117 }
2118
2119 {
2120 BYTECODE(NativeCall, D);
2121 TypedDataPtr data = static_cast<TypedDataPtr>(LOAD_CONSTANT(rD));
2122 MethodRecognizer::Kind kind = NativeEntryData::GetKind(data);
2123 switch (kind) {
2124 case MethodRecognizer::kObjectEquals: {
2125 SP[-1] = SP[-1] == SP[0] ? Bool::True().raw() : Bool::False().raw();
2126 SP--;
2127 } break;
2128 case MethodRecognizer::kStringBaseLength:
2129 case MethodRecognizer::kStringBaseIsEmpty: {
2130 InstancePtr instance = static_cast<InstancePtr>(SP[0]);
2131 SP[0] = reinterpret_cast<ObjectPtr*>(
2132 instance->ptr())[String::length_offset() / kWordSize];
2133 if (kind == MethodRecognizer::kStringBaseIsEmpty) {
2134 SP[0] =
2135 SP[0] == Smi::New(0) ? Bool::True().raw() : Bool::False().raw();
2136 }
2137 } break;
2138 case MethodRecognizer::kGrowableArrayLength: {
2139 GrowableObjectArrayPtr instance =
2140 static_cast<GrowableObjectArrayPtr>(SP[0]);
2141 SP[0] = instance->ptr()->length_;
2142 } break;
2143 case MethodRecognizer::kObjectArrayLength:
2144 case MethodRecognizer::kImmutableArrayLength: {
2145 ArrayPtr instance = static_cast<ArrayPtr>(SP[0]);
2146 SP[0] = instance->ptr()->length_;
2147 } break;
2148 case MethodRecognizer::kTypedListLength:
2149 case MethodRecognizer::kTypedListViewLength:
2150 case MethodRecognizer::kByteDataViewLength: {
2151 TypedDataBasePtr instance = static_cast<TypedDataBasePtr>(SP[0]);
2152 SP[0] = instance->ptr()->length_;
2153 } break;
2154 case MethodRecognizer::kByteDataViewOffsetInBytes:
2155 case MethodRecognizer::kTypedDataViewOffsetInBytes: {
2156 TypedDataViewPtr instance = static_cast<TypedDataViewPtr>(SP[0]);
2157 SP[0] = instance->ptr()->offset_in_bytes_;
2158 } break;
2159 case MethodRecognizer::kByteDataViewTypedData:
2160 case MethodRecognizer::kTypedDataViewTypedData: {
2161 TypedDataViewPtr instance = static_cast<TypedDataViewPtr>(SP[0]);
2162 SP[0] = instance->ptr()->typed_data_;
2163 } break;
2164 case MethodRecognizer::kClassIDgetID: {
2165 SP[0] = InterpreterHelpers::GetClassIdAsSmi(SP[0]);
2166 } break;
2167 case MethodRecognizer::kAsyncStackTraceHelper: {
2168 SP[0] = Object::null();
2169 } break;
2170 case MethodRecognizer::kGrowableArrayCapacity: {
2171 GrowableObjectArrayPtr instance =
2172 static_cast<GrowableObjectArrayPtr>(SP[0]);
2173 SP[0] = instance->ptr()->data_->ptr()->length_;
2174 } break;
2175 case MethodRecognizer::kListFactory: {
2176 // factory List<E>([int length]) {
2177 // return (:arg_desc.positional_count == 2) ? new _List<E>(length)
2178 // : new _GrowableList<E>(0);
2179 // }
2180 if (InterpreterHelpers::ArgDescPosCount(argdesc_) == 2) {
2181 TypeArgumentsPtr type_args = TypeArguments::RawCast(SP[-1]);
2182 ObjectPtr length = SP[0];
2183 SP--;
2184 if (!AllocateArray(thread, type_args, length, pc, FP, SP)) {
2185 HANDLE_EXCEPTION;
2186 }
2187 } else {
2188 ASSERT(InterpreterHelpers::ArgDescPosCount(argdesc_) == 1);
2189 // SP[-1] is type.
2190 // The native wrapper pushed null as the optional length argument.
2191 ASSERT(SP[0] == null_value);
2192 SP[0] = Smi::New(0); // Patch null length with zero length.
2193 SP[1] = thread->isolate()->object_store()->growable_list_factory();
2194 // Change the ArgumentsDescriptor of the call with a new cached one.
2195 argdesc_ = ArgumentsDescriptor::NewBoxed(
2196 0, KernelBytecode::kNativeCallToGrowableListArgc);
2197 // Replace PC to the return trampoline so ReturnTOS would see
2198 // a call bytecode at return address and will be able to get argc
2199 // via DecodeArgc.
2200 pc = KernelBytecode::GetNativeCallToGrowableListReturnTrampoline();
2201 if (!Invoke(thread, SP - 1, SP + 1, &pc, &FP, &SP)) {
2202 HANDLE_EXCEPTION;
2203 }
2204 }
2205 } break;
2206 case MethodRecognizer::kObjectArrayAllocate: {
2207 TypeArgumentsPtr type_args = TypeArguments::RawCast(SP[-1]);
2208 ObjectPtr length = SP[0];
2209 SP--;
2210 if (!AllocateArray(thread, type_args, length, pc, FP, SP)) {
2211 HANDLE_EXCEPTION;
2212 }
2213 } break;
2214 case MethodRecognizer::kLinkedHashMap_getIndex: {
2215 InstancePtr instance = static_cast<InstancePtr>(SP[0]);
2216 SP[0] = reinterpret_cast<ObjectPtr*>(
2217 instance->ptr())[LinkedHashMap::index_offset() / kWordSize];
2218 } break;
2219 case MethodRecognizer::kLinkedHashMap_setIndex: {
2220 InstancePtr instance = static_cast<InstancePtr>(SP[-1]);
2221 instance->ptr()->StorePointer(
2222 reinterpret_cast<ObjectPtr*>(instance->ptr()) +
2223 LinkedHashMap::index_offset() / kWordSize,
2224 SP[0]);
2225 *--SP = null_value;
2226 } break;
2227 case MethodRecognizer::kLinkedHashMap_getData: {
2228 InstancePtr instance = static_cast<InstancePtr>(SP[0]);
2229 SP[0] = reinterpret_cast<ObjectPtr*>(
2230 instance->ptr())[LinkedHashMap::data_offset() / kWordSize];
2231 } break;
2232 case MethodRecognizer::kLinkedHashMap_setData: {
2233 InstancePtr instance = static_cast<InstancePtr>(SP[-1]);
2234 instance->ptr()->StorePointer(
2235 reinterpret_cast<ObjectPtr*>(instance->ptr()) +
2236 LinkedHashMap::data_offset() / kWordSize,
2237 SP[0]);
2238 *--SP = null_value;
2239 } break;
2240 case MethodRecognizer::kLinkedHashMap_getHashMask: {
2241 InstancePtr instance = static_cast<InstancePtr>(SP[0]);
2242 SP[0] = reinterpret_cast<ObjectPtr*>(
2243 instance->ptr())[LinkedHashMap::hash_mask_offset() / kWordSize];
2244 } break;
2245 case MethodRecognizer::kLinkedHashMap_setHashMask: {
2246 InstancePtr instance = static_cast<InstancePtr>(SP[-1]);
2247 ASSERT(!SP[0]->IsHeapObject());
2248 reinterpret_cast<ObjectPtr*>(
2249 instance->ptr())[LinkedHashMap::hash_mask_offset() / kWordSize] =
2250 SP[0];
2251 *--SP = null_value;
2252 } break;
2253 case MethodRecognizer::kLinkedHashMap_getUsedData: {
2254 InstancePtr instance = static_cast<InstancePtr>(SP[0]);
2255 SP[0] = reinterpret_cast<ObjectPtr*>(
2256 instance->ptr())[LinkedHashMap::used_data_offset() / kWordSize];
2257 } break;
2258 case MethodRecognizer::kLinkedHashMap_setUsedData: {
2259 InstancePtr instance = static_cast<InstancePtr>(SP[-1]);
2260 ASSERT(!SP[0]->IsHeapObject());
2261 reinterpret_cast<ObjectPtr*>(
2262 instance->ptr())[LinkedHashMap::used_data_offset() / kWordSize] =
2263 SP[0];
2264 *--SP = null_value;
2265 } break;
2266 case MethodRecognizer::kLinkedHashMap_getDeletedKeys: {
2267 InstancePtr instance = static_cast<InstancePtr>(SP[0]);
2268 SP[0] = reinterpret_cast<ObjectPtr*>(
2269 instance->ptr())[LinkedHashMap::deleted_keys_offset() / kWordSize];
2270 } break;
2271 case MethodRecognizer::kLinkedHashMap_setDeletedKeys: {
2272 InstancePtr instance = static_cast<InstancePtr>(SP[-1]);
2273 ASSERT(!SP[0]->IsHeapObject());
2274 reinterpret_cast<ObjectPtr*>(
2275 instance->ptr())[LinkedHashMap::deleted_keys_offset() / kWordSize] =
2276 SP[0];
2277 *--SP = null_value;
2278 } break;
2279 case MethodRecognizer::kFfiAbi: {
2280 *++SP = Smi::New(static_cast<int64_t>(compiler::ffi::TargetAbi()));
2281 } break;
2282 default: {
2283 NativeEntryData::Payload* payload =
2284 NativeEntryData::FromTypedArray(data);
2285 intptr_t argc_tag = NativeEntryData::GetArgcTag(data);
2286 const intptr_t num_arguments =
2287 NativeArguments::ArgcBits::decode(argc_tag);
2288
2289 if (payload->trampoline == NULL) {
2290 ASSERT(payload->native_function == NULL);
2291 payload->trampoline = &NativeEntry::BootstrapNativeCallWrapper;
2292 payload->native_function =
2293 reinterpret_cast<NativeFunction>(&NativeEntry::LinkNativeCall);
2294 }
2295
2296 *++SP = null_value; // Result slot.
2297
2298 ObjectPtr* incoming_args = SP - num_arguments;
2299 ObjectPtr* return_slot = SP;
2300 Exit(thread, FP, SP + 1, pc);
2301 NativeArguments native_args(thread, argc_tag, incoming_args,
2302 return_slot);
2303 INVOKE_NATIVE(
2304 payload->trampoline,
2305 reinterpret_cast<Dart_NativeFunction>(payload->native_function),
2306 reinterpret_cast<Dart_NativeArguments>(&native_args));
2307
2308 *(SP - num_arguments) = *return_slot;
2309 SP -= num_arguments;
2310 }
2311 }
2312 DISPATCH();
2313 }
2314
2315 {
2316 BYTECODE(ReturnTOS, 0);
2317 DEBUG_CHECK;
2318 ObjectPtr result; // result to return to the caller.
2319 result = *SP;
2320 // Restore caller PC.
2321 pc = SavedCallerPC(FP);
2322
2323 // Check if it is a fake PC marking the entry frame.
2324 if (IsEntryFrameMarker(pc)) {
2325 // Pop entry frame.
2326 ObjectPtr* entry_fp = SavedCallerFP(FP);
2327 // Restore exit frame info saved in entry frame.
2328 pp_ = static_cast<ObjectPoolPtr>(entry_fp[kKBCSavedPpSlotFromEntryFp]);
2329 argdesc_ =
2330 static_cast<ArrayPtr>(entry_fp[kKBCSavedArgDescSlotFromEntryFp]);
2331 uword exit_fp = static_cast<uword>(entry_fp[kKBCExitLinkSlotFromEntryFp]);
2332 thread->set_top_exit_frame_info(exit_fp);
2333 thread->set_top_resource(top_resource);
2334 thread->set_vm_tag(vm_tag);
2335 fp_ = entry_fp;
2336 NOT_IN_PRODUCT(pc_ = pc); // For the profiler.
2337#if defined(DEBUG)
2338 if (IsTracingExecution()) {
2339 THR_Print("%" Pu64 " ", icount_);
2340 THR_Print("Returning from interpreter 0x%" Px " at fp_ 0x%" Px
2341 " exit 0x%" Px "\n",
2342 reinterpret_cast<uword>(this), reinterpret_cast<uword>(fp_),
2343 exit_fp);
2344 }
2345 ASSERT(HasFrame(reinterpret_cast<uword>(fp_)));
2346 // Exception propagation should have been done.
2347 ASSERT(!result->IsHeapObject() ||
2348 result->GetClassId() != kUnhandledExceptionCid);
2349#endif
2350 return result;
2351 }
2352
2353 // Look at the caller to determine how many arguments to pop.
2354 const uint8_t argc = KernelBytecode::DecodeArgc(pc);
2355
2356 // Restore SP, FP and PP. Push result and dispatch.
2357 SP = FrameArguments(FP, argc);
2358 FP = SavedCallerFP(FP);
2359 NOT_IN_PRODUCT(fp_ = FP); // For the profiler.
2360 NOT_IN_PRODUCT(pc_ = pc); // For the profiler.
2361 pp_ = InterpreterHelpers::FrameBytecode(FP)->ptr()->object_pool_;
2362 *SP = result;
2363#if defined(DEBUG)
2364 if (IsTracingExecution()) {
2365 THR_Print("%" Pu64 " ", icount_);
2366 THR_Print("Returning to %s (argc %d)\n",
2367 Function::Handle(FrameFunction(FP)).ToFullyQualifiedCString(),
2368 static_cast<int>(argc));
2369 }
2370#endif
2371 DISPATCH();
2372 }
2373
2374 {
2375 BYTECODE(InitLateField, D);
2376 FieldPtr field = RAW_CAST(Field, LOAD_CONSTANT(rD + 1));
2377 InstancePtr instance = static_cast<InstancePtr>(SP[0]);
2378 intptr_t offset_in_words =
2379 Smi::Value(field->ptr()->host_offset_or_field_id_);
2380
2381 instance->ptr()->StorePointer(
2382 reinterpret_cast<ObjectPtr*>(instance->ptr()) + offset_in_words,
2383 Object::RawCast(Object::sentinel().raw()), thread);
2384
2385 SP -= 1; // Drop instance.
2386 DISPATCH();
2387 }
2388
2389 {
2390 BYTECODE(PushUninitializedSentinel, 0);
2391 *++SP = Object::sentinel().raw();
2392 DISPATCH();
2393 }
2394
2395 {
2396 BYTECODE(JumpIfInitialized, T);
2397 SP -= 1;
2398 if (SP[1] != Object::sentinel().raw()) {
2399 LOAD_JUMP_TARGET();
2400 }
2401 DISPATCH();
2402 }
2403
2404 {
2405 BYTECODE(StoreStaticTOS, D);
2406 FieldPtr field = static_cast<FieldPtr>(LOAD_CONSTANT(rD));
2407 InstancePtr value = static_cast<InstancePtr>(*SP--);
2408 intptr_t field_id = Smi::Value(field->ptr()->host_offset_or_field_id_);
2409 thread->field_table_values()[field_id] = value;
2410 DISPATCH();
2411 }
2412
2413 {
2414 BYTECODE(LoadStatic, D);
2415 FieldPtr field = static_cast<FieldPtr>(LOAD_CONSTANT(rD));
2416 intptr_t field_id = Smi::Value(field->ptr()->host_offset_or_field_id_);
2417 InstancePtr value = thread->field_table_values()[field_id];
2418 ASSERT((value != Object::sentinel().raw()) &&
2419 (value != Object::transition_sentinel().raw()));
2420 *++SP = value;
2421 DISPATCH();
2422 }
2423
2424 {
2425 BYTECODE(StoreFieldTOS, D);
2426 FieldPtr field = RAW_CAST(Field, LOAD_CONSTANT(rD + 1));
2427 InstancePtr instance = static_cast<InstancePtr>(SP[-1]);
2428 ObjectPtr value = static_cast<ObjectPtr>(SP[0]);
2429 intptr_t offset_in_words =
2430 Smi::Value(field->ptr()->host_offset_or_field_id_);
2431
2432 if (InterpreterHelpers::FieldNeedsGuardUpdate(field, value)) {
2433 SP[1] = 0; // Unused result of runtime call.
2434 SP[2] = field;
2435 SP[3] = value;
2436 Exit(thread, FP, SP + 4, pc);
2437 if (!InvokeRuntime(thread, this, DRT_UpdateFieldCid,
2438 NativeArguments(thread, 2, /* argv */ SP + 2,
2439 /* retval */ SP + 1))) {
2440 HANDLE_EXCEPTION;
2441 }
2442
2443 // Reload objects after the call which may trigger GC.
2444 field = RAW_CAST(Field, LOAD_CONSTANT(rD + 1));
2445 instance = static_cast<InstancePtr>(SP[-1]);
2446 value = SP[0];
2447 }
2448
2449 const bool unboxing =
2450 (field->ptr()->is_nullable_ != kNullCid) &&
2451 Field::UnboxingCandidateBit::decode(field->ptr()->kind_bits_);
2452 classid_t guarded_cid = field->ptr()->guarded_cid_;
2453 if (unboxing && (guarded_cid == kDoubleCid) && supports_unboxed_doubles_) {
2454 double raw_value = Double::RawCast(value)->ptr()->value_;
2455 ASSERT(*(reinterpret_cast<DoublePtr*>(instance->ptr()) +
2456 offset_in_words) == null_value); // Initializing store.
2457 if (!AllocateDouble(thread, raw_value, pc, FP, SP)) {
2458 HANDLE_EXCEPTION;
2459 }
2460 DoublePtr box = Double::RawCast(SP[0]);
2461 instance = static_cast<InstancePtr>(SP[-1]);
2462 instance->ptr()->StorePointer(
2463 reinterpret_cast<DoublePtr*>(instance->ptr()) + offset_in_words, box,
2464 thread);
2465 } else if (unboxing && (guarded_cid == kFloat32x4Cid) &&
2466 supports_unboxed_simd128_) {
2467 simd128_value_t raw_value;
2468 raw_value.readFrom(Float32x4::RawCast(value)->ptr()->value_);
2469 ASSERT(*(reinterpret_cast<Float32x4Ptr*>(instance->ptr()) +
2470 offset_in_words) == null_value); // Initializing store.
2471 if (!AllocateFloat32x4(thread, raw_value, pc, FP, SP)) {
2472 HANDLE_EXCEPTION;
2473 }
2474 Float32x4Ptr box = Float32x4::RawCast(SP[0]);
2475 instance = static_cast<InstancePtr>(SP[-1]);
2476 instance->ptr()->StorePointer(
2477 reinterpret_cast<Float32x4Ptr*>(instance->ptr()) + offset_in_words,
2478 box, thread);
2479 } else if (unboxing && (guarded_cid == kFloat64x2Cid) &&
2480 supports_unboxed_simd128_) {
2481 simd128_value_t raw_value;
2482 raw_value.readFrom(Float64x2::RawCast(value)->ptr()->value_);
2483 ASSERT(*(reinterpret_cast<Float64x2Ptr*>(instance->ptr()) +
2484 offset_in_words) == null_value); // Initializing store.
2485 if (!AllocateFloat64x2(thread, raw_value, pc, FP, SP)) {
2486 HANDLE_EXCEPTION;
2487 }
2488 Float64x2Ptr box = Float64x2::RawCast(SP[0]);
2489 instance = static_cast<InstancePtr>(SP[-1]);
2490 instance->ptr()->StorePointer(
2491 reinterpret_cast<Float64x2Ptr*>(instance->ptr()) + offset_in_words,
2492 box, thread);
2493 } else {
2494 instance->ptr()->StorePointer(
2495 reinterpret_cast<ObjectPtr*>(instance->ptr()) + offset_in_words,
2496 value, thread);
2497 }
2498
2499 SP -= 2; // Drop instance and value.
2500 DISPATCH();
2501 }
2502
2503 {
2504 BYTECODE(StoreContextParent, 0);
2505 const uword offset_in_words =
2506 static_cast<uword>(Context::parent_offset() / kWordSize);
2507 ContextPtr instance = static_cast<ContextPtr>(SP[-1]);
2508 ContextPtr value = static_cast<ContextPtr>(SP[0]);
2509 SP -= 2; // Drop instance and value.
2510
2511 instance->ptr()->StorePointer(
2512 reinterpret_cast<ContextPtr*>(instance->ptr()) + offset_in_words, value,
2513 thread);
2514
2515 DISPATCH();
2516 }
2517
2518 {
2519 BYTECODE(StoreContextVar, A_E);
2520 const uword offset_in_words =
2521 static_cast<uword>(Context::variable_offset(rE) / kWordSize);
2522 ContextPtr instance = static_cast<ContextPtr>(SP[-1]);
2523 ObjectPtr value = static_cast<ContextPtr>(SP[0]);
2524 SP -= 2; // Drop instance and value.
2525 ASSERT(rE < static_cast<uint32_t>(instance->ptr()->num_variables_));
2526 instance->ptr()->StorePointer(
2527 reinterpret_cast<ObjectPtr*>(instance->ptr()) + offset_in_words, value,
2528 thread);
2529
2530 DISPATCH();
2531 }
2532
2533 {
2534 BYTECODE(LoadFieldTOS, D);
2535#if defined(DEBUG)
2536 // Currently only used to load closure fields, which are not unboxed.
2537 // If used for general field, code for copying the mutable box must be
2538 // added.
2539 FieldPtr field = RAW_CAST(Field, LOAD_CONSTANT(rD + 1));
2540 const bool unboxing =
2541 (field->ptr()->is_nullable_ != kNullCid) &&
2542 Field::UnboxingCandidateBit::decode(field->ptr()->kind_bits_);
2543 ASSERT(!unboxing);
2544#endif
2545 const uword offset_in_words =
2546 static_cast<uword>(Smi::Value(RAW_CAST(Smi, LOAD_CONSTANT(rD))));
2547 InstancePtr instance = static_cast<InstancePtr>(SP[0]);
2548 SP[0] = reinterpret_cast<ObjectPtr*>(instance->ptr())[offset_in_words];
2549 DISPATCH();
2550 }
2551
2552 {
2553 BYTECODE(LoadTypeArgumentsField, D);
2554 const uword offset_in_words =
2555 static_cast<uword>(Smi::Value(RAW_CAST(Smi, LOAD_CONSTANT(rD))));
2556 InstancePtr instance = static_cast<InstancePtr>(SP[0]);
2557 SP[0] = reinterpret_cast<ObjectPtr*>(instance->ptr())[offset_in_words];
2558 DISPATCH();
2559 }
2560
2561 {
2562 BYTECODE(LoadContextParent, 0);
2563 const uword offset_in_words =
2564 static_cast<uword>(Context::parent_offset() / kWordSize);
2565 ContextPtr instance = static_cast<ContextPtr>(SP[0]);
2566 SP[0] = reinterpret_cast<ObjectPtr*>(instance->ptr())[offset_in_words];
2567 DISPATCH();
2568 }
2569
2570 {
2571 BYTECODE(LoadContextVar, A_E);
2572 const uword offset_in_words =
2573 static_cast<uword>(Context::variable_offset(rE) / kWordSize);
2574 ContextPtr instance = static_cast<ContextPtr>(SP[0]);
2575 ASSERT(rE < static_cast<uint32_t>(instance->ptr()->num_variables_));
2576 SP[0] = reinterpret_cast<ObjectPtr*>(instance->ptr())[offset_in_words];
2577 DISPATCH();
2578 }
2579
2580 {
2581 BYTECODE(AllocateContext, A_E);
2582 ++SP;
2583 const uint32_t num_context_variables = rE;
2584 if (!AllocateContext(thread, num_context_variables, pc, FP, SP)) {
2585 HANDLE_EXCEPTION;
2586 }
2587 DISPATCH();
2588 }
2589
2590 {
2591 BYTECODE(CloneContext, A_E);
2592 {
2593 SP[1] = SP[0]; // Context to clone.
2594 Exit(thread, FP, SP + 2, pc);
2595 INVOKE_RUNTIME(DRT_CloneContext, NativeArguments(thread, 1, SP + 1, SP));
2596 }
2597 DISPATCH();
2598 }
2599
2600 {
2601 BYTECODE(Allocate, D);
2602 ClassPtr cls = Class::RawCast(LOAD_CONSTANT(rD));
2603 if (LIKELY(InterpreterHelpers::IsAllocateFinalized(cls))) {
2604 const intptr_t class_id = cls->ptr()->id_;
2605 const intptr_t instance_size = cls->ptr()->host_instance_size_in_words_
2606 << kWordSizeLog2;
2607 ObjectPtr result;
2608 if (TryAllocate(thread, class_id, instance_size, &result)) {
2609 uword start = ObjectLayout::ToAddr(result);
2610 for (intptr_t offset = sizeof(InstanceLayout); offset < instance_size;
2611 offset += kWordSize) {
2612 *reinterpret_cast<ObjectPtr*>(start + offset) = null_value;
2613 }
2614 *++SP = result;
2615 DISPATCH();
2616 }
2617 }
2618
2619 SP[1] = 0; // Space for the result.
2620 SP[2] = cls; // Class object.
2621 SP[3] = null_value; // Type arguments.
2622 Exit(thread, FP, SP + 4, pc);
2623 INVOKE_RUNTIME(DRT_AllocateObject,
2624 NativeArguments(thread, 2, SP + 2, SP + 1));
2625 SP++; // Result is in SP[1].
2626 DISPATCH();
2627 }
2628
2629 {
2630 BYTECODE(AllocateT, 0);
2631 ClassPtr cls = Class::RawCast(SP[0]);
2632 TypeArgumentsPtr type_args = TypeArguments::RawCast(SP[-1]);
2633 if (LIKELY(InterpreterHelpers::IsAllocateFinalized(cls))) {
2634 const intptr_t class_id = cls->ptr()->id_;
2635 const intptr_t instance_size = cls->ptr()->host_instance_size_in_words_
2636 << kWordSizeLog2;
2637 ObjectPtr result;
2638 if (TryAllocate(thread, class_id, instance_size, &result)) {
2639 uword start = ObjectLayout::ToAddr(result);
2640 for (intptr_t offset = sizeof(InstanceLayout); offset < instance_size;
2641 offset += kWordSize) {
2642 *reinterpret_cast<ObjectPtr*>(start + offset) = null_value;
2643 }
2644 const intptr_t type_args_offset =
2645 cls->ptr()->host_type_arguments_field_offset_in_words_
2646 << kWordSizeLog2;
2647 *reinterpret_cast<ObjectPtr*>(start + type_args_offset) = type_args;
2648 *--SP = result;
2649 DISPATCH();
2650 }
2651 }
2652
2653 SP[1] = cls;
2654 SP[2] = type_args;
2655 Exit(thread, FP, SP + 3, pc);
2656 INVOKE_RUNTIME(DRT_AllocateObject,
2657 NativeArguments(thread, 2, SP + 1, SP - 1));
2658 SP -= 1; // Result is in SP - 1.
2659 DISPATCH();
2660 }
2661
2662 {
2663 BYTECODE(CreateArrayTOS, 0);
2664 TypeArgumentsPtr type_args = TypeArguments::RawCast(SP[-1]);
2665 ObjectPtr length = SP[0];
2666 SP--;
2667 if (!AllocateArray(thread, type_args, length, pc, FP, SP)) {
2668 HANDLE_EXCEPTION;
2669 }
2670 DISPATCH();
2671 }
2672
2673 {
2674 BYTECODE(AssertAssignable, A_E);
2675 // Stack: instance, type, instantiator type args, function type args, name
2676 ObjectPtr* args = SP - 4;
2677 const bool may_be_smi = (rA == 1);
2678 const bool is_smi =
2679 ((static_cast<intptr_t>(args[0]) & kSmiTagMask) == kSmiTag);
2680 const bool smi_ok = is_smi && may_be_smi;
2681 if (!smi_ok && (args[0] != null_value)) {
2682 SubtypeTestCachePtr cache =
2683 static_cast<SubtypeTestCachePtr>(LOAD_CONSTANT(rE));
2684
2685 if (!AssertAssignable(thread, pc, FP, SP, args, cache)) {
2686 HANDLE_EXCEPTION;
2687 }
2688 }
2689
2690 SP -= 4; // Instance remains on stack.
2691 DISPATCH();
2692 }
2693
2694 {
2695 BYTECODE(AssertSubtype, 0);
2696 ObjectPtr* args = SP - 4;
2697
2698 // TODO(kustermann): Implement fast case for common arguments.
2699
2700 // The arguments on the stack look like:
2701 // args[0] instantiator type args
2702 // args[1] function type args
2703 // args[2] sub_type
2704 // args[3] super_type
2705 // args[4] name
2706
2707 // This is unused, since the negative case throws an exception.
2708 SP++;
2709 ObjectPtr* result_slot = SP;
2710
2711 Exit(thread, FP, SP + 1, pc);
2712 INVOKE_RUNTIME(DRT_SubtypeCheck,
2713 NativeArguments(thread, 5, args, result_slot));
2714
2715 // Drop result slot and all arguments.
2716 SP -= 6;
2717
2718 DISPATCH();
2719 }
2720
2721 {
2722 BYTECODE(AssertBoolean, A);
2723 ObjectPtr value = SP[0];
2724 if (rA != 0u) { // Should we perform type check?
2725 if ((value == true_value) || (value == false_value)) {
2726 goto AssertBooleanOk;
2727 }
2728 } else if (value != null_value) {
2729 goto AssertBooleanOk;
2730 }
2731
2732 // Assertion failed.
2733 {
2734 SP[1] = SP[0]; // instance
2735 Exit(thread, FP, SP + 2, pc);
2736 INVOKE_RUNTIME(DRT_NonBoolTypeError,
2737 NativeArguments(thread, 1, SP + 1, SP));
2738 }
2739
2740 AssertBooleanOk:
2741 DISPATCH();
2742 }
2743
2744 {
2745 BYTECODE(Jump, T);
2746 LOAD_JUMP_TARGET();
2747 DISPATCH();
2748 }
2749
2750 {
2751 BYTECODE(JumpIfNoAsserts, T);
2752 if (!thread->isolate()->asserts()) {
2753 LOAD_JUMP_TARGET();
2754 }
2755 DISPATCH();
2756 }
2757
2758 {
2759 BYTECODE(JumpIfNotZeroTypeArgs, T);
2760 if (InterpreterHelpers::ArgDescTypeArgsLen(argdesc_) != 0) {
2761 LOAD_JUMP_TARGET();
2762 }
2763 DISPATCH();
2764 }
2765
2766 {
2767 BYTECODE(JumpIfEqStrict, T);
2768 SP -= 2;
2769 if (SP[1] == SP[2]) {
2770 LOAD_JUMP_TARGET();
2771 }
2772 DISPATCH();
2773 }
2774
2775 {
2776 BYTECODE(JumpIfNeStrict, T);
2777 SP -= 2;
2778 if (SP[1] != SP[2]) {
2779 LOAD_JUMP_TARGET();
2780 }
2781 DISPATCH();
2782 }
2783
2784 {
2785 BYTECODE(JumpIfTrue, T);
2786 SP -= 1;
2787 if (SP[1] == true_value) {
2788 LOAD_JUMP_TARGET();
2789 }
2790 DISPATCH();
2791 }
2792
2793 {
2794 BYTECODE(JumpIfFalse, T);
2795 SP -= 1;
2796 if (SP[1] == false_value) {
2797 LOAD_JUMP_TARGET();
2798 }
2799 DISPATCH();
2800 }
2801
2802 {
2803 BYTECODE(JumpIfNull, T);
2804 SP -= 1;
2805 if (SP[1] == null_value) {
2806 LOAD_JUMP_TARGET();
2807 }
2808 DISPATCH();
2809 }
2810
2811 {
2812 BYTECODE(JumpIfNotNull, T);
2813 SP -= 1;
2814 if (SP[1] != null_value) {
2815 LOAD_JUMP_TARGET();
2816 }
2817 DISPATCH();
2818 }
2819
2820 {
2821 BYTECODE(JumpIfUnchecked, T);
2822 // Interpreter is not tracking unchecked calls, so fall through to
2823 // parameter type checks.
2824 DISPATCH();
2825 }
2826
2827 {
2828 BYTECODE(StoreIndexedTOS, 0);
2829 SP -= 3;
2830 ArrayPtr array = RAW_CAST(Array, SP[1]);
2831 SmiPtr index = RAW_CAST(Smi, SP[2]);
2832 ObjectPtr value = SP[3];
2833 ASSERT(InterpreterHelpers::CheckIndex(index, array->ptr()->length_));
2834 array->ptr()->StorePointer(array->ptr()->data() + Smi::Value(index), value,
2835 thread);
2836 DISPATCH();
2837 }
2838
2839 {
2840 BYTECODE(EqualsNull, 0);
2841 DEBUG_CHECK;
2842 SP[0] = (SP[0] == null_value) ? true_value : false_value;
2843 DISPATCH();
2844 }
2845
2846 {
2847 BYTECODE(NullCheck, D);
2848
2849 if (UNLIKELY(SP[0] == null_value)) {
2850 // Load selector.
2851 SP[0] = LOAD_CONSTANT(rD);
2852 goto ThrowNullError;
2853 }
2854 SP -= 1;
2855
2856 DISPATCH();
2857 }
2858
2859 {
2860 BYTECODE(NegateInt, 0);
2861 DEBUG_CHECK;
2862 UNBOX_INT64(value, SP[0], Symbols::UnaryMinus());
2863 int64_t result = Utils::SubWithWrapAround(0, value);
2864 BOX_INT64_RESULT(result);
2865 DISPATCH();
2866 }
2867
2868 {
2869 BYTECODE(AddInt, 0);
2870 DEBUG_CHECK;
2871 SP -= 1;
2872 UNBOX_INT64(a, SP[0], Symbols::Plus());
2873 UNBOX_INT64(b, SP[1], Symbols::Plus());
2874 int64_t result = Utils::AddWithWrapAround(a, b);
2875 BOX_INT64_RESULT(result);
2876 DISPATCH();
2877 }
2878
2879 {
2880 BYTECODE(SubInt, 0);
2881 DEBUG_CHECK;
2882 SP -= 1;
2883 UNBOX_INT64(a, SP[0], Symbols::Minus());
2884 UNBOX_INT64(b, SP[1], Symbols::Minus());
2885 int64_t result = Utils::SubWithWrapAround(a, b);
2886 BOX_INT64_RESULT(result);
2887 DISPATCH();
2888 }
2889
2890 {
2891 BYTECODE(MulInt, 0);
2892 DEBUG_CHECK;
2893 SP -= 1;
2894 UNBOX_INT64(a, SP[0], Symbols::Star());
2895 UNBOX_INT64(b, SP[1], Symbols::Star());
2896 int64_t result = Utils::MulWithWrapAround(a, b);
2897 BOX_INT64_RESULT(result);
2898 DISPATCH();
2899 }
2900
2901 {
2902 BYTECODE(TruncDivInt, 0);
2903 DEBUG_CHECK;
2904 SP -= 1;
2905 UNBOX_INT64(a, SP[0], Symbols::TruncDivOperator());
2906 UNBOX_INT64(b, SP[1], Symbols::TruncDivOperator());
2907 if (UNLIKELY(b == 0)) {
2908 goto ThrowIntegerDivisionByZeroException;
2909 }
2910 int64_t result;
2911 if (UNLIKELY((a == Mint::kMinValue) && (b == -1))) {
2912 result = Mint::kMinValue;
2913 } else {
2914 result = a / b;
2915 }
2916 BOX_INT64_RESULT(result);
2917 DISPATCH();
2918 }
2919
2920 {
2921 BYTECODE(ModInt, 0);
2922 DEBUG_CHECK;
2923 SP -= 1;
2924 UNBOX_INT64(a, SP[0], Symbols::Percent());
2925 UNBOX_INT64(b, SP[1], Symbols::Percent());
2926 if (UNLIKELY(b == 0)) {
2927 goto ThrowIntegerDivisionByZeroException;
2928 }
2929 int64_t result;
2930 if (UNLIKELY((a == Mint::kMinValue) && (b == -1))) {
2931 result = 0;
2932 } else {
2933 result = a % b;
2934 if (result < 0) {
2935 if (b < 0) {
2936 result -= b;
2937 } else {
2938 result += b;
2939 }
2940 }
2941 }
2942 BOX_INT64_RESULT(result);
2943 DISPATCH();
2944 }
2945
2946 {
2947 BYTECODE(BitAndInt, 0);
2948 DEBUG_CHECK;
2949 SP -= 1;
2950 UNBOX_INT64(a, SP[0], Symbols::Ampersand());
2951 UNBOX_INT64(b, SP[1], Symbols::Ampersand());
2952 int64_t result = a & b;
2953 BOX_INT64_RESULT(result);
2954 DISPATCH();
2955 }
2956
2957 {
2958 BYTECODE(BitOrInt, 0);
2959 DEBUG_CHECK;
2960 SP -= 1;
2961 UNBOX_INT64(a, SP[0], Symbols::BitOr());
2962 UNBOX_INT64(b, SP[1], Symbols::BitOr());
2963 int64_t result = a | b;
2964 BOX_INT64_RESULT(result);
2965 DISPATCH();
2966 }
2967
2968 {
2969 BYTECODE(BitXorInt, 0);
2970 DEBUG_CHECK;
2971 SP -= 1;
2972 UNBOX_INT64(a, SP[0], Symbols::Caret());
2973 UNBOX_INT64(b, SP[1], Symbols::Caret());
2974 int64_t result = a ^ b;
2975 BOX_INT64_RESULT(result);
2976 DISPATCH();
2977 }
2978
2979 {
2980 BYTECODE(ShlInt, 0);
2981 DEBUG_CHECK;
2982 SP -= 1;
2983 UNBOX_INT64(a, SP[0], Symbols::LeftShiftOperator());
2984 UNBOX_INT64(b, SP[1], Symbols::LeftShiftOperator());
2985 if (b < 0) {
2986 SP[0] = SP[1];
2987 goto ThrowArgumentError;
2988 }
2989 int64_t result = Utils::ShiftLeftWithTruncation(a, b);
2990 BOX_INT64_RESULT(result);
2991 DISPATCH();
2992 }
2993
2994 {
2995 BYTECODE(ShrInt, 0);
2996 DEBUG_CHECK;
2997 SP -= 1;
2998 UNBOX_INT64(a, SP[0], Symbols::RightShiftOperator());
2999 UNBOX_INT64(b, SP[1], Symbols::RightShiftOperator());
3000 if (b < 0) {
3001 SP[0] = SP[1];
3002 goto ThrowArgumentError;
3003 }
3004 int64_t result = a >> Utils::Minimum<int64_t>(b, Mint::kBits);
3005 BOX_INT64_RESULT(result);
3006 DISPATCH();
3007 }
3008
3009 {
3010 BYTECODE(CompareIntEq, 0);
3011 DEBUG_CHECK;
3012 SP -= 1;
3013 if (SP[0] == SP[1]) {
3014 SP[0] = true_value;
3015 } else if (!SP[0]->IsHeapObject() || !SP[1]->IsHeapObject() ||
3016 (SP[0] == null_value) || (SP[1] == null_value)) {
3017 SP[0] = false_value;
3018 } else {
3019 int64_t a = Integer::GetInt64Value(RAW_CAST(Integer, SP[0]));
3020 int64_t b = Integer::GetInt64Value(RAW_CAST(Integer, SP[1]));
3021 SP[0] = (a == b) ? true_value : false_value;
3022 }
3023 DISPATCH();
3024 }
3025
3026 {
3027 BYTECODE(CompareIntGt, 0);
3028 DEBUG_CHECK;
3029 SP -= 1;
3030 UNBOX_INT64(a, SP[0], Symbols::RAngleBracket());
3031 UNBOX_INT64(b, SP[1], Symbols::RAngleBracket());
3032 SP[0] = (a > b) ? true_value : false_value;
3033 DISPATCH();
3034 }
3035
3036 {
3037 BYTECODE(CompareIntLt, 0);
3038 DEBUG_CHECK;
3039 SP -= 1;
3040 UNBOX_INT64(a, SP[0], Symbols::LAngleBracket());
3041 UNBOX_INT64(b, SP[1], Symbols::LAngleBracket());
3042 SP[0] = (a < b) ? true_value : false_value;
3043 DISPATCH();
3044 }
3045
3046 {
3047 BYTECODE(CompareIntGe, 0);
3048 DEBUG_CHECK;
3049 SP -= 1;
3050 UNBOX_INT64(a, SP[0], Symbols::GreaterEqualOperator());
3051 UNBOX_INT64(b, SP[1], Symbols::GreaterEqualOperator());
3052 SP[0] = (a >= b) ? true_value : false_value;
3053 DISPATCH();
3054 }
3055
3056 {
3057 BYTECODE(CompareIntLe, 0);
3058 DEBUG_CHECK;
3059 SP -= 1;
3060 UNBOX_INT64(a, SP[0], Symbols::LessEqualOperator());
3061 UNBOX_INT64(b, SP[1], Symbols::LessEqualOperator());
3062 SP[0] = (a <= b) ? true_value : false_value;
3063 DISPATCH();
3064 }
3065
3066 {
3067 BYTECODE(NegateDouble, 0);
3068 DEBUG_CHECK;
3069 UNBOX_DOUBLE(value, SP[0], Symbols::UnaryMinus());
3070 double result = -value;
3071 BOX_DOUBLE_RESULT(result);
3072 DISPATCH();
3073 }
3074
3075 {
3076 BYTECODE(AddDouble, 0);
3077 DEBUG_CHECK;
3078 SP -= 1;
3079 UNBOX_DOUBLE(a, SP[0], Symbols::Plus());
3080 UNBOX_DOUBLE(b, SP[1], Symbols::Plus());
3081 double result = a + b;
3082 BOX_DOUBLE_RESULT(result);
3083 DISPATCH();
3084 }
3085
3086 {
3087 BYTECODE(SubDouble, 0);
3088 DEBUG_CHECK;
3089 SP -= 1;
3090 UNBOX_DOUBLE(a, SP[0], Symbols::Minus());
3091 UNBOX_DOUBLE(b, SP[1], Symbols::Minus());
3092 double result = a - b;
3093 BOX_DOUBLE_RESULT(result);
3094 DISPATCH();
3095 }
3096
3097 {
3098 BYTECODE(MulDouble, 0);
3099 DEBUG_CHECK;
3100 SP -= 1;
3101 UNBOX_DOUBLE(a, SP[0], Symbols::Star());
3102 UNBOX_DOUBLE(b, SP[1], Symbols::Star());
3103 double result = a * b;
3104 BOX_DOUBLE_RESULT(result);
3105 DISPATCH();
3106 }
3107
3108 {
3109 BYTECODE(DivDouble, 0);
3110 DEBUG_CHECK;
3111 SP -= 1;
3112 UNBOX_DOUBLE(a, SP[0], Symbols::Slash());
3113 UNBOX_DOUBLE(b, SP[1], Symbols::Slash());
3114 double result = a / b;
3115 BOX_DOUBLE_RESULT(result);
3116 DISPATCH();
3117 }
3118
3119 {
3120 BYTECODE(CompareDoubleEq, 0);
3121 DEBUG_CHECK;
3122 SP -= 1;
3123 if ((SP[0] == null_value) || (SP[1] == null_value)) {
3124 SP[0] = (SP[0] == SP[1]) ? true_value : false_value;
3125 } else {
3126 double a = Double::RawCast(SP[0])->ptr()->value_;
3127 double b = Double::RawCast(SP[1])->ptr()->value_;
3128 SP[0] = (a == b) ? true_value : false_value;
3129 }
3130 DISPATCH();
3131 }
3132
3133 {
3134 BYTECODE(CompareDoubleGt, 0);
3135 DEBUG_CHECK;
3136 SP -= 1;
3137 UNBOX_DOUBLE(a, SP[0], Symbols::RAngleBracket());
3138 UNBOX_DOUBLE(b, SP[1], Symbols::RAngleBracket());
3139 SP[0] = (a > b) ? true_value : false_value;
3140 DISPATCH();
3141 }
3142
3143 {
3144 BYTECODE(CompareDoubleLt, 0);
3145 DEBUG_CHECK;
3146 SP -= 1;
3147 UNBOX_DOUBLE(a, SP[0], Symbols::LAngleBracket());
3148 UNBOX_DOUBLE(b, SP[1], Symbols::LAngleBracket());
3149 SP[0] = (a < b) ? true_value : false_value;
3150 DISPATCH();
3151 }
3152
3153 {
3154 BYTECODE(CompareDoubleGe, 0);
3155 DEBUG_CHECK;
3156 SP -= 1;
3157 UNBOX_DOUBLE(a, SP[0], Symbols::GreaterEqualOperator());
3158 UNBOX_DOUBLE(b, SP[1], Symbols::GreaterEqualOperator());
3159 SP[0] = (a >= b) ? true_value : false_value;
3160 DISPATCH();
3161 }
3162
3163 {
3164 BYTECODE(CompareDoubleLe, 0);
3165 DEBUG_CHECK;
3166 SP -= 1;
3167 UNBOX_DOUBLE(a, SP[0], Symbols::LessEqualOperator());
3168 UNBOX_DOUBLE(b, SP[1], Symbols::LessEqualOperator());
3169 SP[0] = (a <= b) ? true_value : false_value;
3170 DISPATCH();
3171 }
3172
3173 {
3174 BYTECODE(AllocateClosure, D);
3175 ++SP;
3176 if (!AllocateClosure(thread, pc, FP, SP)) {
3177 HANDLE_EXCEPTION;
3178 }
3179 DISPATCH();
3180 }
3181
3182 {
3183 BYTECODE_ENTRY_LABEL(Trap);
3184
3185#define UNIMPLEMENTED_LABEL_ORDN(Name)
3186#define UNIMPLEMENTED_LABEL_WIDE(Name)
3187#define UNIMPLEMENTED_LABEL_RESV(Name) BYTECODE_ENTRY_LABEL(Name)
3188#define UNIMPLEMENTED_LABEL(name, encoding, kind, op1, op2, op3) \
3189 UNIMPLEMENTED_LABEL_##kind(name)
3190
3191 KERNEL_BYTECODES_LIST(UNIMPLEMENTED_LABEL)
3192
3193#undef UNIMPLEMENTED_LABEL_ORDN
3194#undef UNIMPLEMENTED_LABEL_WIDE
3195#undef UNIMPLEMENTED_LABEL_RESV
3196#undef UNIMPLEMENTED_LABEL
3197
3198 UNIMPLEMENTED();
3199 DISPATCH();
3200 }
3201
3202 {
3203 BYTECODE(VMInternal_ImplicitGetter, 0);
3204
3205 FunctionPtr function = FrameFunction(FP);
3206 ASSERT(Function::kind(function) == FunctionLayout::kImplicitGetter);
3207
3208 BUMP_USAGE_COUNTER_ON_ENTRY(function);
3209
3210 // Field object is cached in function's data_.
3211 FieldPtr field = static_cast<FieldPtr>(function->ptr()->data_);
3212 intptr_t offset_in_words =
3213 Smi::Value(field->ptr()->host_offset_or_field_id_);
3214
3215 const intptr_t kArgc = 1;
3216 InstancePtr instance =
3217 static_cast<InstancePtr>(FrameArguments(FP, kArgc)[0]);
3218 InstancePtr value =
3219 reinterpret_cast<InstancePtr*>(instance->ptr())[offset_in_words];
3220
3221 if (UNLIKELY(value == Object::sentinel().raw())) {
3222 SP[1] = 0; // Result slot.
3223 SP[2] = instance;
3224 SP[3] = field;
3225 Exit(thread, FP, SP + 4, pc);
3226 INVOKE_RUNTIME(
3227 DRT_InitInstanceField,
3228 NativeArguments(thread, 2, /* argv */ SP + 2, /* ret val */ SP + 1));
3229
3230 function = FrameFunction(FP);
3231 instance = static_cast<InstancePtr>(SP[2]);
3232 field = static_cast<FieldPtr>(SP[3]);
3233 offset_in_words = Smi::Value(field->ptr()->host_offset_or_field_id_);
3234 value = reinterpret_cast<InstancePtr*>(instance->ptr())[offset_in_words];
3235 }
3236
3237 *++SP = value;
3238
3239#if !defined(PRODUCT)
3240 if (UNLIKELY(Field::NeedsLoadGuardBit::decode(field->ptr()->kind_bits_))) {
3241 if (!AssertAssignableField<true>(thread, pc, FP, SP, instance, field,
3242 value)) {
3243 HANDLE_EXCEPTION;
3244 }
3245 // Reload objects after the call which may trigger GC.
3246 field = static_cast<FieldPtr>(FrameFunction(FP)->ptr()->data_);
3247 instance = static_cast<InstancePtr>(FrameArguments(FP, kArgc)[0]);
3248 value = reinterpret_cast<InstancePtr*>(instance->ptr())[offset_in_words];
3249 }
3250#endif
3251
3252 const bool unboxing =
3253 (field->ptr()->is_nullable_ != kNullCid) &&
3254 Field::UnboxingCandidateBit::decode(field->ptr()->kind_bits_);
3255 classid_t guarded_cid = field->ptr()->guarded_cid_;
3256 if (unboxing && (guarded_cid == kDoubleCid) && supports_unboxed_doubles_) {
3257 ASSERT(FlowGraphCompiler::SupportsUnboxedDoubles());
3258 double raw_value = Double::RawCast(value)->ptr()->value_;
3259 // AllocateDouble places result at SP[0]
3260 if (!AllocateDouble(thread, raw_value, pc, FP, SP)) {
3261 HANDLE_EXCEPTION;
3262 }
3263 } else if (unboxing && (guarded_cid == kFloat32x4Cid) &&
3264 supports_unboxed_simd128_) {
3265 simd128_value_t raw_value;
3266 raw_value.readFrom(Float32x4::RawCast(value)->ptr()->value_);
3267 // AllocateFloat32x4 places result at SP[0]
3268 if (!AllocateFloat32x4(thread, raw_value, pc, FP, SP)) {
3269 HANDLE_EXCEPTION;
3270 }
3271 } else if (unboxing && (guarded_cid == kFloat64x2Cid) &&
3272 supports_unboxed_simd128_) {
3273 simd128_value_t raw_value;
3274 raw_value.readFrom(Float64x2::RawCast(value)->ptr()->value_);
3275 // AllocateFloat64x2 places result at SP[0]
3276 if (!AllocateFloat64x2(thread, raw_value, pc, FP, SP)) {
3277 HANDLE_EXCEPTION;
3278 }
3279 }
3280
3281 DISPATCH();
3282 }
3283
3284 {
3285 BYTECODE(VMInternal_ImplicitSetter, 0);
3286
3287 FunctionPtr function = FrameFunction(FP);
3288 ASSERT(Function::kind(function) == FunctionLayout::kImplicitSetter);
3289
3290 BUMP_USAGE_COUNTER_ON_ENTRY(function);
3291
3292 // Field object is cached in function's data_.
3293 FieldPtr field = static_cast<FieldPtr>(function->ptr()->data_);
3294 intptr_t offset_in_words =
3295 Smi::Value(field->ptr()->host_offset_or_field_id_);
3296 const intptr_t kArgc = 2;
3297 InstancePtr instance =
3298 static_cast<InstancePtr>(FrameArguments(FP, kArgc)[0]);
3299 InstancePtr value = static_cast<InstancePtr>(FrameArguments(FP, kArgc)[1]);
3300
3301 if (!AssertAssignableField<false>(thread, pc, FP, SP, instance, field,
3302 value)) {
3303 HANDLE_EXCEPTION;
3304 }
3305 // Reload objects after the call which may trigger GC.
3306 field = static_cast<FieldPtr>(FrameFunction(FP)->ptr()->data_);
3307 instance = static_cast<InstancePtr>(FrameArguments(FP, kArgc)[0]);
3308 value = static_cast<InstancePtr>(FrameArguments(FP, kArgc)[1]);
3309
3310 if (InterpreterHelpers::FieldNeedsGuardUpdate(field, value)) {
3311 SP[1] = 0; // Unused result of runtime call.
3312 SP[2] = field;
3313 SP[3] = value;
3314 Exit(thread, FP, SP + 4, pc);
3315 if (!InvokeRuntime(thread, this, DRT_UpdateFieldCid,
3316 NativeArguments(thread, 2, /* argv */ SP + 2,
3317 /* retval */ SP + 1))) {
3318 HANDLE_EXCEPTION;
3319 }
3320
3321 // Reload objects after the call which may trigger GC.
3322 field = static_cast<FieldPtr>(FrameFunction(FP)->ptr()->data_);
3323 instance = static_cast<InstancePtr>(FrameArguments(FP, kArgc)[0]);
3324 value = static_cast<InstancePtr>(FrameArguments(FP, kArgc)[1]);
3325 }
3326
3327 const bool unboxing =
3328 (field->ptr()->is_nullable_ != kNullCid) &&
3329 Field::UnboxingCandidateBit::decode(field->ptr()->kind_bits_);
3330 classid_t guarded_cid = field->ptr()->guarded_cid_;
3331 if (unboxing && (guarded_cid == kDoubleCid) && supports_unboxed_doubles_) {
3332 double raw_value = Double::RawCast(value)->ptr()->value_;
3333 DoublePtr box =
3334 *(reinterpret_cast<DoublePtr*>(instance->ptr()) + offset_in_words);
3335 ASSERT(box != null_value); // Non-initializing store.
3336 box->ptr()->value_ = raw_value;
3337 } else if (unboxing && (guarded_cid == kFloat32x4Cid) &&
3338 supports_unboxed_simd128_) {
3339 simd128_value_t raw_value;
3340 raw_value.readFrom(Float32x4::RawCast(value)->ptr()->value_);
3341 Float32x4Ptr box =
3342 *(reinterpret_cast<Float32x4Ptr*>(instance->ptr()) + offset_in_words);
3343 ASSERT(box != null_value); // Non-initializing store.
3344 raw_value.writeTo(box->ptr()->value_);
3345 } else if (unboxing && (guarded_cid == kFloat64x2Cid) &&
3346 supports_unboxed_simd128_) {
3347 simd128_value_t raw_value;
3348 raw_value.readFrom(Float64x2::RawCast(value)->ptr()->value_);
3349 Float64x2Ptr box =
3350 *(reinterpret_cast<Float64x2Ptr*>(instance->ptr()) + offset_in_words);
3351 ASSERT(box != null_value); // Non-initializing store.
3352 raw_value.writeTo(box->ptr()->value_);
3353 } else {
3354 instance->ptr()->StorePointer(
3355 reinterpret_cast<InstancePtr*>(instance->ptr()) + offset_in_words,
3356 value, thread);
3357 }
3358
3359 *++SP = null_value;
3360
3361 DISPATCH();
3362 }
3363
3364 {
3365 BYTECODE(VMInternal_ImplicitStaticGetter, 0);
3366
3367 FunctionPtr function = FrameFunction(FP);
3368 ASSERT(Function::kind(function) == FunctionLayout::kImplicitStaticGetter);
3369
3370 BUMP_USAGE_COUNTER_ON_ENTRY(function);
3371
3372 // Field object is cached in function's data_.
3373 FieldPtr field = static_cast<FieldPtr>(function->ptr()->data_);
3374 intptr_t field_id = Smi::Value(field->ptr()->host_offset_or_field_id_);
3375 InstancePtr value = thread->field_table_values()[field_id];
3376 if (value == Object::sentinel().raw() ||
3377 value == Object::transition_sentinel().raw()) {
3378 SP[1] = 0; // Unused result of invoking the initializer.
3379 SP[2] = field;
3380 Exit(thread, FP, SP + 3, pc);
3381 INVOKE_RUNTIME(DRT_InitStaticField,
3382 NativeArguments(thread, 1, SP + 2, SP + 1));
3383
3384 // Reload objects after the call which may trigger GC.
3385 function = FrameFunction(FP);
3386 field = static_cast<FieldPtr>(function->ptr()->data_);
3387 // The field is initialized by the runtime call, but not returned.
3388 intptr_t field_id = Smi::Value(field->ptr()->host_offset_or_field_id_);
3389 value = thread->field_table_values()[field_id];
3390 }
3391
3392 // Field was initialized. Return its value.
3393 *++SP = value;
3394
3395#if !defined(PRODUCT)
3396 if (UNLIKELY(Field::NeedsLoadGuardBit::decode(field->ptr()->kind_bits_))) {
3397 if (!AssertAssignableField<true>(thread, pc, FP, SP,
3398 static_cast<InstancePtr>(null_value),
3399 field, value)) {
3400 HANDLE_EXCEPTION;
3401 }
3402 }
3403#endif
3404
3405 DISPATCH();
3406 }
3407
3408 {
3409 BYTECODE(VMInternal_MethodExtractor, 0);
3410
3411 FunctionPtr function = FrameFunction(FP);
3412 ASSERT(Function::kind(function) == FunctionLayout::kMethodExtractor);
3413
3414 BUMP_USAGE_COUNTER_ON_ENTRY(function);
3415
3416 ASSERT(InterpreterHelpers::ArgDescTypeArgsLen(argdesc_) == 0);
3417
3418 ++SP;
3419 if (!AllocateClosure(thread, pc, FP, SP)) {
3420 HANDLE_EXCEPTION;
3421 }
3422
3423 ++SP;
3424 if (!AllocateContext(thread, 1, pc, FP, SP)) {
3425 HANDLE_EXCEPTION;
3426 }
3427
3428 ContextPtr context = Context::RawCast(*SP--);
3429 InstancePtr instance = Instance::RawCast(FrameArguments(FP, 1)[0]);
3430 context->ptr()->StorePointer(
3431 reinterpret_cast<InstancePtr*>(&context->ptr()->data()[0]), instance);
3432
3433 ClosurePtr closure = Closure::RawCast(*SP);
3434 closure->ptr()->StorePointer(
3435 &closure->ptr()->instantiator_type_arguments_,
3436 InterpreterHelpers::GetTypeArguments(thread, instance));
3437 // function_type_arguments_ is already null
3438 closure->ptr()->delayed_type_arguments_ =
3439 Object::empty_type_arguments().raw();
3440 closure->ptr()->StorePointer(
3441 &closure->ptr()->function_,
3442 Function::RawCast(FrameFunction(FP)->ptr()->data_));
3443 closure->ptr()->StorePointer(&closure->ptr()->context_, context);
3444 // hash_ is already null
3445
3446 DISPATCH();
3447 }
3448
3449 {
3450 BYTECODE(VMInternal_InvokeClosure, 0);
3451
3452 FunctionPtr function = FrameFunction(FP);
3453 ASSERT(Function::kind(function) == FunctionLayout::kInvokeFieldDispatcher);
3454
3455 BUMP_USAGE_COUNTER_ON_ENTRY(function);
3456
3457 const intptr_t type_args_len =
3458 InterpreterHelpers::ArgDescTypeArgsLen(argdesc_);
3459 const intptr_t receiver_idx = type_args_len > 0 ? 1 : 0;
3460 const intptr_t argc =
3461 InterpreterHelpers::ArgDescArgCount(argdesc_) + receiver_idx;
3462
3463 ClosurePtr receiver =
3464 Closure::RawCast(FrameArguments(FP, argc)[receiver_idx]);
3465 function = receiver->ptr()->function_;
3466
3467 SP[1] = function;
3468 goto TailCallSP1;
3469 }
3470
3471 {
3472 BYTECODE(VMInternal_InvokeField, 0);
3473
3474 FunctionPtr function = FrameFunction(FP);
3475 ASSERT(Function::kind(function) == FunctionLayout::kInvokeFieldDispatcher);
3476
3477 BUMP_USAGE_COUNTER_ON_ENTRY(function);
3478
3479 const intptr_t type_args_len =
3480 InterpreterHelpers::ArgDescTypeArgsLen(argdesc_);
3481 const intptr_t receiver_idx = type_args_len > 0 ? 1 : 0;
3482 const intptr_t argc =
3483 InterpreterHelpers::ArgDescArgCount(argdesc_) + receiver_idx;
3484 ObjectPtr receiver = FrameArguments(FP, argc)[receiver_idx];
3485
3486 // Possibly demangle field name and invoke field getter on receiver.
3487 {
3488 SP[1] = argdesc_; // Save argdesc_.
3489 SP[2] = 0; // Result of runtime call.
3490 SP[3] = receiver; // Receiver.
3491 SP[4] = function->ptr()->name_; // Field name (may change during call).
3492 Exit(thread, FP, SP + 5, pc);
3493 if (!InvokeRuntime(thread, this, DRT_GetFieldForDispatch,
3494 NativeArguments(thread, 2, SP + 3, SP + 2))) {
3495 HANDLE_EXCEPTION;
3496 }
3497 function = FrameFunction(FP);
3498 argdesc_ = Array::RawCast(SP[1]);
3499 }
3500
3501 // If the field name in the arguments is different after the call, then
3502 // this was a dynamic call.
3503 StringPtr field_name = String::RawCast(SP[4]);
3504 const bool is_dynamic_call = function->ptr()->name_ != field_name;
3505
3506 // Replace receiver with field value, keep all other arguments, and
3507 // invoke 'call' function, or if not found, invoke noSuchMethod.
3508 FrameArguments(FP, argc)[receiver_idx] = receiver = SP[2];
3509
3510 // If the field value is a closure, no need to resolve 'call' function.
3511 if (InterpreterHelpers::GetClassId(receiver) == kClosureCid) {
3512 if (is_dynamic_call) {
3513 // TODO(dartbug.com/40813): Move checks that are currently compiled
3514 // in the closure body to here as they are also moved to
3515 // FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher.
3516 }
3517 SP[1] = Closure::RawCast(receiver)->ptr()->function_;
3518 goto TailCallSP1;
3519 }
3520
3521 // Otherwise, call runtime to resolve 'call' function.
3522 {
3523 SP[1] = 0; // Result slot.
3524 SP[2] = receiver;
3525 SP[3] = argdesc_;
3526 Exit(thread, FP, SP + 4, pc);
3527 if (!InvokeRuntime(thread, this, DRT_ResolveCallFunction,
3528 NativeArguments(thread, 2, SP + 2, SP + 1))) {
3529 HANDLE_EXCEPTION;
3530 }
3531 argdesc_ = Array::RawCast(SP[3]);
3532 function = Function::RawCast(SP[1]);
3533 receiver = SP[2];
3534 }
3535
3536 if (function != Function::null()) {
3537 SP[1] = function;
3538 goto TailCallSP1;
3539 }
3540
3541 // Function 'call' could not be resolved for argdesc_.
3542 // Invoke noSuchMethod.
3543 SP[1] = null_value;
3544 SP[2] = receiver;
3545 SP[3] = Symbols::Call().raw(); // We failed to resolve the 'call' function.
3546 SP[4] = argdesc_;
3547 SP[5] = null_value; // Array of arguments (will be filled).
3548
3549 // Allocate array of arguments.
3550 {
3551 SP[6] = Smi::New(argc); // length
3552 SP[7] = null_value; // type
3553 Exit(thread, FP, SP + 8, pc);
3554 if (!InvokeRuntime(thread, this, DRT_AllocateArray,
3555 NativeArguments(thread, 2, SP + 6, SP + 5))) {
3556 HANDLE_EXCEPTION;
3557 }
3558 }
3559
3560 // Copy arguments into the newly allocated array.
3561 ObjectPtr* argv = FrameArguments(FP, argc);
3562 ArrayPtr array = static_cast<ArrayPtr>(SP[5]);
3563 ASSERT(array->GetClassId() == kArrayCid);
3564 for (intptr_t i = 0; i < argc; i++) {
3565 array->ptr()->data()[i] = argv[i];
3566 }
3567
3568 // Invoke noSuchMethod passing down receiver, target name, argument
3569 // descriptor, and array of arguments.
3570 {
3571 Exit(thread, FP, SP + 6, pc);
3572 if (!InvokeRuntime(thread, this, DRT_InvokeNoSuchMethod,
3573 NativeArguments(thread, 4, SP + 2, SP + 1))) {
3574 HANDLE_EXCEPTION;
3575 }
3576
3577 ++SP; // Result at SP[0]
3578 }
3579 DISPATCH();
3580 }
3581
3582 {
3583 BYTECODE(VMInternal_ForwardDynamicInvocation, 0);
3584 FunctionPtr function = FrameFunction(FP);
3585 ASSERT(Function::kind(function) ==
3586 FunctionLayout::kDynamicInvocationForwarder);
3587
3588 BUMP_USAGE_COUNTER_ON_ENTRY(function);
3589
3590 ArrayPtr checks = Array::RawCast(function->ptr()->data_);
3591 FunctionPtr target = Function::RawCast(checks->ptr()->data()[0]);
3592 ASSERT(Function::kind(target) !=
3593 FunctionLayout::kDynamicInvocationForwarder);
3594 BytecodePtr target_bytecode = target->ptr()->bytecode_;
3595 ASSERT(target_bytecode != Bytecode::null());
3596 ASSERT(target_bytecode->IsBytecode());
3597
3598 const KBCInstr* pc2 = reinterpret_cast<const KBCInstr*>(
3599 target_bytecode->ptr()->instructions_);
3600 if (KernelBytecode::IsEntryOptionalOpcode(pc2)) {
3601 pp_ = target_bytecode->ptr()->object_pool_;
3602 uint32_t rA, rB, rC;
3603 rA = KernelBytecode::DecodeA(pc2);
3604 rB = KernelBytecode::DecodeB(pc2);
3605 rC = KernelBytecode::DecodeC(pc2);
3606 pc2 = KernelBytecode::Next(pc2);
3607 if (!CopyParameters(thread, &pc2, &FP, &SP, rA, rB, rC)) {
3608 goto NoSuchMethodFromPrologue;
3609 }
3610 }
3611
3612 intptr_t len = Smi::Value(checks->ptr()->length_);
3613 SP[1] = checks;
3614 SP[2] = argdesc_;
3615
3616 const intptr_t type_args_len =
3617 InterpreterHelpers::ArgDescTypeArgsLen(argdesc_);
3618 const intptr_t receiver_idx = type_args_len > 0 ? 1 : 0;
3619 const intptr_t argc =
3620 InterpreterHelpers::ArgDescArgCount(argdesc_) + receiver_idx;
3621
3622 InstancePtr receiver =
3623 Instance::RawCast(FrameArguments(FP, argc)[receiver_idx]);
3624 SP[5] = InterpreterHelpers::GetTypeArguments(thread, receiver);
3625
3626 if (type_args_len > 0) {
3627 SP[6] = FrameArguments(FP, argc)[0];
3628 } else {
3629 SP[6] = TypeArguments::RawCast(checks->ptr()->data()[1]);
3630 // TODO(regis): Verify this condition; why test SP[6]?
3631 if (SP[5] != null_value && SP[6] != null_value) {
3632 SP[7] = SP[6]; // type_arguments
3633 SP[8] = SP[5]; // instantiator_type_args
3634 SP[9] = null_value; // function_type_args
3635 Exit(thread, FP, SP + 10, pc);
3636 INVOKE_RUNTIME(DRT_InstantiateTypeArguments,
3637 NativeArguments(thread, 3, SP + 7, SP + 7));
3638 SP[6] = SP[7];
3639 }
3640 }
3641
3642 for (intptr_t i = 2; i < len; i++) {
3643 ParameterTypeCheckPtr check =
3644 ParameterTypeCheck::RawCast(checks->ptr()->data()[i]);
3645
3646 if (LIKELY(check->ptr()->index_ != 0)) {
3647 ASSERT(&FP[check->ptr()->index_] <= SP);
3648 SP[3] = Instance::RawCast(FP[check->ptr()->index_]);
3649 // TODO(regis): Revisit null handling once interpreter supports NNBD.
3650 if (SP[3] == null_value) {
3651 continue; // Not handled by AssertAssignable for some reason...
3652 }
3653 SP[4] = check->ptr()->type_or_bound_;
3654 // SP[5]: Instantiator type args.
3655 // SP[6]: Function type args.
3656 SP[7] = check->ptr()->name_;
3657 if (!AssertAssignable(thread, pc, FP, SP + 7, SP + 3,
3658 check->ptr()->cache_)) {
3659 HANDLE_EXCEPTION;
3660 }
3661 } else {
3662 SP[3] = 0;
3663 SP[4] = 0;
3664 // SP[5]: Instantiator type args.
3665 // SP[6]: Function type args.
3666 SP[7] = check->ptr()->param_;
3667 SP[8] = check->ptr()->type_or_bound_;
3668 SP[9] = check->ptr()->name_;
3669 SP[10] = 0;
3670 Exit(thread, FP, SP + 11, pc);
3671 INVOKE_RUNTIME(DRT_SubtypeCheck,
3672 NativeArguments(thread, 5, SP + 5, SP + 10));
3673 }
3674
3675 checks = Array::RawCast(SP[1]); // Reload after runtime call.
3676 }
3677
3678 target = Function::RawCast(checks->ptr()->data()[0]);
3679 argdesc_ = Array::RawCast(SP[2]);
3680
3681 SP = FP - 1; // Unmarshall optional parameters.
3682
3683 SP[1] = target;
3684 goto TailCallSP1;
3685 }
3686
3687 {
3688 BYTECODE(VMInternal_NoSuchMethodDispatcher, 0);
3689 FunctionPtr function = FrameFunction(FP);
3690 ASSERT(Function::kind(function) == FunctionLayout::kNoSuchMethodDispatcher);
3691 goto NoSuchMethodFromPrologue;
3692 }
3693
3694 {
3695 BYTECODE(VMInternal_ImplicitStaticClosure, 0);
3696 FunctionPtr function = FrameFunction(FP);
3697 ASSERT(Function::kind(function) ==
3698 FunctionLayout::kImplicitClosureFunction);
3699 UNIMPLEMENTED();
3700 DISPATCH();
3701 }
3702
3703 {
3704 BYTECODE(VMInternal_ImplicitInstanceClosure, 0);
3705 FunctionPtr function = FrameFunction(FP);
3706 ASSERT(Function::kind(function) ==
3707 FunctionLayout::kImplicitClosureFunction);
3708 UNIMPLEMENTED();
3709 DISPATCH();
3710 }
3711
3712 {
3713 TailCallSP1:
3714 FunctionPtr function = Function::RawCast(SP[1]);
3715
3716 for (;;) {
3717 if (Function::HasBytecode(function)) {
3718 ASSERT(function->IsFunction());
3719 BytecodePtr bytecode = function->ptr()->bytecode_;
3720 ASSERT(bytecode->IsBytecode());
3721 FP[kKBCFunctionSlotFromFp] = function;
3722 FP[kKBCPcMarkerSlotFromFp] = bytecode;
3723 pp_ = bytecode->ptr()->object_pool_;
3724 pc = reinterpret_cast<const KBCInstr*>(bytecode->ptr()->instructions_);
3725 NOT_IN_PRODUCT(pc_ = pc); // For the profiler.
3726 DISPATCH();
3727 }
3728
3729 if (Function::HasCode(function)) {
3730 const intptr_t type_args_len =
3731 InterpreterHelpers::ArgDescTypeArgsLen(argdesc_);
3732 const intptr_t receiver_idx = type_args_len > 0 ? 1 : 0;
3733 const intptr_t argc =
3734 InterpreterHelpers::ArgDescArgCount(argdesc_) + receiver_idx;
3735 ObjectPtr* argv = FrameArguments(FP, argc);
3736 for (intptr_t i = 0; i < argc; i++) {
3737 *++SP = argv[i];
3738 }
3739
3740 ObjectPtr* call_base = SP - argc + 1;
3741 ObjectPtr* call_top = SP + 1;
3742 call_top[0] = function;
3743 if (!InvokeCompiled(thread, function, call_base, call_top, &pc, &FP,
3744 &SP)) {
3745 HANDLE_EXCEPTION;
3746 } else {
3747 HANDLE_RETURN;
3748 }
3749 DISPATCH();
3750 }
3751
3752 // Compile the function to either generate code or load bytecode.
3753 SP[1] = argdesc_;
3754 SP[2] = 0; // Code result.
3755 SP[3] = function;
3756 Exit(thread, FP, SP + 4, pc);
3757 if (!InvokeRuntime(thread, this, DRT_CompileFunction,
3758 NativeArguments(thread, 1, /* argv */ SP + 3,
3759 /* retval */ SP + 2))) {
3760 HANDLE_EXCEPTION;
3761 }
3762 function = Function::RawCast(SP[3]);
3763 argdesc_ = Array::RawCast(SP[1]);
3764
3765 ASSERT(Function::HasCode(function) || Function::HasBytecode(function));
3766 }
3767 }
3768
3769 // Helper used to handle noSuchMethod on closures.
3770 {
3771 NoSuchMethodFromPrologue:
3772 FunctionPtr function = FrameFunction(FP);
3773
3774 const intptr_t type_args_len =
3775 InterpreterHelpers::ArgDescTypeArgsLen(argdesc_);
3776 const intptr_t receiver_idx = type_args_len > 0 ? 1 : 0;
3777 const intptr_t argc =
3778 InterpreterHelpers::ArgDescArgCount(argdesc_) + receiver_idx;
3779 ObjectPtr* args = FrameArguments(FP, argc);
3780
3781 SP[1] = null_value;
3782 SP[2] = args[receiver_idx];
3783 SP[3] = function;
3784 SP[4] = argdesc_;
3785 SP[5] = null_value; // Array of arguments (will be filled).
3786
3787 // Allocate array of arguments.
3788 {
3789 SP[6] = Smi::New(argc); // length
3790 SP[7] = null_value; // type
3791 Exit(thread, FP, SP + 8, pc);
3792 if (!InvokeRuntime(thread, this, DRT_AllocateArray,
3793 NativeArguments(thread, 2, SP + 6, SP + 5))) {
3794 HANDLE_EXCEPTION;
3795 }
3796
3797 // Copy arguments into the newly allocated array.
3798 ArrayPtr array = static_cast<ArrayPtr>(SP[5]);
3799 ASSERT(array->GetClassId() == kArrayCid);
3800 for (intptr_t i = 0; i < argc; i++) {
3801 array->ptr()->data()[i] = args[i];
3802 }
3803 }
3804
3805 // Invoke noSuchMethod passing down receiver, function, argument descriptor
3806 // and array of arguments.
3807 {
3808 Exit(thread, FP, SP + 6, pc);
3809 INVOKE_RUNTIME(DRT_NoSuchMethodFromPrologue,
3810 NativeArguments(thread, 4, SP + 2, SP + 1));
3811 ++SP; // Result at SP[0]
3812 }
3813
3814 DISPATCH();
3815 }
3816
3817 {
3818 ThrowNullError:
3819 // SP[0] contains selector.
3820 SP[1] = 0; // Unused space for result.
3821 Exit(thread, FP, SP + 2, pc);
3822 INVOKE_RUNTIME(DRT_NullErrorWithSelector,
3823 NativeArguments(thread, 1, SP, SP + 1));
3824 UNREACHABLE();
3825 }
3826
3827 {
3828 ThrowIntegerDivisionByZeroException:
3829 SP[0] = 0; // Unused space for result.
3830 Exit(thread, FP, SP + 1, pc);
3831 INVOKE_RUNTIME(DRT_IntegerDivisionByZeroException,
3832 NativeArguments(thread, 0, SP, SP));
3833 UNREACHABLE();
3834 }
3835
3836 {
3837 ThrowArgumentError:
3838 // SP[0] contains value.
3839 SP[1] = 0; // Unused space for result.
3840 Exit(thread, FP, SP + 2, pc);
3841 INVOKE_RUNTIME(DRT_ArgumentError, NativeArguments(thread, 1, SP, SP + 1));
3842 UNREACHABLE();
3843 }
3844
3845 // Exception handling helper. Gets handler FP and PC from the Interpreter
3846 // where they were stored by Interpreter::Longjmp and proceeds to execute the
3847 // handler. Corner case: handler PC can be a fake marker that marks entry
3848 // frame, which means exception was not handled in the interpreter. In this
3849 // case we return the caught exception from Interpreter::Call.
3850 {
3851 HandleException:
3852 FP = fp_;
3853 pc = pc_;
3854 if (IsEntryFrameMarker(pc)) {
3855 pp_ = static_cast<ObjectPoolPtr>(fp_[kKBCSavedPpSlotFromEntryFp]);
3856 argdesc_ = static_cast<ArrayPtr>(fp_[kKBCSavedArgDescSlotFromEntryFp]);
3857 uword exit_fp = static_cast<uword>(fp_[kKBCExitLinkSlotFromEntryFp]);
3858 thread->set_top_exit_frame_info(exit_fp);
3859 thread->set_top_resource(top_resource);
3860 thread->set_vm_tag(vm_tag);
3861#if defined(DEBUG)
3862 if (IsTracingExecution()) {
3863 THR_Print("%" Pu64 " ", icount_);
3864 THR_Print("Returning exception from interpreter 0x%" Px " at fp_ 0x%" Px
3865 " exit 0x%" Px "\n",
3866 reinterpret_cast<uword>(this), reinterpret_cast<uword>(fp_),
3867 exit_fp);
3868 }
3869#endif
3870 ASSERT(HasFrame(reinterpret_cast<uword>(fp_)));
3871 return special_[KernelBytecode::kExceptionSpecialIndex];
3872 }
3873
3874 pp_ = InterpreterHelpers::FrameBytecode(FP)->ptr()->object_pool_;
3875 DISPATCH();
3876 }
3877
3878 UNREACHABLE();
3879 return 0;
3880}
3881
3882void Interpreter::JumpToFrame(uword pc, uword sp, uword fp, Thread* thread) {
3883 // Walk over all setjmp buffers (simulated --> C++ transitions)
3884 // and try to find the setjmp associated with the simulated frame pointer.
3885 InterpreterSetjmpBuffer* buf = last_setjmp_buffer();
3886 while ((buf->link() != NULL) && (buf->link()->fp() > fp)) {
3887 buf = buf->link();
3888 }
3889 ASSERT(buf != NULL);
3890 ASSERT(last_setjmp_buffer() == buf);
3891
3892 // The C++ caller has not cleaned up the stack memory of C++ frames.
3893 // Prepare for unwinding frames by destroying all the stack resources
3894 // in the previous C++ frames.
3895 StackResource::Unwind(thread);
3896
3897 fp_ = reinterpret_cast<ObjectPtr*>(fp);
3898
3899 if (pc == StubCode::RunExceptionHandler().EntryPoint()) {
3900 // The RunExceptionHandler stub is a placeholder. We implement
3901 // its behavior here.
3902 ObjectPtr raw_exception = thread->active_exception();
3903 ObjectPtr raw_stacktrace = thread->active_stacktrace();
3904 ASSERT(raw_exception != Object::null());
3905 thread->set_active_exception(Object::null_object());
3906 thread->set_active_stacktrace(Object::null_object());
3907 special_[KernelBytecode::kExceptionSpecialIndex] = raw_exception;
3908 special_[KernelBytecode::kStackTraceSpecialIndex] = raw_stacktrace;
3909 pc_ = reinterpret_cast<const KBCInstr*>(thread->resume_pc());
3910 } else {
3911 pc_ = reinterpret_cast<const KBCInstr*>(pc);
3912 }
3913
3914 // Set the tag.
3915 thread->set_vm_tag(VMTag::kDartInterpretedTagId);
3916 // Clear top exit frame.
3917 thread->set_top_exit_frame_info(0);
3918
3919 buf->Longjmp();
3920 UNREACHABLE();
3921}
3922
3923void Interpreter::VisitObjectPointers(ObjectPointerVisitor* visitor) {
3924 visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&pp_));
3925 visitor->VisitPointer(reinterpret_cast<ObjectPtr*>(&argdesc_));
3926}
3927
3928} // namespace dart
3929
3930#endif // !defined(DART_PRECOMPILED_RUNTIME)
3931