1// Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/runtime_entry.h"
6
7#include "vm/code_descriptors.h"
8#include "vm/code_patcher.h"
9#include "vm/compiler/api/deopt_id.h"
10#include "vm/compiler/api/type_check_mode.h"
11#include "vm/compiler/jit/compiler.h"
12#include "vm/dart_api_impl.h"
13#include "vm/dart_api_state.h"
14#include "vm/dart_entry.h"
15#include "vm/debugger.h"
16#include "vm/exceptions.h"
17#include "vm/flags.h"
18#include "vm/heap/verifier.h"
19#include "vm/instructions.h"
20#include "vm/interpreter.h"
21#include "vm/kernel_isolate.h"
22#include "vm/message.h"
23#include "vm/message_handler.h"
24#include "vm/object_store.h"
25#include "vm/parser.h"
26#include "vm/resolver.h"
27#include "vm/service_isolate.h"
28#include "vm/stack_frame.h"
29#include "vm/symbols.h"
30#include "vm/thread.h"
31#include "vm/thread_registry.h"
32#include "vm/type_testing_stubs.h"
33
34#if !defined(DART_PRECOMPILED_RUNTIME)
35#include "vm/deopt_instructions.h"
36#endif // !defined(DART_PRECOMPILED_RUNTIME)
37
38namespace dart {
39
40DEFINE_FLAG(
41 int,
42 max_subtype_cache_entries,
43 100,
44 "Maximum number of subtype cache entries (number of checks cached).");
45DEFINE_FLAG(
46 int,
47 regexp_optimization_counter_threshold,
48 1000,
49 "RegExp's usage-counter value before it is optimized, -1 means never");
50DEFINE_FLAG(int,
51 reoptimization_counter_threshold,
52 4000,
53 "Counter threshold before a function gets reoptimized.");
54DEFINE_FLAG(bool,
55 stress_write_barrier_elimination,
56 false,
57 "Stress test write barrier elimination.");
58DEFINE_FLAG(bool, trace_deoptimization, false, "Trace deoptimization");
59DEFINE_FLAG(bool,
60 trace_deoptimization_verbose,
61 false,
62 "Trace deoptimization verbose");
63
64DECLARE_FLAG(bool, enable_interpreter);
65DECLARE_FLAG(int, max_deoptimization_counter_threshold);
66DECLARE_FLAG(bool, trace_compiler);
67DECLARE_FLAG(bool, trace_optimizing_compiler);
68DECLARE_FLAG(int, max_polymorphic_checks);
69
70DEFINE_FLAG(bool, trace_osr, false, "Trace attempts at on-stack replacement.");
71
72DEFINE_FLAG(int, gc_every, 0, "Run major GC on every N stack overflow checks");
73DEFINE_FLAG(int,
74 stacktrace_every,
75 0,
76 "Compute debugger stacktrace on every N stack overflow checks");
77DEFINE_FLAG(charp,
78 stacktrace_filter,
79 NULL,
80 "Compute stacktrace in named function on stack overflow checks");
81DEFINE_FLAG(charp,
82 deoptimize_filter,
83 NULL,
84 "Deoptimize in named function on stack overflow checks");
85
86DEFINE_FLAG(bool,
87 unopt_monomorphic_calls,
88 true,
89 "Enable specializing monomorphic calls from unoptimized code.");
90DEFINE_FLAG(bool,
91 unopt_megamorphic_calls,
92 true,
93 "Enable specializing megamorphic calls from unoptimized code.");
94DEFINE_FLAG(bool,
95 verbose_stack_overflow,
96 false,
97 "Print additional details about stack overflow.");
98
99DECLARE_FLAG(int, reload_every);
100DECLARE_FLAG(bool, reload_every_optimized);
101DECLARE_FLAG(bool, reload_every_back_off);
102
103#if defined(TESTING) || defined(DEBUG)
104void VerifyOnTransition() {
105 Thread* thread = Thread::Current();
106 TransitionGeneratedToVM transition(thread);
107 VerifyPointersVisitor::VerifyPointers();
108 thread->isolate()->heap()->Verify();
109}
110#endif
111
112// Add function to a class and that class to the class dictionary so that
113// frame walking can be used.
114const Function& RegisterFakeFunction(const char* name, const Code& code) {
115 Thread* thread = Thread::Current();
116 const String& class_name = String::Handle(Symbols::New(thread, "ownerClass"));
117 const Script& script = Script::Handle();
118 const Library& lib = Library::Handle(Library::CoreLibrary());
119 const Class& owner_class = Class::Handle(
120 Class::New(lib, class_name, script, TokenPosition::kNoSource));
121 const String& function_name = String::ZoneHandle(Symbols::New(thread, name));
122 const Function& function = Function::ZoneHandle(Function::New(
123 function_name, FunctionLayout::kRegularFunction, true, false, false,
124 false, false, owner_class, TokenPosition::kMinSource));
125 const Array& functions = Array::Handle(Array::New(1));
126 functions.SetAt(0, function);
127 owner_class.SetFunctions(functions);
128 lib.AddClass(owner_class);
129 function.AttachCode(code);
130 return function;
131}
132
133DEFINE_RUNTIME_ENTRY(RangeError, 2) {
134 const Instance& length = Instance::CheckedHandle(zone, arguments.ArgAt(0));
135 const Instance& index = Instance::CheckedHandle(zone, arguments.ArgAt(1));
136 if (!length.IsInteger()) {
137 // Throw: new ArgumentError.value(length, "length", "is not an integer");
138 const Array& args = Array::Handle(zone, Array::New(3));
139 args.SetAt(0, length);
140 args.SetAt(1, Symbols::Length());
141 args.SetAt(2, String::Handle(zone, String::New("is not an integer")));
142 Exceptions::ThrowByType(Exceptions::kArgumentValue, args);
143 }
144 if (!index.IsInteger()) {
145 // Throw: new ArgumentError.value(index, "index", "is not an integer");
146 const Array& args = Array::Handle(zone, Array::New(3));
147 args.SetAt(0, index);
148 args.SetAt(1, Symbols::Index());
149 args.SetAt(2, String::Handle(zone, String::New("is not an integer")));
150 Exceptions::ThrowByType(Exceptions::kArgumentValue, args);
151 }
152 // Throw: new RangeError.range(index, 0, length - 1, "length");
153 const Array& args = Array::Handle(zone, Array::New(4));
154 args.SetAt(0, index);
155 args.SetAt(1, Integer::Handle(zone, Integer::New(0)));
156 args.SetAt(
157 2, Integer::Handle(
158 zone, Integer::Cast(length).ArithmeticOp(
159 Token::kSUB, Integer::Handle(zone, Integer::New(1)))));
160 args.SetAt(3, Symbols::Length());
161 Exceptions::ThrowByType(Exceptions::kRange, args);
162}
163
164static void NullErrorHelper(Zone* zone, const String& selector) {
165 // If the selector is null, this must be a null check that wasn't due to a
166 // method invocation, so was due to the null check operator.
167 if (selector.IsNull()) {
168 const Array& args = Array::Handle(zone, Array::New(4));
169 args.SetAt(
170 3, String::Handle(
171 zone, String::New("Null check operator used on a null value")));
172 Exceptions::ThrowByType(Exceptions::kCast, args);
173 return;
174 }
175
176 InvocationMirror::Kind kind = InvocationMirror::kMethod;
177 if (Field::IsGetterName(selector)) {
178 kind = InvocationMirror::kGetter;
179 } else if (Field::IsSetterName(selector)) {
180 kind = InvocationMirror::kSetter;
181 }
182
183 const Smi& invocation_type = Smi::Handle(
184 zone,
185 Smi::New(InvocationMirror::EncodeType(InvocationMirror::kDynamic, kind)));
186
187 const Array& args = Array::Handle(zone, Array::New(7));
188 args.SetAt(0, /* instance */ Object::null_object());
189 args.SetAt(1, selector);
190 args.SetAt(2, invocation_type);
191 args.SetAt(3, /* func_type_args_length */ Object::smi_zero());
192 args.SetAt(4, /* func_type_args */ Object::null_object());
193 args.SetAt(5, /* func_args */ Object::null_object());
194 args.SetAt(6, /* func_arg_names */ Object::null_object());
195 Exceptions::ThrowByType(Exceptions::kNoSuchMethod, args);
196}
197
198DEFINE_RUNTIME_ENTRY(NullError, 0) {
199 DartFrameIterator iterator(thread,
200 StackFrameIterator::kNoCrossThreadIteration);
201 const StackFrame* caller_frame = iterator.NextFrame();
202 ASSERT(caller_frame->IsDartFrame());
203 ASSERT(!caller_frame->is_interpreted());
204 const Code& code = Code::Handle(zone, caller_frame->LookupDartCode());
205 const uword pc_offset = caller_frame->pc() - code.PayloadStart();
206
207 if (FLAG_shared_slow_path_triggers_gc) {
208 isolate->heap()->CollectAllGarbage();
209 }
210
211 const CodeSourceMap& map =
212 CodeSourceMap::Handle(zone, code.code_source_map());
213 String& member_name = String::Handle(zone);
214 if (!map.IsNull()) {
215 CodeSourceMapReader reader(map, Array::null_array(),
216 Function::null_function());
217 const intptr_t name_index = reader.GetNullCheckNameIndexAt(pc_offset);
218 RELEASE_ASSERT(name_index >= 0);
219
220 const ObjectPool& pool = ObjectPool::Handle(zone, code.GetObjectPool());
221 member_name ^= pool.ObjectAt(name_index);
222 } else {
223 member_name = Symbols::OptimizedOut().raw();
224 }
225
226 NullErrorHelper(zone, member_name);
227}
228
229DEFINE_RUNTIME_ENTRY(NullErrorWithSelector, 1) {
230 const String& selector = String::CheckedHandle(zone, arguments.ArgAt(0));
231 NullErrorHelper(zone, selector);
232}
233
234DEFINE_RUNTIME_ENTRY(NullCastError, 0) {
235 NullErrorHelper(zone, String::null_string());
236}
237
238DEFINE_RUNTIME_ENTRY(ArgumentNullError, 0) {
239 const String& error = String::Handle(String::New("argument value is null"));
240 Exceptions::ThrowArgumentError(error);
241}
242
243DEFINE_RUNTIME_ENTRY(ArgumentError, 1) {
244 const Instance& value = Instance::CheckedHandle(zone, arguments.ArgAt(0));
245 Exceptions::ThrowArgumentError(value);
246}
247
248DEFINE_RUNTIME_ENTRY(ArgumentErrorUnboxedInt64, 0) {
249 // Unboxed value is passed through a dedicated slot in Thread.
250 int64_t unboxed_value = arguments.thread()->unboxed_int64_runtime_arg();
251 const Integer& value = Integer::Handle(zone, Integer::New(unboxed_value));
252 Exceptions::ThrowArgumentError(value);
253}
254
255DEFINE_RUNTIME_ENTRY(IntegerDivisionByZeroException, 0) {
256 const Array& args = Array::Handle(zone, Array::New(0));
257 Exceptions::ThrowByType(Exceptions::kIntegerDivisionByZeroException, args);
258}
259
260static Heap::Space SpaceForRuntimeAllocation() {
261 return FLAG_stress_write_barrier_elimination ? Heap::kOld : Heap::kNew;
262}
263
264// Allocation of a fixed length array of given element type.
265// This runtime entry is never called for allocating a List of a generic type,
266// because a prior run time call instantiates the element type if necessary.
267// Arg0: array length.
268// Arg1: array type arguments, i.e. vector of 1 type, the element type.
269// Return value: newly allocated array of length arg0.
270DEFINE_RUNTIME_ENTRY(AllocateArray, 2) {
271 const Instance& length = Instance::CheckedHandle(zone, arguments.ArgAt(0));
272 if (!length.IsInteger()) {
273 // Throw: new ArgumentError.value(length, "length", "is not an integer");
274 const Array& args = Array::Handle(zone, Array::New(3));
275 args.SetAt(0, length);
276 args.SetAt(1, Symbols::Length());
277 args.SetAt(2, String::Handle(zone, String::New("is not an integer")));
278 Exceptions::ThrowByType(Exceptions::kArgumentValue, args);
279 }
280 const int64_t len = Integer::Cast(length).AsInt64Value();
281 if (len < 0) {
282 // Throw: new RangeError.range(length, 0, Array::kMaxElements, "length");
283 Exceptions::ThrowRangeError("length", Integer::Cast(length), 0,
284 Array::kMaxElements);
285 }
286 if (len > Array::kMaxElements) {
287 const Instance& exception = Instance::Handle(
288 zone, thread->isolate()->object_store()->out_of_memory());
289 Exceptions::Throw(thread, exception);
290 }
291
292 const Array& array = Array::Handle(
293 zone,
294 Array::New(static_cast<intptr_t>(len), SpaceForRuntimeAllocation()));
295 arguments.SetReturn(array);
296 TypeArguments& element_type =
297 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
298 // An Array is raw or takes one type argument. However, its type argument
299 // vector may be longer than 1 due to a type optimization reusing the type
300 // argument vector of the instantiator.
301 ASSERT(element_type.IsNull() ||
302 (element_type.Length() >= 1 && element_type.IsInstantiated()));
303 array.SetTypeArguments(element_type); // May be null.
304}
305
306// Helper returning the token position of the Dart caller.
307static TokenPosition GetCallerLocation() {
308 DartFrameIterator iterator(Thread::Current(),
309 StackFrameIterator::kNoCrossThreadIteration);
310 StackFrame* caller_frame = iterator.NextFrame();
311 ASSERT(caller_frame != NULL);
312 return caller_frame->GetTokenPos();
313}
314
315// Result of an invoke may be an unhandled exception, in which case we
316// rethrow it.
317static void ThrowIfError(const Object& result) {
318 if (!result.IsNull() && result.IsError()) {
319 Exceptions::PropagateError(Error::Cast(result));
320 }
321}
322
323// Allocate a new object.
324// Arg0: class of the object that needs to be allocated.
325// Arg1: type arguments of the object that needs to be allocated.
326// Return value: newly allocated object.
327DEFINE_RUNTIME_ENTRY(AllocateObject, 2) {
328 const Class& cls = Class::CheckedHandle(zone, arguments.ArgAt(0));
329 const Error& error =
330 Error::Handle(zone, cls.EnsureIsAllocateFinalized(thread));
331 ThrowIfError(error);
332 const Instance& instance =
333 Instance::Handle(zone, Instance::New(cls, SpaceForRuntimeAllocation()));
334
335 arguments.SetReturn(instance);
336 if (cls.NumTypeArguments() == 0) {
337 // No type arguments required for a non-parameterized type.
338 ASSERT(Instance::CheckedHandle(zone, arguments.ArgAt(1)).IsNull());
339 } else {
340 const auto& type_arguments =
341 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
342 // Unless null (for a raw type), the type argument vector may be longer than
343 // necessary due to a type optimization reusing the type argument vector of
344 // the instantiator.
345 ASSERT(type_arguments.IsNull() ||
346 (type_arguments.IsInstantiated() &&
347 (type_arguments.Length() >= cls.NumTypeArguments())));
348 instance.SetTypeArguments(type_arguments);
349 }
350}
351
352DEFINE_LEAF_RUNTIME_ENTRY(uword /*ObjectPtr*/,
353 EnsureRememberedAndMarkingDeferred,
354 2,
355 uword /*ObjectPtr*/ object_in,
356 Thread* thread) {
357 ObjectPtr object = static_cast<ObjectPtr>(object_in);
358 // The allocation stubs will call this leaf method for newly allocated
359 // old space objects.
360 RELEASE_ASSERT(object->IsOldObject());
361
362 // If we eliminate a generational write barriers on allocations of an object
363 // we need to ensure it's either a new-space object or it has been added to
364 // the remebered set.
365 //
366 // NOTE: We use reinterpret_cast<>() instead of ::RawCast() to avoid handle
367 // allocations in debug mode. Handle allocations in leaf runtimes can cause
368 // memory leaks because they will allocate into a handle scope from the next
369 // outermost runtime code (to which the genenerated Dart code might not return
370 // in a long time).
371 bool add_to_remembered_set = true;
372 if (object->ptr()->IsRemembered()) {
373 // Objects must not be added to the remembered set twice because the
374 // scavenger's visitor is not idempotent.
375 // Might already be remembered because of type argument store in
376 // AllocateArray or any field in CloneContext.
377 add_to_remembered_set = false;
378 } else if (object->IsArray()) {
379 const intptr_t length = Array::LengthOf(static_cast<ArrayPtr>(object));
380 add_to_remembered_set =
381 compiler::target::WillAllocateNewOrRememberedArray(length);
382 } else if (object->IsContext()) {
383 const intptr_t num_context_variables =
384 Context::NumVariables(static_cast<ContextPtr>(object));
385 add_to_remembered_set =
386 compiler::target::WillAllocateNewOrRememberedContext(
387 num_context_variables);
388 }
389
390 if (add_to_remembered_set) {
391 object->ptr()->AddToRememberedSet(thread);
392 }
393
394 // For incremental write barrier elimination, we need to ensure that the
395 // allocation ends up in the new space or else the object needs to added
396 // to deferred marking stack so it will be [re]scanned.
397 if (thread->is_marking()) {
398 thread->DeferredMarkingStackAddObject(object);
399 }
400
401 return static_cast<uword>(object);
402}
403END_LEAF_RUNTIME_ENTRY
404
405// Instantiate type.
406// Arg0: uninstantiated type.
407// Arg1: instantiator type arguments.
408// Arg2: function type arguments.
409// Return value: instantiated type.
410DEFINE_RUNTIME_ENTRY(InstantiateType, 3) {
411 AbstractType& type = AbstractType::CheckedHandle(zone, arguments.ArgAt(0));
412 const TypeArguments& instantiator_type_arguments =
413 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
414 const TypeArguments& function_type_arguments =
415 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
416 ASSERT(!type.IsNull());
417 ASSERT(instantiator_type_arguments.IsNull() ||
418 instantiator_type_arguments.IsInstantiated());
419 ASSERT(function_type_arguments.IsNull() ||
420 function_type_arguments.IsInstantiated());
421 type = type.InstantiateFrom(instantiator_type_arguments,
422 function_type_arguments, kAllFree, Heap::kOld);
423 if (type.IsTypeRef()) {
424 type = TypeRef::Cast(type).type();
425 ASSERT(!type.IsTypeRef());
426 ASSERT(type.IsCanonical());
427 }
428 ASSERT(!type.IsNull() && type.IsInstantiated());
429 arguments.SetReturn(type);
430}
431
432// Instantiate type arguments.
433// Arg0: uninstantiated type arguments.
434// Arg1: instantiator type arguments.
435// Arg2: function type arguments.
436// Return value: instantiated type arguments.
437DEFINE_RUNTIME_ENTRY(InstantiateTypeArguments, 3) {
438 TypeArguments& type_arguments =
439 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0));
440 const TypeArguments& instantiator_type_arguments =
441 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
442 const TypeArguments& function_type_arguments =
443 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
444 ASSERT(!type_arguments.IsNull() && !type_arguments.IsInstantiated());
445 ASSERT(instantiator_type_arguments.IsNull() ||
446 instantiator_type_arguments.IsInstantiated());
447 ASSERT(function_type_arguments.IsNull() ||
448 function_type_arguments.IsInstantiated());
449 // Code inlined in the caller should have optimized the case where the
450 // instantiator can be reused as type argument vector.
451 ASSERT(!type_arguments.IsUninstantiatedIdentity());
452 type_arguments = type_arguments.InstantiateAndCanonicalizeFrom(
453 instantiator_type_arguments, function_type_arguments);
454 ASSERT(type_arguments.IsNull() || type_arguments.IsInstantiated());
455 arguments.SetReturn(type_arguments);
456}
457
458// Instantiate type.
459// Arg0: instantiator type arguments
460// Arg1: function type arguments
461// Arg2: type to be a subtype of the other
462// Arg3: type to be a supertype of the other
463// Arg4: variable name of the subtype parameter
464// No return value.
465DEFINE_RUNTIME_ENTRY(SubtypeCheck, 5) {
466 const TypeArguments& instantiator_type_args =
467 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0));
468 const TypeArguments& function_type_args =
469 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
470 AbstractType& subtype = AbstractType::CheckedHandle(zone, arguments.ArgAt(2));
471 AbstractType& supertype =
472 AbstractType::CheckedHandle(zone, arguments.ArgAt(3));
473 const String& dst_name = String::CheckedHandle(zone, arguments.ArgAt(4));
474
475 ASSERT(!subtype.IsNull() && !subtype.IsTypeRef());
476 ASSERT(!supertype.IsNull() && !supertype.IsTypeRef());
477
478 // The supertype or subtype may not be instantiated.
479 if (AbstractType::InstantiateAndTestSubtype(
480 &subtype, &supertype, instantiator_type_args, function_type_args)) {
481 return;
482 }
483
484 // Throw a dynamic type error.
485 const TokenPosition location = GetCallerLocation();
486 Exceptions::CreateAndThrowTypeError(location, subtype, supertype, dst_name);
487 UNREACHABLE();
488}
489
490// Allocate a new SubtypeTestCache for use in interpreted implicit setters.
491// Return value: newly allocated SubtypeTestCache.
492DEFINE_RUNTIME_ENTRY(AllocateSubtypeTestCache, 0) {
493 ASSERT(FLAG_enable_interpreter);
494 arguments.SetReturn(SubtypeTestCache::Handle(zone, SubtypeTestCache::New()));
495}
496
497// Allocate a new context large enough to hold the given number of variables.
498// Arg0: number of variables.
499// Return value: newly allocated context.
500DEFINE_RUNTIME_ENTRY(AllocateContext, 1) {
501 const Smi& num_variables = Smi::CheckedHandle(zone, arguments.ArgAt(0));
502 const Context& context = Context::Handle(
503 zone, Context::New(num_variables.Value(), SpaceForRuntimeAllocation()));
504 arguments.SetReturn(context);
505}
506
507// Make a copy of the given context, including the values of the captured
508// variables.
509// Arg0: the context to be cloned.
510// Return value: newly allocated context.
511DEFINE_RUNTIME_ENTRY(CloneContext, 1) {
512 const Context& ctx = Context::CheckedHandle(zone, arguments.ArgAt(0));
513 Context& cloned_ctx = Context::Handle(
514 zone, Context::New(ctx.num_variables(), SpaceForRuntimeAllocation()));
515 cloned_ctx.set_parent(Context::Handle(zone, ctx.parent()));
516 Object& inst = Object::Handle(zone);
517 for (int i = 0; i < ctx.num_variables(); i++) {
518 inst = ctx.At(i);
519 cloned_ctx.SetAt(i, inst);
520 }
521 arguments.SetReturn(cloned_ctx);
522}
523
524// Invoke field getter before dispatch.
525// Arg0: instance.
526// Arg1: field name (may be demangled during call).
527// Return value: field value.
528DEFINE_RUNTIME_ENTRY(GetFieldForDispatch, 2) {
529 ASSERT(FLAG_enable_interpreter);
530 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
531 String& name = String::CheckedHandle(zone, arguments.ArgAt(1));
532 const Class& receiver_class = Class::Handle(zone, receiver.clazz());
533 if (Function::IsDynamicInvocationForwarderName(name)) {
534 name = Function::DemangleDynamicInvocationForwarderName(name);
535 arguments.SetArgAt(1, name); // Reflect change in arguments.
536 }
537 const String& getter_name = String::Handle(zone, Field::GetterName(name));
538 const int kTypeArgsLen = 0;
539 const int kNumArguments = 1;
540 ArgumentsDescriptor args_desc(Array::Handle(
541 zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, kNumArguments)));
542 const Function& getter =
543 Function::Handle(zone, Resolver::ResolveDynamicForReceiverClass(
544 receiver_class, getter_name, args_desc));
545 ASSERT(!getter.IsNull()); // An InvokeFieldDispatcher function was created.
546 const Array& args = Array::Handle(zone, Array::New(kNumArguments));
547 args.SetAt(0, receiver);
548 const Object& result =
549 Object::Handle(zone, DartEntry::InvokeFunction(getter, args));
550 ThrowIfError(result);
551 arguments.SetReturn(result);
552}
553
554// Resolve 'call' function of receiver.
555// Arg0: receiver (not a closure).
556// Arg1: arguments descriptor
557// Return value: 'call' function'.
558DEFINE_RUNTIME_ENTRY(ResolveCallFunction, 2) {
559 ASSERT(FLAG_enable_interpreter);
560 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
561 const Array& descriptor = Array::CheckedHandle(zone, arguments.ArgAt(1));
562 ArgumentsDescriptor args_desc(descriptor);
563 ASSERT(!receiver.IsClosure()); // Interpreter tests for closure.
564 Class& cls = Class::Handle(zone, receiver.clazz());
565 Function& call_function = Function::Handle(
566 zone,
567 Resolver::ResolveDynamicForReceiverClass(cls, Symbols::Call(), args_desc,
568 /*allow_add=*/false));
569 arguments.SetReturn(call_function);
570}
571
572// Helper routine for tracing a type check.
573static void PrintTypeCheck(const char* message,
574 const Instance& instance,
575 const AbstractType& type,
576 const TypeArguments& instantiator_type_arguments,
577 const TypeArguments& function_type_arguments,
578 const Bool& result) {
579 DartFrameIterator iterator(Thread::Current(),
580 StackFrameIterator::kNoCrossThreadIteration);
581 StackFrame* caller_frame = iterator.NextFrame();
582 ASSERT(caller_frame != NULL);
583
584 const AbstractType& instance_type =
585 AbstractType::Handle(instance.GetType(Heap::kNew));
586 ASSERT(instance_type.IsInstantiated() ||
587 (instance.IsClosure() && instance_type.IsInstantiated(kCurrentClass)));
588 if (type.IsInstantiated()) {
589 OS::PrintErr("%s: '%s' %" Pd " %s '%s' %" Pd " (pc: %#" Px ").\n", message,
590 String::Handle(instance_type.Name()).ToCString(),
591 Class::Handle(instance_type.type_class()).id(),
592 (result.raw() == Bool::True().raw()) ? "is" : "is !",
593 String::Handle(type.Name()).ToCString(),
594 Class::Handle(type.type_class()).id(), caller_frame->pc());
595 } else {
596 // Instantiate type before printing.
597 const AbstractType& instantiated_type = AbstractType::Handle(
598 type.InstantiateFrom(instantiator_type_arguments,
599 function_type_arguments, kAllFree, Heap::kOld));
600 OS::PrintErr("%s: '%s' %s '%s' instantiated from '%s' (pc: %#" Px ").\n",
601 message, String::Handle(instance_type.Name()).ToCString(),
602 (result.raw() == Bool::True().raw()) ? "is" : "is !",
603 String::Handle(instantiated_type.Name()).ToCString(),
604 String::Handle(type.Name()).ToCString(), caller_frame->pc());
605 }
606 const Function& function =
607 Function::Handle(caller_frame->LookupDartFunction());
608 OS::PrintErr(" -> Function %s\n", function.ToFullyQualifiedCString());
609}
610
611// This updates the type test cache, an array containing 5-value elements
612// (instance class (or function if the instance is a closure), instance type
613// arguments, instantiator type arguments, function type arguments,
614// and test_result). It can be applied to classes with type arguments in which
615// case it contains just the result of the class subtype test, not including the
616// evaluation of type arguments.
617// This operation is currently very slow (lookup of code is not efficient yet).
618static void UpdateTypeTestCache(
619 Zone* zone,
620 Thread* thread,
621 const Instance& instance,
622 const AbstractType& type,
623 const TypeArguments& instantiator_type_arguments,
624 const TypeArguments& function_type_arguments,
625 const Bool& result,
626 const SubtypeTestCache& new_cache) {
627 ASSERT(!new_cache.IsNull());
628 Class& instance_class = Class::Handle(zone);
629 if (instance.IsSmi()) {
630 instance_class = Smi::Class();
631 } else {
632 instance_class = instance.clazz();
633 }
634 // If the type is uninstantiated and refers to parent function type
635 // parameters, the function_type_arguments have been canonicalized
636 // when concatenated.
637 ASSERT(function_type_arguments.IsNull() ||
638 function_type_arguments.IsCanonical());
639 auto& instance_class_id_or_function = Object::Handle(zone);
640 auto& instance_type_arguments = TypeArguments::Handle(zone);
641 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
642 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
643 if (instance_class.IsClosureClass()) {
644 const auto& closure = Closure::Cast(instance);
645 const auto& closure_function = Function::Handle(zone, closure.function());
646 instance_class_id_or_function = closure_function.raw();
647 instance_type_arguments = closure.instantiator_type_arguments();
648 instance_parent_function_type_arguments = closure.function_type_arguments();
649 instance_delayed_type_arguments = closure.delayed_type_arguments();
650 } else {
651 instance_class_id_or_function = Smi::New(instance_class.id());
652 if (instance_class.NumTypeArguments() > 0) {
653 instance_type_arguments = instance.GetTypeArguments();
654 }
655 }
656 {
657 SafepointMutexLocker ml(
658 thread->isolate_group()->subtype_test_cache_mutex());
659
660 const intptr_t len = new_cache.NumberOfChecks();
661 if (len >= FLAG_max_subtype_cache_entries) {
662 if (FLAG_trace_type_checks) {
663 OS::PrintErr(
664 "Not updating subtype test cache as its length reached %d\n",
665 FLAG_max_subtype_cache_entries);
666 }
667 return;
668 }
669 ASSERT(instance_type_arguments.IsNull() ||
670 instance_type_arguments.IsCanonical());
671 ASSERT(instantiator_type_arguments.IsNull() ||
672 instantiator_type_arguments.IsCanonical());
673 ASSERT(function_type_arguments.IsNull() ||
674 function_type_arguments.IsCanonical());
675 ASSERT(instance_parent_function_type_arguments.IsNull() ||
676 instance_parent_function_type_arguments.IsCanonical());
677 ASSERT(instance_delayed_type_arguments.IsNull() ||
678 instance_delayed_type_arguments.IsCanonical());
679 auto& last_instance_class_id_or_function = Object::Handle(zone);
680 auto& last_instance_type_arguments = TypeArguments::Handle(zone);
681 auto& last_instantiator_type_arguments = TypeArguments::Handle(zone);
682 auto& last_function_type_arguments = TypeArguments::Handle(zone);
683 auto& last_instance_parent_function_type_arguments =
684 TypeArguments::Handle(zone);
685 auto& last_instance_delayed_type_arguments = TypeArguments::Handle(zone);
686 Bool& last_result = Bool::Handle(zone);
687 for (intptr_t i = 0; i < len; ++i) {
688 new_cache.GetCheck(
689 i, &last_instance_class_id_or_function, &last_instance_type_arguments,
690 &last_instantiator_type_arguments, &last_function_type_arguments,
691 &last_instance_parent_function_type_arguments,
692 &last_instance_delayed_type_arguments, &last_result);
693 if ((last_instance_class_id_or_function.raw() ==
694 instance_class_id_or_function.raw()) &&
695 (last_instance_type_arguments.raw() ==
696 instance_type_arguments.raw()) &&
697 (last_instantiator_type_arguments.raw() ==
698 instantiator_type_arguments.raw()) &&
699 (last_function_type_arguments.raw() ==
700 function_type_arguments.raw()) &&
701 (last_instance_parent_function_type_arguments.raw() ==
702 instance_parent_function_type_arguments.raw()) &&
703 (last_instance_delayed_type_arguments.raw() ==
704 instance_delayed_type_arguments.raw())) {
705 // Some other isolate might have updated the cache between entry was
706 // found missing and now.
707 return;
708 }
709 }
710 new_cache.AddCheck(instance_class_id_or_function, instance_type_arguments,
711 instantiator_type_arguments, function_type_arguments,
712 instance_parent_function_type_arguments,
713 instance_delayed_type_arguments, result);
714 if (FLAG_trace_type_checks) {
715 AbstractType& test_type = AbstractType::Handle(zone, type.raw());
716 if (!test_type.IsInstantiated()) {
717 test_type =
718 type.InstantiateFrom(instantiator_type_arguments,
719 function_type_arguments, kAllFree, Heap::kNew);
720 }
721 const auto& type_class = Class::Handle(zone, test_type.type_class());
722 const auto& instance_class_name =
723 String::Handle(zone, instance_class.Name());
724 OS::PrintErr(
725 " Updated test cache %#" Px " ix: %" Pd
726 " with (cid-or-fun:"
727 " %#" Px ", type-args: %#" Px ", i-type-args: %#" Px
728 ", "
729 "f-type-args: %#" Px ", p-type-args: %#" Px
730 ", "
731 "d-type-args: %#" Px
732 ", result: %s)\n"
733 " instance [class: (%#" Px " '%s' cid: %" Pd
734 "), type-args: %#" Px
735 " %s]\n"
736 " test-type [class: (%#" Px " '%s' cid: %" Pd
737 "), i-type-args: %#" Px " %s, f-type-args: %#" Px " %s]\n",
738 static_cast<uword>(new_cache.raw()), len,
739 static_cast<uword>(instance_class_id_or_function.raw()),
740 static_cast<uword>(instance_type_arguments.raw()),
741 static_cast<uword>(instantiator_type_arguments.raw()),
742 static_cast<uword>(function_type_arguments.raw()),
743 static_cast<uword>(instance_parent_function_type_arguments.raw()),
744 static_cast<uword>(instance_delayed_type_arguments.raw()),
745 result.ToCString(), static_cast<uword>(instance_class.raw()),
746 instance_class_name.ToCString(), instance_class.id(),
747 static_cast<uword>(instance_type_arguments.raw()),
748 instance_type_arguments.ToCString(),
749 static_cast<uword>(type_class.raw()),
750 String::Handle(zone, type_class.Name()).ToCString(), type_class.id(),
751 static_cast<uword>(instantiator_type_arguments.raw()),
752 instantiator_type_arguments.ToCString(),
753 static_cast<uword>(function_type_arguments.raw()),
754 function_type_arguments.ToCString());
755 }
756 }
757}
758
759// Check that the given instance is an instance of the given type.
760// Tested instance may be null, because a null test cannot always be inlined,
761// e.g 'null is T' yields true if T = Null, but false if T = bool.
762// Arg0: instance being checked.
763// Arg1: type.
764// Arg2: type arguments of the instantiator of the type.
765// Arg3: type arguments of the function of the type.
766// Arg4: SubtypeTestCache.
767// Return value: true or false.
768DEFINE_RUNTIME_ENTRY(Instanceof, 5) {
769 const Instance& instance = Instance::CheckedHandle(zone, arguments.ArgAt(0));
770 const AbstractType& type =
771 AbstractType::CheckedHandle(zone, arguments.ArgAt(1));
772 const TypeArguments& instantiator_type_arguments =
773 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
774 const TypeArguments& function_type_arguments =
775 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
776 const SubtypeTestCache& cache =
777 SubtypeTestCache::CheckedHandle(zone, arguments.ArgAt(4));
778 ASSERT(type.IsFinalized());
779 ASSERT(!type.IsDynamicType()); // No need to check assignment.
780 ASSERT(!cache.IsNull());
781 const Bool& result = Bool::Get(instance.IsInstanceOf(
782 type, instantiator_type_arguments, function_type_arguments));
783 if (FLAG_trace_type_checks) {
784 PrintTypeCheck("InstanceOf", instance, type, instantiator_type_arguments,
785 function_type_arguments, result);
786 }
787 UpdateTypeTestCache(zone, thread, instance, type, instantiator_type_arguments,
788 function_type_arguments, result, cache);
789 arguments.SetReturn(result);
790}
791
792// Check that the type of the given instance is a subtype of the given type and
793// can therefore be assigned.
794// Tested instance may not be null, because a null test is always inlined.
795// Arg0: instance being assigned.
796// Arg1: type being assigned to.
797// Arg2: type arguments of the instantiator of the type being assigned to.
798// Arg3: type arguments of the function of the type being assigned to.
799// Arg4: name of variable being assigned to.
800// Arg5: SubtypeTestCache.
801// Arg6: invocation mode (see TypeCheckMode)
802// Return value: instance if a subtype, otherwise throw a TypeError.
803DEFINE_RUNTIME_ENTRY(TypeCheck, 7) {
804 const Instance& src_instance =
805 Instance::CheckedHandle(zone, arguments.ArgAt(0));
806 AbstractType& dst_type =
807 AbstractType::CheckedHandle(zone, arguments.ArgAt(1));
808 const TypeArguments& instantiator_type_arguments =
809 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
810 const TypeArguments& function_type_arguments =
811 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
812 String& dst_name = String::Handle(zone);
813 dst_name ^= arguments.ArgAt(4);
814 ASSERT(dst_name.IsNull() || dst_name.IsString());
815
816 SubtypeTestCache& cache = SubtypeTestCache::Handle(zone);
817 cache ^= arguments.ArgAt(5);
818 ASSERT(cache.IsNull() || cache.IsSubtypeTestCache());
819
820 const TypeCheckMode mode = static_cast<TypeCheckMode>(
821 Smi::CheckedHandle(zone, arguments.ArgAt(6)).Value());
822
823#if defined(TARGET_ARCH_IA32)
824 ASSERT(mode == kTypeCheckFromInline);
825#endif
826
827 ASSERT(!dst_type.IsDynamicType()); // No need to check assignment.
828 // A null instance is already detected and allowed in inlined code, unless
829 // strong checking is enabled.
830 ASSERT(!src_instance.IsNull() || isolate->null_safety());
831 const bool is_instance_of = src_instance.IsAssignableTo(
832 dst_type, instantiator_type_arguments, function_type_arguments);
833
834 if (FLAG_trace_type_checks) {
835 PrintTypeCheck("TypeCheck", src_instance, dst_type,
836 instantiator_type_arguments, function_type_arguments,
837 Bool::Get(is_instance_of));
838 }
839 if (!is_instance_of) {
840 // Throw a dynamic type error.
841 const TokenPosition location = GetCallerLocation();
842 const AbstractType& src_type =
843 AbstractType::Handle(zone, src_instance.GetType(Heap::kNew));
844 if (!dst_type.IsInstantiated()) {
845 // Instantiate dst_type before reporting the error.
846 dst_type = dst_type.InstantiateFrom(instantiator_type_arguments,
847 function_type_arguments, kAllFree,
848 Heap::kNew);
849 }
850 if (dst_name.IsNull()) {
851#if !defined(TARGET_ARCH_IA32)
852 // Can only come here from type testing stub.
853 ASSERT(mode != kTypeCheckFromInline);
854
855 // Grab the [dst_name] from the pool. It's stored at one pool slot after
856 // the subtype-test-cache.
857 DartFrameIterator iterator(thread,
858 StackFrameIterator::kNoCrossThreadIteration);
859 StackFrame* caller_frame = iterator.NextFrame();
860 ASSERT(!caller_frame->is_interpreted());
861 const Code& caller_code =
862 Code::Handle(zone, caller_frame->LookupDartCode());
863 const ObjectPool& pool =
864 ObjectPool::Handle(zone, caller_code.GetObjectPool());
865 TypeTestingStubCallPattern tts_pattern(caller_frame->pc());
866 const intptr_t stc_pool_idx = tts_pattern.GetSubtypeTestCachePoolIndex();
867 const intptr_t dst_name_idx = stc_pool_idx + 1;
868 dst_name ^= pool.ObjectAt(dst_name_idx);
869#else
870 UNREACHABLE();
871#endif
872 }
873
874 Exceptions::CreateAndThrowTypeError(location, src_type, dst_type, dst_name);
875 UNREACHABLE();
876 }
877
878 bool should_update_cache = true;
879#if !defined(TARGET_ARCH_IA32)
880 bool would_update_cache_if_not_lazy = false;
881#if !defined(DART_PRECOMPILED_RUNTIME)
882 if (mode == kTypeCheckFromLazySpecializeStub) {
883 // Checks against type parameters are done by loading the value of the type
884 // parameter and calling its type testing stub.
885 // So we have to install a specialized TTS on the value of the type
886 // parameter, not the parameter itself.
887 if (dst_type.IsTypeParameter()) {
888 dst_type = TypeParameter::Cast(dst_type).GetFromTypeArguments(
889 instantiator_type_arguments, function_type_arguments);
890 }
891 if (FLAG_trace_type_checks) {
892 OS::PrintErr(" Specializing type testing stub for %s\n",
893 dst_type.ToCString());
894 }
895 TypeTestingStubGenerator::SpecializeStubFor(thread, dst_type);
896
897 // Only create the cache if we failed to create a specialized TTS and doing
898 // the same check would cause an update to the cache.
899 would_update_cache_if_not_lazy =
900 (!src_instance.IsNull() &&
901 dst_type.type_test_stub() ==
902 StubCode::DefaultNullableTypeTest().raw()) ||
903 dst_type.type_test_stub() == StubCode::DefaultTypeTest().raw();
904 should_update_cache = would_update_cache_if_not_lazy && cache.IsNull();
905 }
906
907 // Fast path of type testing stub wasn't able to handle given type, yet it
908 // passed the type check. It means that fast-path was using outdated cid
909 // ranges and new classes appeared since the stub was generated.
910 // Re-generate the stub.
911 if ((mode == kTypeCheckFromSlowStub) && dst_type.IsType() &&
912 (TypeTestingStubGenerator::DefaultCodeForType(dst_type, /*lazy=*/false) !=
913 dst_type.type_test_stub()) &&
914 dst_type.IsInstantiated()) {
915 if (FLAG_trace_type_checks) {
916 OS::PrintErr(" Rebuilding type testing stub for %s\n",
917 dst_type.ToCString());
918 }
919#if defined(DEBUG)
920 const auto& old_code = Code::Handle(dst_type.type_test_stub());
921#endif
922 TypeTestingStubGenerator::SpecializeStubFor(thread, dst_type);
923#if defined(DEBUG)
924 ASSERT(old_code.raw() != dst_type.type_test_stub());
925#endif
926 // Only create the cache when we come from a normal stub.
927 should_update_cache = false;
928 }
929#endif // !defined(DART_PRECOMPILED_RUNTIME)
930#endif // !defined(TARGET_ARCH_IA32)
931
932 if (should_update_cache) {
933 if (cache.IsNull()) {
934#if !defined(TARGET_ARCH_IA32)
935 ASSERT(mode == kTypeCheckFromSlowStub ||
936 (mode == kTypeCheckFromLazySpecializeStub &&
937 would_update_cache_if_not_lazy));
938 // We lazily create [SubtypeTestCache] for those call sites which actually
939 // need one and will patch the pool entry.
940 DartFrameIterator iterator(thread,
941 StackFrameIterator::kNoCrossThreadIteration);
942 StackFrame* caller_frame = iterator.NextFrame();
943 ASSERT(!caller_frame->is_interpreted());
944 const Code& caller_code =
945 Code::Handle(zone, caller_frame->LookupDartCode());
946 const ObjectPool& pool =
947 ObjectPool::Handle(zone, caller_code.GetObjectPool());
948 TypeTestingStubCallPattern tts_pattern(caller_frame->pc());
949 const intptr_t stc_pool_idx = tts_pattern.GetSubtypeTestCachePoolIndex();
950
951 // Ensure we do have a STC (lazily create it if not) and all threads use
952 // the same STC.
953 {
954 SafepointMutexLocker ml(isolate->group()->subtype_test_cache_mutex());
955 cache ^= pool.ObjectAt<std::memory_order_acquire>(stc_pool_idx);
956 if (cache.IsNull()) {
957 cache = SubtypeTestCache::New();
958 pool.SetObjectAt<std::memory_order_release>(stc_pool_idx, cache);
959 }
960 }
961#else
962 UNREACHABLE();
963#endif
964 }
965
966 UpdateTypeTestCache(zone, thread, src_instance, dst_type,
967 instantiator_type_arguments, function_type_arguments,
968 Bool::True(), cache);
969 }
970
971 arguments.SetReturn(src_instance);
972}
973
974// Report that the type of the given object is not bool in conditional context.
975// Throw assertion error if the object is null. (cf. Boolean Conversion
976// in language Spec.)
977// Arg0: bad object.
978// Return value: none, throws TypeError or AssertionError.
979DEFINE_RUNTIME_ENTRY(NonBoolTypeError, 1) {
980 const TokenPosition location = GetCallerLocation();
981 const Instance& src_instance =
982 Instance::CheckedHandle(zone, arguments.ArgAt(0));
983
984 if (src_instance.IsNull()) {
985 const Array& args = Array::Handle(zone, Array::New(5));
986 args.SetAt(
987 0, String::Handle(
988 zone,
989 String::New(
990 "Failed assertion: boolean expression must not be null")));
991
992 // No source code for this assertion, set url to null.
993 args.SetAt(1, String::Handle(zone, String::null()));
994 args.SetAt(2, Object::smi_zero());
995 args.SetAt(3, Object::smi_zero());
996 args.SetAt(4, String::Handle(zone, String::null()));
997
998 Exceptions::ThrowByType(Exceptions::kAssertion, args);
999 UNREACHABLE();
1000 }
1001
1002 ASSERT(!src_instance.IsBool());
1003 const Type& bool_interface = Type::Handle(Type::BoolType());
1004 const AbstractType& src_type =
1005 AbstractType::Handle(zone, src_instance.GetType(Heap::kNew));
1006 Exceptions::CreateAndThrowTypeError(location, src_type, bool_interface,
1007 Symbols::BooleanExpression());
1008 UNREACHABLE();
1009}
1010
1011DEFINE_RUNTIME_ENTRY(Throw, 1) {
1012 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1013 Exceptions::Throw(thread, exception);
1014}
1015
1016DEFINE_RUNTIME_ENTRY(ReThrow, 2) {
1017 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1018 const Instance& stacktrace =
1019 Instance::CheckedHandle(zone, arguments.ArgAt(1));
1020 Exceptions::ReThrow(thread, exception, stacktrace);
1021}
1022
1023// Patches static call in optimized code with the target's entry point.
1024// Compiles target if necessary.
1025DEFINE_RUNTIME_ENTRY(PatchStaticCall, 0) {
1026#if !defined(DART_PRECOMPILED_RUNTIME)
1027 DartFrameIterator iterator(thread,
1028 StackFrameIterator::kNoCrossThreadIteration);
1029 StackFrame* caller_frame = iterator.NextFrame();
1030 ASSERT(caller_frame != NULL);
1031 ASSERT(!caller_frame->is_interpreted());
1032 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
1033 ASSERT(!caller_code.IsNull());
1034 ASSERT(caller_code.is_optimized());
1035 const Function& target_function = Function::Handle(
1036 zone, caller_code.GetStaticCallTargetFunctionAt(caller_frame->pc()));
1037 const Code& target_code = Code::Handle(zone, target_function.EnsureHasCode());
1038 // Before patching verify that we are not repeatedly patching to the same
1039 // target.
1040 ASSERT(target_code.raw() !=
1041 CodePatcher::GetStaticCallTargetAt(caller_frame->pc(), caller_code));
1042 CodePatcher::PatchStaticCallAt(caller_frame->pc(), caller_code, target_code);
1043 caller_code.SetStaticCallTargetCodeAt(caller_frame->pc(), target_code);
1044 if (FLAG_trace_patching) {
1045 THR_Print("PatchStaticCall: patching caller pc %#" Px
1046 ""
1047 " to '%s' new entry point %#" Px " (%s)\n",
1048 caller_frame->pc(), target_function.ToFullyQualifiedCString(),
1049 target_code.EntryPoint(),
1050 target_code.is_optimized() ? "optimized" : "unoptimized");
1051 }
1052 arguments.SetReturn(target_code);
1053#else
1054 UNREACHABLE();
1055#endif
1056}
1057
1058#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
1059DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
1060 UNREACHABLE();
1061 return;
1062}
1063#else
1064// Gets called from debug stub when code reaches a breakpoint
1065// set on a runtime stub call.
1066DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
1067 DartFrameIterator iterator(thread,
1068 StackFrameIterator::kNoCrossThreadIteration);
1069 StackFrame* caller_frame = iterator.NextFrame();
1070 ASSERT(caller_frame != NULL);
1071 Code& orig_stub = Code::Handle(zone);
1072 if (!caller_frame->is_interpreted()) {
1073 orig_stub = isolate->debugger()->GetPatchedStubAddress(caller_frame->pc());
1074 }
1075 const Error& error =
1076 Error::Handle(zone, isolate->debugger()->PauseBreakpoint());
1077 ThrowIfError(error);
1078 arguments.SetReturn(orig_stub);
1079}
1080#endif
1081
1082DEFINE_RUNTIME_ENTRY(SingleStepHandler, 0) {
1083#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
1084 UNREACHABLE();
1085#else
1086 const Error& error =
1087 Error::Handle(zone, isolate->debugger()->PauseStepping());
1088 ThrowIfError(error);
1089#endif
1090}
1091
1092// An instance call of the form o.f(...) could not be resolved. Check if
1093// there is a getter with the same name. If so, invoke it. If the value is
1094// a closure, invoke it with the given arguments. If the value is a
1095// non-closure, attempt to invoke "call" on it.
1096static bool ResolveCallThroughGetter(const Class& receiver_class,
1097 const String& target_name,
1098 const String& demangled,
1099 const Array& arguments_descriptor,
1100 Function* result) {
1101 const String& getter_name = String::Handle(Field::GetterName(demangled));
1102 const int kTypeArgsLen = 0;
1103 const int kNumArguments = 1;
1104 ArgumentsDescriptor args_desc(Array::Handle(
1105 ArgumentsDescriptor::NewBoxed(kTypeArgsLen, kNumArguments)));
1106 const Function& getter =
1107 Function::Handle(Resolver::ResolveDynamicForReceiverClass(
1108 receiver_class, getter_name, args_desc));
1109 if (getter.IsNull() || getter.IsMethodExtractor()) {
1110 return false;
1111 }
1112 // We do this on the target_name, _not_ on the demangled name, so that
1113 // FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher can detect dynamic
1114 // calls from the dyn: tag on the name of the dispatcher.
1115 const Function& target_function =
1116 Function::Handle(receiver_class.GetInvocationDispatcher(
1117 target_name, arguments_descriptor,
1118 FunctionLayout::kInvokeFieldDispatcher, FLAG_lazy_dispatchers));
1119 ASSERT(!target_function.IsNull() || !FLAG_lazy_dispatchers);
1120 if (FLAG_trace_ic) {
1121 OS::PrintErr(
1122 "InvokeField IC miss: adding <%s> id:%" Pd " -> <%s>\n",
1123 receiver_class.ToCString(), receiver_class.id(),
1124 target_function.IsNull() ? "null" : target_function.ToCString());
1125 }
1126 *result = target_function.raw();
1127 return true;
1128}
1129
1130// Handle other invocations (implicit closures, noSuchMethod).
1131FunctionPtr InlineCacheMissHelper(const Class& receiver_class,
1132 const Array& args_descriptor,
1133 const String& target_name) {
1134 // Create a demangled version of the target_name, if necessary, This is used
1135 // for the field getter in ResolveCallThroughGetter and as the target name
1136 // for the NoSuchMethod dispatcher (if needed).
1137 const String* demangled = &target_name;
1138 if (Function::IsDynamicInvocationForwarderName(target_name)) {
1139 demangled = &String::Handle(
1140 Function::DemangleDynamicInvocationForwarderName(target_name));
1141 }
1142 Function& result = Function::Handle();
1143 if (!ResolveCallThroughGetter(receiver_class, target_name, *demangled,
1144 args_descriptor, &result)) {
1145 ArgumentsDescriptor desc(args_descriptor);
1146 const Function& target_function =
1147 Function::Handle(receiver_class.GetInvocationDispatcher(
1148 *demangled, args_descriptor,
1149 FunctionLayout::kNoSuchMethodDispatcher, FLAG_lazy_dispatchers));
1150 if (FLAG_trace_ic) {
1151 OS::PrintErr(
1152 "NoSuchMethod IC miss: adding <%s> id:%" Pd " -> <%s>\n",
1153 receiver_class.ToCString(), receiver_class.id(),
1154 target_function.IsNull() ? "null" : target_function.ToCString());
1155 }
1156 result = target_function.raw();
1157 }
1158 // May be null if --no-lazy-dispatchers, in which case dispatch will be
1159 // handled by NoSuchMethodFromCallStub.
1160 ASSERT(!result.IsNull() || !FLAG_lazy_dispatchers);
1161 return result.raw();
1162}
1163
1164static void TrySwitchInstanceCall(const ICData& ic_data,
1165 const Function& target_function) {
1166#if !defined(DART_PRECOMPILED_RUNTIME)
1167 // Monomorphic/megamorphic calls only check the receiver CID.
1168 if (ic_data.NumArgsTested() != 1) return;
1169
1170 ASSERT(ic_data.rebind_rule() == ICData::kInstance);
1171
1172 // Monomorphic/megamorphic calls don't record exactness.
1173 if (ic_data.is_tracking_exactness()) return;
1174
1175#if !defined(PRODUCT)
1176 // Monomorphic/megamorphic do not check the isolate's stepping flag.
1177 if (Isolate::Current()->has_attempted_stepping()) return;
1178#endif
1179
1180 Thread* thread = Thread::Current();
1181 DartFrameIterator iterator(thread,
1182 StackFrameIterator::kNoCrossThreadIteration);
1183 StackFrame* caller_frame = iterator.NextFrame();
1184 ASSERT(caller_frame->IsDartFrame());
1185
1186 // Monomorphic/megamorphic calls are only for unoptimized code.
1187 if (caller_frame->is_interpreted()) return;
1188 Zone* zone = thread->zone();
1189 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
1190 if (caller_code.is_optimized()) return;
1191
1192 // Code is detached from its function. This will prevent us from resetting
1193 // the switchable call later because resets are function based and because
1194 // the ic_data_array belongs to the function instead of the code. This should
1195 // only happen because of reload, but it sometimes happens with KBC mixed mode
1196 // probably through a race between foreground and background compilation.
1197 const Function& caller_function =
1198 Function::Handle(zone, caller_code.function());
1199 if (caller_function.unoptimized_code() != caller_code.raw()) {
1200 return;
1201 }
1202#if !defined(PRODUCT)
1203 // Skip functions that contain breakpoints or when debugger is in single
1204 // stepping mode.
1205 if (Debugger::IsDebugging(thread, caller_function)) return;
1206#endif
1207
1208 const intptr_t num_checks = ic_data.NumberOfChecks();
1209
1210 // Monomorphic call.
1211 if (FLAG_unopt_monomorphic_calls && (num_checks == 1)) {
1212 // A call site in the monomorphic state does not load the arguments
1213 // descriptor, so do not allow transition to this state if the callee
1214 // needs it.
1215 if (target_function.PrologueNeedsArgumentsDescriptor()) {
1216 return;
1217 }
1218
1219 // Avoid forcing foreground compilation if target function is still
1220 // interpreted.
1221 if (FLAG_enable_interpreter && !target_function.HasCode()) {
1222 return;
1223 }
1224
1225 const Array& data = Array::Handle(zone, ic_data.entries());
1226 const Code& target = Code::Handle(zone, target_function.EnsureHasCode());
1227 CodePatcher::PatchInstanceCallAt(caller_frame->pc(), caller_code, data,
1228 target);
1229 if (FLAG_trace_ic) {
1230 OS::PrintErr("Instance call at %" Px
1231 " switching to monomorphic dispatch, %s\n",
1232 caller_frame->pc(), ic_data.ToCString());
1233 }
1234 return; // Success.
1235 }
1236
1237 // Megamorphic call.
1238 if (FLAG_unopt_megamorphic_calls &&
1239 (num_checks > FLAG_max_polymorphic_checks)) {
1240 const String& name = String::Handle(zone, ic_data.target_name());
1241 const Array& descriptor =
1242 Array::Handle(zone, ic_data.arguments_descriptor());
1243 const MegamorphicCache& cache = MegamorphicCache::Handle(
1244 zone, MegamorphicCacheTable::Lookup(thread, name, descriptor));
1245 ic_data.set_is_megamorphic(true);
1246 CodePatcher::PatchInstanceCallAt(caller_frame->pc(), caller_code, cache,
1247 StubCode::MegamorphicCall());
1248 if (FLAG_trace_ic) {
1249 OS::PrintErr("Instance call at %" Px
1250 " switching to megamorphic dispatch, %s\n",
1251 caller_frame->pc(), ic_data.ToCString());
1252 }
1253 return; // Success.
1254 }
1255
1256#endif // !defined(DART_PRECOMPILED_RUNTIME)
1257}
1258
1259// Perform the subtype and return constant function based on the result.
1260static FunctionPtr ComputeTypeCheckTarget(const Instance& receiver,
1261 const AbstractType& type,
1262 const ArgumentsDescriptor& desc) {
1263 const bool result = receiver.IsInstanceOf(type, Object::null_type_arguments(),
1264 Object::null_type_arguments());
1265 const ObjectStore* store = Isolate::Current()->object_store();
1266 const Function& target =
1267 Function::Handle(result ? store->simple_instance_of_true_function()
1268 : store->simple_instance_of_false_function());
1269 ASSERT(!target.IsNull());
1270 return target.raw();
1271}
1272
1273static FunctionPtr InlineCacheMissHandlerGivenTargetFunction(
1274 const GrowableArray<const Instance*>& args, // Checked arguments only.
1275 const ICData& ic_data,
1276 intptr_t count,
1277 const Function& target_function) {
1278 if (target_function.IsNull()) {
1279 return target_function.raw();
1280 }
1281
1282 const Instance& receiver = *args[0];
1283
1284 if (args.length() == 1) {
1285 if (ic_data.is_tracking_exactness()) {
1286#if !defined(DART_PRECOMPILED_RUNTIME)
1287 const auto state = receiver.IsNull()
1288 ? StaticTypeExactnessState::NotExact()
1289 : StaticTypeExactnessState::Compute(
1290 Type::Cast(AbstractType::Handle(
1291 ic_data.receivers_static_type())),
1292 receiver);
1293 ic_data.AddReceiverCheck(
1294 receiver.GetClassId(), target_function, count,
1295 /*exactness=*/state.CollapseSuperTypeExactness());
1296#else
1297 UNREACHABLE();
1298#endif
1299 } else {
1300 ic_data.AddReceiverCheck(args[0]->GetClassId(), target_function, count);
1301 }
1302 } else {
1303 GrowableArray<intptr_t> class_ids(args.length());
1304 ASSERT(ic_data.NumArgsTested() == args.length());
1305 for (intptr_t i = 0; i < args.length(); i++) {
1306 class_ids.Add(args[i]->GetClassId());
1307 }
1308 ic_data.AddCheck(class_ids, target_function, count);
1309 }
1310 if (FLAG_trace_ic_miss_in_optimized || FLAG_trace_ic) {
1311 DartFrameIterator iterator(Thread::Current(),
1312 StackFrameIterator::kNoCrossThreadIteration);
1313 StackFrame* caller_frame = iterator.NextFrame();
1314 ASSERT(caller_frame != NULL);
1315 if (FLAG_trace_ic_miss_in_optimized) {
1316 const Code& caller = Code::Handle(Code::LookupCode(caller_frame->pc()));
1317 if (caller.is_optimized()) {
1318 OS::PrintErr("IC miss in optimized code; call %s -> %s\n",
1319 Function::Handle(caller.function()).ToCString(),
1320 target_function.ToCString());
1321 }
1322 }
1323 if (FLAG_trace_ic) {
1324 OS::PrintErr("InlineCacheMissHandler %" Pd " call at %#" Px
1325 "' "
1326 "adding <%s> id:%" Pd " -> <%s>\n",
1327 args.length(), caller_frame->pc(),
1328 Class::Handle(receiver.clazz()).ToCString(),
1329 receiver.GetClassId(), target_function.ToCString());
1330 }
1331 }
1332
1333 TrySwitchInstanceCall(ic_data, target_function);
1334
1335 return target_function.raw();
1336}
1337
1338static FunctionPtr InlineCacheMissHandler(
1339 const GrowableArray<const Instance*>& args, // Checked arguments only.
1340 const ICData& ic_data,
1341 intptr_t count = 1) {
1342 Thread* thread = Thread::Current();
1343 Zone* zone = thread->zone();
1344
1345 const Instance& receiver = *args[0];
1346 ArgumentsDescriptor arguments_descriptor(
1347 Array::Handle(zone, ic_data.arguments_descriptor()));
1348 String& function_name = String::Handle(zone, ic_data.target_name());
1349 ASSERT(function_name.IsSymbol());
1350
1351 const Class& receiver_class = Class::Handle(zone, receiver.clazz());
1352 Function& target_function = Function::Handle(
1353 zone, Resolver::ResolveDynamicForReceiverClass(
1354 receiver_class, function_name, arguments_descriptor));
1355
1356 ObjectStore* store = thread->isolate()->object_store();
1357 if (target_function.raw() == store->simple_instance_of_function()) {
1358 // Replace the target function with constant function.
1359 ASSERT(args.length() == 2);
1360 const AbstractType& type = AbstractType::Cast(*args[1]);
1361 target_function =
1362 ComputeTypeCheckTarget(receiver, type, arguments_descriptor);
1363 }
1364 if (target_function.IsNull()) {
1365 if (FLAG_trace_ic) {
1366 OS::PrintErr("InlineCacheMissHandler NULL function for %s receiver: %s\n",
1367 String::Handle(zone, ic_data.target_name()).ToCString(),
1368 receiver.ToCString());
1369 }
1370 const Array& args_descriptor =
1371 Array::Handle(zone, ic_data.arguments_descriptor());
1372 const String& target_name = String::Handle(zone, ic_data.target_name());
1373 target_function =
1374 InlineCacheMissHelper(receiver_class, args_descriptor, target_name);
1375 }
1376 if (target_function.IsNull()) {
1377 ASSERT(!FLAG_lazy_dispatchers);
1378 return target_function.raw();
1379 }
1380
1381 return InlineCacheMissHandlerGivenTargetFunction(args, ic_data, count,
1382 target_function);
1383}
1384
1385// Handles inline cache misses by updating the IC data array of the call site.
1386// Arg0: Receiver object.
1387// Arg1: IC data object.
1388// Returns: target function with compiled code or null.
1389// Modifies the instance call to hold the updated IC data array.
1390DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerOneArg, 2) {
1391 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1392 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(1));
1393 RELEASE_ASSERT(!FLAG_precompiled_mode);
1394 GrowableArray<const Instance*> args(1);
1395 args.Add(&receiver);
1396 const Function& result =
1397 Function::Handle(zone, InlineCacheMissHandler(args, ic_data));
1398 arguments.SetReturn(result);
1399}
1400
1401// Handles inline cache misses by updating the IC data array of the call site.
1402// Arg0: Receiver object.
1403// Arg1: Argument after receiver.
1404// Arg2: IC data object.
1405// Returns: target function with compiled code or null.
1406// Modifies the instance call to hold the updated IC data array.
1407DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerTwoArgs, 3) {
1408 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1409 const Instance& other = Instance::CheckedHandle(zone, arguments.ArgAt(1));
1410 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(2));
1411 RELEASE_ASSERT(!FLAG_precompiled_mode);
1412 GrowableArray<const Instance*> args(2);
1413 args.Add(&receiver);
1414 args.Add(&other);
1415 const Function& result =
1416 Function::Handle(zone, InlineCacheMissHandler(args, ic_data));
1417 arguments.SetReturn(result);
1418}
1419
1420// Handles a static call in unoptimized code that has one argument type not
1421// seen before. Compile the target if necessary and update the ICData.
1422// Arg0: argument.
1423// Arg1: IC data object.
1424DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerOneArg, 2) {
1425 const Instance& arg = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1426 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(1));
1427 // IC data for static call is prepopulated with the statically known target.
1428 ASSERT(ic_data.NumberOfChecksIs(1));
1429 const Function& target = Function::Handle(zone, ic_data.GetTargetAt(0));
1430 target.EnsureHasCode();
1431 ASSERT(!target.IsNull() && target.HasCode());
1432 ic_data.AddReceiverCheck(arg.GetClassId(), target, 1);
1433 if (FLAG_trace_ic) {
1434 DartFrameIterator iterator(thread,
1435 StackFrameIterator::kNoCrossThreadIteration);
1436 StackFrame* caller_frame = iterator.NextFrame();
1437 ASSERT(caller_frame != NULL);
1438 OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ")\n",
1439 caller_frame->pc(), target.ToCString(), arg.GetClassId());
1440 }
1441 arguments.SetReturn(target);
1442}
1443
1444// Handles a static call in unoptimized code that has two argument types not
1445// seen before. Compile the target if necessary and update the ICData.
1446// Arg0: argument 0.
1447// Arg1: argument 1.
1448// Arg2: IC data object.
1449DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerTwoArgs, 3) {
1450 const Instance& arg0 = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1451 const Instance& arg1 = Instance::CheckedHandle(zone, arguments.ArgAt(1));
1452 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(2));
1453 // IC data for static call is prepopulated with the statically known target.
1454 ASSERT(!ic_data.NumberOfChecksIs(0));
1455 const Function& target = Function::Handle(zone, ic_data.GetTargetAt(0));
1456 target.EnsureHasCode();
1457 GrowableArray<intptr_t> cids(2);
1458 cids.Add(arg0.GetClassId());
1459 cids.Add(arg1.GetClassId());
1460 ic_data.AddCheck(cids, target);
1461 if (FLAG_trace_ic) {
1462 DartFrameIterator iterator(thread,
1463 StackFrameIterator::kNoCrossThreadIteration);
1464 StackFrame* caller_frame = iterator.NextFrame();
1465 ASSERT(caller_frame != NULL);
1466 OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ", %" Pd
1467 ")\n",
1468 caller_frame->pc(), target.ToCString(), cids[0], cids[1]);
1469 }
1470 arguments.SetReturn(target);
1471}
1472
1473static bool IsSingleTarget(Isolate* isolate,
1474 Zone* zone,
1475 intptr_t lower_cid,
1476 intptr_t upper_cid,
1477 const Function& target,
1478 const String& name) {
1479 Class& cls = Class::Handle(zone);
1480 ClassTable* table = isolate->class_table();
1481 Function& other_target = Function::Handle(zone);
1482 for (intptr_t cid = lower_cid; cid <= upper_cid; cid++) {
1483 if (!table->HasValidClassAt(cid)) continue;
1484 cls = table->At(cid);
1485 if (cls.is_abstract()) continue;
1486 if (!cls.is_allocated()) continue;
1487 other_target = Resolver::ResolveDynamicAnyArgs(zone, cls, name,
1488 /*allow_add=*/false);
1489 if (other_target.raw() != target.raw()) {
1490 return false;
1491 }
1492 }
1493 return true;
1494}
1495
1496#if defined(DART_PRECOMPILED_RUNTIME)
1497
1498class SavedUnlinkedCallMapKeyEqualsTraits : public AllStatic {
1499 public:
1500 static const char* Name() { return "SavedUnlinkedCallMapKeyEqualsTraits "; }
1501 static bool ReportStats() { return false; }
1502
1503 static bool IsMatch(const Object& key1, const Object& key2) {
1504 if (!key1.IsInteger() || !key2.IsInteger()) return false;
1505 return Integer::Cast(key1).Equals(Integer::Cast(key2));
1506 }
1507 static uword Hash(const Object& key) {
1508 return Integer::Cast(key).CanonicalizeHash();
1509 }
1510};
1511
1512using UnlinkedCallMap = UnorderedHashMap<SavedUnlinkedCallMapKeyEqualsTraits>;
1513
1514static void SaveUnlinkedCall(Zone* zone,
1515 Isolate* isolate,
1516 uword frame_pc,
1517 const UnlinkedCall& unlinked_call) {
1518 IsolateGroup* isolate_group = isolate->group();
1519
1520 SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex());
1521 if (isolate_group->saved_unlinked_calls() == Array::null()) {
1522 const auto& initial_map =
1523 Array::Handle(zone, HashTables::New<UnlinkedCallMap>(16, Heap::kOld));
1524 isolate_group->set_saved_unlinked_calls(initial_map);
1525 }
1526
1527 UnlinkedCallMap unlinked_call_map(zone,
1528 isolate_group->saved_unlinked_calls());
1529 const auto& pc = Integer::Handle(zone, Integer::NewFromUint64(frame_pc));
1530 // Some other isolate might have updated unlinked_call_map[pc] too, but
1531 // their update should be identical to ours.
1532 const auto& new_or_old_value = UnlinkedCall::Handle(
1533 zone, UnlinkedCall::RawCast(
1534 unlinked_call_map.InsertOrGetValue(pc, unlinked_call)));
1535 RELEASE_ASSERT(new_or_old_value.raw() == unlinked_call.raw());
1536 isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
1537}
1538
1539static UnlinkedCallPtr LoadUnlinkedCall(Zone* zone,
1540 Isolate* isolate,
1541 uword pc) {
1542 IsolateGroup* isolate_group = isolate->group();
1543
1544 SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex());
1545 ASSERT(isolate_group->saved_unlinked_calls() != Array::null());
1546 UnlinkedCallMap unlinked_call_map(zone,
1547 isolate_group->saved_unlinked_calls());
1548
1549 const auto& pc_integer = Integer::Handle(zone, Integer::NewFromUint64(pc));
1550 const auto& unlinked_call = UnlinkedCall::Cast(
1551 Object::Handle(zone, unlinked_call_map.GetOrDie(pc_integer)));
1552 isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
1553 return unlinked_call.raw();
1554}
1555
1556// NOTE: Right now we never delete [UnlinkedCall] objects. They are needed while
1557// a call site is in Unlinked/Monomorphic/MonomorphicSmiable/SingleTarget
1558// states.
1559//
1560// Theoretically we could free the [UnlinkedCall] object once we transition the
1561// call site to use ICData/MegamorphicCache, but that would require careful
1562// coordination between the deleter and a possible concurrent reader.
1563//
1564// To simplify the code we decided not to do that atm (only a very small
1565// fraction of callsites in AOT use switchable calls, the name/args-descriptor
1566// objects are kept alive anyways -> there is little memory savings from
1567// freeing the [UnlinkedCall] objects).
1568
1569#endif // defined(DART_PRECOMPILED_RUNTIME)
1570
1571class SwitchableCallHandler {
1572 public:
1573 SwitchableCallHandler(Thread* thread,
1574 const Instance& receiver,
1575 NativeArguments arguments,
1576 StackFrame* caller_frame,
1577 const Code& caller_code,
1578 const Function& caller_function)
1579 : isolate_(thread->isolate()),
1580 thread_(thread),
1581 zone_(thread->zone()),
1582 receiver_(receiver),
1583 arguments_(arguments),
1584 caller_frame_(caller_frame),
1585 caller_code_(caller_code),
1586 caller_function_(caller_function),
1587 name_(String::Handle()),
1588 args_descriptor_(Array::Handle()) {}
1589
1590 FunctionPtr ResolveTargetFunction(const Object& data);
1591 void HandleMiss(const Object& old_data,
1592 const Code& old_target,
1593 const Function& target_function);
1594
1595 private:
1596 void DoUnlinkedCall(const UnlinkedCall& unlinked,
1597 const Function& target_function);
1598 bool CanExtendSingleTargetRange(const String& name,
1599 const Function& old_target,
1600 const Function& target_function,
1601 intptr_t* lower,
1602 intptr_t* upper);
1603 void DoMonomorphicMiss(const Object& data, const Function& target_function);
1604#if defined(DART_PRECOMPILED_RUNTIME)
1605 void DoSingleTargetMiss(const SingleTargetCache& data,
1606 const Function& target_function);
1607#endif // !defined(DART_PRECOMPILED_RUNTIME)
1608 void DoICDataMiss(const ICData& data, const Function& target_function);
1609 void DoMegamorphicMiss(const MegamorphicCache& data,
1610 const Function& target_function);
1611
1612 Isolate* isolate_;
1613 Thread* thread_;
1614 Zone* zone_;
1615 const Instance& receiver_;
1616 NativeArguments arguments_;
1617 StackFrame* caller_frame_;
1618 const Code& caller_code_;
1619 const Function& caller_function_;
1620
1621 // Call-site information populated during resolution.
1622 String& name_;
1623 Array& args_descriptor_;
1624 bool is_monomorphic_hit_ = false;
1625};
1626
1627void SwitchableCallHandler::DoUnlinkedCall(const UnlinkedCall& unlinked,
1628 const Function& target_function) {
1629 const String& name = String::Handle(zone_, unlinked.target_name());
1630 const Array& descriptor =
1631 Array::Handle(zone_, unlinked.arguments_descriptor());
1632 const ICData& ic_data =
1633 ICData::Handle(zone_, ICData::New(caller_function_, name, descriptor,
1634 DeoptId::kNone, 1, /* args_tested */
1635 ICData::kInstance));
1636 if (!target_function.IsNull()) {
1637 ic_data.AddReceiverCheck(receiver_.GetClassId(), target_function);
1638 }
1639
1640 Object& object = Object::Handle(zone_, ic_data.raw());
1641 Code& code = Code::Handle(zone_, StubCode::ICCallThroughCode().raw());
1642 // If the target function has optional parameters or is generic, it's
1643 // prologue requires ARGS_DESC_REG to be populated. Yet the switchable calls
1644 // do not populate that on the call site, which is why we don't transition
1645 // those call sites to monomorphic, but rather directly to call via stub
1646 // (which will populate the ARGS_DESC_REG from the ICData).
1647 //
1648 // Because of this we also don't generate monomorphic checks for those
1649 // functions.
1650 if (!target_function.IsNull() &&
1651 !target_function.PrologueNeedsArgumentsDescriptor()) {
1652 // Patch to monomorphic call.
1653 ASSERT(target_function.HasCode());
1654 const Code& target_code =
1655 Code::Handle(zone_, target_function.CurrentCode());
1656 const Smi& expected_cid =
1657 Smi::Handle(zone_, Smi::New(receiver_.GetClassId()));
1658
1659 if (unlinked.can_patch_to_monomorphic()) {
1660 object = expected_cid.raw();
1661 code = target_code.raw();
1662 ASSERT(code.HasMonomorphicEntry());
1663 } else {
1664 object = MonomorphicSmiableCall::New(expected_cid.Value(), target_code);
1665 code = StubCode::MonomorphicSmiableCheck().raw();
1666 }
1667 }
1668 CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
1669 thread_, caller_frame_->pc(), caller_code_, object, code);
1670
1671 // Return the ICData. The miss stub will jump to continue in the IC lookup
1672 // stub.
1673 arguments_.SetArgAt(0, StubCode::ICCallThroughCode());
1674 arguments_.SetReturn(ic_data);
1675}
1676
1677bool SwitchableCallHandler::CanExtendSingleTargetRange(
1678 const String& name,
1679 const Function& old_target,
1680 const Function& target_function,
1681 intptr_t* lower,
1682 intptr_t* upper) {
1683 if (old_target.raw() != target_function.raw()) {
1684 return false;
1685 }
1686 intptr_t unchecked_lower, unchecked_upper;
1687 if (receiver_.GetClassId() < *lower) {
1688 unchecked_lower = receiver_.GetClassId();
1689 unchecked_upper = *lower - 1;
1690 *lower = receiver_.GetClassId();
1691 } else {
1692 unchecked_upper = receiver_.GetClassId();
1693 unchecked_lower = *upper + 1;
1694 *upper = receiver_.GetClassId();
1695 }
1696
1697 return IsSingleTarget(isolate_, zone_, unchecked_lower, unchecked_upper,
1698 target_function, name);
1699}
1700
1701#if !defined(DART_PRECOMPILED_RUNTIME)
1702static ICDataPtr FindICDataForInstanceCall(Zone* zone,
1703 const Code& code,
1704 uword pc) {
1705 uword pc_offset = pc - code.PayloadStart();
1706 const PcDescriptors& descriptors =
1707 PcDescriptors::Handle(zone, code.pc_descriptors());
1708 PcDescriptors::Iterator iter(descriptors, PcDescriptorsLayout::kIcCall);
1709 intptr_t deopt_id = -1;
1710 while (iter.MoveNext()) {
1711 if (iter.PcOffset() == pc_offset) {
1712 deopt_id = iter.DeoptId();
1713 break;
1714 }
1715 }
1716 ASSERT(deopt_id != -1);
1717 return Function::Handle(zone, code.function()).FindICData(deopt_id);
1718}
1719#endif // !defined(DART_PRECOMPILED_RUNTIME)
1720
1721static FunctionPtr Resolve(Zone* zone,
1722 const Class& receiver_class,
1723 const String& name,
1724 const Array& descriptor) {
1725 ASSERT(name.IsSymbol());
1726
1727 ArgumentsDescriptor args_desc(descriptor);
1728 Function& target_function =
1729 Function::Handle(zone, Resolver::ResolveDynamicForReceiverClass(
1730 receiver_class, name, args_desc));
1731
1732 if (target_function.IsNull()) {
1733 target_function = InlineCacheMissHelper(receiver_class, descriptor, name);
1734 if (target_function.IsNull()) {
1735 ASSERT(!FLAG_lazy_dispatchers);
1736 }
1737 }
1738
1739 return target_function.raw();
1740}
1741
1742void SwitchableCallHandler::DoMonomorphicMiss(const Object& data,
1743 const Function& target_function) {
1744#if defined(DART_PRECOMPILED_RUNTIME)
1745 classid_t old_expected_cid;
1746 if (data.IsSmi()) {
1747 old_expected_cid = Smi::Cast(data).Value();
1748 } else {
1749 RELEASE_ASSERT(data.IsMonomorphicSmiableCall());
1750 old_expected_cid = MonomorphicSmiableCall::Cast(data).expected_cid();
1751 }
1752 const bool is_monomorphic_hit = old_expected_cid == receiver_.GetClassId();
1753 const auto& old_receiver_class =
1754 Class::Handle(zone_, isolate_->class_table()->At(old_expected_cid));
1755 const auto& old_target = Function::Handle(
1756 zone_, Resolve(zone_, old_receiver_class, name_, args_descriptor_));
1757
1758 const ICData& ic_data = ICData::Handle(
1759 zone_, ICData::New(caller_function_, name_, args_descriptor_,
1760 DeoptId::kNone, 1, /* args_tested */
1761 ICData::kInstance));
1762 // Add the first target.
1763 if (!old_target.IsNull()) {
1764 ic_data.AddReceiverCheck(old_expected_cid, old_target);
1765 }
1766
1767 if (is_monomorphic_hit) {
1768 // The site just have been updated to monomorphic state with same
1769 // exact class id - do nothing in that case: stub will call through ic data.
1770 arguments_.SetArgAt(0, StubCode::ICCallThroughCode());
1771 arguments_.SetReturn(ic_data);
1772 return;
1773 }
1774
1775 intptr_t lower = old_expected_cid;
1776 intptr_t upper = old_expected_cid;
1777 if (CanExtendSingleTargetRange(name_, old_target, target_function, &lower,
1778 &upper)) {
1779 const SingleTargetCache& cache =
1780 SingleTargetCache::Handle(zone_, SingleTargetCache::New());
1781 const Code& code = Code::Handle(zone_, target_function.CurrentCode());
1782 cache.set_target(code);
1783 cache.set_entry_point(code.EntryPoint());
1784 cache.set_lower_limit(lower);
1785 cache.set_upper_limit(upper);
1786 const Code& stub = StubCode::SingleTargetCall();
1787 CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
1788 thread_, caller_frame_->pc(), caller_code_, cache, stub);
1789 // Return the ICData. The miss stub will jump to continue in the IC call
1790 // stub.
1791 arguments_.SetArgAt(0, StubCode::ICCallThroughCode());
1792 arguments_.SetReturn(ic_data);
1793 return;
1794 }
1795
1796 // Patch to call through stub.
1797 const Code& stub = StubCode::ICCallThroughCode();
1798 CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
1799 thread_, caller_frame_->pc(), caller_code_, ic_data, stub);
1800
1801 // Return the ICData. The miss stub will jump to continue in the IC lookup
1802 // stub.
1803 arguments_.SetArgAt(0, stub);
1804 arguments_.SetReturn(ic_data);
1805#else // JIT
1806 const ICData& ic_data = ICData::Handle(
1807 zone_,
1808 FindICDataForInstanceCall(zone_, caller_code_, caller_frame_->pc()));
1809 RELEASE_ASSERT(!ic_data.IsNull());
1810
1811 ASSERT(ic_data.NumArgsTested() == 1);
1812 const Code& stub = ic_data.is_tracking_exactness()
1813 ? StubCode::OneArgCheckInlineCacheWithExactnessCheck()
1814 : StubCode::OneArgCheckInlineCache();
1815 CodePatcher::PatchInstanceCallAtWithMutatorsStopped(
1816 thread_, caller_frame_->pc(), caller_code_, ic_data, stub);
1817 if (FLAG_trace_ic) {
1818 OS::PrintErr("Instance call at %" Px
1819 " switching to polymorphic dispatch, %s\n",
1820 caller_frame_->pc(), ic_data.ToCString());
1821 }
1822
1823 // ICData can be shared between unoptimized and optimized code, so beware that
1824 // the new receiver class may have already been added through the optimized
1825 // code.
1826 if (!ic_data.HasReceiverClassId(receiver_.GetClassId())) {
1827 GrowableArray<const Instance*> args(1);
1828 args.Add(&receiver_);
1829 // Don't count during insertion because the IC stub we continue through will
1830 // do an increment.
1831 InlineCacheMissHandlerGivenTargetFunction(args, ic_data, /*count=*/0,
1832 target_function);
1833 }
1834 arguments_.SetArgAt(0, stub);
1835 arguments_.SetReturn(ic_data);
1836#endif // defined(DART_PRECOMPILED_RUNTIME)
1837}
1838
1839#if defined(DART_PRECOMPILED_RUNTIME)
1840void SwitchableCallHandler::DoSingleTargetMiss(
1841 const SingleTargetCache& data,
1842 const Function& target_function) {
1843 const Code& old_target_code = Code::Handle(zone_, data.target());
1844 const Function& old_target =
1845 Function::Handle(zone_, Function::RawCast(old_target_code.owner()));
1846
1847 // We lost the original ICData when we patched to the monomorphic case.
1848 const ICData& ic_data = ICData::Handle(
1849 zone_, ICData::New(caller_function_, name_, args_descriptor_,
1850 DeoptId::kNone, 1, /* args_tested */
1851 ICData::kInstance));
1852 if (!target_function.IsNull()) {
1853 ic_data.AddReceiverCheck(receiver_.GetClassId(), target_function);
1854 }
1855
1856 intptr_t lower = data.lower_limit();
1857 intptr_t upper = data.upper_limit();
1858 if (CanExtendSingleTargetRange(name_, old_target, target_function, &lower,
1859 &upper)) {
1860 data.set_lower_limit(lower);
1861 data.set_upper_limit(upper);
1862 // Return the ICData. The single target stub will jump to continue in the
1863 // IC call stub.
1864 arguments_.SetArgAt(0, StubCode::ICCallThroughCode());
1865 arguments_.SetReturn(ic_data);
1866 return;
1867 }
1868
1869 // Call site is not single target, switch to call using ICData.
1870 const Code& stub = StubCode::ICCallThroughCode();
1871 CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
1872 thread_, caller_frame_->pc(), caller_code_, ic_data, stub);
1873
1874 // Return the ICData. The single target stub will jump to continue in the
1875 // IC call stub.
1876 arguments_.SetArgAt(0, stub);
1877 arguments_.SetReturn(ic_data);
1878}
1879#endif // !defined(DART_PRECOMPILED_RUNTIME)
1880
1881void SwitchableCallHandler::DoICDataMiss(const ICData& ic_data,
1882 const Function& target_function) {
1883 const String& name = String::Handle(zone_, ic_data.target_name());
1884 const Class& cls = Class::Handle(zone_, receiver_.clazz());
1885 ASSERT(!cls.IsNull());
1886 const Array& descriptor =
1887 Array::CheckedHandle(zone_, ic_data.arguments_descriptor());
1888 ArgumentsDescriptor args_desc(descriptor);
1889 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) {
1890 OS::PrintErr("ICData miss, class=%s, function<%" Pd ">=%s\n",
1891 cls.ToCString(), args_desc.TypeArgsLen(), name.ToCString());
1892 }
1893
1894 if (target_function.IsNull()) {
1895 arguments_.SetArgAt(0, StubCode::NoSuchMethodDispatcher());
1896 arguments_.SetReturn(ic_data);
1897 return;
1898 }
1899
1900 const intptr_t number_of_checks = ic_data.NumberOfChecks();
1901
1902 if ((number_of_checks == 0) &&
1903 (!FLAG_precompiled_mode || ic_data.receiver_cannot_be_smi()) &&
1904 !target_function.PrologueNeedsArgumentsDescriptor()) {
1905 // This call site is unlinked: transition to a monomorphic direct call.
1906 // Note we cannot do this if the target has optional parameters because
1907 // the monomorphic direct call does not load the arguments descriptor.
1908 // We cannot do this if we are still in the middle of precompiling because
1909 // the monomorphic case hides a live instance selector from the
1910 // treeshaker.
1911 const Code& target_code =
1912 Code::Handle(zone_, target_function.EnsureHasCode());
1913 const Smi& expected_cid =
1914 Smi::Handle(zone_, Smi::New(receiver_.GetClassId()));
1915 ASSERT(target_code.HasMonomorphicEntry());
1916 CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
1917 thread_, caller_frame_->pc(), caller_code_, expected_cid, target_code);
1918 arguments_.SetArgAt(0, target_code);
1919 arguments_.SetReturn(expected_cid);
1920 } else {
1921 // IC entry might have been added while we waited to get into runtime.
1922 GrowableArray<intptr_t> class_ids(1);
1923 class_ids.Add(receiver_.GetClassId());
1924 if (ic_data.FindCheck(class_ids) == -1) {
1925 ic_data.AddReceiverCheck(receiver_.GetClassId(), target_function);
1926 }
1927 if (number_of_checks > FLAG_max_polymorphic_checks) {
1928 // Switch to megamorphic call.
1929 const MegamorphicCache& cache = MegamorphicCache::Handle(
1930 zone_, MegamorphicCacheTable::Lookup(thread_, name, descriptor));
1931 const Code& stub = StubCode::MegamorphicCall();
1932
1933 CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
1934 thread_, caller_frame_->pc(), caller_code_, cache, stub);
1935 arguments_.SetArgAt(0, stub);
1936 arguments_.SetReturn(cache);
1937 } else {
1938 arguments_.SetArgAt(0, StubCode::ICCallThroughCode());
1939 arguments_.SetReturn(ic_data);
1940 }
1941 }
1942}
1943
1944void SwitchableCallHandler::DoMegamorphicMiss(const MegamorphicCache& data,
1945 const Function& target_function) {
1946 const String& name = String::Handle(zone_, data.target_name());
1947 const Class& cls = Class::Handle(zone_, receiver_.clazz());
1948 ASSERT(!cls.IsNull());
1949 const Array& descriptor =
1950 Array::CheckedHandle(zone_, data.arguments_descriptor());
1951 ArgumentsDescriptor args_desc(descriptor);
1952 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) {
1953 OS::PrintErr("Megamorphic miss, class=%s, function<%" Pd ">=%s\n",
1954 cls.ToCString(), args_desc.TypeArgsLen(), name.ToCString());
1955 }
1956 if (target_function.IsNull()) {
1957 arguments_.SetArgAt(0, StubCode::NoSuchMethodDispatcher());
1958 arguments_.SetReturn(data);
1959 return;
1960 }
1961
1962 // Insert function found into cache.
1963 const Smi& class_id = Smi::Handle(zone_, Smi::New(cls.id()));
1964 data.Insert(class_id, target_function);
1965 arguments_.SetArgAt(0, StubCode::MegamorphicCall());
1966 arguments_.SetReturn(data);
1967}
1968
1969FunctionPtr SwitchableCallHandler::ResolveTargetFunction(const Object& data) {
1970 switch (data.GetClassId()) {
1971 case kUnlinkedCallCid: {
1972 const auto& unlinked_call = UnlinkedCall::Cast(data);
1973
1974#if defined(DART_PRECOMPILED_RUNTIME)
1975 // When transitioning out of UnlinkedCall to other states (e.g.
1976 // Monomorphic, MonomorphicSmiable, SingleTarget) we lose
1977 // name/arg-descriptor in AOT mode and cannot recover it.
1978 //
1979 // Even if we could recover an old target function (which was missed) -
1980 // which we cannot in AOT bare mode - we can still lose the name due to a
1981 // dyn:* call site potentially targeting non-dyn:* targets.
1982 //
1983 // => We will therefore retain the unlinked call here.
1984 //
1985 // In JIT mode we always use ICData from the call site, which has the
1986 // correct name/args-descriptor.
1987 SaveUnlinkedCall(zone_, isolate_, caller_frame_->pc(), unlinked_call);
1988#endif // defined(DART_PRECOMPILED_RUNTIME)
1989
1990 name_ = unlinked_call.target_name();
1991 args_descriptor_ = unlinked_call.arguments_descriptor();
1992 break;
1993 }
1994 case kMonomorphicSmiableCallCid:
1995 FALL_THROUGH;
1996#if defined(DART_PRECOMPILED_RUNTIME)
1997 case kSmiCid:
1998 FALL_THROUGH;
1999 case kSingleTargetCacheCid: {
2000 const auto& unlinked_call = UnlinkedCall::Handle(
2001 zone_, LoadUnlinkedCall(zone_, isolate_, caller_frame_->pc()));
2002 name_ = unlinked_call.target_name();
2003 args_descriptor_ = unlinked_call.arguments_descriptor();
2004 break;
2005 }
2006#else
2007 case kArrayCid: {
2008 // ICData three-element array: Smi(receiver CID), Smi(count),
2009 // Function(target). It is the Array from ICData::entries_.
2010 const auto& ic_data = ICData::Handle(
2011 zone_,
2012 FindICDataForInstanceCall(zone_, caller_code_, caller_frame_->pc()));
2013 RELEASE_ASSERT(!ic_data.IsNull());
2014 name_ = ic_data.target_name();
2015 args_descriptor_ = ic_data.arguments_descriptor();
2016 break;
2017 }
2018#endif // defined(DART_PRECOMPILED_RUNTIME)
2019 case kICDataCid:
2020 FALL_THROUGH;
2021 case kMegamorphicCacheCid: {
2022 const CallSiteData& call_site_data = CallSiteData::Cast(data);
2023 name_ = call_site_data.target_name();
2024 args_descriptor_ = call_site_data.arguments_descriptor();
2025 break;
2026 }
2027 default:
2028 UNREACHABLE();
2029 }
2030 const Class& cls = Class::Handle(zone_, receiver_.clazz());
2031 return Resolve(zone_, cls, name_, args_descriptor_);
2032}
2033
2034void SwitchableCallHandler::HandleMiss(const Object& old_data,
2035 const Code& old_code,
2036 const Function& target_function) {
2037 switch (old_data.GetClassId()) {
2038 case kUnlinkedCallCid:
2039 ASSERT(old_code.raw() == StubCode::SwitchableCallMiss().raw());
2040 DoUnlinkedCall(UnlinkedCall::Cast(old_data), target_function);
2041 break;
2042 case kMonomorphicSmiableCallCid:
2043 ASSERT(old_code.raw() == StubCode::MonomorphicSmiableCheck().raw());
2044 FALL_THROUGH;
2045#if defined(DART_PRECOMPILED_RUNTIME)
2046 case kSmiCid:
2047 DoMonomorphicMiss(old_data, target_function);
2048 break;
2049 case kSingleTargetCacheCid:
2050 ASSERT(old_code.raw() == StubCode::SingleTargetCall().raw());
2051 DoSingleTargetMiss(SingleTargetCache::Cast(old_data), target_function);
2052 break;
2053#else
2054 case kArrayCid:
2055 // ICData three-element array: Smi(receiver CID), Smi(count),
2056 // Function(target). It is the Array from ICData::entries_.
2057 DoMonomorphicMiss(old_data, target_function);
2058 break;
2059#endif // !defined(DART_PRECOMPILED_RUNTIME)
2060 case kICDataCid:
2061 ASSERT(old_code.raw() == StubCode::ICCallThroughCode().raw());
2062 DoICDataMiss(ICData::Cast(old_data), target_function);
2063 break;
2064 case kMegamorphicCacheCid:
2065 ASSERT(old_code.raw() == StubCode::MegamorphicCall().raw());
2066 DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
2067 break;
2068 default:
2069 UNREACHABLE();
2070 }
2071}
2072
2073// Handle the first use of an instance call
2074// Arg1: Receiver.
2075// Arg0: Stub out.
2076// Returns: the ICData used to continue with the call.
2077DEFINE_RUNTIME_ENTRY(SwitchableCallMiss, 2) {
2078 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(1));
2079
2080 StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, thread,
2081 StackFrameIterator::kNoCrossThreadIteration);
2082 StackFrame* exit_frame = iterator.NextFrame();
2083 ASSERT(exit_frame->IsExitFrame());
2084 StackFrame* miss_handler_frame = iterator.NextFrame();
2085 // This runtime entry can be called either from miss stub or from
2086 // switchable_call_miss "dart" stub/function set up in
2087 // [MegamorphicCacheTable::InitMissHandler].
2088 ASSERT(miss_handler_frame->IsStubFrame() ||
2089 miss_handler_frame->IsDartFrame());
2090 StackFrame* caller_frame = iterator.NextFrame();
2091 ASSERT(caller_frame->IsDartFrame());
2092 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
2093 const Function& caller_function =
2094 Function::Handle(zone, caller_frame->LookupDartFunction());
2095
2096 Object& old_data = Object::Handle(zone);
2097 Code& old_code = Code::Handle(zone);
2098
2099#if defined(DART_PRECOMPILED_RUNTIME)
2100 // Grab old_data and do potentially long-running step of resolving the
2101 // target function before we stop mutators.
2102 // This will reduce amount of time spent with all mutators are stopped
2103 // hopefully leaving only code patching to be done then.
2104 old_data =
2105 CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(), caller_code);
2106#else
2107 old_code ^= CodePatcher::GetInstanceCallAt(caller_frame->pc(), caller_code,
2108 &old_data);
2109#endif
2110 SwitchableCallHandler handler(thread, receiver, arguments, caller_frame,
2111 caller_code, caller_function);
2112 const Function& target_function =
2113 Function::Handle(zone, handler.ResolveTargetFunction(old_data));
2114 thread->isolate_group()->RunWithStoppedMutators(
2115 [&]() {
2116#if defined(DART_PRECOMPILED_RUNTIME)
2117 old_data = CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(),
2118 caller_code);
2119#if defined(DEBUG)
2120 old_code ^= CodePatcher::GetSwitchableCallTargetAt(caller_frame->pc(),
2121 caller_code);
2122#endif
2123#else
2124 old_code ^= CodePatcher::GetInstanceCallAt(caller_frame->pc(),
2125 caller_code, &old_data);
2126#endif
2127 handler.HandleMiss(old_data, old_code, target_function);
2128 },
2129 /*use_force_growth=*/true);
2130}
2131
2132// Handles interpreted interface call cache miss.
2133// Arg0: receiver
2134// Arg1: target name
2135// Arg2: arguments descriptor
2136// Returns: target function (can only be null if !FLAG_lazy_dispatchers)
2137// Modifies the instance call table in current interpreter.
2138DEFINE_RUNTIME_ENTRY(InterpretedInstanceCallMissHandler, 3) {
2139#if defined(DART_PRECOMPILED_RUNTIME)
2140 UNREACHABLE();
2141#else
2142 ASSERT(FLAG_enable_interpreter);
2143 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2144 const String& target_name = String::CheckedHandle(zone, arguments.ArgAt(1));
2145 const Array& arg_desc = Array::CheckedHandle(zone, arguments.ArgAt(2));
2146
2147 ArgumentsDescriptor arguments_descriptor(arg_desc);
2148 Function& target_function = Function::Handle(
2149 zone,
2150 Resolver::ResolveDynamic(receiver, target_name, arguments_descriptor));
2151
2152 // TODO(regis): In order to substitute 'simple_instance_of_function', the 2nd
2153 // arg to the call, the type, is needed.
2154
2155 if (target_function.IsNull()) {
2156 const Class& receiver_class = Class::Handle(zone, receiver.clazz());
2157 target_function =
2158 InlineCacheMissHelper(receiver_class, arg_desc, target_name);
2159 }
2160 ASSERT(!target_function.IsNull() || !FLAG_lazy_dispatchers);
2161 arguments.SetReturn(target_function);
2162#endif
2163}
2164
2165// Used to find the correct receiver and function to invoke or to fall back to
2166// invoking noSuchMethod when lazy dispatchers are disabled. Returns the
2167// result of the invocation or an Error.
2168static ObjectPtr InvokeCallThroughGetterOrNoSuchMethod(
2169 Zone* zone,
2170 const Instance& receiver,
2171 const String& target_name,
2172 const Array& orig_arguments,
2173 const Array& orig_arguments_desc) {
2174 ASSERT(!FLAG_lazy_dispatchers);
2175 const bool is_dynamic_call =
2176 Function::IsDynamicInvocationForwarderName(target_name);
2177 String& demangled_target_name = String::Handle(zone, target_name.raw());
2178 if (is_dynamic_call) {
2179 demangled_target_name =
2180 Function::DemangleDynamicInvocationForwarderName(target_name);
2181 }
2182
2183 Class& cls = Class::Handle(zone, receiver.clazz());
2184 Function& function = Function::Handle(zone);
2185
2186 // Dart distinguishes getters and regular methods and allows their calls
2187 // to mix with conversions, and its selectors are independent of arity. So do
2188 // a zigzagged lookup to see if this call failed because of an arity mismatch,
2189 // need for conversion, or there really is no such method.
2190
2191 const bool is_getter = Field::IsGetterName(demangled_target_name);
2192 if (is_getter) {
2193 // Tear-off of a method
2194 // o.foo (o.get:foo) failed, closurize o.foo() if it exists.
2195 const auto& function_name =
2196 String::Handle(zone, Field::NameFromGetter(demangled_target_name));
2197 while (!cls.IsNull()) {
2198 // We don't generate dyn:* forwarders for method extractors so there is no
2199 // need to try to find a dyn:get:foo first (see assertion below)
2200 if (function.IsNull()) {
2201 function = cls.LookupDynamicFunction(function_name);
2202 }
2203 if (!function.IsNull()) {
2204#if !defined(DART_PRECOMPILED_RUNTIME)
2205 ASSERT(!kernel::NeedsDynamicInvocationForwarder(Function::Handle(
2206 function.GetMethodExtractor(demangled_target_name))));
2207#endif
2208 const Function& closure_function =
2209 Function::Handle(zone, function.ImplicitClosureFunction());
2210 const Object& result = Object::Handle(
2211 zone, closure_function.ImplicitInstanceClosure(receiver));
2212 return result.raw();
2213 }
2214 cls = cls.SuperClass();
2215 }
2216
2217 // Fall through for noSuchMethod
2218 } else {
2219 // Call through field.
2220 // o.foo(...) failed, invoke noSuchMethod is foo exists but has the wrong
2221 // number of arguments, or try (o.foo).call(...)
2222
2223 if ((target_name.raw() == Symbols::Call().raw()) && receiver.IsClosure()) {
2224 // Special case: closures are implemented with a call getter instead of a
2225 // call method and with lazy dispatchers the field-invocation-dispatcher
2226 // would perform the closure call.
2227 auto& result = Object::Handle(
2228 zone,
2229 DartEntry::ResolveCallable(orig_arguments, orig_arguments_desc));
2230 if (result.IsError()) {
2231 return result.raw();
2232 }
2233 function ^= result.raw();
2234 if (is_dynamic_call && !function.IsNull() &&
2235 !function.CanReceiveDynamicInvocation()) {
2236 ArgumentsDescriptor args_desc(orig_arguments_desc);
2237 result = function.DoArgumentTypesMatch(orig_arguments, args_desc);
2238 if (result.IsError()) {
2239 return result.raw();
2240 }
2241 }
2242 result = DartEntry::InvokeCallable(function, orig_arguments,
2243 orig_arguments_desc);
2244 return result.raw();
2245 }
2246
2247 // Dynamic call sites have to use the dynamic getter as well (if it was
2248 // created).
2249 const auto& getter_name =
2250 String::Handle(zone, Field::GetterName(demangled_target_name));
2251 const auto& dyn_getter_name = String::Handle(
2252 zone, is_dynamic_call
2253 ? Function::CreateDynamicInvocationForwarderName(getter_name)
2254 : getter_name.raw());
2255 ArgumentsDescriptor args_desc(orig_arguments_desc);
2256 while (!cls.IsNull()) {
2257 // If there is a function with the target name but mismatched arguments
2258 // we need to call `receiver.noSuchMethod()`.
2259 function = cls.LookupDynamicFunction(target_name);
2260 if (!function.IsNull()) {
2261 ASSERT(!function.AreValidArguments(args_desc, NULL));
2262 break; // mismatch, invoke noSuchMethod
2263 }
2264 if (is_dynamic_call) {
2265 function = cls.LookupDynamicFunction(demangled_target_name);
2266 if (!function.IsNull()) {
2267 ASSERT(!function.AreValidArguments(args_desc, NULL));
2268 break; // mismatch, invoke noSuchMethod
2269 }
2270 }
2271
2272 // If there is a getter we need to call-through-getter.
2273 if (is_dynamic_call) {
2274 function = cls.LookupDynamicFunction(dyn_getter_name);
2275 }
2276 if (function.IsNull()) {
2277 function = cls.LookupDynamicFunction(getter_name);
2278 }
2279 if (!function.IsNull()) {
2280 const Array& getter_arguments = Array::Handle(Array::New(1));
2281 getter_arguments.SetAt(0, receiver);
2282 const Object& getter_result = Object::Handle(
2283 zone, DartEntry::InvokeFunction(function, getter_arguments));
2284 if (getter_result.IsError()) {
2285 return getter_result.raw();
2286 }
2287 ASSERT(getter_result.IsNull() || getter_result.IsInstance());
2288
2289 orig_arguments.SetAt(args_desc.FirstArgIndex(), getter_result);
2290 auto& result = Object::Handle(
2291 zone,
2292 DartEntry::ResolveCallable(orig_arguments, orig_arguments_desc));
2293 if (result.IsError()) {
2294 return result.raw();
2295 }
2296 function ^= result.raw();
2297 if (is_dynamic_call && !function.IsNull() &&
2298 !function.CanReceiveDynamicInvocation()) {
2299 result = function.DoArgumentTypesMatch(orig_arguments, args_desc);
2300 if (result.IsError()) {
2301 return result.raw();
2302 }
2303 }
2304 result = DartEntry::InvokeCallable(function, orig_arguments,
2305 orig_arguments_desc);
2306 return result.raw();
2307 }
2308 cls = cls.SuperClass();
2309 }
2310 }
2311
2312 const Object& result = Object::Handle(
2313 zone, DartEntry::InvokeNoSuchMethod(receiver, demangled_target_name,
2314 orig_arguments, orig_arguments_desc));
2315 return result.raw();
2316}
2317
2318// Invoke appropriate noSuchMethod or closure from getter.
2319// Arg0: receiver
2320// Arg1: ICData or MegamorphicCache
2321// Arg2: arguments descriptor array
2322// Arg3: arguments array
2323DEFINE_RUNTIME_ENTRY(NoSuchMethodFromCallStub, 4) {
2324 ASSERT(!FLAG_lazy_dispatchers);
2325 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2326 const Object& ic_data_or_cache = Object::Handle(zone, arguments.ArgAt(1));
2327 const Array& orig_arguments_desc =
2328 Array::CheckedHandle(zone, arguments.ArgAt(2));
2329 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2330 String& target_name = String::Handle(zone);
2331 if (ic_data_or_cache.IsICData()) {
2332 target_name = ICData::Cast(ic_data_or_cache).target_name();
2333 } else {
2334 ASSERT(ic_data_or_cache.IsMegamorphicCache());
2335 target_name = MegamorphicCache::Cast(ic_data_or_cache).target_name();
2336 }
2337
2338 const auto& result = Object::Handle(
2339 zone,
2340 InvokeCallThroughGetterOrNoSuchMethod(
2341 zone, receiver, target_name, orig_arguments, orig_arguments_desc));
2342 ThrowIfError(result);
2343 arguments.SetReturn(result);
2344}
2345
2346// Invoke appropriate noSuchMethod function.
2347// Arg0: receiver
2348// Arg1: function
2349// Arg1: arguments descriptor array.
2350// Arg3: arguments array.
2351DEFINE_RUNTIME_ENTRY(NoSuchMethodFromPrologue, 4) {
2352 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2353 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(1));
2354 const Array& orig_arguments_desc =
2355 Array::CheckedHandle(zone, arguments.ArgAt(2));
2356 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2357
2358 String& orig_function_name = String::Handle(zone);
2359 if ((function.kind() == FunctionLayout::kClosureFunction) ||
2360 (function.kind() == FunctionLayout::kImplicitClosureFunction)) {
2361 // For closure the function name is always 'call'. Replace it with the
2362 // name of the closurized function so that exception contains more
2363 // relevant information.
2364 orig_function_name = function.QualifiedUserVisibleName();
2365 } else {
2366 orig_function_name = function.name();
2367 }
2368
2369 const Object& result = Object::Handle(
2370 zone, DartEntry::InvokeNoSuchMethod(receiver, orig_function_name,
2371 orig_arguments, orig_arguments_desc));
2372 ThrowIfError(result);
2373 arguments.SetReturn(result);
2374}
2375
2376// Invoke appropriate noSuchMethod function (or in the case of no lazy
2377// dispatchers, walk the receiver to find the correct method to call).
2378// Arg0: receiver
2379// Arg1: function name.
2380// Arg2: arguments descriptor array.
2381// Arg3: arguments array.
2382DEFINE_RUNTIME_ENTRY(InvokeNoSuchMethod, 4) {
2383 ASSERT(FLAG_enable_interpreter);
2384 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2385 const String& original_function_name =
2386 String::CheckedHandle(zone, arguments.ArgAt(1));
2387 const Array& orig_arguments_desc =
2388 Array::CheckedHandle(zone, arguments.ArgAt(2));
2389 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2390
2391 auto& result = Object::Handle(zone);
2392 if (!FLAG_lazy_dispatchers) {
2393 // Failing to find the method could be due to the lack of lazy invoke field
2394 // dispatchers, so attempt a deeper search before calling noSuchMethod.
2395 result = InvokeCallThroughGetterOrNoSuchMethod(
2396 zone, receiver, original_function_name, orig_arguments,
2397 orig_arguments_desc);
2398 } else {
2399 result = DartEntry::InvokeNoSuchMethod(receiver, original_function_name,
2400 orig_arguments, orig_arguments_desc);
2401 }
2402 ThrowIfError(result);
2403 arguments.SetReturn(result);
2404}
2405
2406#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
2407// The following code is used to stress test
2408// - deoptimization
2409// - debugger stack tracing
2410// - garbage collection
2411// - hot reload
2412static void HandleStackOverflowTestCases(Thread* thread) {
2413 Isolate* isolate = thread->isolate();
2414
2415 if (FLAG_shared_slow_path_triggers_gc) {
2416 isolate->heap()->CollectAllGarbage();
2417 }
2418
2419 bool do_deopt = false;
2420 bool do_stacktrace = false;
2421 bool do_reload = false;
2422 bool do_gc = false;
2423 const intptr_t isolate_reload_every =
2424 isolate->reload_every_n_stack_overflow_checks();
2425 if ((FLAG_deoptimize_every > 0) || (FLAG_stacktrace_every > 0) ||
2426 (FLAG_gc_every > 0) || (isolate_reload_every > 0)) {
2427 if (!Isolate::IsVMInternalIsolate(isolate)) {
2428 // TODO(turnidge): To make --deoptimize_every and
2429 // --stacktrace-every faster we could move this increment/test to
2430 // the generated code.
2431 int32_t count = thread->IncrementAndGetStackOverflowCount();
2432 if (FLAG_deoptimize_every > 0 && (count % FLAG_deoptimize_every) == 0) {
2433 do_deopt = true;
2434 }
2435 if (FLAG_stacktrace_every > 0 && (count % FLAG_stacktrace_every) == 0) {
2436 do_stacktrace = true;
2437 }
2438 if (FLAG_gc_every > 0 && (count % FLAG_gc_every) == 0) {
2439 do_gc = true;
2440 }
2441 if ((isolate_reload_every > 0) && (count % isolate_reload_every) == 0) {
2442 do_reload = isolate->CanReload();
2443 }
2444 }
2445 }
2446 if ((FLAG_deoptimize_filter != nullptr) ||
2447 (FLAG_stacktrace_filter != nullptr) || (FLAG_reload_every != 0)) {
2448 DartFrameIterator iterator(thread,
2449 StackFrameIterator::kNoCrossThreadIteration);
2450 StackFrame* frame = iterator.NextFrame();
2451 ASSERT(frame != nullptr);
2452 Code& code = Code::Handle();
2453 Function& function = Function::Handle();
2454 if (frame->is_interpreted()) {
2455 function = frame->LookupDartFunction();
2456 } else {
2457 code = frame->LookupDartCode();
2458 ASSERT(!code.IsNull());
2459 function = code.function();
2460 }
2461 ASSERT(!function.IsNull());
2462 const char* function_name = nullptr;
2463 if ((FLAG_deoptimize_filter != nullptr) ||
2464 (FLAG_stacktrace_filter != nullptr)) {
2465 function_name = function.ToFullyQualifiedCString();
2466 ASSERT(function_name != nullptr);
2467 }
2468 if (!code.IsNull()) {
2469 if (!code.is_optimized() && FLAG_reload_every_optimized) {
2470 // Don't do the reload if we aren't inside optimized code.
2471 do_reload = false;
2472 }
2473 if (code.is_optimized() && FLAG_deoptimize_filter != nullptr &&
2474 strstr(function_name, FLAG_deoptimize_filter) != nullptr &&
2475 !function.ForceOptimize()) {
2476 OS::PrintErr("*** Forcing deoptimization (%s)\n",
2477 function.ToFullyQualifiedCString());
2478 do_deopt = true;
2479 }
2480 }
2481 if (FLAG_stacktrace_filter != nullptr &&
2482 strstr(function_name, FLAG_stacktrace_filter) != nullptr) {
2483 OS::PrintErr("*** Computing stacktrace (%s)\n",
2484 function.ToFullyQualifiedCString());
2485 do_stacktrace = true;
2486 }
2487 }
2488 if (do_deopt) {
2489 // TODO(turnidge): Consider using DeoptimizeAt instead.
2490 DeoptimizeFunctionsOnStack();
2491 }
2492 if (do_reload) {
2493 JSONStream js;
2494 // Maybe adjust the rate of future reloads.
2495 isolate->MaybeIncreaseReloadEveryNStackOverflowChecks();
2496
2497 const char* script_uri;
2498 {
2499 NoReloadScope no_reload(isolate, thread);
2500 const Library& lib =
2501 Library::Handle(isolate->object_store()->_internal_library());
2502 const Class& cls = Class::Handle(
2503 lib.LookupClass(String::Handle(String::New("VMLibraryHooks"))));
2504 const Function& func = Function::Handle(cls.LookupFunction(
2505 String::Handle(String::New("get:platformScript"))));
2506 Object& result = Object::Handle(
2507 DartEntry::InvokeFunction(func, Object::empty_array()));
2508 if (result.IsUnwindError()) {
2509 Exceptions::PropagateError(Error::Cast(result));
2510 }
2511 if (!result.IsInstance()) {
2512 FATAL1("Bad script uri hook: %s", result.ToCString());
2513 }
2514 result = DartLibraryCalls::ToString(Instance::Cast(result));
2515 if (result.IsUnwindError()) {
2516 Exceptions::PropagateError(Error::Cast(result));
2517 }
2518 if (!result.IsString()) {
2519 FATAL1("Bad script uri hook: %s", result.ToCString());
2520 }
2521 script_uri = result.ToCString(); // Zone allocated.
2522 }
2523
2524 // Issue a reload.
2525 bool success = isolate->group()->ReloadSources(&js, true /* force_reload */,
2526 script_uri);
2527 if (!success) {
2528 FATAL1("*** Isolate reload failed:\n%s\n", js.ToCString());
2529 }
2530 }
2531 if (do_stacktrace) {
2532 String& var_name = String::Handle();
2533 Instance& var_value = Instance::Handle();
2534 DebuggerStackTrace* stack = isolate->debugger()->StackTrace();
2535 intptr_t num_frames = stack->Length();
2536 for (intptr_t i = 0; i < num_frames; i++) {
2537 ActivationFrame* frame = stack->FrameAt(i);
2538 int num_vars = 0;
2539 // Variable locations and number are unknown when precompiling.
2540#if !defined(DART_PRECOMPILED_RUNTIME)
2541 // NumLocalVariables() can call EnsureHasUnoptimizedCode() for
2542 // non-interpreted functions.
2543 if (!frame->function().ForceOptimize()) {
2544 if (!frame->IsInterpreted()) {
2545 // Ensure that we have unoptimized code.
2546 frame->function().EnsureHasCompiledUnoptimizedCode();
2547 }
2548 num_vars = frame->NumLocalVariables();
2549 }
2550#endif
2551 TokenPosition unused = TokenPosition::kNoSource;
2552 for (intptr_t v = 0; v < num_vars; v++) {
2553 frame->VariableAt(v, &var_name, &unused, &unused, &unused, &var_value);
2554 }
2555 }
2556 if (FLAG_stress_async_stacks) {
2557 isolate->debugger()->CollectAwaiterReturnStackTrace();
2558 }
2559 }
2560 if (do_gc) {
2561 isolate->heap()->CollectAllGarbage(Heap::kDebugging);
2562 }
2563}
2564#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
2565
2566#if !defined(DART_PRECOMPILED_RUNTIME)
2567static void HandleOSRRequest(Thread* thread) {
2568 Isolate* isolate = thread->isolate();
2569 ASSERT(isolate->use_osr());
2570 DartFrameIterator iterator(thread,
2571 StackFrameIterator::kNoCrossThreadIteration);
2572 StackFrame* frame = iterator.NextFrame();
2573 ASSERT(frame != NULL);
2574 const Code& code = Code::ZoneHandle(frame->LookupDartCode());
2575 ASSERT(!code.IsNull());
2576 ASSERT(!code.is_optimized());
2577 const Function& function = Function::Handle(code.function());
2578 ASSERT(!function.IsNull());
2579
2580 // If the code of the frame does not match the function's unoptimized code,
2581 // we bail out since the code was reset by an isolate reload.
2582 if (code.raw() != function.unoptimized_code()) {
2583 return;
2584 }
2585
2586 // Since the code is referenced from the frame and the ZoneHandle,
2587 // it cannot have been removed from the function.
2588 ASSERT(function.HasCode());
2589 // Don't do OSR on intrinsified functions: The intrinsic code expects to be
2590 // called like a regular function and can't be entered via OSR.
2591 if (!Compiler::CanOptimizeFunction(thread, function) ||
2592 function.is_intrinsic()) {
2593 return;
2594 }
2595
2596 // The unoptimized code is on the stack and should never be detached from
2597 // the function at this point.
2598 ASSERT(function.unoptimized_code() != Object::null());
2599 intptr_t osr_id =
2600 Code::Handle(function.unoptimized_code()).GetDeoptIdForOsr(frame->pc());
2601 ASSERT(osr_id != Compiler::kNoOSRDeoptId);
2602 if (FLAG_trace_osr) {
2603 OS::PrintErr("Attempting OSR for %s at id=%" Pd ", count=%" Pd "\n",
2604 function.ToFullyQualifiedCString(), osr_id,
2605 function.usage_counter());
2606 }
2607
2608 // Since the code is referenced from the frame and the ZoneHandle,
2609 // it cannot have been removed from the function.
2610 const Object& result = Object::Handle(
2611 Compiler::CompileOptimizedFunction(thread, function, osr_id));
2612 ThrowIfError(result);
2613
2614 if (!result.IsNull()) {
2615 const Code& code = Code::Cast(result);
2616 uword optimized_entry = code.EntryPoint();
2617 frame->set_pc(optimized_entry);
2618 frame->set_pc_marker(code.raw());
2619 }
2620}
2621#endif // !defined(DART_PRECOMPILED_RUNTIME)
2622
2623DEFINE_RUNTIME_ENTRY(AllocateMint, 0) {
2624 if (FLAG_shared_slow_path_triggers_gc) {
2625 isolate->heap()->CollectAllGarbage();
2626 }
2627 constexpr uint64_t val = 0x7fffffff7fffffff;
2628 ASSERT(!Smi::IsValid(static_cast<int64_t>(val)));
2629 const auto& integer_box = Integer::Handle(zone, Integer::NewFromUint64(val));
2630 arguments.SetReturn(integer_box);
2631};
2632
2633DEFINE_RUNTIME_ENTRY(StackOverflow, 0) {
2634#if defined(USING_SIMULATOR)
2635 uword stack_pos = Simulator::Current()->get_sp();
2636 // If simulator was never called (for example, in pure
2637 // interpreted mode) it may return 0 as a value of SPREG.
2638 if (stack_pos == 0) {
2639 // Use any reasonable value which would not be treated
2640 // as stack overflow.
2641 stack_pos = thread->saved_stack_limit();
2642 }
2643#else
2644 uword stack_pos = OSThread::GetCurrentStackPointer();
2645#endif
2646 // Always clear the stack overflow flags. They are meant for this
2647 // particular stack overflow runtime call and are not meant to
2648 // persist.
2649 uword stack_overflow_flags = thread->GetAndClearStackOverflowFlags();
2650
2651 bool interpreter_stack_overflow = false;
2652#if !defined(DART_PRECOMPILED_RUNTIME)
2653 if (FLAG_enable_interpreter) {
2654 // Do not allocate an interpreter, if none is allocated yet.
2655 Interpreter* interpreter = thread->interpreter();
2656 if (interpreter != NULL) {
2657 interpreter_stack_overflow =
2658 interpreter->get_sp() >= interpreter->overflow_stack_limit();
2659 }
2660 }
2661#endif // !defined(DART_PRECOMPILED_RUNTIME)
2662
2663 // If an interrupt happens at the same time as a stack overflow, we
2664 // process the stack overflow now and leave the interrupt for next
2665 // time.
2666 if (interpreter_stack_overflow || !thread->os_thread()->HasStackHeadroom() ||
2667 IsCalleeFrameOf(thread->saved_stack_limit(), stack_pos)) {
2668 if (FLAG_verbose_stack_overflow) {
2669 OS::PrintErr("Stack overflow in %s\n",
2670 interpreter_stack_overflow ? "interpreter" : "native code");
2671 OS::PrintErr(" Native SP = %" Px ", stack limit = %" Px "\n", stack_pos,
2672 thread->saved_stack_limit());
2673#if !defined(DART_PRECOMPILED_RUNTIME)
2674 if (thread->interpreter() != nullptr) {
2675 OS::PrintErr(" Interpreter SP = %" Px ", stack limit = %" Px "\n",
2676 thread->interpreter()->get_sp(),
2677 thread->interpreter()->overflow_stack_limit());
2678 }
2679#endif // !defined(DART_PRECOMPILED_RUNTIME)
2680
2681 OS::PrintErr("Call stack:\n");
2682 OS::PrintErr("size | frame\n");
2683 StackFrameIterator frames(ValidationPolicy::kDontValidateFrames, thread,
2684 StackFrameIterator::kNoCrossThreadIteration);
2685 uword fp = stack_pos;
2686 StackFrame* frame = frames.NextFrame();
2687 while (frame != NULL) {
2688 if (frame->is_interpreted() == interpreter_stack_overflow) {
2689 uword delta = interpreter_stack_overflow ? (fp - frame->fp())
2690 : (frame->fp() - fp);
2691 fp = frame->fp();
2692 OS::PrintErr("%4" Pd " %s\n", delta, frame->ToCString());
2693 } else {
2694 OS::PrintErr(" %s\n", frame->ToCString());
2695 }
2696 frame = frames.NextFrame();
2697 }
2698 }
2699
2700 // Use the preallocated stack overflow exception to avoid calling
2701 // into dart code.
2702 const Instance& exception =
2703 Instance::Handle(isolate->object_store()->stack_overflow());
2704 Exceptions::Throw(thread, exception);
2705 UNREACHABLE();
2706 }
2707
2708#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
2709 HandleStackOverflowTestCases(thread);
2710#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
2711
2712 // Handle interrupts:
2713 // - store buffer overflow
2714 // - OOB message (vm-service or dart:isolate)
2715 const Error& error = Error::Handle(thread->HandleInterrupts());
2716 ThrowIfError(error);
2717
2718#if !defined(DART_PRECOMPILED_RUNTIME)
2719 if ((stack_overflow_flags & Thread::kOsrRequest) != 0) {
2720 HandleOSRRequest(thread);
2721 }
2722#else
2723 ASSERT((stack_overflow_flags & Thread::kOsrRequest) == 0);
2724#endif // !defined(DART_PRECOMPILED_RUNTIME)
2725}
2726
2727DEFINE_RUNTIME_ENTRY(TraceICCall, 2) {
2728 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(0));
2729 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(1));
2730 DartFrameIterator iterator(thread,
2731 StackFrameIterator::kNoCrossThreadIteration);
2732 StackFrame* frame = iterator.NextFrame();
2733 ASSERT(frame != NULL);
2734 OS::PrintErr(
2735 "IC call @%#" Px ": ICData: %#" Px " cnt:%" Pd " nchecks: %" Pd " %s\n",
2736 frame->pc(), static_cast<uword>(ic_data.raw()), function.usage_counter(),
2737 ic_data.NumberOfChecks(), function.ToFullyQualifiedCString());
2738}
2739
2740// This is called from interpreter when function usage counter reached
2741// compilation threshold and function needs to be compiled.
2742DEFINE_RUNTIME_ENTRY(CompileInterpretedFunction, 1) {
2743#if !defined(DART_PRECOMPILED_RUNTIME)
2744 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
2745 ASSERT(!function.IsNull());
2746 ASSERT(FLAG_enable_interpreter);
2747
2748#if !defined(PRODUCT)
2749 if (Debugger::IsDebugging(thread, function)) {
2750 return;
2751 }
2752#endif // !defined(PRODUCT)
2753
2754 if (FLAG_background_compilation) {
2755 if (!BackgroundCompiler::IsDisabled(isolate,
2756 /* optimizing_compilation = */ false) &&
2757 function.is_background_optimizable()) {
2758 // Ensure background compiler is running, if not start it.
2759 BackgroundCompiler::Start(isolate);
2760 // Reduce the chance of triggering a compilation while the function is
2761 // being compiled in the background. INT32_MIN should ensure that it
2762 // takes long time to trigger a compilation.
2763 // Note that the background compilation queue rejects duplicate entries.
2764 function.SetUsageCounter(INT32_MIN);
2765 isolate->background_compiler()->Compile(function);
2766 return;
2767 }
2768 }
2769
2770 // Reset usage counter for future optimization.
2771 function.SetUsageCounter(0);
2772 Object& result =
2773 Object::Handle(zone, Compiler::CompileFunction(thread, function));
2774 ThrowIfError(result);
2775#else
2776 UNREACHABLE();
2777#endif // !DART_PRECOMPILED_RUNTIME
2778}
2779
2780// This is called from function that needs to be optimized.
2781// The requesting function can be already optimized (reoptimization).
2782// Returns the Code object where to continue execution.
2783DEFINE_RUNTIME_ENTRY(OptimizeInvokedFunction, 1) {
2784#if !defined(DART_PRECOMPILED_RUNTIME)
2785 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
2786 ASSERT(!function.IsNull());
2787 ASSERT(function.HasCode());
2788
2789 if (Compiler::CanOptimizeFunction(thread, function)) {
2790 if (FLAG_background_compilation) {
2791 Field& field = Field::Handle(zone, isolate->GetDeoptimizingBoxedField());
2792 while (!field.IsNull()) {
2793 if (FLAG_trace_optimization || FLAG_trace_field_guards) {
2794 THR_Print("Lazy disabling unboxing of %s\n", field.ToCString());
2795 }
2796 field.set_is_unboxing_candidate(false);
2797 field.DeoptimizeDependentCode();
2798 // Get next field.
2799 field = isolate->GetDeoptimizingBoxedField();
2800 }
2801 if (!BackgroundCompiler::IsDisabled(isolate,
2802 /* optimizing_compiler = */ true) &&
2803 function.is_background_optimizable()) {
2804 // Ensure background compiler is running, if not start it.
2805 BackgroundCompiler::Start(isolate);
2806 // Reduce the chance of triggering a compilation while the function is
2807 // being compiled in the background. INT32_MIN should ensure that it
2808 // takes long time to trigger a compilation.
2809 // Note that the background compilation queue rejects duplicate entries.
2810 function.SetUsageCounter(INT32_MIN);
2811 isolate->optimizing_background_compiler()->Compile(function);
2812 // Continue in the same code.
2813 arguments.SetReturn(function);
2814 return;
2815 }
2816 }
2817
2818 // Reset usage counter for reoptimization before calling optimizer to
2819 // prevent recursive triggering of function optimization.
2820 function.SetUsageCounter(0);
2821 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) {
2822 if (function.HasOptimizedCode()) {
2823 THR_Print("ReCompiling function: '%s' \n",
2824 function.ToFullyQualifiedCString());
2825 }
2826 }
2827 Object& result = Object::Handle(
2828 zone, Compiler::CompileOptimizedFunction(thread, function));
2829 ThrowIfError(result);
2830 }
2831 arguments.SetReturn(function);
2832#else
2833 UNREACHABLE();
2834#endif // !DART_PRECOMPILED_RUNTIME
2835}
2836
2837// The caller must be a static call in a Dart frame, or an entry frame.
2838// Patch static call to point to valid code's entry point.
2839DEFINE_RUNTIME_ENTRY(FixCallersTarget, 0) {
2840#if !defined(DART_PRECOMPILED_RUNTIME)
2841 StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, thread,
2842 StackFrameIterator::kNoCrossThreadIteration);
2843 StackFrame* frame = iterator.NextFrame();
2844 ASSERT(frame != NULL);
2845 while (frame->IsStubFrame() || frame->IsExitFrame()) {
2846 frame = iterator.NextFrame();
2847 ASSERT(frame != NULL);
2848 }
2849 if (frame->IsEntryFrame()) {
2850 // Since function's current code is always unpatched, the entry frame always
2851 // calls to unpatched code.
2852 UNREACHABLE();
2853 }
2854 ASSERT(frame->IsDartFrame());
2855 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode());
2856 RELEASE_ASSERT(caller_code.is_optimized());
2857 const Function& target_function = Function::Handle(
2858 zone, caller_code.GetStaticCallTargetFunctionAt(frame->pc()));
2859
2860 const Code& current_target_code =
2861 Code::Handle(zone, target_function.EnsureHasCode());
2862 CodePatcher::PatchStaticCallAt(frame->pc(), caller_code, current_target_code);
2863 caller_code.SetStaticCallTargetCodeAt(frame->pc(), current_target_code);
2864 if (FLAG_trace_patching) {
2865 OS::PrintErr(
2866 "FixCallersTarget: caller %#" Px
2867 " "
2868 "target '%s' -> %#" Px " (%s)\n",
2869 frame->pc(), target_function.ToFullyQualifiedCString(),
2870 current_target_code.EntryPoint(),
2871 current_target_code.is_optimized() ? "optimized" : "unoptimized");
2872 }
2873 ASSERT(!current_target_code.IsDisabled());
2874 arguments.SetReturn(current_target_code);
2875#else
2876 UNREACHABLE();
2877#endif
2878}
2879
2880// The caller must be a monomorphic call from unoptimized code.
2881// Patch call to point to new target.
2882DEFINE_RUNTIME_ENTRY(FixCallersTargetMonomorphic, 0) {
2883#if !defined(DART_PRECOMPILED_RUNTIME)
2884 StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, thread,
2885 StackFrameIterator::kNoCrossThreadIteration);
2886 StackFrame* frame = iterator.NextFrame();
2887 ASSERT(frame != NULL);
2888 while (frame->IsStubFrame() || frame->IsExitFrame()) {
2889 frame = iterator.NextFrame();
2890 ASSERT(frame != NULL);
2891 }
2892 if (frame->IsEntryFrame()) {
2893 // Since function's current code is always unpatched, the entry frame always
2894 // calls to unpatched code.
2895 UNREACHABLE();
2896 }
2897 ASSERT(frame->IsDartFrame());
2898 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode());
2899 RELEASE_ASSERT(!caller_code.is_optimized());
2900
2901 Object& cache = Object::Handle(zone);
2902 const Code& old_target_code = Code::Handle(
2903 zone, CodePatcher::GetInstanceCallAt(frame->pc(), caller_code, &cache));
2904 const Function& target_function =
2905 Function::Handle(zone, old_target_code.function());
2906 const Code& current_target_code =
2907 Code::Handle(zone, target_function.EnsureHasCode());
2908 CodePatcher::PatchInstanceCallAt(frame->pc(), caller_code, cache,
2909 current_target_code);
2910 if (FLAG_trace_patching) {
2911 OS::PrintErr(
2912 "FixCallersTargetMonomorphic: caller %#" Px
2913 " "
2914 "target '%s' -> %#" Px " (%s)\n",
2915 frame->pc(), target_function.ToFullyQualifiedCString(),
2916 current_target_code.EntryPoint(),
2917 current_target_code.is_optimized() ? "optimized" : "unoptimized");
2918 }
2919 ASSERT(!current_target_code.IsDisabled());
2920 arguments.SetReturn(current_target_code);
2921#else
2922 UNREACHABLE();
2923#endif
2924}
2925
2926// The caller tried to allocate an instance via an invalidated allocation
2927// stub.
2928DEFINE_RUNTIME_ENTRY(FixAllocationStubTarget, 0) {
2929#if !defined(DART_PRECOMPILED_RUNTIME)
2930 StackFrameIterator iterator(ValidationPolicy::kDontValidateFrames, thread,
2931 StackFrameIterator::kNoCrossThreadIteration);
2932 StackFrame* frame = iterator.NextFrame();
2933 ASSERT(frame != NULL);
2934 while (frame->IsStubFrame() || frame->IsExitFrame()) {
2935 frame = iterator.NextFrame();
2936 ASSERT(frame != NULL);
2937 }
2938 if (frame->IsEntryFrame()) {
2939 // There must be a valid Dart frame.
2940 UNREACHABLE();
2941 }
2942 ASSERT(frame->IsDartFrame());
2943 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode());
2944 ASSERT(!caller_code.IsNull());
2945 const Code& stub = Code::Handle(
2946 CodePatcher::GetStaticCallTargetAt(frame->pc(), caller_code));
2947 Class& alloc_class = Class::ZoneHandle(zone);
2948 alloc_class ^= stub.owner();
2949 Code& alloc_stub = Code::Handle(zone, alloc_class.allocation_stub());
2950 if (alloc_stub.IsNull()) {
2951 alloc_stub = StubCode::GetAllocationStubForClass(alloc_class);
2952 ASSERT(!alloc_stub.IsDisabled());
2953 }
2954 CodePatcher::PatchStaticCallAt(frame->pc(), caller_code, alloc_stub);
2955 caller_code.SetStubCallTargetCodeAt(frame->pc(), alloc_stub);
2956 if (FLAG_trace_patching) {
2957 OS::PrintErr("FixAllocationStubTarget: caller %#" Px
2958 " alloc-class %s "
2959 " -> %#" Px "\n",
2960 frame->pc(), alloc_class.ToCString(), alloc_stub.EntryPoint());
2961 }
2962 arguments.SetReturn(alloc_stub);
2963#else
2964 UNREACHABLE();
2965#endif
2966}
2967
2968const char* DeoptReasonToCString(ICData::DeoptReasonId deopt_reason) {
2969 switch (deopt_reason) {
2970#define DEOPT_REASON_TO_TEXT(name) \
2971 case ICData::kDeopt##name: \
2972 return #name;
2973 DEOPT_REASONS(DEOPT_REASON_TO_TEXT)
2974#undef DEOPT_REASON_TO_TEXT
2975 default:
2976 UNREACHABLE();
2977 return "";
2978 }
2979}
2980
2981void DeoptimizeAt(const Code& optimized_code, StackFrame* frame) {
2982 ASSERT(optimized_code.is_optimized());
2983
2984 // Force-optimized code is optimized code which cannot deoptimize and doesn't
2985 // have unoptimized code to fall back to.
2986 ASSERT(!optimized_code.is_force_optimized());
2987
2988 Thread* thread = Thread::Current();
2989 Zone* zone = thread->zone();
2990 const Function& function = Function::Handle(zone, optimized_code.function());
2991 const Error& error =
2992 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, function));
2993 if (!error.IsNull()) {
2994 Exceptions::PropagateError(error);
2995 }
2996 const Code& unoptimized_code =
2997 Code::Handle(zone, function.unoptimized_code());
2998 ASSERT(!unoptimized_code.IsNull());
2999 // The switch to unoptimized code may have already occurred.
3000 if (function.HasOptimizedCode()) {
3001 function.SwitchToUnoptimizedCode();
3002 }
3003
3004 if (frame->IsMarkedForLazyDeopt()) {
3005 // Deopt already scheduled.
3006 if (FLAG_trace_deoptimization) {
3007 THR_Print("Lazy deopt already scheduled for fp=%" Pp "\n", frame->fp());
3008 }
3009 } else {
3010 uword deopt_pc = frame->pc();
3011 ASSERT(optimized_code.ContainsInstructionAt(deopt_pc));
3012
3013#if defined(DEBUG)
3014 ValidateFrames();
3015#endif
3016
3017 // N.B.: Update the pending deopt table before updating the frame. The
3018 // profiler may attempt a stack walk in between.
3019 ASSERT(!frame->is_interpreted());
3020 thread->isolate()->AddPendingDeopt(frame->fp(), deopt_pc);
3021 frame->MarkForLazyDeopt();
3022
3023 if (FLAG_trace_deoptimization) {
3024 THR_Print("Lazy deopt scheduled for fp=%" Pp ", pc=%" Pp "\n",
3025 frame->fp(), deopt_pc);
3026 }
3027 }
3028
3029 // Mark code as dead (do not GC its embedded objects).
3030 optimized_code.set_is_alive(false);
3031}
3032
3033// Currently checks only that all optimized frames have kDeoptIndex
3034// and unoptimized code has the kDeoptAfter.
3035void DeoptimizeFunctionsOnStack() {
3036 DartFrameIterator iterator(Thread::Current(),
3037 StackFrameIterator::kNoCrossThreadIteration);
3038 StackFrame* frame = iterator.NextFrame();
3039 Code& optimized_code = Code::Handle();
3040 while (frame != NULL) {
3041 if (!frame->is_interpreted()) {
3042 optimized_code = frame->LookupDartCode();
3043 if (optimized_code.is_optimized() &&
3044 !optimized_code.is_force_optimized()) {
3045 DeoptimizeAt(optimized_code, frame);
3046 }
3047 }
3048 frame = iterator.NextFrame();
3049 }
3050}
3051
3052#if !defined(DART_PRECOMPILED_RUNTIME)
3053static const intptr_t kNumberOfSavedCpuRegisters = kNumberOfCpuRegisters;
3054static const intptr_t kNumberOfSavedFpuRegisters = kNumberOfFpuRegisters;
3055
3056static void CopySavedRegisters(uword saved_registers_address,
3057 fpu_register_t** fpu_registers,
3058 intptr_t** cpu_registers) {
3059 // Tell MemorySanitizer this region is initialized by generated code. This
3060 // region isn't already (fully) unpoisoned by FrameSetIterator::Unpoison
3061 // because it is in an exit frame and stack frame iteration doesn't have
3062 // access to true SP for exit frames.
3063 MSAN_UNPOISON(reinterpret_cast<void*>(saved_registers_address),
3064 kNumberOfSavedFpuRegisters * kFpuRegisterSize +
3065 kNumberOfSavedCpuRegisters * kWordSize);
3066
3067 ASSERT(sizeof(fpu_register_t) == kFpuRegisterSize);
3068 fpu_register_t* fpu_registers_copy =
3069 new fpu_register_t[kNumberOfSavedFpuRegisters];
3070 ASSERT(fpu_registers_copy != NULL);
3071 for (intptr_t i = 0; i < kNumberOfSavedFpuRegisters; i++) {
3072 fpu_registers_copy[i] =
3073 *reinterpret_cast<fpu_register_t*>(saved_registers_address);
3074 saved_registers_address += kFpuRegisterSize;
3075 }
3076 *fpu_registers = fpu_registers_copy;
3077
3078 ASSERT(sizeof(intptr_t) == kWordSize);
3079 intptr_t* cpu_registers_copy = new intptr_t[kNumberOfSavedCpuRegisters];
3080 ASSERT(cpu_registers_copy != NULL);
3081 for (intptr_t i = 0; i < kNumberOfSavedCpuRegisters; i++) {
3082 cpu_registers_copy[i] =
3083 *reinterpret_cast<intptr_t*>(saved_registers_address);
3084 saved_registers_address += kWordSize;
3085 }
3086 *cpu_registers = cpu_registers_copy;
3087}
3088#endif
3089
3090// Copies saved registers and caller's frame into temporary buffers.
3091// Returns the stack size of unoptimized frame.
3092// The calling code must be optimized, but its function may not have
3093// have optimized code if the code is OSR code, or if the code was invalidated
3094// through class loading/finalization or field guard.
3095DEFINE_LEAF_RUNTIME_ENTRY(intptr_t,
3096 DeoptimizeCopyFrame,
3097 2,
3098 uword saved_registers_address,
3099 uword is_lazy_deopt) {
3100#if !defined(DART_PRECOMPILED_RUNTIME)
3101 Thread* thread = Thread::Current();
3102 Isolate* isolate = thread->isolate();
3103 StackZone zone(thread);
3104 HANDLESCOPE(thread);
3105
3106 // All registers have been saved below last-fp as if they were locals.
3107 const uword last_fp =
3108 saved_registers_address + (kNumberOfSavedCpuRegisters * kWordSize) +
3109 (kNumberOfSavedFpuRegisters * kFpuRegisterSize) -
3110 ((runtime_frame_layout.first_local_from_fp + 1) * kWordSize);
3111
3112 // Get optimized code and frame that need to be deoptimized.
3113 DartFrameIterator iterator(last_fp, thread,
3114 StackFrameIterator::kNoCrossThreadIteration);
3115
3116 StackFrame* caller_frame = iterator.NextFrame();
3117 ASSERT(caller_frame != NULL);
3118 const Code& optimized_code = Code::Handle(caller_frame->LookupDartCode());
3119 ASSERT(optimized_code.is_optimized());
3120 const Function& top_function =
3121 Function::Handle(thread->zone(), optimized_code.function());
3122 const bool deoptimizing_code = top_function.HasOptimizedCode();
3123 if (FLAG_trace_deoptimization) {
3124 const Function& function = Function::Handle(optimized_code.function());
3125 THR_Print("== Deoptimizing code for '%s', %s, %s\n",
3126 function.ToFullyQualifiedCString(),
3127 deoptimizing_code ? "code & frame" : "frame",
3128 (is_lazy_deopt != 0u) ? "lazy-deopt" : "");
3129 }
3130
3131 if (is_lazy_deopt != 0u) {
3132 uword deopt_pc = isolate->FindPendingDeopt(caller_frame->fp());
3133 if (FLAG_trace_deoptimization) {
3134 THR_Print("Lazy deopt fp=%" Pp " pc=%" Pp "\n", caller_frame->fp(),
3135 deopt_pc);
3136 }
3137
3138 // N.B.: Update frame before updating pending deopt table. The profiler
3139 // may attempt a stack walk in between.
3140 caller_frame->set_pc(deopt_pc);
3141 ASSERT(caller_frame->pc() == deopt_pc);
3142 ASSERT(optimized_code.ContainsInstructionAt(caller_frame->pc()));
3143 isolate->ClearPendingDeoptsAtOrBelow(caller_frame->fp());
3144 } else {
3145 if (FLAG_trace_deoptimization) {
3146 THR_Print("Eager deopt fp=%" Pp " pc=%" Pp "\n", caller_frame->fp(),
3147 caller_frame->pc());
3148 }
3149 }
3150
3151 // Copy the saved registers from the stack.
3152 fpu_register_t* fpu_registers;
3153 intptr_t* cpu_registers;
3154 CopySavedRegisters(saved_registers_address, &fpu_registers, &cpu_registers);
3155
3156 // Create the DeoptContext.
3157 DeoptContext* deopt_context = new DeoptContext(
3158 caller_frame, optimized_code, DeoptContext::kDestIsOriginalFrame,
3159 fpu_registers, cpu_registers, is_lazy_deopt != 0, deoptimizing_code);
3160 isolate->set_deopt_context(deopt_context);
3161
3162 // Stack size (FP - SP) in bytes.
3163 return deopt_context->DestStackAdjustment() * kWordSize;
3164#else
3165 UNREACHABLE();
3166 return 0;
3167#endif // !DART_PRECOMPILED_RUNTIME
3168}
3169END_LEAF_RUNTIME_ENTRY
3170
3171// The stack has been adjusted to fit all values for unoptimized frame.
3172// Fill the unoptimized frame.
3173DEFINE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, 1, uword last_fp) {
3174#if !defined(DART_PRECOMPILED_RUNTIME)
3175 Thread* thread = Thread::Current();
3176 Isolate* isolate = thread->isolate();
3177 StackZone zone(thread);
3178 HANDLESCOPE(thread);
3179
3180 DeoptContext* deopt_context = isolate->deopt_context();
3181 DartFrameIterator iterator(last_fp, thread,
3182 StackFrameIterator::kNoCrossThreadIteration);
3183 StackFrame* caller_frame = iterator.NextFrame();
3184 ASSERT(caller_frame != NULL);
3185
3186#if defined(DEBUG)
3187 {
3188 // The code from the deopt_context.
3189 const Code& code = Code::Handle(deopt_context->code());
3190
3191 // The code from our frame.
3192 const Code& optimized_code = Code::Handle(caller_frame->LookupDartCode());
3193 const Function& function = Function::Handle(optimized_code.function());
3194 ASSERT(!function.IsNull());
3195
3196 // The code will be the same as before.
3197 ASSERT(code.raw() == optimized_code.raw());
3198
3199 // Some sanity checking of the optimized code.
3200 ASSERT(!optimized_code.IsNull() && optimized_code.is_optimized());
3201 }
3202#endif
3203
3204 deopt_context->set_dest_frame(caller_frame);
3205 deopt_context->FillDestFrame();
3206
3207#else
3208 UNREACHABLE();
3209#endif // !DART_PRECOMPILED_RUNTIME
3210}
3211END_LEAF_RUNTIME_ENTRY
3212
3213// This is the last step in the deoptimization, GC can occur.
3214// Returns number of bytes to remove from the expression stack of the
3215// bottom-most deoptimized frame. Those arguments were artificially injected
3216// under return address to keep them discoverable by GC that can occur during
3217// materialization phase.
3218DEFINE_RUNTIME_ENTRY(DeoptimizeMaterialize, 0) {
3219#if !defined(DART_PRECOMPILED_RUNTIME)
3220#if defined(DEBUG)
3221 {
3222 // We may rendezvous for a safepoint at entry or GC from the allocations
3223 // below. Check the stack is walkable.
3224 ValidateFrames();
3225 }
3226#endif
3227 DeoptContext* deopt_context = isolate->deopt_context();
3228 intptr_t deopt_arg_count = deopt_context->MaterializeDeferredObjects();
3229 isolate->set_deopt_context(NULL);
3230 delete deopt_context;
3231
3232 // Return value tells deoptimization stub to remove the given number of bytes
3233 // from the stack.
3234 arguments.SetReturn(Smi::Handle(Smi::New(deopt_arg_count * kWordSize)));
3235#else
3236 UNREACHABLE();
3237#endif // !DART_PRECOMPILED_RUNTIME
3238}
3239
3240DEFINE_RUNTIME_ENTRY(RewindPostDeopt, 0) {
3241#if !defined(DART_PRECOMPILED_RUNTIME)
3242#if !defined(PRODUCT)
3243 isolate->debugger()->RewindPostDeopt();
3244#endif // !PRODUCT
3245#endif // !DART_PRECOMPILED_RUNTIME
3246 UNREACHABLE();
3247}
3248
3249double DartModulo(double left, double right) {
3250 double remainder = fmod_ieee(left, right);
3251 if (remainder == 0.0) {
3252 // We explicitly switch to the positive 0.0 (just in case it was negative).
3253 remainder = +0.0;
3254 } else if (remainder < 0.0) {
3255 if (right < 0) {
3256 remainder -= right;
3257 } else {
3258 remainder += right;
3259 }
3260 }
3261 return remainder;
3262}
3263
3264// Update global type feedback recorded for a field recording the assignment
3265// of the given value.
3266// Arg0: Field object;
3267// Arg1: Value that is being stored.
3268DEFINE_RUNTIME_ENTRY(UpdateFieldCid, 2) {
3269#if !defined(DART_PRECOMPILED_RUNTIME)
3270 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3271 const Object& value = Object::Handle(arguments.ArgAt(1));
3272 field.RecordStore(value);
3273#else
3274 UNREACHABLE();
3275#endif
3276}
3277
3278DEFINE_RUNTIME_ENTRY(InitInstanceField, 2) {
3279 const Instance& instance = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3280 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(1));
3281 Object& result = Object::Handle(zone, field.InitializeInstance(instance));
3282 ThrowIfError(result);
3283 result = instance.GetField(field);
3284 ASSERT((result.raw() != Object::sentinel().raw()) &&
3285 (result.raw() != Object::transition_sentinel().raw()));
3286 arguments.SetReturn(result);
3287}
3288
3289DEFINE_RUNTIME_ENTRY(InitStaticField, 1) {
3290 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3291 Object& result = Object::Handle(zone, field.InitializeStatic());
3292 ThrowIfError(result);
3293 result = field.StaticValue();
3294 ASSERT((result.raw() != Object::sentinel().raw()) &&
3295 (result.raw() != Object::transition_sentinel().raw()));
3296 arguments.SetReturn(result);
3297}
3298
3299DEFINE_RUNTIME_ENTRY(LateInitializationError, 1) {
3300 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3301 Exceptions::ThrowLateInitializationError(String::Handle(field.name()));
3302}
3303
3304DEFINE_RUNTIME_ENTRY(NotLoaded, 0) {
3305 // We could just use a trap instruction in the stub, but we get better stack
3306 // traces when there is an exit frame.
3307 FATAL("Not loaded");
3308}
3309
3310// Use expected function signatures to help MSVC compiler resolve overloading.
3311typedef double (*UnaryMathCFunction)(double x);
3312typedef double (*BinaryMathCFunction)(double x, double y);
3313
3314DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3315 LibcPow,
3316 2,
3317 true /* is_float */,
3318 reinterpret_cast<RuntimeFunction>(static_cast<BinaryMathCFunction>(&pow)));
3319
3320DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3321 DartModulo,
3322 2,
3323 true /* is_float */,
3324 reinterpret_cast<RuntimeFunction>(
3325 static_cast<BinaryMathCFunction>(&DartModulo)));
3326
3327DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3328 LibcAtan2,
3329 2,
3330 true /* is_float */,
3331 reinterpret_cast<RuntimeFunction>(
3332 static_cast<BinaryMathCFunction>(&atan2_ieee)));
3333
3334DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3335 LibcFloor,
3336 1,
3337 true /* is_float */,
3338 reinterpret_cast<RuntimeFunction>(static_cast<UnaryMathCFunction>(&floor)));
3339
3340DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3341 LibcCeil,
3342 1,
3343 true /* is_float */,
3344 reinterpret_cast<RuntimeFunction>(static_cast<UnaryMathCFunction>(&ceil)));
3345
3346DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3347 LibcTrunc,
3348 1,
3349 true /* is_float */,
3350 reinterpret_cast<RuntimeFunction>(static_cast<UnaryMathCFunction>(&trunc)));
3351
3352DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3353 LibcRound,
3354 1,
3355 true /* is_float */,
3356 reinterpret_cast<RuntimeFunction>(static_cast<UnaryMathCFunction>(&round)));
3357
3358DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3359 LibcCos,
3360 1,
3361 true /* is_float */,
3362 reinterpret_cast<RuntimeFunction>(static_cast<UnaryMathCFunction>(&cos)));
3363
3364DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3365 LibcSin,
3366 1,
3367 true /* is_float */,
3368 reinterpret_cast<RuntimeFunction>(static_cast<UnaryMathCFunction>(&sin)));
3369
3370DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3371 LibcAsin,
3372 1,
3373 true /* is_float */,
3374 reinterpret_cast<RuntimeFunction>(static_cast<UnaryMathCFunction>(&asin)));
3375
3376DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3377 LibcAcos,
3378 1,
3379 true /* is_float */,
3380 reinterpret_cast<RuntimeFunction>(static_cast<UnaryMathCFunction>(&acos)));
3381
3382DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3383 LibcTan,
3384 1,
3385 true /* is_float */,
3386 reinterpret_cast<RuntimeFunction>(static_cast<UnaryMathCFunction>(&tan)));
3387
3388DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3389 LibcAtan,
3390 1,
3391 true /* is_float */,
3392 reinterpret_cast<RuntimeFunction>(static_cast<UnaryMathCFunction>(&atan)));
3393
3394// Interpret a function call. Should be called only for non-jitted functions.
3395// argc indicates the number of arguments, including the type arguments.
3396// argv points to the first argument.
3397// If argc < 0, arguments are passed at decreasing memory addresses from argv.
3398extern "C" uword /*ObjectPtr*/ InterpretCall(uword /*FunctionPtr*/ function_in,
3399 uword /*ArrayPtr*/ argdesc_in,
3400 intptr_t argc,
3401 ObjectPtr* argv,
3402 Thread* thread) {
3403#if defined(DART_PRECOMPILED_RUNTIME)
3404 UNREACHABLE();
3405#else
3406 FunctionPtr function = static_cast<FunctionPtr>(function_in);
3407 ArrayPtr argdesc = static_cast<ArrayPtr>(argdesc_in);
3408 ASSERT(FLAG_enable_interpreter);
3409 Interpreter* interpreter = Interpreter::Current();
3410#if defined(DEBUG)
3411 uword exit_fp = thread->top_exit_frame_info();
3412 ASSERT(exit_fp != 0);
3413 ASSERT(thread == Thread::Current());
3414 // Caller is InterpretCall stub called from generated code.
3415 // We stay in "in generated code" execution state when interpreting code.
3416 ASSERT(thread->execution_state() == Thread::kThreadInGenerated);
3417 ASSERT(!Function::HasCode(function));
3418 ASSERT(Function::HasBytecode(function));
3419 ASSERT(interpreter != NULL);
3420#endif
3421 // Tell MemorySanitizer 'argv' is initialized by generated code.
3422 if (argc < 0) {
3423 MSAN_UNPOISON(argv - argc, -argc * sizeof(ObjectPtr));
3424 } else {
3425 MSAN_UNPOISON(argv, argc * sizeof(ObjectPtr));
3426 }
3427 ObjectPtr result = interpreter->Call(function, argdesc, argc, argv, thread);
3428 DEBUG_ASSERT(thread->top_exit_frame_info() == exit_fp);
3429 if (IsErrorClassId(result->GetClassIdMayBeSmi())) {
3430 // Must not leak handles in the caller's zone.
3431 HANDLESCOPE(thread);
3432 // Protect the result in a handle before transitioning, which may trigger
3433 // GC.
3434 const Error& error = Error::Handle(Error::RawCast(result));
3435 // Propagating an error may cause allocation. Check if we need to block for
3436 // a safepoint by switching to "in VM" execution state.
3437 TransitionGeneratedToVM transition(thread);
3438 Exceptions::PropagateError(error);
3439 }
3440 return static_cast<uword>(result);
3441#endif // defined(DART_PRECOMPILED_RUNTIME)
3442}
3443
3444uword RuntimeEntry::InterpretCallEntry() {
3445 uword entry = reinterpret_cast<uword>(InterpretCall);
3446#if defined(USING_SIMULATOR)
3447 entry = Simulator::RedirectExternalReference(entry,
3448 Simulator::kLeafRuntimeCall, 5);
3449#endif
3450 return entry;
3451}
3452
3453extern "C" void DFLRT_EnterSafepoint(NativeArguments __unusable_) {
3454 CHECK_STACK_ALIGNMENT;
3455 TRACE_RUNTIME_CALL("%s", "EnterSafepoint");
3456 Thread* thread = Thread::Current();
3457 ASSERT(thread->top_exit_frame_info() != 0);
3458 ASSERT(thread->execution_state() == Thread::kThreadInNative);
3459 thread->EnterSafepoint();
3460 TRACE_RUNTIME_CALL("%s", "EnterSafepoint done");
3461}
3462DEFINE_RAW_LEAF_RUNTIME_ENTRY(EnterSafepoint, 0, false, &DFLRT_EnterSafepoint);
3463
3464extern "C" void DFLRT_ExitSafepoint(NativeArguments __unusable_) {
3465 CHECK_STACK_ALIGNMENT;
3466 TRACE_RUNTIME_CALL("%s", "ExitSafepoint");
3467 Thread* thread = Thread::Current();
3468 ASSERT(thread->top_exit_frame_info() != 0);
3469
3470 ASSERT(thread->execution_state() == Thread::kThreadInVM);
3471 thread->ExitSafepoint();
3472 TRACE_RUNTIME_CALL("%s", "ExitSafepoint done");
3473}
3474DEFINE_RAW_LEAF_RUNTIME_ENTRY(ExitSafepoint, 0, false, &DFLRT_ExitSafepoint);
3475
3476// Not registered as a runtime entry because we can't use Thread to look it up.
3477static Thread* GetThreadForNativeCallback(uword callback_id,
3478 uword return_address) {
3479 Thread* const thread = Thread::Current();
3480 if (thread == nullptr) {
3481 FATAL("Cannot invoke native callback outside an isolate.");
3482 }
3483 if (thread->no_callback_scope_depth() != 0) {
3484 FATAL("Cannot invoke native callback when API callbacks are prohibited.");
3485 }
3486 if (!thread->IsMutatorThread()) {
3487 FATAL("Native callbacks must be invoked on the mutator thread.");
3488 }
3489
3490 // Set the execution state to VM while waiting for the safepoint to end.
3491 // This isn't strictly necessary but enables tests to check that we're not
3492 // in native code anymore. See tests/ffi/function_gc_test.dart for example.
3493 thread->set_execution_state(Thread::kThreadInVM);
3494
3495 thread->ExitSafepoint();
3496 thread->VerifyCallbackIsolate(callback_id, return_address);
3497
3498 return thread;
3499}
3500
3501#if defined(HOST_OS_WINDOWS)
3502#pragma intrinsic(_ReturnAddress)
3503#endif
3504
3505// This is called directly by NativeEntryInstr. At the moment we enter this
3506// routine, the caller is generated code in the Isolate heap. Therefore we check
3507// that the return address (caller) corresponds to the declared callback ID's
3508// code within this Isolate.
3509extern "C" Thread* DLRT_GetThreadForNativeCallback(uword callback_id) {
3510 CHECK_STACK_ALIGNMENT;
3511 TRACE_RUNTIME_CALL("GetThreadForNativeCallback %" Pd, callback_id);
3512#if defined(HOST_OS_WINDOWS)
3513 void* return_address = _ReturnAddress();
3514#else
3515 void* return_address = __builtin_return_address(0);
3516#endif
3517 Thread* return_value = GetThreadForNativeCallback(
3518 callback_id, reinterpret_cast<uword>(return_address));
3519 TRACE_RUNTIME_CALL("GetThreadForNativeCallback returning %p", return_value);
3520 return return_value;
3521}
3522
3523// This is called by a native callback trampoline
3524// (see StubCodeCompiler::GenerateJITCallbackTrampolines). There is no need to
3525// check the return address because the trampoline will use the callback ID to
3526// look up the generated code. We still check that the callback ID is valid for
3527// this isolate.
3528extern "C" Thread* DLRT_GetThreadForNativeCallbackTrampoline(
3529 uword callback_id) {
3530 CHECK_STACK_ALIGNMENT;
3531 return GetThreadForNativeCallback(callback_id, 0);
3532}
3533
3534// This is called directly by EnterHandleScopeInstr.
3535extern "C" ApiLocalScope* DLRT_EnterHandleScope(Thread* thread) {
3536 CHECK_STACK_ALIGNMENT;
3537 TRACE_RUNTIME_CALL("EnterHandleScope %p", thread);
3538 thread->EnterApiScope();
3539 ApiLocalScope* return_value = thread->api_top_scope();
3540 TRACE_RUNTIME_CALL("EnterHandleScope returning %p", return_value);
3541 return return_value;
3542}
3543DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3544 EnterHandleScope,
3545 1,
3546 false /* is_float */,
3547 reinterpret_cast<RuntimeFunction>(&DLRT_EnterHandleScope));
3548
3549// This is called directly by ExitHandleScopeInstr.
3550extern "C" void DLRT_ExitHandleScope(Thread* thread) {
3551 CHECK_STACK_ALIGNMENT;
3552 TRACE_RUNTIME_CALL("ExitHandleScope %p", thread);
3553 thread->ExitApiScope();
3554 TRACE_RUNTIME_CALL("ExitHandleScope %s", "done");
3555}
3556DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3557 ExitHandleScope,
3558 1,
3559 false /* is_float */,
3560 reinterpret_cast<RuntimeFunction>(&DLRT_ExitHandleScope));
3561
3562// This is called directly by AllocateHandleInstr.
3563extern "C" LocalHandle* DLRT_AllocateHandle(ApiLocalScope* scope) {
3564 CHECK_STACK_ALIGNMENT;
3565 TRACE_RUNTIME_CALL("AllocateHandle %p", scope);
3566 LocalHandle* return_value = scope->local_handles()->AllocateHandle();
3567 TRACE_RUNTIME_CALL("AllocateHandle returning %p", return_value);
3568 return return_value;
3569}
3570DEFINE_RAW_LEAF_RUNTIME_ENTRY(
3571 AllocateHandle,
3572 1,
3573 false /* is_float */,
3574 reinterpret_cast<RuntimeFunction>(&DLRT_AllocateHandle));
3575
3576} // namespace dart
3577