1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/stub_code.h"
6
7#include "platform/assert.h"
8#include "platform/globals.h"
9#include "vm/clustered_snapshot.h"
10#include "vm/compiler/assembler/disassembler.h"
11#include "vm/flags.h"
12#include "vm/heap/safepoint.h"
13#include "vm/interpreter.h"
14#include "vm/object_store.h"
15#include "vm/snapshot.h"
16#include "vm/virtual_memory.h"
17#include "vm/visitor.h"
18
19#if !defined(DART_PRECOMPILED_RUNTIME)
20#include "vm/compiler/aot/precompiler.h"
21#include "vm/compiler/assembler/assembler.h"
22#endif // !defined(DART_PRECOMPILED_RUNTIME)
23
24namespace dart {
25
26DEFINE_FLAG(bool, disassemble_stubs, false, "Disassemble generated stubs.");
27DECLARE_FLAG(bool, precompiled_mode);
28
29DECLARE_FLAG(bool, enable_interpreter);
30
31StubCode::StubCodeEntry StubCode::entries_[kNumStubEntries] = {
32#if defined(DART_PRECOMPILED_RUNTIME)
33#define STUB_CODE_DECLARE(name) {nullptr, #name},
34#else
35#define STUB_CODE_DECLARE(name) \
36 {nullptr, #name, compiler::StubCodeCompiler::Generate##name##Stub},
37#endif
38 VM_STUB_CODE_LIST(STUB_CODE_DECLARE)
39#undef STUB_CODE_DECLARE
40};
41
42#if defined(DART_PRECOMPILED_RUNTIME)
43void StubCode::Init() {
44 // Stubs will be loaded from the snapshot.
45 UNREACHABLE();
46}
47
48#else
49
50void StubCode::Init() {
51 compiler::ObjectPoolBuilder object_pool_builder;
52
53 // Generate all the stubs.
54 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
55 entries_[i].code = Code::ReadOnlyHandle();
56 *(entries_[i].code) =
57 Generate(entries_[i].name, &object_pool_builder, entries_[i].generator);
58 }
59
60 const ObjectPool& object_pool =
61 ObjectPool::Handle(ObjectPool::NewFromBuilder(object_pool_builder));
62
63 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
64 entries_[i].code->set_object_pool(object_pool.raw());
65 }
66}
67
68#undef STUB_CODE_GENERATE
69#undef STUB_CODE_SET_OBJECT_POOL
70
71CodePtr StubCode::Generate(
72 const char* name,
73 compiler::ObjectPoolBuilder* object_pool_builder,
74 void (*GenerateStub)(compiler::Assembler* assembler)) {
75 compiler::Assembler assembler(object_pool_builder);
76 GenerateStub(&assembler);
77 const Code& code = Code::Handle(Code::FinalizeCodeAndNotify(
78 name, nullptr, &assembler, Code::PoolAttachment::kNotAttachPool,
79 /*optimized=*/false));
80#ifndef PRODUCT
81 if (FLAG_support_disassembler && FLAG_disassemble_stubs) {
82 Disassembler::DisassembleStub(name, code);
83 }
84#endif // !PRODUCT
85 return code.raw();
86}
87#endif // defined(DART_PRECOMPILED_RUNTIME)
88
89void StubCode::Cleanup() {
90 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
91 entries_[i].code = nullptr;
92 }
93}
94
95bool StubCode::HasBeenInitialized() {
96 // Use AsynchronousGapMarker as canary.
97 return entries_[kAsynchronousGapMarkerIndex].code != nullptr;
98}
99
100bool StubCode::InInvocationStub(uword pc, bool is_interpreted_frame) {
101 ASSERT(HasBeenInitialized());
102#if !defined(DART_PRECOMPILED_RUNTIME)
103 if (FLAG_enable_interpreter) {
104 if (is_interpreted_frame) {
105 // Recognize special marker set up by interpreter in entry frame.
106 return Interpreter::IsEntryFrameMarker(
107 reinterpret_cast<const KBCInstr*>(pc));
108 }
109 {
110 uword entry = StubCode::InvokeDartCodeFromBytecode().EntryPoint();
111 uword size = StubCode::InvokeDartCodeFromBytecodeSize();
112 if ((pc >= entry) && (pc < (entry + size))) {
113 return true;
114 }
115 }
116 }
117#endif // !defined(DART_PRECOMPILED_RUNTIME)
118 uword entry = StubCode::InvokeDartCode().EntryPoint();
119 uword size = StubCode::InvokeDartCodeSize();
120 return (pc >= entry) && (pc < (entry + size));
121}
122
123bool StubCode::InJumpToFrameStub(uword pc) {
124 ASSERT(HasBeenInitialized());
125 uword entry = StubCode::JumpToFrame().EntryPoint();
126 uword size = StubCode::JumpToFrameSize();
127 return (pc >= entry) && (pc < (entry + size));
128}
129
130#if !defined(DART_PRECOMPILED_RUNTIME)
131ArrayPtr compiler::StubCodeCompiler::BuildStaticCallsTable(
132 Zone* zone,
133 compiler::UnresolvedPcRelativeCalls* unresolved_calls) {
134 if (unresolved_calls->length() == 0) {
135 return Array::null();
136 }
137 const intptr_t array_length =
138 unresolved_calls->length() * Code::kSCallTableEntryLength;
139 const auto& static_calls_table =
140 Array::Handle(zone, Array::New(array_length, Heap::kOld));
141 StaticCallsTable entries(static_calls_table);
142 auto& kind_type_and_offset = Smi::Handle(zone);
143 for (intptr_t i = 0; i < unresolved_calls->length(); i++) {
144 auto& unresolved_call = (*unresolved_calls)[i];
145 auto call_kind = unresolved_call->is_tail_call() ? Code::kPcRelativeTailCall
146 : Code::kPcRelativeCall;
147 kind_type_and_offset =
148 Smi::New(Code::KindField::encode(call_kind) |
149 Code::EntryPointField::encode(Code::kDefaultEntry) |
150 Code::OffsetField::encode(unresolved_call->offset()));
151 auto view = entries[i];
152 view.Set<Code::kSCallTableKindAndOffset>(kind_type_and_offset);
153 view.Set<Code::kSCallTableCodeOrTypeTarget>(unresolved_call->target());
154 }
155 return static_calls_table.raw();
156}
157#endif // !defined(DART_PRECOMPILED_RUNTIME)
158
159CodePtr StubCode::GetAllocationStubForClass(const Class& cls) {
160 Thread* thread = Thread::Current();
161 auto object_store = thread->isolate()->object_store();
162 Zone* zone = thread->zone();
163 const Error& error =
164 Error::Handle(zone, cls.EnsureIsAllocateFinalized(thread));
165 ASSERT(error.IsNull());
166 if (cls.id() == kArrayCid) {
167 return object_store->allocate_array_stub();
168 } else if (cls.id() == kContextCid) {
169 return object_store->allocate_context_stub();
170 } else if (cls.id() == kUnhandledExceptionCid) {
171 return object_store->allocate_unhandled_exception_stub();
172 }
173 Code& stub = Code::Handle(zone, cls.allocation_stub());
174#if !defined(DART_PRECOMPILED_RUNTIME)
175 if (stub.IsNull()) {
176 compiler::ObjectPoolBuilder object_pool_builder;
177 Precompiler* precompiler = Precompiler::Instance();
178
179 compiler::ObjectPoolBuilder* wrapper =
180 FLAG_use_bare_instructions && precompiler != NULL
181 ? precompiler->global_object_pool_builder()
182 : &object_pool_builder;
183
184 const auto pool_attachment =
185 FLAG_precompiled_mode && FLAG_use_bare_instructions
186 ? Code::PoolAttachment::kNotAttachPool
187 : Code::PoolAttachment::kAttachPool;
188
189 auto zone = thread->zone();
190 auto object_store = thread->isolate()->object_store();
191 auto& allocate_object_stub = Code::ZoneHandle(zone);
192 auto& allocate_object_parametrized_stub = Code::ZoneHandle(zone);
193 if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
194 allocate_object_stub = object_store->allocate_object_stub();
195 allocate_object_parametrized_stub =
196 object_store->allocate_object_parametrized_stub();
197 }
198
199 compiler::Assembler assembler(wrapper);
200 compiler::UnresolvedPcRelativeCalls unresolved_calls;
201 const char* name = cls.ToCString();
202 compiler::StubCodeCompiler::GenerateAllocationStubForClass(
203 &assembler, &unresolved_calls, cls, allocate_object_stub,
204 allocate_object_parametrized_stub);
205
206 const auto& static_calls_table =
207 Array::Handle(zone, compiler::StubCodeCompiler::BuildStaticCallsTable(
208 zone, &unresolved_calls));
209
210 auto mutator_fun = [&]() {
211 stub = Code::FinalizeCode(nullptr, &assembler, pool_attachment,
212 /*optimized=*/false,
213 /*stats=*/nullptr);
214 // Check if background compilation thread has not already added the stub.
215 if (cls.allocation_stub() == Code::null()) {
216 stub.set_owner(cls);
217 if (!static_calls_table.IsNull()) {
218 stub.set_static_calls_target_table(static_calls_table);
219 }
220 cls.set_allocation_stub(stub);
221 }
222 };
223 auto bg_compiler_fun = [&]() {
224 ASSERT(Thread::Current()->IsAtSafepoint());
225 stub = cls.allocation_stub();
226 // Check if stub was already generated.
227 if (!stub.IsNull()) {
228 return;
229 }
230 stub = Code::FinalizeCode(nullptr, &assembler, pool_attachment,
231 /*optimized=*/false, /*stats=*/nullptr);
232 stub.set_owner(cls);
233 if (!static_calls_table.IsNull()) {
234 stub.set_static_calls_target_table(static_calls_table);
235 }
236 cls.set_allocation_stub(stub);
237 };
238
239 // We have to ensure no mutators are running, because:
240 //
241 // a) We allocate an instructions object, which might cause us to
242 // temporarily flip page protections from (RX -> RW -> RX).
243 //
244 // b) To ensure only one thread succeeds installing an allocation for the
245 // given class.
246 //
247 thread->isolate_group()->RunWithStoppedMutators(
248 mutator_fun, bg_compiler_fun, /*use_force_growth=*/true);
249
250 // We notify code observers after finalizing the code in order to be
251 // outside a [SafepointOperationScope].
252 Code::NotifyCodeObservers(name, stub, /*optimized=*/false);
253#ifndef PRODUCT
254 if (FLAG_support_disassembler && FLAG_disassemble_stubs) {
255 Disassembler::DisassembleStub(name, stub);
256 }
257#endif // !PRODUCT
258 }
259#endif // !defined(DART_PRECOMPILED_RUNTIME)
260 return stub.raw();
261}
262
263#if !defined(TARGET_ARCH_IA32)
264CodePtr StubCode::GetBuildMethodExtractorStub(
265 compiler::ObjectPoolBuilder* pool) {
266#if !defined(DART_PRECOMPILED_RUNTIME)
267 auto thread = Thread::Current();
268 auto Z = thread->zone();
269 auto object_store = thread->isolate()->object_store();
270
271 const auto& closure_class =
272 Class::ZoneHandle(Z, object_store->closure_class());
273 const auto& closure_allocation_stub =
274 Code::ZoneHandle(Z, StubCode::GetAllocationStubForClass(closure_class));
275 const auto& context_allocation_stub = StubCode::AllocateContext();
276
277 compiler::ObjectPoolBuilder object_pool_builder;
278 compiler::Assembler assembler(pool != nullptr ? pool : &object_pool_builder);
279 compiler::StubCodeCompiler::GenerateBuildMethodExtractorStub(
280 &assembler, closure_allocation_stub, context_allocation_stub);
281
282 const char* name = "BuildMethodExtractor";
283 const Code& stub = Code::Handle(Code::FinalizeCodeAndNotify(
284 name, nullptr, &assembler, Code::PoolAttachment::kNotAttachPool,
285 /*optimized=*/false));
286
287 if (pool == nullptr) {
288 stub.set_object_pool(ObjectPool::NewFromBuilder(object_pool_builder));
289 }
290
291#ifndef PRODUCT
292 if (FLAG_support_disassembler && FLAG_disassemble_stubs) {
293 Disassembler::DisassembleStub(name, stub);
294 }
295#endif // !PRODUCT
296 return stub.raw();
297#else // !defined(DART_PRECOMPILED_RUNTIME)
298 UNIMPLEMENTED();
299 return nullptr;
300#endif // !defined(DART_PRECOMPILED_RUNTIME)
301}
302#endif // !defined(TARGET_ARCH_IA32)
303
304const Code& StubCode::UnoptimizedStaticCallEntry(intptr_t num_args_tested) {
305 switch (num_args_tested) {
306 case 0:
307 return ZeroArgsUnoptimizedStaticCall();
308 case 1:
309 return OneArgUnoptimizedStaticCall();
310 case 2:
311 return TwoArgsUnoptimizedStaticCall();
312 default:
313 UNIMPLEMENTED();
314 return Code::Handle();
315 }
316}
317
318const char* StubCode::NameOfStub(uword entry_point) {
319 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
320 if ((entries_[i].code != nullptr) && !entries_[i].code->IsNull() &&
321 (entries_[i].code->EntryPoint() == entry_point)) {
322 return entries_[i].name;
323 }
324 }
325
326 auto object_store = Isolate::Current()->object_store();
327
328#define MATCH(member, name) \
329 if (object_store->member() != Code::null() && \
330 entry_point == Code::EntryPointOf(object_store->member())) { \
331 return "_iso_stub_" #name "Stub"; \
332 }
333 OBJECT_STORE_STUB_CODE_LIST(MATCH)
334 MATCH(build_method_extractor_code, BuildMethodExtractor)
335#undef MATCH
336 return nullptr;
337}
338
339} // namespace dart
340