1 | // Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #ifndef RUNTIME_VM_COMPILER_STUB_CODE_COMPILER_H_ |
6 | #define RUNTIME_VM_COMPILER_STUB_CODE_COMPILER_H_ |
7 | |
8 | #if defined(DART_PRECOMPILED_RUNTIME) |
9 | #error "AOT runtime should not use compiler sources (including header files)" |
10 | #endif // defined(DART_PRECOMPILED_RUNTIME) |
11 | |
12 | #include "vm/allocation.h" |
13 | #include "vm/compiler/runtime_api.h" |
14 | #include "vm/constants.h" |
15 | #include "vm/growable_array.h" |
16 | #include "vm/stub_code_list.h" |
17 | #include "vm/tagged_pointer.h" |
18 | |
19 | namespace dart { |
20 | |
21 | // Forward declarations. |
22 | class Code; |
23 | |
24 | namespace compiler { |
25 | |
26 | // Forward declarations. |
27 | class Assembler; |
28 | |
29 | // Represents an unresolved PC-relative Call/TailCall. |
30 | class UnresolvedPcRelativeCall : public ZoneAllocated { |
31 | public: |
32 | UnresolvedPcRelativeCall(intptr_t offset, |
33 | const dart::Code& target, |
34 | bool is_tail_call) |
35 | : offset_(offset), target_(target), is_tail_call_(is_tail_call) {} |
36 | |
37 | intptr_t offset() const { return offset_; } |
38 | const dart::Code& target() const { return target_; } |
39 | bool is_tail_call() const { return is_tail_call_; } |
40 | |
41 | private: |
42 | const intptr_t offset_; |
43 | const dart::Code& target_; |
44 | const bool is_tail_call_; |
45 | }; |
46 | |
47 | using UnresolvedPcRelativeCalls = GrowableArray<UnresolvedPcRelativeCall*>; |
48 | |
49 | class StubCodeCompiler : public AllStatic { |
50 | public: |
51 | #if !defined(TARGET_ARCH_IA32) |
52 | static void ( |
53 | Assembler* assembler, |
54 | const Object& closure_allocation_stub, |
55 | const Object& context_allocation_stub); |
56 | #endif |
57 | |
58 | static ArrayPtr BuildStaticCallsTable( |
59 | Zone* zone, |
60 | compiler::UnresolvedPcRelativeCalls* unresolved_calls); |
61 | |
62 | #define STUB_CODE_GENERATE(name) \ |
63 | static void Generate##name##Stub(Assembler* assembler); |
64 | VM_STUB_CODE_LIST(STUB_CODE_GENERATE) |
65 | #undef STUB_CODE_GENERATE |
66 | |
67 | static void GenerateAllocationStubForClass( |
68 | Assembler* assembler, |
69 | UnresolvedPcRelativeCalls* unresolved_calls, |
70 | const Class& cls, |
71 | const dart::Code& allocate_object, |
72 | const dart::Code& allocat_object_parametrized); |
73 | |
74 | enum Optimized { |
75 | kUnoptimized, |
76 | kOptimized, |
77 | }; |
78 | enum CallType { |
79 | kInstanceCall, |
80 | kStaticCall, |
81 | }; |
82 | enum Exactness { |
83 | kCheckExactness, |
84 | kIgnoreExactness, |
85 | }; |
86 | static void GenerateNArgsCheckInlineCacheStub( |
87 | Assembler* assembler, |
88 | intptr_t num_args, |
89 | const RuntimeEntry& handle_ic_miss, |
90 | Token::Kind kind, |
91 | Optimized optimized, |
92 | CallType type, |
93 | Exactness exactness); |
94 | static void GenerateNArgsCheckInlineCacheStubForEntryKind( |
95 | Assembler* assembler, |
96 | intptr_t num_args, |
97 | const RuntimeEntry& handle_ic_miss, |
98 | Token::Kind kind, |
99 | Optimized optimized, |
100 | CallType type, |
101 | Exactness exactness, |
102 | CodeEntryKind entry_kind); |
103 | static void GenerateUsageCounterIncrement(Assembler* assembler, |
104 | Register temp_reg); |
105 | static void GenerateOptimizedUsageCounterIncrement(Assembler* assembler); |
106 | |
107 | #if defined(TARGET_ARCH_X64) |
108 | static constexpr intptr_t kNativeCallbackTrampolineSize = 10; |
109 | static constexpr intptr_t kNativeCallbackSharedStubSize = 217; |
110 | static constexpr intptr_t kNativeCallbackTrampolineStackDelta = 2; |
111 | #elif defined(TARGET_ARCH_IA32) |
112 | static constexpr intptr_t kNativeCallbackTrampolineSize = 10; |
113 | static constexpr intptr_t kNativeCallbackSharedStubSize = 90; |
114 | static constexpr intptr_t kNativeCallbackTrampolineStackDelta = 2; |
115 | #elif defined(TARGET_ARCH_ARM) |
116 | static constexpr intptr_t kNativeCallbackTrampolineSize = 12; |
117 | static constexpr intptr_t kNativeCallbackSharedStubSize = 140; |
118 | static constexpr intptr_t kNativeCallbackTrampolineStackDelta = 4; |
119 | #elif defined(TARGET_ARCH_ARM64) |
120 | static constexpr intptr_t kNativeCallbackTrampolineSize = 12; |
121 | static constexpr intptr_t kNativeCallbackSharedStubSize = 268; |
122 | static constexpr intptr_t kNativeCallbackTrampolineStackDelta = 2; |
123 | #endif |
124 | |
125 | static void GenerateJITCallbackTrampolines(Assembler* assembler, |
126 | intptr_t next_callback_id); |
127 | |
128 | // Calculates the offset (in words) from FP to the provided [cpu_register]. |
129 | // |
130 | // Assumes |
131 | // * all [kDartAvailableCpuRegs] followed by saved-PC, saved-FP were |
132 | // pushed on the stack |
133 | // * [cpu_register] is in [kDartAvailableCpuRegs] |
134 | // |
135 | // The intended use of this function is to find registers on the stack which |
136 | // were spilled in the |
137 | // `StubCode::*<stub-name>Shared{With,Without}FpuRegsStub()` |
138 | static intptr_t WordOffsetFromFpToCpuRegister(Register cpu_register); |
139 | |
140 | private: |
141 | // Common function for generating InitLateInstanceField and |
142 | // InitLateFinalInstanceField stubs. |
143 | static void GenerateInitLateInstanceFieldStub(Assembler* assembler, |
144 | bool is_final); |
145 | }; |
146 | |
147 | } // namespace compiler |
148 | |
149 | enum DeoptStubKind { kLazyDeoptFromReturn, kLazyDeoptFromThrow, kEagerDeopt }; |
150 | |
151 | // Zap value used to indicate unused CODE_REG in deopt. |
152 | static const uword kZapCodeReg = 0xf1f1f1f1; |
153 | |
154 | // Zap value used to indicate unused return address in deopt. |
155 | static const uword kZapReturnAddress = 0xe1e1e1e1; |
156 | |
157 | } // namespace dart |
158 | |
159 | #endif // RUNTIME_VM_COMPILER_STUB_CODE_COMPILER_H_ |
160 | |