1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32.
6#if defined(TARGET_ARCH_IA32)
7
8#include "platform/unaligned.h"
9#include "vm/code_patcher.h"
10#include "vm/cpu.h"
11#include "vm/dart_entry.h"
12#include "vm/instructions.h"
13#include "vm/object.h"
14#include "vm/raw_object.h"
15
16namespace dart {
17
18// The expected pattern of a Dart unoptimized call (static and instance):
19// mov ECX, ic-data
20// mov EDI, target-code-object
21// call target_address (stub)
22// <- return address
23class UnoptimizedCall : public ValueObject {
24 public:
25 explicit UnoptimizedCall(uword return_address)
26 : start_(return_address - kPatternSize) {
27 ASSERT(IsValid());
28 }
29
30 ObjectPtr ic_data() const {
31 return *reinterpret_cast<ObjectPtr*>(start_ + 1);
32 }
33
34 static const int kMovInstructionSize = 5;
35 static const int kCallInstructionSize = 3;
36 static const int kPatternSize =
37 2 * kMovInstructionSize + kCallInstructionSize;
38
39 private:
40 bool IsValid() {
41 uint8_t* code_bytes = reinterpret_cast<uint8_t*>(start_);
42 return (code_bytes[0] == 0xB9) &&
43 (code_bytes[2 * kMovInstructionSize] == 0xFF);
44 }
45
46 uword return_address() const { return start_ + kPatternSize; }
47
48 uword call_address() const { return start_ + 2 * kMovInstructionSize; }
49
50 protected:
51 uword start_;
52
53 private:
54 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedCall);
55};
56
57class NativeCall : public UnoptimizedCall {
58 public:
59 explicit NativeCall(uword return_address) : UnoptimizedCall(return_address) {}
60
61 NativeFunction native_function() const {
62 return *reinterpret_cast<NativeFunction*>(start_ + 1);
63 }
64
65 void set_native_function(NativeFunction func) const {
66 Thread::Current()->isolate_group()->RunWithStoppedMutators([&]() {
67 WritableInstructionsScope writable(start_ + 1, sizeof(func));
68 *reinterpret_cast<NativeFunction*>(start_ + 1) = func;
69 });
70 }
71
72 private:
73 DISALLOW_IMPLICIT_CONSTRUCTORS(NativeCall);
74};
75
76// b9xxxxxxxx mov ecx,<data>
77// bfyyyyyyyy mov edi,<target>
78// ff5707 call [edi+<monomorphic-entry-offset>]
79class InstanceCall : public UnoptimizedCall {
80 public:
81 explicit InstanceCall(uword return_address)
82 : UnoptimizedCall(return_address) {
83#if defined(DEBUG)
84 Object& test_data = Object::Handle(data());
85 ASSERT(test_data.IsArray() || test_data.IsICData() ||
86 test_data.IsMegamorphicCache());
87 if (test_data.IsICData()) {
88 ASSERT(ICData::Cast(test_data).NumArgsTested() > 0);
89 }
90#endif // DEBUG
91 }
92
93 ObjectPtr data() const {
94 return LoadUnaligned(reinterpret_cast<ObjectPtr*>(start_ + 1));
95 }
96 void set_data(const Object& data) const {
97 StoreUnaligned(reinterpret_cast<ObjectPtr*>(start_ + 1), data.raw());
98 }
99
100 CodePtr target() const {
101 return LoadUnaligned(reinterpret_cast<CodePtr*>(start_ + 6));
102 }
103 void set_target(const Code& target) const {
104 StoreUnaligned(reinterpret_cast<CodePtr*>(start_ + 6), target.raw());
105 }
106
107 private:
108 DISALLOW_IMPLICIT_CONSTRUCTORS(InstanceCall);
109};
110
111class UnoptimizedStaticCall : public UnoptimizedCall {
112 public:
113 explicit UnoptimizedStaticCall(uword return_address)
114 : UnoptimizedCall(return_address) {
115#if defined(DEBUG)
116 ICData& test_ic_data = ICData::Handle();
117 test_ic_data ^= ic_data();
118 ASSERT(test_ic_data.NumArgsTested() >= 0);
119#endif // DEBUG
120 }
121
122 private:
123 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedStaticCall);
124};
125
126// The expected pattern of a dart static call:
127// mov EDX, arguments_descriptor_array (optional in polymorphic calls)
128// mov EDI, Immediate(code_object)
129// call [EDI + entry_point_offset]
130// <- return address
131class StaticCall : public ValueObject {
132 public:
133 explicit StaticCall(uword return_address)
134 : start_(return_address - (kMovInstructionSize + kCallInstructionSize)) {
135 ASSERT(IsValid());
136 }
137
138 bool IsValid() {
139 uint8_t* code_bytes = reinterpret_cast<uint8_t*>(start_);
140 return (code_bytes[0] == 0xBF) && (code_bytes[5] == 0xFF);
141 }
142
143 CodePtr target() const {
144 const uword imm = *reinterpret_cast<uword*>(start_ + 1);
145 return static_cast<CodePtr>(imm);
146 }
147
148 void set_target(const Code& target) const {
149 uword* target_addr = reinterpret_cast<uword*>(start_ + 1);
150 uword imm = static_cast<uword>(target.raw());
151 *target_addr = imm;
152 CPU::FlushICache(start_ + 1, sizeof(imm));
153 }
154
155 static const int kMovInstructionSize = 5;
156 static const int kCallInstructionSize = 3;
157
158 private:
159 uword return_address() const {
160 return start_ + kMovInstructionSize + kCallInstructionSize;
161 }
162
163 uword call_address() const { return start_ + kMovInstructionSize; }
164
165 uword start_;
166
167 DISALLOW_IMPLICIT_CONSTRUCTORS(StaticCall);
168};
169
170CodePtr CodePatcher::GetStaticCallTargetAt(uword return_address,
171 const Code& code) {
172 ASSERT(code.ContainsInstructionAt(return_address));
173 StaticCall call(return_address);
174 return call.target();
175}
176
177void CodePatcher::PatchStaticCallAt(uword return_address,
178 const Code& code,
179 const Code& new_target) {
180 auto thread = Thread::Current();
181 auto zone = thread->zone();
182 const Instructions& instrs = Instructions::Handle(zone, code.instructions());
183 thread->isolate_group()->RunWithStoppedMutators([&]() {
184 WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size());
185 ASSERT(code.ContainsInstructionAt(return_address));
186 StaticCall call(return_address);
187 call.set_target(new_target);
188 });
189}
190
191void CodePatcher::InsertDeoptimizationCallAt(uword start) {
192 UNREACHABLE();
193}
194
195CodePtr CodePatcher::GetInstanceCallAt(uword return_address,
196 const Code& caller_code,
197 Object* data) {
198 ASSERT(caller_code.ContainsInstructionAt(return_address));
199 InstanceCall call(return_address);
200 if (data != NULL) {
201 *data = call.data();
202 }
203 return call.target();
204}
205
206void CodePatcher::PatchInstanceCallAt(uword return_address,
207 const Code& caller_code,
208 const Object& data,
209 const Code& target) {
210 auto thread = Thread::Current();
211 thread->isolate_group()->RunWithStoppedMutators([&]() {
212 PatchInstanceCallAtWithMutatorsStopped(thread, return_address, caller_code,
213 data, target);
214 });
215}
216
217void CodePatcher::PatchInstanceCallAtWithMutatorsStopped(
218 Thread* thread,
219 uword return_address,
220 const Code& caller_code,
221 const Object& data,
222 const Code& target) {
223 auto zone = thread->zone();
224 ASSERT(caller_code.ContainsInstructionAt(return_address));
225 const Instructions& instrs =
226 Instructions::Handle(zone, caller_code.instructions());
227 WritableInstructionsScope writable(instrs.PayloadStart(), instrs.Size());
228 InstanceCall call(return_address);
229 call.set_data(data);
230 call.set_target(target);
231}
232
233FunctionPtr CodePatcher::GetUnoptimizedStaticCallAt(uword return_address,
234 const Code& caller_code,
235 ICData* ic_data_result) {
236 ASSERT(caller_code.ContainsInstructionAt(return_address));
237 UnoptimizedStaticCall static_call(return_address);
238 ICData& ic_data = ICData::Handle();
239 ic_data ^= static_call.ic_data();
240 if (ic_data_result != NULL) {
241 *ic_data_result = ic_data.raw();
242 }
243 return ic_data.GetTargetAt(0);
244}
245
246void CodePatcher::PatchSwitchableCallAt(uword return_address,
247 const Code& caller_code,
248 const Object& data,
249 const Code& target) {
250 // Switchable instance calls only generated for precompilation.
251 UNREACHABLE();
252}
253
254void CodePatcher::PatchSwitchableCallAtWithMutatorsStopped(
255 Thread* thread,
256 uword return_address,
257 const Code& caller_code,
258 const Object& data,
259 const Code& target) {
260 // Switchable instance calls only generated for precompilation.
261 UNREACHABLE();
262}
263
264CodePtr CodePatcher::GetSwitchableCallTargetAt(uword return_address,
265 const Code& caller_code) {
266 // Switchable instance calls only generated for precompilation.
267 UNREACHABLE();
268 return Code::null();
269}
270
271ObjectPtr CodePatcher::GetSwitchableCallDataAt(uword return_address,
272 const Code& caller_code) {
273 // Switchable instance calls only generated for precompilation.
274 UNREACHABLE();
275 return Object::null();
276}
277
278void CodePatcher::PatchNativeCallAt(uword return_address,
279 const Code& caller_code,
280 NativeFunction target,
281 const Code& trampoline) {
282 UNREACHABLE();
283}
284
285CodePtr CodePatcher::GetNativeCallAt(uword return_address,
286 const Code& caller_code,
287 NativeFunction* target) {
288 UNREACHABLE();
289 return NULL;
290}
291
292} // namespace dart
293
294#endif // defined TARGET_ARCH_IA32
295