1// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_COMPILER_ASSEMBLER_ASSEMBLER_BASE_H_
6#define RUNTIME_VM_COMPILER_ASSEMBLER_ASSEMBLER_BASE_H_
7
8#if defined(DART_PRECOMPILED_RUNTIME)
9#error "AOT runtime should not use compiler sources (including header files)"
10#endif // defined(DART_PRECOMPILED_RUNTIME)
11
12#include "platform/assert.h"
13#include "platform/unaligned.h"
14#include "vm/allocation.h"
15#include "vm/compiler/assembler/object_pool_builder.h"
16#include "vm/compiler/runtime_api.h"
17#include "vm/globals.h"
18#include "vm/growable_array.h"
19#include "vm/hash_map.h"
20
21namespace dart {
22
23#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
24DECLARE_FLAG(bool, use_far_branches);
25#endif
26
27class MemoryRegion;
28
29namespace compiler {
30
31// Forward declarations.
32class Assembler;
33class AssemblerFixup;
34class AssemblerBuffer;
35
36class Label : public ZoneAllocated {
37 public:
38 Label() : position_(0), unresolved_(0) {
39#ifdef DEBUG
40 for (int i = 0; i < kMaxUnresolvedBranches; i++) {
41 unresolved_near_positions_[i] = -1;
42 }
43#endif // DEBUG
44 }
45
46 ~Label() {
47 // Assert if label is being destroyed with unresolved branches pending.
48 ASSERT(!IsLinked());
49 ASSERT(!HasNear());
50 }
51
52 // Returns the position for bound and linked labels. Cannot be used
53 // for unused labels.
54 intptr_t Position() const {
55 ASSERT(!IsUnused());
56 return IsBound() ? -position_ - kBias : position_ - kBias;
57 }
58
59 intptr_t LinkPosition() const {
60 ASSERT(IsLinked());
61 return position_ - kBias;
62 }
63
64 intptr_t NearPosition() {
65 ASSERT(HasNear());
66 return unresolved_near_positions_[--unresolved_];
67 }
68
69 bool IsBound() const { return position_ < 0; }
70 bool IsUnused() const { return position_ == 0 && unresolved_ == 0; }
71 bool IsLinked() const { return position_ > 0; }
72 bool HasNear() const { return unresolved_ != 0; }
73
74 private:
75#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_IA32)
76 static const int kMaxUnresolvedBranches = 20;
77#else
78 static const int kMaxUnresolvedBranches = 1; // Unused on non-Intel.
79#endif
80 // Zero position_ means unused (neither bound nor linked to).
81 // Thus we offset actual positions by the given bias to prevent zero
82 // positions from occurring.
83 // Note: we use target::kWordSize as a bias because on ARM
84 // there are assertions that check that distance is aligned.
85 static constexpr int kBias = 4;
86
87 intptr_t position_;
88 intptr_t unresolved_;
89 intptr_t unresolved_near_positions_[kMaxUnresolvedBranches];
90
91 void Reinitialize() { position_ = 0; }
92
93 void BindTo(intptr_t position) {
94 ASSERT(!IsBound());
95 ASSERT(!HasNear());
96 position_ = -position - kBias;
97 ASSERT(IsBound());
98 }
99
100 void LinkTo(intptr_t position) {
101 ASSERT(!IsBound());
102 position_ = position + kBias;
103 ASSERT(IsLinked());
104 }
105
106 void NearLinkTo(intptr_t position) {
107 ASSERT(!IsBound());
108 ASSERT(unresolved_ < kMaxUnresolvedBranches);
109 unresolved_near_positions_[unresolved_++] = position;
110 }
111
112 friend class Assembler;
113 DISALLOW_COPY_AND_ASSIGN(Label);
114};
115
116// External labels keep a function pointer to allow them
117// to be called from code generated by the assembler.
118class ExternalLabel : public ValueObject {
119 public:
120 explicit ExternalLabel(uword address) : address_(address) {}
121
122 bool is_resolved() const { return address_ != 0; }
123 uword address() const {
124 ASSERT(is_resolved());
125 return address_;
126 }
127
128 private:
129 const uword address_;
130};
131
132// Assembler fixups are positions in generated code that hold relocation
133// information that needs to be processed before finalizing the code
134// into executable memory.
135class AssemblerFixup : public ZoneAllocated {
136 public:
137 virtual void Process(const MemoryRegion& region, intptr_t position) = 0;
138
139 virtual bool IsPointerOffset() const = 0;
140
141 // It would be ideal if the destructor method could be made private,
142 // but the g++ compiler complains when this is subclassed.
143 virtual ~AssemblerFixup() { UNREACHABLE(); }
144
145 private:
146 AssemblerFixup* previous_;
147 intptr_t position_;
148
149 AssemblerFixup* previous() const { return previous_; }
150 void set_previous(AssemblerFixup* previous) { previous_ = previous; }
151
152 intptr_t position() const { return position_; }
153 void set_position(intptr_t position) { position_ = position; }
154
155 friend class AssemblerBuffer;
156};
157
158// Assembler buffers are used to emit binary code. They grow on demand.
159class AssemblerBuffer : public ValueObject {
160 public:
161 AssemblerBuffer();
162 ~AssemblerBuffer();
163
164 // Basic support for emitting, loading, and storing.
165 template <typename T>
166 void Emit(T value) {
167 ASSERT(HasEnsuredCapacity());
168#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_X64)
169 // Variable-length instructions in ia32/x64 have unaligned immediates.
170 StoreUnaligned(reinterpret_cast<T*>(cursor_), value);
171#else
172 // Other architecture have aligned, fixed-length instructions.
173 *reinterpret_cast<T*>(cursor_) = value;
174#endif
175 cursor_ += sizeof(T);
176 }
177
178 template <typename T>
179 void Remit() {
180 ASSERT(Size() >= static_cast<intptr_t>(sizeof(T)));
181 cursor_ -= sizeof(T);
182 }
183
184 // Return address to code at |position| bytes.
185 uword Address(intptr_t position) { return contents_ + position; }
186
187 template <typename T>
188 T Load(intptr_t position) {
189 ASSERT(position >= 0 &&
190 position <= (Size() - static_cast<intptr_t>(sizeof(T))));
191#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_X64)
192 // Variable-length instructions in ia32/x64 have unaligned immediates.
193 return LoadUnaligned(reinterpret_cast<T*>(contents_ + position));
194#else
195 // Other architecture have aligned, fixed-length instructions.
196 return *reinterpret_cast<T*>(contents_ + position);
197#endif
198 }
199
200 template <typename T>
201 void Store(intptr_t position, T value) {
202 ASSERT(position >= 0 &&
203 position <= (Size() - static_cast<intptr_t>(sizeof(T))));
204#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_X64)
205 // Variable-length instructions in ia32/x64 have unaligned immediates.
206 StoreUnaligned(reinterpret_cast<T*>(contents_ + position), value);
207#else
208 // Other architecture have aligned, fixed-length instructions.
209 *reinterpret_cast<T*>(contents_ + position) = value;
210#endif
211 }
212
213 const ZoneGrowableArray<intptr_t>& pointer_offsets() const {
214#if defined(DEBUG)
215 ASSERT(fixups_processed_);
216#endif
217 return *pointer_offsets_;
218 }
219
220#if defined(TARGET_ARCH_IA32)
221 // Emit an object pointer directly in the code.
222 void EmitObject(const Object& object);
223#endif
224
225 // Emit a fixup at the current location.
226 void EmitFixup(AssemblerFixup* fixup) {
227 fixup->set_previous(fixup_);
228 fixup->set_position(Size());
229 fixup_ = fixup;
230 }
231
232 // Count the fixups that produce a pointer offset, without processing
233 // the fixups.
234 intptr_t CountPointerOffsets() const;
235
236 // Get the size of the emitted code.
237 intptr_t Size() const { return cursor_ - contents_; }
238 uword contents() const { return contents_; }
239
240 // Copy the assembled instructions into the specified memory block
241 // and apply all fixups.
242 void FinalizeInstructions(const MemoryRegion& region);
243
244 // To emit an instruction to the assembler buffer, the EnsureCapacity helper
245 // must be used to guarantee that the underlying data area is big enough to
246 // hold the emitted instruction. Usage:
247 //
248 // AssemblerBuffer buffer;
249 // AssemblerBuffer::EnsureCapacity ensured(&buffer);
250 // ... emit bytes for single instruction ...
251
252#if defined(DEBUG)
253 class EnsureCapacity : public ValueObject {
254 public:
255 explicit EnsureCapacity(AssemblerBuffer* buffer);
256 ~EnsureCapacity();
257
258 private:
259 AssemblerBuffer* buffer_;
260 intptr_t gap_;
261
262 intptr_t ComputeGap() { return buffer_->Capacity() - buffer_->Size(); }
263 };
264
265 bool has_ensured_capacity_;
266 bool HasEnsuredCapacity() const { return has_ensured_capacity_; }
267#else
268 class EnsureCapacity : public ValueObject {
269 public:
270 explicit EnsureCapacity(AssemblerBuffer* buffer) {
271 if (buffer->cursor() >= buffer->limit()) buffer->ExtendCapacity();
272 }
273 };
274
275 // When building the C++ tests, assertion code is enabled. To allow
276 // asserting that the user of the assembler buffer has ensured the
277 // capacity needed for emitting, we add a dummy method in non-debug mode.
278 bool HasEnsuredCapacity() const { return true; }
279#endif
280
281 // Returns the position in the instruction stream.
282 intptr_t GetPosition() const { return cursor_ - contents_; }
283
284 void Reset() { cursor_ = contents_; }
285
286 private:
287 // The limit is set to kMinimumGap bytes before the end of the data area.
288 // This leaves enough space for the longest possible instruction and allows
289 // for a single, fast space check per instruction.
290 static const intptr_t kMinimumGap = 32;
291
292 uword contents_;
293 uword cursor_;
294 uword limit_;
295 AssemblerFixup* fixup_;
296 ZoneGrowableArray<intptr_t>* pointer_offsets_;
297#if defined(DEBUG)
298 bool fixups_processed_;
299#endif
300
301 uword cursor() const { return cursor_; }
302 uword limit() const { return limit_; }
303 intptr_t Capacity() const {
304 ASSERT(limit_ >= contents_);
305 return (limit_ - contents_) + kMinimumGap;
306 }
307
308 // Process the fixup chain.
309 void ProcessFixups(const MemoryRegion& region);
310
311 // Compute the limit based on the data area and the capacity. See
312 // description of kMinimumGap for the reasoning behind the value.
313 static uword ComputeLimit(uword data, intptr_t capacity) {
314 return data + capacity - kMinimumGap;
315 }
316
317 void ExtendCapacity();
318
319 friend class AssemblerFixup;
320};
321
322enum RestorePP { kRestoreCallerPP, kKeepCalleePP };
323
324class AssemblerBase : public StackResource {
325 public:
326 explicit AssemblerBase(ObjectPoolBuilder* object_pool_builder)
327 : StackResource(ThreadState::Current()),
328 prologue_offset_(-1),
329 has_monomorphic_entry_(false),
330 object_pool_builder_(object_pool_builder) {}
331 virtual ~AssemblerBase();
332
333 intptr_t CodeSize() const { return buffer_.Size(); }
334
335 uword CodeAddress(intptr_t offset) { return buffer_.Address(offset); }
336
337 bool HasObjectPoolBuilder() const { return object_pool_builder_ != nullptr; }
338 ObjectPoolBuilder& object_pool_builder() { return *object_pool_builder_; }
339
340 intptr_t prologue_offset() const { return prologue_offset_; }
341 bool has_monomorphic_entry() const { return has_monomorphic_entry_; }
342
343 void Comment(const char* format, ...) PRINTF_ATTRIBUTE(2, 3);
344 static bool EmittingComments();
345
346 virtual void Breakpoint() = 0;
347
348 intptr_t InsertAlignedRelocation(BSS::Relocation reloc);
349
350 void Unimplemented(const char* message);
351 void Untested(const char* message);
352 void Unreachable(const char* message);
353 void Stop(const char* message);
354
355 void FinalizeInstructions(const MemoryRegion& region) {
356 buffer_.FinalizeInstructions(region);
357 }
358
359 // Count the fixups that produce a pointer offset, without processing
360 // the fixups.
361 intptr_t CountPointerOffsets() const { return buffer_.CountPointerOffsets(); }
362
363 const ZoneGrowableArray<intptr_t>& GetPointerOffsets() const {
364 return buffer_.pointer_offsets();
365 }
366
367 class CodeComment : public ZoneAllocated {
368 public:
369 CodeComment(intptr_t pc_offset, const String& comment)
370 : pc_offset_(pc_offset), comment_(comment) {}
371
372 intptr_t pc_offset() const { return pc_offset_; }
373 const String& comment() const { return comment_; }
374
375 private:
376 intptr_t pc_offset_;
377 const String& comment_;
378
379 DISALLOW_COPY_AND_ASSIGN(CodeComment);
380 };
381
382 const GrowableArray<CodeComment*>& comments() const { return comments_; }
383
384 void BindUncheckedEntryPoint() {
385 ASSERT(unchecked_entry_offset_ == 0);
386 unchecked_entry_offset_ = CodeSize();
387 }
388
389 // Returns the offset (from the very beginning of the instructions) to the
390 // unchecked entry point (incl. prologue/frame setup, etc.).
391 intptr_t UncheckedEntryOffset() const { return unchecked_entry_offset_; }
392
393 protected:
394 AssemblerBuffer buffer_; // Contains position independent code.
395 int32_t prologue_offset_;
396 bool has_monomorphic_entry_;
397
398 intptr_t unchecked_entry_offset_ = 0;
399
400 private:
401 GrowableArray<CodeComment*> comments_;
402 ObjectPoolBuilder* object_pool_builder_;
403};
404
405} // namespace compiler
406
407} // namespace dart
408
409#endif // RUNTIME_VM_COMPILER_ASSEMBLER_ASSEMBLER_BASE_H_
410