| 1 | // Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file |
| 2 | // for details. All rights reserved. Use of this source code is governed by a |
| 3 | // BSD-style license that can be found in the LICENSE file. |
| 4 | |
| 5 | #include "vm/compiler/ffi/native_location.h" |
| 6 | |
| 7 | #include "vm/compiler/backend/il_printer.h" |
| 8 | |
| 9 | namespace dart { |
| 10 | |
| 11 | namespace compiler { |
| 12 | |
| 13 | namespace ffi { |
| 14 | |
| 15 | bool NativeLocation::LocationCanBeExpressed(Location loc, Representation rep) { |
| 16 | switch (loc.kind()) { |
| 17 | case Location::Kind::kRegister: |
| 18 | case Location::Kind::kFpuRegister: |
| 19 | case Location::Kind::kStackSlot: |
| 20 | case Location::Kind::kDoubleStackSlot: |
| 21 | return true; |
| 22 | default: |
| 23 | break; |
| 24 | } |
| 25 | if (loc.IsPairLocation()) { |
| 26 | // TODO(36730): We could possibly consume a pair location as struct. |
| 27 | return false; |
| 28 | } |
| 29 | return false; |
| 30 | } |
| 31 | |
| 32 | NativeLocation& NativeLocation::FromLocation(Location loc, |
| 33 | Representation rep, |
| 34 | Zone* zone) { |
| 35 | // TODO(36730): We could possibly consume a pair location as struct. |
| 36 | ASSERT(LocationCanBeExpressed(loc, rep)); |
| 37 | |
| 38 | const NativeType& native_rep = |
| 39 | NativeType::FromUnboxedRepresentation(rep, zone); |
| 40 | |
| 41 | switch (loc.kind()) { |
| 42 | case Location::Kind::kRegister: |
| 43 | return *new (zone) |
| 44 | NativeRegistersLocation(native_rep, native_rep, loc.reg()); |
| 45 | case Location::Kind::kFpuRegister: |
| 46 | return *new (zone) |
| 47 | NativeFpuRegistersLocation(native_rep, native_rep, loc.fpu_reg()); |
| 48 | case Location::Kind::kStackSlot: |
| 49 | return *new (zone) |
| 50 | NativeStackLocation(native_rep, native_rep, loc.base_reg(), |
| 51 | loc.stack_index() * compiler::target::kWordSize); |
| 52 | case Location::Kind::kDoubleStackSlot: |
| 53 | return *new (zone) |
| 54 | NativeStackLocation(native_rep, native_rep, loc.base_reg(), |
| 55 | loc.stack_index() * compiler::target::kWordSize); |
| 56 | default: |
| 57 | break; |
| 58 | } |
| 59 | |
| 60 | UNREACHABLE(); |
| 61 | } |
| 62 | |
| 63 | // TODO(36730): Remove when being able to consume as struct. |
| 64 | NativeLocation& NativeLocation::FromPairLocation(Location pair_loc, |
| 65 | Representation pair_rep, |
| 66 | intptr_t index, |
| 67 | Zone* zone) { |
| 68 | ASSERT(pair_loc.IsPairLocation()); |
| 69 | ASSERT(index == 0 || index == 1); |
| 70 | const Representation rep = |
| 71 | NativeType::FromUnboxedRepresentation(pair_rep, zone) |
| 72 | .Split(index, zone) |
| 73 | .AsRepresentation(); |
| 74 | const Location loc = pair_loc.AsPairLocation()->At(index); |
| 75 | return FromLocation(loc, rep, zone); |
| 76 | } |
| 77 | |
| 78 | const NativeRegistersLocation& NativeLocation::AsRegisters() const { |
| 79 | ASSERT(IsRegisters()); |
| 80 | return static_cast<const NativeRegistersLocation&>(*this); |
| 81 | } |
| 82 | |
| 83 | const NativeFpuRegistersLocation& NativeLocation::AsFpuRegisters() const { |
| 84 | ASSERT(IsFpuRegisters()); |
| 85 | return static_cast<const NativeFpuRegistersLocation&>(*this); |
| 86 | } |
| 87 | |
| 88 | const NativeStackLocation& NativeLocation::AsStack() const { |
| 89 | ASSERT(IsStack()); |
| 90 | return static_cast<const NativeStackLocation&>(*this); |
| 91 | } |
| 92 | |
| 93 | Location NativeRegistersLocation::AsLocation() const { |
| 94 | ASSERT(IsExpressibleAsLocation()); |
| 95 | switch (num_regs()) { |
| 96 | case 1: |
| 97 | return Location::RegisterLocation(regs_->At(0)); |
| 98 | case 2: |
| 99 | return Location::Pair(Location::RegisterLocation(regs_->At(0)), |
| 100 | Location::RegisterLocation(regs_->At(1))); |
| 101 | } |
| 102 | UNREACHABLE(); |
| 103 | } |
| 104 | |
| 105 | Location NativeStackLocation::AsLocation() const { |
| 106 | ASSERT(IsExpressibleAsLocation()); |
| 107 | if (payload_type().IsInt()) { |
| 108 | const intptr_t size = payload_type().SizeInBytes(); |
| 109 | const intptr_t size_slots = size / compiler::target::kWordSize; |
| 110 | switch (size_slots) { |
| 111 | case 1: |
| 112 | return Location::StackSlot(offset_in_words(), base_register_); |
| 113 | case 2: |
| 114 | return Location::Pair( |
| 115 | Location::StackSlot(offset_in_words(), base_register_), |
| 116 | Location::StackSlot(offset_in_words() + 1, base_register_)); |
| 117 | } |
| 118 | } else { |
| 119 | ASSERT(payload_type().IsFloat()); |
| 120 | if (payload_type().AsFundamental().representation() == kFloat) { |
| 121 | return Location::StackSlot(offset_in_words(), base_register_); |
| 122 | } else { |
| 123 | ASSERT(payload_type().AsFundamental().representation() == kDouble); |
| 124 | return Location::DoubleStackSlot(offset_in_words(), base_register_); |
| 125 | } |
| 126 | } |
| 127 | UNREACHABLE(); |
| 128 | } |
| 129 | NativeRegistersLocation& NativeRegistersLocation::Split(intptr_t index, |
| 130 | Zone* zone) const { |
| 131 | ASSERT(num_regs() == 2); |
| 132 | return *new (zone) NativeRegistersLocation( |
| 133 | payload_type().Split(index, zone), container_type().Split(index, zone), |
| 134 | reg_at(index)); |
| 135 | } |
| 136 | |
| 137 | NativeStackLocation& NativeStackLocation::Split(intptr_t index, |
| 138 | Zone* zone) const { |
| 139 | ASSERT(index == 0 || index == 1); |
| 140 | const intptr_t size = payload_type().SizeInBytes(); |
| 141 | |
| 142 | return *new (zone) NativeStackLocation( |
| 143 | payload_type().Split(index, zone), container_type().Split(index, zone), |
| 144 | base_register_, offset_in_bytes_ + size / 2 * index); |
| 145 | } |
| 146 | |
| 147 | NativeLocation& NativeLocation::WidenTo4Bytes(Zone* zone) const { |
| 148 | return WithOtherNativeType(payload_type().WidenTo4Bytes(zone), |
| 149 | container_type().WidenTo4Bytes(zone), zone); |
| 150 | } |
| 151 | |
| 152 | #if defined(TARGET_ARCH_ARM) |
| 153 | const NativeLocation& NativeLocation::WidenToQFpuRegister(Zone* zone) const { |
| 154 | if (!IsFpuRegisters()) { |
| 155 | return *this; |
| 156 | } |
| 157 | const auto& fpu_loc = AsFpuRegisters(); |
| 158 | switch (fpu_loc.fpu_reg_kind()) { |
| 159 | case kQuadFpuReg: |
| 160 | return *this; |
| 161 | case kDoubleFpuReg: { |
| 162 | return *new (zone) NativeFpuRegistersLocation( |
| 163 | payload_type_, container_type_, QRegisterOf(fpu_loc.fpu_d_reg())); |
| 164 | } |
| 165 | case kSingleFpuReg: { |
| 166 | return *new (zone) NativeFpuRegistersLocation( |
| 167 | payload_type_, container_type_, QRegisterOf(fpu_loc.fpu_s_reg())); |
| 168 | } |
| 169 | } |
| 170 | UNREACHABLE(); |
| 171 | } |
| 172 | #endif // defined(TARGET_ARCH_ARM) |
| 173 | |
| 174 | bool NativeRegistersLocation::Equals(const NativeLocation& other) const { |
| 175 | if (!other.IsRegisters()) { |
| 176 | return false; |
| 177 | } |
| 178 | const auto& other_regs = other.AsRegisters(); |
| 179 | if (other_regs.num_regs() != num_regs()) { |
| 180 | return false; |
| 181 | } |
| 182 | for (intptr_t i = 0; i < num_regs(); i++) { |
| 183 | if (other_regs.reg_at(i) != reg_at(i)) { |
| 184 | return false; |
| 185 | } |
| 186 | } |
| 187 | return true; |
| 188 | } |
| 189 | |
| 190 | bool NativeFpuRegistersLocation::Equals(const NativeLocation& other) const { |
| 191 | if (!other.IsFpuRegisters()) { |
| 192 | return false; |
| 193 | } |
| 194 | return other.AsFpuRegisters().fpu_reg_ == fpu_reg_; |
| 195 | } |
| 196 | |
| 197 | bool NativeStackLocation::Equals(const NativeLocation& other) const { |
| 198 | if (!other.IsStack()) { |
| 199 | return false; |
| 200 | } |
| 201 | const auto& other_stack = other.AsStack(); |
| 202 | if (other_stack.base_register_ != base_register_) { |
| 203 | return false; |
| 204 | } |
| 205 | return other_stack.offset_in_bytes_ == offset_in_bytes_; |
| 206 | } |
| 207 | |
| 208 | compiler::Address NativeLocationToStackSlotAddress( |
| 209 | const NativeStackLocation& loc) { |
| 210 | return compiler::Address(loc.base_register(), loc.offset_in_bytes()); |
| 211 | } |
| 212 | |
| 213 | static void PrintRepresentations(BaseTextBuffer* f, const NativeLocation& loc) { |
| 214 | f->AddString(" " ); |
| 215 | loc.container_type().PrintTo(f); |
| 216 | if (!loc.container_type().Equals(loc.payload_type())) { |
| 217 | f->AddString("[" ); |
| 218 | loc.payload_type().PrintTo(f); |
| 219 | f->AddString("]" ); |
| 220 | } |
| 221 | } |
| 222 | |
| 223 | void NativeLocation::PrintTo(BaseTextBuffer* f) const { |
| 224 | f->AddString("I" ); |
| 225 | PrintRepresentations(f, *this); |
| 226 | } |
| 227 | |
| 228 | void NativeRegistersLocation::PrintTo(BaseTextBuffer* f) const { |
| 229 | if (num_regs() == 1) { |
| 230 | f->Printf("%s" , RegisterNames::RegisterName(regs_->At(0))); |
| 231 | } else { |
| 232 | f->AddString("(" ); |
| 233 | for (intptr_t i = 0; i < num_regs(); i++) { |
| 234 | if (i != 0) f->Printf(", " ); |
| 235 | f->Printf("%s" , RegisterNames::RegisterName(regs_->At(i))); |
| 236 | } |
| 237 | f->AddString(")" ); |
| 238 | } |
| 239 | PrintRepresentations(f, *this); |
| 240 | } |
| 241 | |
| 242 | void NativeFpuRegistersLocation::PrintTo(BaseTextBuffer* f) const { |
| 243 | switch (fpu_reg_kind()) { |
| 244 | case kQuadFpuReg: |
| 245 | f->Printf("%s" , RegisterNames::FpuRegisterName(fpu_reg())); |
| 246 | break; |
| 247 | #if defined(TARGET_ARCH_ARM) |
| 248 | case kDoubleFpuReg: |
| 249 | f->Printf("%s" , RegisterNames::FpuDRegisterName(fpu_d_reg())); |
| 250 | break; |
| 251 | case kSingleFpuReg: |
| 252 | f->Printf("%s" , RegisterNames::FpuSRegisterName(fpu_s_reg())); |
| 253 | break; |
| 254 | #endif // defined(TARGET_ARCH_ARM) |
| 255 | default: |
| 256 | UNREACHABLE(); |
| 257 | } |
| 258 | |
| 259 | PrintRepresentations(f, *this); |
| 260 | } |
| 261 | |
| 262 | void NativeStackLocation::PrintTo(BaseTextBuffer* f) const { |
| 263 | f->Printf("S%+" Pd, offset_in_bytes_); |
| 264 | PrintRepresentations(f, *this); |
| 265 | } |
| 266 | |
| 267 | const char* NativeLocation::ToCString() const { |
| 268 | char buffer[1024]; |
| 269 | BufferFormatter bf(buffer, 1024); |
| 270 | PrintTo(&bf); |
| 271 | return Thread::Current()->zone()->MakeCopyOfString(buffer); |
| 272 | } |
| 273 | |
| 274 | intptr_t SizeFromFpuRegisterKind(enum FpuRegisterKind kind) { |
| 275 | switch (kind) { |
| 276 | case kQuadFpuReg: |
| 277 | return 16; |
| 278 | case kDoubleFpuReg: |
| 279 | return 8; |
| 280 | case kSingleFpuReg: |
| 281 | return 4; |
| 282 | } |
| 283 | UNREACHABLE(); |
| 284 | } |
| 285 | enum FpuRegisterKind FpuRegisterKindFromSize(intptr_t size_in_bytes) { |
| 286 | switch (size_in_bytes) { |
| 287 | case 16: |
| 288 | return kQuadFpuReg; |
| 289 | case 8: |
| 290 | return kDoubleFpuReg; |
| 291 | case 4: |
| 292 | return kSingleFpuReg; |
| 293 | } |
| 294 | UNREACHABLE(); |
| 295 | } |
| 296 | |
| 297 | #if defined(TARGET_ARCH_ARM) |
| 298 | DRegister NativeFpuRegistersLocation::fpu_as_d_reg() const { |
| 299 | switch (fpu_reg_kind_) { |
| 300 | case kQuadFpuReg: |
| 301 | return EvenDRegisterOf(fpu_reg()); |
| 302 | case kDoubleFpuReg: |
| 303 | return fpu_d_reg(); |
| 304 | case kSingleFpuReg: |
| 305 | return DRegisterOf(fpu_s_reg()); |
| 306 | } |
| 307 | UNREACHABLE(); |
| 308 | } |
| 309 | |
| 310 | SRegister NativeFpuRegistersLocation::fpu_as_s_reg() const { |
| 311 | switch (fpu_reg_kind_) { |
| 312 | case kQuadFpuReg: |
| 313 | return EvenSRegisterOf(EvenDRegisterOf(fpu_reg())); |
| 314 | case kDoubleFpuReg: |
| 315 | return EvenSRegisterOf(fpu_d_reg()); |
| 316 | case kSingleFpuReg: |
| 317 | return fpu_s_reg(); |
| 318 | } |
| 319 | UNREACHABLE(); |
| 320 | } |
| 321 | |
| 322 | bool NativeFpuRegistersLocation::IsLowestBits() const { |
| 323 | switch (fpu_reg_kind()) { |
| 324 | case kQuadFpuReg: |
| 325 | return true; |
| 326 | case kDoubleFpuReg: { |
| 327 | return fpu_d_reg() % 2 == 0; |
| 328 | } |
| 329 | case kSingleFpuReg: { |
| 330 | return fpu_s_reg() % 4 == 0; |
| 331 | } |
| 332 | } |
| 333 | UNREACHABLE(); |
| 334 | } |
| 335 | #endif // defined(TARGET_ARCH_ARM) |
| 336 | |
| 337 | } // namespace ffi |
| 338 | |
| 339 | } // namespace compiler |
| 340 | |
| 341 | } // namespace dart |
| 342 | |