| 1 | // Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file |
| 2 | // for details. All rights reserved. Use of this source code is governed by a |
| 3 | // BSD-style license that can be found in the LICENSE file. |
| 4 | |
| 5 | #if defined(DART_PRECOMPILER) |
| 6 | |
| 7 | #include "vm/compiler/aot/dispatch_table_generator.h" |
| 8 | |
| 9 | #include <memory> |
| 10 | |
| 11 | #include "vm/compiler/frontend/kernel_translation_helper.h" |
| 12 | #include "vm/dispatch_table.h" |
| 13 | #include "vm/stub_code.h" |
| 14 | #include "vm/thread.h" |
| 15 | |
| 16 | #define Z zone_ |
| 17 | |
| 18 | namespace dart { |
| 19 | namespace compiler { |
| 20 | |
| 21 | class Interval { |
| 22 | public: |
| 23 | Interval() : begin_(-1), end_(-1) {} |
| 24 | Interval(int32_t begin, int32_t end) : begin_(begin), end_(end) { |
| 25 | ASSERT(end > begin); |
| 26 | } |
| 27 | |
| 28 | int32_t begin() const { return begin_; } |
| 29 | void set_begin(int32_t value) { begin_ = value; } |
| 30 | |
| 31 | int32_t end() const { return end_; } |
| 32 | void set_end(int32_t value) { end_ = value; } |
| 33 | |
| 34 | int32_t length() const { return end_ - begin_; } |
| 35 | |
| 36 | Interval WithOffset(int32_t offset) const { |
| 37 | return Interval(begin_ + offset, end_ + offset); |
| 38 | } |
| 39 | |
| 40 | bool IsSame(const Interval other) const { |
| 41 | return end() == other.end() && begin() == other.begin(); |
| 42 | } |
| 43 | |
| 44 | bool IsBefore(const Interval other) const { return end() <= other.begin(); } |
| 45 | |
| 46 | bool IsAfter(const Interval other) const { return begin() >= other.end(); } |
| 47 | |
| 48 | bool Overlap(const Interval other) const { |
| 49 | return !IsBefore(other) && !IsAfter(other); |
| 50 | } |
| 51 | |
| 52 | bool ContainsBeginOf(const Interval other) const { |
| 53 | return begin() <= other.begin() && other.begin() <= end(); |
| 54 | } |
| 55 | |
| 56 | bool ContainsEndOf(const Interval other) const { |
| 57 | return begin() <= other.end() && other.end() <= end(); |
| 58 | } |
| 59 | |
| 60 | bool Contains(const Interval other) const { |
| 61 | return ContainsBeginOf(other) && ContainsEndOf(other); |
| 62 | } |
| 63 | |
| 64 | void ExtendToIncludeInterval(const Interval& other) { |
| 65 | if (other.begin() < begin_) begin_ = other.begin(); |
| 66 | if (other.end() > end_) end_ = other.end(); |
| 67 | } |
| 68 | |
| 69 | private: |
| 70 | int32_t begin_; |
| 71 | int32_t end_; |
| 72 | }; |
| 73 | |
| 74 | class CidInterval { |
| 75 | public: |
| 76 | CidInterval(classid_t cid, |
| 77 | int16_t depth, |
| 78 | Interval range, |
| 79 | const Function* function) |
| 80 | : cid_(cid), depth_(depth), range_(range), function_(function) {} |
| 81 | |
| 82 | classid_t cid() const { return cid_; } |
| 83 | int16_t depth() const { return depth_; } |
| 84 | const Interval& range() const { return range_; } |
| 85 | Interval& range() { return range_; } |
| 86 | const Function* function() const { return function_; } |
| 87 | |
| 88 | private: |
| 89 | classid_t cid_; |
| 90 | int16_t depth_; |
| 91 | Interval range_; |
| 92 | const Function* function_; |
| 93 | }; |
| 94 | |
| 95 | class SelectorRow { |
| 96 | public: |
| 97 | SelectorRow(Zone* zone, TableSelector* selector) |
| 98 | : selector_(selector), |
| 99 | class_ranges_(zone, 0), |
| 100 | ranges_(zone, 0), |
| 101 | code_(Code::Handle(zone)) {} |
| 102 | |
| 103 | TableSelector* selector() const { return selector_; } |
| 104 | |
| 105 | int32_t total_size() const { return total_size_; } |
| 106 | |
| 107 | const GrowableArray<Interval>& ranges() const { return ranges_; } |
| 108 | |
| 109 | const GrowableArray<CidInterval>& class_ranges() const { |
| 110 | return class_ranges_; |
| 111 | } |
| 112 | |
| 113 | void DefineSelectorImplementationForInterval(classid_t cid, |
| 114 | int16_t depth, |
| 115 | const Interval& range, |
| 116 | const Function* function); |
| 117 | bool Finalize(); |
| 118 | |
| 119 | int32_t CallCount() const { return selector_->call_count; } |
| 120 | |
| 121 | bool IsAllocated() const { |
| 122 | return selector_->offset != SelectorMap::kInvalidSelectorOffset; |
| 123 | } |
| 124 | |
| 125 | void AllocateAt(int32_t offset) { |
| 126 | ASSERT(!IsAllocated()); |
| 127 | selector_->offset = offset; |
| 128 | } |
| 129 | |
| 130 | void FillTable(ClassTable* class_table, const Array& entries); |
| 131 | |
| 132 | private: |
| 133 | TableSelector* selector_; |
| 134 | int32_t total_size_ = 0; |
| 135 | |
| 136 | GrowableArray<CidInterval> class_ranges_; |
| 137 | GrowableArray<Interval> ranges_; |
| 138 | Code& code_; |
| 139 | }; |
| 140 | |
| 141 | class RowFitter { |
| 142 | public: |
| 143 | RowFitter() : first_slot_index_(0) { free_slots_.Add(Interval(0, INT_MAX)); } |
| 144 | |
| 145 | // Try to fit a row at the specified offset and return whether it was |
| 146 | // successful. If successful, the entries taken up by the row are marked |
| 147 | // internally as occupied. If unsuccessful, next_offset is set to the next |
| 148 | // potential offset where the row might fit. |
| 149 | bool TryFit(SelectorRow* row, int32_t offset, int32_t* next_offset); |
| 150 | |
| 151 | // If the row is not already allocated, try to fit it within the given range |
| 152 | // of offsets and allocate it if successful. |
| 153 | void FitAndAllocate(SelectorRow* row, |
| 154 | int32_t min_offset, |
| 155 | int32_t max_offset = INT32_MAX); |
| 156 | |
| 157 | int32_t TableSize() const { return free_slots_.Last().begin(); } |
| 158 | |
| 159 | private: |
| 160 | intptr_t MoveForwardToCover(const Interval range, intptr_t slot_index); |
| 161 | |
| 162 | void UpdateFreeSlots(int32_t offset, |
| 163 | const GrowableArray<Interval>& ranges, |
| 164 | intptr_t slot_index); |
| 165 | |
| 166 | intptr_t FitInFreeSlot(const Interval range, intptr_t slot_index); |
| 167 | |
| 168 | GrowableArray<Interval> free_slots_; |
| 169 | intptr_t first_slot_index_; |
| 170 | }; |
| 171 | |
| 172 | void SelectorRow::DefineSelectorImplementationForInterval( |
| 173 | classid_t cid, |
| 174 | int16_t depth, |
| 175 | const Interval& range, |
| 176 | const Function* function) { |
| 177 | CidInterval cid_range(cid, depth, range, function); |
| 178 | class_ranges_.Add(cid_range); |
| 179 | } |
| 180 | |
| 181 | bool SelectorRow::Finalize() { |
| 182 | if (class_ranges_.length() == 0) { |
| 183 | return false; |
| 184 | } |
| 185 | |
| 186 | // Make a list of [begin, end) ranges which are disjunct and cover all |
| 187 | // areas that [class_ranges_] cover (i.e. there can be holes, but no overlap). |
| 188 | for (intptr_t i = 0; i < class_ranges_.length(); i++) { |
| 189 | ranges_.Add(class_ranges_[i].range()); |
| 190 | } |
| 191 | |
| 192 | struct IntervalSorter { |
| 193 | static int Compare(const Interval* a, const Interval* b) { |
| 194 | if (a->begin() != b->begin()) { |
| 195 | return a->begin() - b->begin(); |
| 196 | } |
| 197 | return b->length() - a->length(); |
| 198 | } |
| 199 | }; |
| 200 | |
| 201 | ranges_.Sort(IntervalSorter::Compare); |
| 202 | |
| 203 | intptr_t current_index = 0; |
| 204 | intptr_t write_index = 1; |
| 205 | intptr_t read_index = 1; |
| 206 | for (; read_index < ranges_.length(); read_index++) { |
| 207 | Interval& current_range = ranges_[current_index]; |
| 208 | Interval& next_range = ranges_[read_index]; |
| 209 | if (current_range.Contains(next_range)) { |
| 210 | // We drop the entry. |
| 211 | } else if (current_range.end() == next_range.begin()) { |
| 212 | // We extend the current entry and drop the entry. |
| 213 | current_range.ExtendToIncludeInterval(next_range); |
| 214 | } else { |
| 215 | // We keep the entry. |
| 216 | if (read_index != write_index) { |
| 217 | ranges_[write_index] = ranges_[read_index]; |
| 218 | } |
| 219 | current_index = write_index; |
| 220 | write_index++; |
| 221 | } |
| 222 | } |
| 223 | ranges_.TruncateTo(write_index); |
| 224 | |
| 225 | for (intptr_t i = 0; i < ranges_.length() - 1; i++) { |
| 226 | const Interval& a = ranges_[i]; |
| 227 | const Interval& b = ranges_[i + 1]; |
| 228 | ASSERT(a.begin() < b.begin()); |
| 229 | ASSERT(a.end() < b.begin()); |
| 230 | } |
| 231 | |
| 232 | for (intptr_t i = 0; i < ranges_.length(); i++) { |
| 233 | total_size_ += ranges_[i].length(); |
| 234 | } |
| 235 | |
| 236 | return true; |
| 237 | } |
| 238 | |
| 239 | void SelectorRow::FillTable(ClassTable* class_table, const Array& entries) { |
| 240 | // Define the entries in the table by going top-down, which means more |
| 241 | // specific ones will override more general ones. |
| 242 | |
| 243 | // Sort by depth. |
| 244 | struct IntervalSorter { |
| 245 | static int Compare(const CidInterval* a, const CidInterval* b) { |
| 246 | ASSERT(a == b || a->depth() != b->depth() || |
| 247 | !a->range().Overlap(b->range())); |
| 248 | return a->depth() - b->depth(); |
| 249 | } |
| 250 | }; |
| 251 | class_ranges_.Sort(IntervalSorter::Compare); |
| 252 | |
| 253 | for (intptr_t i = 0; i < class_ranges_.length(); i++) { |
| 254 | const CidInterval& cid_range = class_ranges_[i]; |
| 255 | const Interval& range = cid_range.range(); |
| 256 | const Function* function = cid_range.function(); |
| 257 | if (function != nullptr && function->HasCode()) { |
| 258 | code_ = function->CurrentCode(); |
| 259 | for (classid_t cid = range.begin(); cid < range.end(); cid++) { |
| 260 | entries.SetAt(selector()->offset + cid, code_); |
| 261 | } |
| 262 | } |
| 263 | } |
| 264 | } |
| 265 | |
| 266 | void RowFitter::FitAndAllocate(SelectorRow* row, |
| 267 | int32_t min_offset, |
| 268 | int32_t max_offset) { |
| 269 | if (row->IsAllocated()) { |
| 270 | return; |
| 271 | } |
| 272 | |
| 273 | int32_t next_offset; |
| 274 | |
| 275 | int32_t offset = min_offset; |
| 276 | while (offset <= max_offset && !TryFit(row, offset, &next_offset)) { |
| 277 | offset = next_offset; |
| 278 | } |
| 279 | if (offset <= max_offset) { |
| 280 | row->AllocateAt(offset); |
| 281 | } |
| 282 | } |
| 283 | |
| 284 | bool RowFitter::TryFit(SelectorRow* row, int32_t offset, int32_t* next_offset) { |
| 285 | const GrowableArray<Interval>& ranges = row->ranges(); |
| 286 | |
| 287 | Interval first_range = ranges[0].WithOffset(offset); |
| 288 | if (first_slot_index_ > 0 && |
| 289 | free_slots_[first_slot_index_ - 1].end() >= first_range.end()) { |
| 290 | // Trying lower offset than last time. Start over in free slots. |
| 291 | first_slot_index_ = 0; |
| 292 | } |
| 293 | first_slot_index_ = MoveForwardToCover(first_range, first_slot_index_); |
| 294 | intptr_t slot_index = first_slot_index_; |
| 295 | |
| 296 | for (intptr_t index = 0; index < ranges.length(); index++) { |
| 297 | Interval range = ranges[index].WithOffset(offset); |
| 298 | slot_index = MoveForwardToCover(range, slot_index); |
| 299 | ASSERT(slot_index < free_slots_.length()); |
| 300 | const Interval slot = free_slots_[slot_index]; |
| 301 | ASSERT(slot.end() >= range.end()); |
| 302 | if (slot.begin() > range.begin()) { |
| 303 | *next_offset = offset + slot.begin() - range.begin(); |
| 304 | return false; |
| 305 | } |
| 306 | } |
| 307 | |
| 308 | UpdateFreeSlots(offset, ranges, first_slot_index_); |
| 309 | return true; |
| 310 | } |
| 311 | |
| 312 | intptr_t RowFitter::MoveForwardToCover(const Interval range, |
| 313 | intptr_t slot_index) { |
| 314 | while (free_slots_[slot_index].end() < range.end()) { |
| 315 | slot_index++; |
| 316 | } |
| 317 | return slot_index; |
| 318 | } |
| 319 | |
| 320 | void RowFitter::UpdateFreeSlots(int32_t offset, |
| 321 | const GrowableArray<Interval>& ranges, |
| 322 | intptr_t slot_index) { |
| 323 | for (intptr_t i = 0; i < ranges.length(); i++) { |
| 324 | ASSERT(slot_index < free_slots_.length()); |
| 325 | const Interval range = ranges[i].WithOffset(offset); |
| 326 | |
| 327 | ASSERT(!free_slots_[slot_index].IsAfter(range)); |
| 328 | slot_index = MoveForwardToCover(range, slot_index); |
| 329 | |
| 330 | // Assert that we have a valid slot. |
| 331 | ASSERT(slot_index < free_slots_.length()); |
| 332 | ASSERT(free_slots_[slot_index].Contains(range)); |
| 333 | |
| 334 | slot_index = FitInFreeSlot(range, slot_index); |
| 335 | } |
| 336 | |
| 337 | for (intptr_t i = 0; i < free_slots_.length(); i++) { |
| 338 | ASSERT(free_slots_[i].begin() < free_slots_[i].end()); |
| 339 | } |
| 340 | } |
| 341 | |
| 342 | intptr_t RowFitter::FitInFreeSlot(const Interval range, intptr_t slot_index) { |
| 343 | const Interval& slot = free_slots_[slot_index]; |
| 344 | ASSERT(slot.Contains(range)); |
| 345 | if (slot.begin() < range.begin()) { |
| 346 | Interval free_before = Interval(slot.begin(), range.begin()); |
| 347 | if (slot.end() > range.end()) { |
| 348 | Interval free_after(range.end(), slot.end()); |
| 349 | free_slots_[slot_index] = free_before; |
| 350 | free_slots_.InsertAt(slot_index + 1, free_after); |
| 351 | } else { |
| 352 | free_slots_[slot_index] = free_before; |
| 353 | slot_index++; |
| 354 | } |
| 355 | } else if (slot.end() <= range.end()) { |
| 356 | ASSERT(slot.IsSame(range)); |
| 357 | free_slots_.EraseAt(slot_index); |
| 358 | } else { |
| 359 | Interval free_after(range.end(), slot.end()); |
| 360 | free_slots_[slot_index] = free_after; |
| 361 | } |
| 362 | return slot_index; |
| 363 | } |
| 364 | |
| 365 | int32_t SelectorMap::SelectorId(const Function& interface_target) const { |
| 366 | kernel::ProcedureAttributesMetadata metadata; |
| 367 | metadata = kernel::ProcedureAttributesOf(interface_target, Z); |
| 368 | return interface_target.IsGetterFunction() || |
| 369 | interface_target.IsImplicitGetterFunction() || |
| 370 | interface_target.IsMethodExtractor() |
| 371 | ? metadata.getter_selector_id |
| 372 | : metadata.method_or_setter_selector_id; |
| 373 | } |
| 374 | |
| 375 | const TableSelector* SelectorMap::GetSelector( |
| 376 | const Function& interface_target) const { |
| 377 | const int32_t sid = SelectorId(interface_target); |
| 378 | if (sid == kInvalidSelectorId) return nullptr; |
| 379 | const TableSelector* selector = &selectors_[sid]; |
| 380 | if (!selector->IsUsed()) return nullptr; |
| 381 | if (selector->offset == kInvalidSelectorOffset) return nullptr; |
| 382 | return selector; |
| 383 | } |
| 384 | |
| 385 | void SelectorMap::AddSelector(int32_t call_count, |
| 386 | bool called_on_null, |
| 387 | bool torn_off) { |
| 388 | const int32_t added_sid = selectors_.length(); |
| 389 | selectors_.Add(TableSelector(added_sid, call_count, kInvalidSelectorOffset, |
| 390 | called_on_null, torn_off)); |
| 391 | } |
| 392 | |
| 393 | void SelectorMap::SetSelectorProperties(int32_t sid, |
| 394 | bool on_null_interface, |
| 395 | bool requires_args_descriptor) { |
| 396 | ASSERT(sid < selectors_.length()); |
| 397 | selectors_[sid].on_null_interface |= on_null_interface; |
| 398 | selectors_[sid].requires_args_descriptor |= requires_args_descriptor; |
| 399 | } |
| 400 | |
| 401 | DispatchTableGenerator::DispatchTableGenerator(Zone* zone) |
| 402 | : zone_(zone), |
| 403 | classes_(nullptr), |
| 404 | num_selectors_(-1), |
| 405 | num_classes_(-1), |
| 406 | selector_map_(zone) {} |
| 407 | |
| 408 | void DispatchTableGenerator::Initialize(ClassTable* table) { |
| 409 | classes_ = table; |
| 410 | |
| 411 | ReadTableSelectorInfo(); |
| 412 | NumberSelectors(); |
| 413 | SetupSelectorRows(); |
| 414 | ComputeSelectorOffsets(); |
| 415 | } |
| 416 | |
| 417 | void DispatchTableGenerator::ReadTableSelectorInfo() { |
| 418 | const auto& object_class = Class::Handle(Z, classes_->At(kInstanceCid)); |
| 419 | const auto& script = Script::Handle(Z, object_class.script()); |
| 420 | const auto& info = KernelProgramInfo::Handle(Z, script.kernel_program_info()); |
| 421 | kernel::TableSelectorMetadata* metadata = |
| 422 | kernel::TableSelectorMetadataForProgram(info, Z); |
| 423 | // Errors out if gen_kernel was run in non-AOT mode or without TFA. |
| 424 | if (metadata == nullptr) { |
| 425 | FATAL( |
| 426 | "Missing table selector metadata!\n" |
| 427 | "Probably gen_kernel was run in non-AOT mode or without TFA.\n" ); |
| 428 | } |
| 429 | for (intptr_t i = 0; i < metadata->selectors.length(); i++) { |
| 430 | const kernel::TableSelectorInfo* info = &metadata->selectors[i]; |
| 431 | selector_map_.AddSelector(info->call_count, info->called_on_null, |
| 432 | info->torn_off); |
| 433 | } |
| 434 | } |
| 435 | |
| 436 | void DispatchTableGenerator::NumberSelectors() { |
| 437 | num_classes_ = classes_->NumCids(); |
| 438 | |
| 439 | Object& obj = Object::Handle(Z); |
| 440 | Class& klass = Class::Handle(Z); |
| 441 | Array& functions = Array::Handle(Z); |
| 442 | Function& function = Function::Handle(Z); |
| 443 | |
| 444 | for (classid_t cid = kIllegalCid + 1; cid < num_classes_; cid++) { |
| 445 | obj = classes_->At(cid); |
| 446 | if (obj.IsClass()) { |
| 447 | klass = Class::RawCast(obj.raw()); |
| 448 | functions = klass.functions(); |
| 449 | if (!functions.IsNull()) { |
| 450 | for (intptr_t j = 0; j < functions.Length(); j++) { |
| 451 | function ^= functions.At(j); |
| 452 | if (function.IsDynamicFunction(/*allow_abstract=*/false)) { |
| 453 | const bool on_null_interface = klass.IsObjectClass(); |
| 454 | const bool requires_args_descriptor = |
| 455 | function.IsGeneric() || function.HasOptionalParameters(); |
| 456 | // Get assigned selector ID for this function. |
| 457 | const int32_t sid = selector_map_.SelectorId(function); |
| 458 | if (sid == SelectorMap::kInvalidSelectorId) { |
| 459 | // Probably gen_kernel was run in non-AOT mode or without TFA. |
| 460 | FATAL("Function has no assigned selector ID.\n" ); |
| 461 | } |
| 462 | selector_map_.SetSelectorProperties(sid, on_null_interface, |
| 463 | requires_args_descriptor); |
| 464 | } |
| 465 | } |
| 466 | } |
| 467 | } |
| 468 | } |
| 469 | |
| 470 | num_selectors_ = selector_map_.NumIds(); |
| 471 | } |
| 472 | |
| 473 | void DispatchTableGenerator::SetupSelectorRows() { |
| 474 | Object& obj = Object::Handle(Z); |
| 475 | Class& klass = Class::Handle(Z); |
| 476 | Array& functions = Array::Handle(Z); |
| 477 | Function& function = Function::Handle(Z); |
| 478 | |
| 479 | // For each class, we first need to figure out the ranges of cids that will |
| 480 | // inherit methods from it (this is due to the fact that cids don't have the |
| 481 | // property that they are assigned preorder and don't have holes). |
| 482 | |
| 483 | // Make a condensed array which stores parent cids. |
| 484 | std::unique_ptr<classid_t[]> parent_cids(new classid_t[num_classes_]); |
| 485 | std::unique_ptr<bool[]> is_concrete_class(new bool[num_classes_]); |
| 486 | for (classid_t cid = kIllegalCid + 1; cid < num_classes_; cid++) { |
| 487 | classid_t parent_cid = kIllegalCid; |
| 488 | bool concrete = false; |
| 489 | if (cid > kIllegalCid) { |
| 490 | obj = classes_->At(cid); |
| 491 | if (obj.IsClass()) { |
| 492 | klass = Class::RawCast(obj.raw()); |
| 493 | concrete = !klass.is_abstract(); |
| 494 | klass = klass.SuperClass(); |
| 495 | if (!klass.IsNull()) { |
| 496 | parent_cid = klass.id(); |
| 497 | } |
| 498 | } |
| 499 | } |
| 500 | parent_cids[cid] = parent_cid; |
| 501 | is_concrete_class[cid] = concrete; |
| 502 | } |
| 503 | |
| 504 | // Precompute depth level. |
| 505 | std::unique_ptr<int16_t[]> cid_depth(new int16_t[num_classes_]); |
| 506 | for (classid_t cid = kIllegalCid + 1; cid < num_classes_; cid++) { |
| 507 | int16_t depth = 0; |
| 508 | classid_t pcid = cid; |
| 509 | while (pcid != kIllegalCid) { |
| 510 | pcid = parent_cids[pcid]; |
| 511 | depth++; |
| 512 | } |
| 513 | cid_depth[cid] = depth; |
| 514 | } |
| 515 | |
| 516 | // Find all regions that have [cid] as parent (which should include [cid])! |
| 517 | std::unique_ptr<GrowableArray<Interval>[]> cid_subclass_ranges( |
| 518 | new GrowableArray<Interval>[num_classes_]()); |
| 519 | for (classid_t cid = kIllegalCid + 1; cid < num_classes_; cid++) { |
| 520 | classid_t start = kIllegalCid; |
| 521 | for (classid_t sub_cid = kIllegalCid + 1; sub_cid < num_classes_; |
| 522 | sub_cid++) { |
| 523 | // Is [sub_cid] a subclass of [cid]? |
| 524 | classid_t pcid = sub_cid; |
| 525 | while (pcid != kIllegalCid && pcid != cid) { |
| 526 | pcid = parent_cids[pcid]; |
| 527 | } |
| 528 | const bool is_subclass = cid == pcid; |
| 529 | const bool in_range = is_subclass && is_concrete_class[sub_cid]; |
| 530 | |
| 531 | if (start == kIllegalCid && in_range) { |
| 532 | start = sub_cid; |
| 533 | } else if (start != kIllegalCid && !in_range) { |
| 534 | Interval range(start, sub_cid); |
| 535 | cid_subclass_ranges[cid].Add(range); |
| 536 | start = kIllegalCid; |
| 537 | } |
| 538 | } |
| 539 | if (start != kIllegalCid) { |
| 540 | Interval range(start, num_classes_); |
| 541 | cid_subclass_ranges[cid].Add(range); |
| 542 | } |
| 543 | } |
| 544 | |
| 545 | // Initialize selector rows. |
| 546 | SelectorRow* selector_rows = Z->Alloc<SelectorRow>(num_selectors_); |
| 547 | for (intptr_t i = 0; i < num_selectors_; i++) { |
| 548 | TableSelector* selector = &selector_map_.selectors_[i]; |
| 549 | new (&selector_rows[i]) SelectorRow(Z, selector); |
| 550 | if (selector->called_on_null && !selector->on_null_interface) { |
| 551 | selector_rows[i].DefineSelectorImplementationForInterval( |
| 552 | kNullCid, 0, Interval(kNullCid, kNullCid + 1), nullptr); |
| 553 | } |
| 554 | } |
| 555 | |
| 556 | // Add implementation intervals to the selector rows for all classes that |
| 557 | // have concrete implementations of the selector. |
| 558 | for (classid_t cid = kIllegalCid + 1; cid < num_classes_; cid++) { |
| 559 | obj = classes_->At(cid); |
| 560 | if (obj.IsClass()) { |
| 561 | klass = Class::RawCast(obj.raw()); |
| 562 | GrowableArray<Interval>& subclasss_cid_ranges = cid_subclass_ranges[cid]; |
| 563 | |
| 564 | functions = klass.functions(); |
| 565 | if (!functions.IsNull()) { |
| 566 | const int16_t depth = cid_depth[cid]; |
| 567 | for (intptr_t j = 0; j < functions.Length(); j++) { |
| 568 | function ^= functions.At(j); |
| 569 | if (function.IsDynamicFunction(/*allow_abstract=*/false)) { |
| 570 | const int32_t sid = selector_map_.SelectorId(function); |
| 571 | |
| 572 | if (sid != SelectorMap::kInvalidSelectorId) { |
| 573 | auto MakeIntervals = [&](const Function& function, int32_t sid) { |
| 574 | // A function handle that survives until the table is built. |
| 575 | auto& function_handle = Function::ZoneHandle(Z, function.raw()); |
| 576 | |
| 577 | for (intptr_t i = 0; i < subclasss_cid_ranges.length(); i++) { |
| 578 | Interval& subclass_cid_range = subclasss_cid_ranges[i]; |
| 579 | selector_rows[sid].DefineSelectorImplementationForInterval( |
| 580 | cid, depth, subclass_cid_range, &function_handle); |
| 581 | } |
| 582 | }; |
| 583 | MakeIntervals(function, sid); |
| 584 | |
| 585 | if (selector_map_.selectors_[sid].torn_off) { |
| 586 | const String& method_name = String::Handle(Z, function.name()); |
| 587 | const String& getter_name = |
| 588 | String::Handle(Z, Field::GetterName(method_name)); |
| 589 | const Function& tearoff = Function::Handle( |
| 590 | Z, function.GetMethodExtractor(getter_name)); |
| 591 | const int32_t tearoff_sid = selector_map_.SelectorId(tearoff); |
| 592 | |
| 593 | if (tearoff_sid != SelectorMap::kInvalidSelectorId) { |
| 594 | MakeIntervals(tearoff, tearoff_sid); |
| 595 | } |
| 596 | } |
| 597 | } |
| 598 | } |
| 599 | } |
| 600 | } |
| 601 | } |
| 602 | } |
| 603 | |
| 604 | // Retain all selectors that contain implementation intervals. |
| 605 | for (intptr_t i = 0; i < num_selectors_; i++) { |
| 606 | const TableSelector& selector = selector_map_.selectors_[i]; |
| 607 | if (selector.IsUsed() && selector_rows[i].Finalize()) { |
| 608 | table_rows_.Add(&selector_rows[i]); |
| 609 | } |
| 610 | } |
| 611 | } |
| 612 | |
| 613 | void DispatchTableGenerator::ComputeSelectorOffsets() { |
| 614 | ASSERT(table_rows_.length() > 0); |
| 615 | |
| 616 | RowFitter fitter; |
| 617 | |
| 618 | // Sort the table rows according to popularity, descending. |
| 619 | struct PopularitySorter { |
| 620 | static int Compare(SelectorRow* const* a, SelectorRow* const* b) { |
| 621 | return (*b)->CallCount() - (*a)->CallCount(); |
| 622 | } |
| 623 | }; |
| 624 | table_rows_.Sort(PopularitySorter::Compare); |
| 625 | |
| 626 | // Try to allocate at optimal offset. |
| 627 | const int32_t optimal_offset = DispatchTable::OriginElement(); |
| 628 | for (intptr_t i = 0; i < table_rows_.length(); i++) { |
| 629 | fitter.FitAndAllocate(table_rows_[i], optimal_offset, optimal_offset); |
| 630 | } |
| 631 | |
| 632 | // Sort the table rows according to popularity / size, descending. |
| 633 | struct PopularitySizeRatioSorter { |
| 634 | static int Compare(SelectorRow* const* a, SelectorRow* const* b) { |
| 635 | return (*b)->CallCount() * (*a)->total_size() - |
| 636 | (*a)->CallCount() * (*b)->total_size(); |
| 637 | } |
| 638 | }; |
| 639 | table_rows_.Sort(PopularitySizeRatioSorter::Compare); |
| 640 | |
| 641 | // Try to allocate at small offsets. |
| 642 | const int32_t max_offset = DispatchTable::LargestSmallOffset(); |
| 643 | for (intptr_t i = 0; i < table_rows_.length(); i++) { |
| 644 | fitter.FitAndAllocate(table_rows_[i], 0, max_offset); |
| 645 | } |
| 646 | |
| 647 | // Sort the table rows according to size, descending. |
| 648 | struct SizeSorter { |
| 649 | static int Compare(SelectorRow* const* a, SelectorRow* const* b) { |
| 650 | return (*b)->total_size() - (*a)->total_size(); |
| 651 | } |
| 652 | }; |
| 653 | table_rows_.Sort(SizeSorter::Compare); |
| 654 | |
| 655 | // Allocate remaining rows at large offsets. |
| 656 | const int32_t min_large_offset = DispatchTable::LargestSmallOffset() + 1; |
| 657 | for (intptr_t i = 0; i < table_rows_.length(); i++) { |
| 658 | fitter.FitAndAllocate(table_rows_[i], min_large_offset); |
| 659 | } |
| 660 | |
| 661 | table_size_ = fitter.TableSize(); |
| 662 | } |
| 663 | |
| 664 | ArrayPtr DispatchTableGenerator::BuildCodeArray() { |
| 665 | auto& entries = Array::Handle(zone_, Array::New(table_size_, Heap::kOld)); |
| 666 | for (intptr_t i = 0; i < table_rows_.length(); i++) { |
| 667 | table_rows_[i]->FillTable(classes_, entries); |
| 668 | } |
| 669 | entries.MakeImmutable(); |
| 670 | return entries.raw(); |
| 671 | } |
| 672 | |
| 673 | } // namespace compiler |
| 674 | } // namespace dart |
| 675 | |
| 676 | #endif // defined(DART_PRECOMPILER) |
| 677 | |