| 1 | // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 | // for details. All rights reserved. Use of this source code is governed by a |
| 3 | // BSD-style license that can be found in the LICENSE file. |
| 4 | |
| 5 | #ifndef RUNTIME_VM_HANDLES_IMPL_H_ |
| 6 | #define RUNTIME_VM_HANDLES_IMPL_H_ |
| 7 | |
| 8 | #include "vm/heap/heap.h" |
| 9 | #include "vm/thread.h" |
| 10 | #include "vm/visitor.h" |
| 11 | |
| 12 | namespace dart { |
| 13 | |
| 14 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 15 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 16 | VisitObjectPointers(ObjectPointerVisitor* visitor) { |
| 17 | // Visit all zone handles. |
| 18 | HandlesBlock* block = zone_blocks_; |
| 19 | while (block != NULL) { |
| 20 | block->VisitObjectPointers(visitor); |
| 21 | block = block->next_block(); |
| 22 | } |
| 23 | |
| 24 | // Visit all scoped handles. |
| 25 | VisitScopedHandles(visitor); |
| 26 | } |
| 27 | |
| 28 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 29 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 30 | VisitScopedHandles(ObjectPointerVisitor* visitor) { |
| 31 | HandlesBlock* block = &first_scoped_block_; |
| 32 | do { |
| 33 | block->VisitObjectPointers(visitor); |
| 34 | if (block == scoped_blocks_) { |
| 35 | return; |
| 36 | } |
| 37 | block = block->next_block(); |
| 38 | } while (block != NULL); |
| 39 | UNREACHABLE(); |
| 40 | } |
| 41 | |
| 42 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 43 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>::Visit( |
| 44 | HandleVisitor* visitor) { |
| 45 | // Visit all zone handles. |
| 46 | HandlesBlock* block = zone_blocks_; |
| 47 | while (block != NULL) { |
| 48 | block->Visit(visitor); |
| 49 | block = block->next_block(); |
| 50 | } |
| 51 | |
| 52 | // Visit all scoped handles. |
| 53 | block = &first_scoped_block_; |
| 54 | do { |
| 55 | block->Visit(visitor); |
| 56 | block = block->next_block(); |
| 57 | } while (block != NULL); |
| 58 | } |
| 59 | |
| 60 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 61 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>::Reset() { |
| 62 | // Delete all the extra zone handle blocks allocated and reinit the first |
| 63 | // zone block. |
| 64 | if (zone_blocks_ != NULL) { |
| 65 | DeleteHandleBlocks(zone_blocks_->next_block()); |
| 66 | zone_blocks_->ReInit(); |
| 67 | } |
| 68 | |
| 69 | // Delete all the extra scoped handle blocks allocated and reinit the first |
| 70 | // scoped block. |
| 71 | DeleteHandleBlocks(first_scoped_block_.next_block()); |
| 72 | first_scoped_block_.ReInit(); |
| 73 | scoped_blocks_ = &first_scoped_block_; |
| 74 | } |
| 75 | |
| 76 | // Figure out the current handle scope using the current Zone and |
| 77 | // allocate a handle in that scope. The function assumes that a |
| 78 | // current handle scope exists. It asserts for this appropriately. |
| 79 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 80 | uword Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 81 | AllocateHandle(Zone* zone) { |
| 82 | #if defined(DEBUG) |
| 83 | Thread* thread = Thread::Current(); |
| 84 | ASSERT(thread->top_handle_scope() != NULL); |
| 85 | ASSERT(thread->MayAllocateHandles()); |
| 86 | #endif // DEBUG |
| 87 | Handles* handles = zone->handles(); |
| 88 | ASSERT(handles != NULL); |
| 89 | return handles->AllocateScopedHandle(); |
| 90 | } |
| 91 | |
| 92 | // The function assumes that 'zone' is the current zone and asserts for |
| 93 | // this appropriately. |
| 94 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 95 | uword Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 96 | AllocateZoneHandle(Zone* zone) { |
| 97 | #if defined(DEBUG) |
| 98 | Thread* thread = Thread::Current(); |
| 99 | ASSERT(zone->ContainsNestedZone(thread->zone())); |
| 100 | ASSERT(thread->MayAllocateHandles()); |
| 101 | #endif // DEBUG |
| 102 | Handles* handles = zone->handles(); |
| 103 | ASSERT(handles != NULL); |
| 104 | uword address = handles->AllocateHandleInZone(); |
| 105 | return address; |
| 106 | } |
| 107 | |
| 108 | // Figure out the current zone using the current Thread and |
| 109 | // check if the specified handle has been allocated in this zone. |
| 110 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 111 | bool Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 112 | IsZoneHandle(uword handle) { |
| 113 | // TODO(5411412): Accessing the current thread is a performance problem, |
| 114 | // consider passing it down as a parameter. |
| 115 | Thread* thread = Thread::Current(); |
| 116 | ASSERT(thread != NULL); |
| 117 | ASSERT(thread->zone() != NULL); |
| 118 | Handles* handles = thread->zone()->handles(); |
| 119 | ASSERT(handles != NULL); |
| 120 | return handles->IsValidZoneHandle(handle); |
| 121 | } |
| 122 | |
| 123 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 124 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 125 | DeleteAll() { |
| 126 | // Delete all the zone allocated handle blocks. |
| 127 | // GCTrace does not need to trace this call to DeleteHandleBlocks, |
| 128 | // since the individual zone deletions will be caught |
| 129 | // by instrumentation in the BaseZone destructor. |
| 130 | DeleteHandleBlocks(zone_blocks_); |
| 131 | zone_blocks_ = NULL; |
| 132 | |
| 133 | // Delete all the scoped handle blocks. |
| 134 | scoped_blocks_ = first_scoped_block_.next_block(); |
| 135 | DeleteHandleBlocks(scoped_blocks_); |
| 136 | first_scoped_block_.ReInit(); |
| 137 | scoped_blocks_ = &first_scoped_block_; |
| 138 | } |
| 139 | |
| 140 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 141 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 142 | DeleteHandleBlocks(HandlesBlock* blocks) { |
| 143 | while (blocks != NULL) { |
| 144 | HandlesBlock* block = blocks; |
| 145 | blocks = blocks->next_block(); |
| 146 | delete block; |
| 147 | } |
| 148 | } |
| 149 | |
| 150 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 151 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 152 | SetupNextScopeBlock() { |
| 153 | if (FLAG_trace_handles) { |
| 154 | OS::PrintErr("*** Handle Counts for (0x%" Px "):Zone = %d,Scoped = %d\n" , |
| 155 | reinterpret_cast<intptr_t>(this), CountZoneHandles(), |
| 156 | CountScopedHandles()); |
| 157 | } |
| 158 | if (scoped_blocks_->next_block() == NULL) { |
| 159 | HandlesBlock* block = new HandlesBlock(NULL); |
| 160 | if (block == NULL) { |
| 161 | OUT_OF_MEMORY(); |
| 162 | } |
| 163 | scoped_blocks_->set_next_block(block); |
| 164 | } |
| 165 | scoped_blocks_ = scoped_blocks_->next_block(); |
| 166 | scoped_blocks_->set_next_handle_slot(0); |
| 167 | #if defined(DEBUG) |
| 168 | scoped_blocks_->ZapFreeHandles(); |
| 169 | #endif |
| 170 | } |
| 171 | |
| 172 | // Validation of the handle involves iterating through all the |
| 173 | // handle blocks to check if the handle is valid, please |
| 174 | // use this only in ASSERT code for verification purposes. |
| 175 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 176 | bool Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 177 | IsValidScopedHandle(uword handle) const { |
| 178 | const HandlesBlock* iterator = &first_scoped_block_; |
| 179 | while (iterator != NULL) { |
| 180 | if (iterator->IsValidHandle(handle)) { |
| 181 | return true; |
| 182 | } |
| 183 | iterator = iterator->next_block(); |
| 184 | } |
| 185 | return false; |
| 186 | } |
| 187 | |
| 188 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 189 | bool Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 190 | IsValidZoneHandle(uword handle) const { |
| 191 | const HandlesBlock* iterator = zone_blocks_; |
| 192 | while (iterator != NULL) { |
| 193 | if (iterator->IsValidHandle(handle)) { |
| 194 | return true; |
| 195 | } |
| 196 | iterator = iterator->next_block(); |
| 197 | } |
| 198 | return false; |
| 199 | } |
| 200 | |
| 201 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 202 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 203 | SetupNextZoneBlock() { |
| 204 | if (FLAG_trace_handles) { |
| 205 | OS::PrintErr("*** Handle Counts for (0x%" Px "):Zone = %d,Scoped = %d\n" , |
| 206 | reinterpret_cast<intptr_t>(this), CountZoneHandles(), |
| 207 | CountScopedHandles()); |
| 208 | } |
| 209 | zone_blocks_ = new HandlesBlock(zone_blocks_); |
| 210 | if (zone_blocks_ == NULL) { |
| 211 | OUT_OF_MEMORY(); |
| 212 | } |
| 213 | } |
| 214 | |
| 215 | #if defined(DEBUG) |
| 216 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 217 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 218 | VerifyScopedHandleState() { |
| 219 | HandlesBlock* block = &first_scoped_block_; |
| 220 | const intptr_t end_index = (kHandleSizeInWords * kHandlesPerChunk); |
| 221 | do { |
| 222 | if (scoped_blocks_ == block && block->next_handle_slot() <= end_index) { |
| 223 | return; |
| 224 | } |
| 225 | block = block->next_block(); |
| 226 | } while (block != NULL); |
| 227 | ASSERT(false); |
| 228 | } |
| 229 | |
| 230 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 231 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 232 | ZapFreeScopedHandles() { |
| 233 | HandlesBlock* block = scoped_blocks_; |
| 234 | while (block != NULL) { |
| 235 | block->ZapFreeHandles(); |
| 236 | block = block->next_block(); |
| 237 | } |
| 238 | } |
| 239 | #endif |
| 240 | |
| 241 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 242 | int Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 243 | CountScopedHandles() const { |
| 244 | int count = 0; |
| 245 | const HandlesBlock* block = &first_scoped_block_; |
| 246 | do { |
| 247 | count += block->HandleCount(); |
| 248 | if (block == scoped_blocks_) { |
| 249 | return count; |
| 250 | } |
| 251 | block = block->next_block(); |
| 252 | } while (block != NULL); |
| 253 | UNREACHABLE(); |
| 254 | return 0; |
| 255 | } |
| 256 | |
| 257 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 258 | int Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 259 | CountZoneHandles() const { |
| 260 | int count = 0; |
| 261 | const HandlesBlock* block = zone_blocks_; |
| 262 | while (block != NULL) { |
| 263 | count += block->HandleCount(); |
| 264 | block = block->next_block(); |
| 265 | } |
| 266 | return count; |
| 267 | } |
| 268 | |
| 269 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 270 | Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>::HandlesBlock:: |
| 271 | ~HandlesBlock() { |
| 272 | #if defined(DEBUG) |
| 273 | ReInit(); |
| 274 | #endif |
| 275 | } |
| 276 | |
| 277 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 278 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 279 | HandlesBlock::ReInit() { |
| 280 | next_handle_slot_ = 0; |
| 281 | next_block_ = NULL; |
| 282 | #if defined(DEBUG) |
| 283 | ZapFreeHandles(); |
| 284 | #endif |
| 285 | } |
| 286 | |
| 287 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 288 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 289 | HandlesBlock::VisitObjectPointers(ObjectPointerVisitor* visitor) { |
| 290 | ASSERT(visitor != NULL); |
| 291 | for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { |
| 292 | visitor->VisitPointer( |
| 293 | reinterpret_cast<ObjectPtr*>(&data_[i + kOffsetOfRawPtr / kWordSize])); |
| 294 | } |
| 295 | } |
| 296 | |
| 297 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 298 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 299 | HandlesBlock::Visit(HandleVisitor* visitor) { |
| 300 | ASSERT(visitor != NULL); |
| 301 | for (intptr_t i = 0; i < next_handle_slot_; i += kHandleSizeInWords) { |
| 302 | visitor->VisitHandle(reinterpret_cast<uword>(&data_[i])); |
| 303 | } |
| 304 | } |
| 305 | |
| 306 | #if defined(DEBUG) |
| 307 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 308 | void Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 309 | HandlesBlock::ZapFreeHandles() { |
| 310 | // Reinitialize the handle area to some uninitialized value. |
| 311 | for (intptr_t i = next_handle_slot_; |
| 312 | i < (kHandleSizeInWords * kHandlesPerChunk); i++) { |
| 313 | data_[i] = kZapUninitializedWord; |
| 314 | } |
| 315 | } |
| 316 | #endif |
| 317 | |
| 318 | template <int kHandleSizeInWords, int kHandlesPerChunk, int kOffsetOfRawPtr> |
| 319 | int Handles<kHandleSizeInWords, kHandlesPerChunk, kOffsetOfRawPtr>:: |
| 320 | HandlesBlock::HandleCount() const { |
| 321 | return (next_handle_slot_ / kHandleSizeInWords); |
| 322 | } |
| 323 | |
| 324 | } // namespace dart |
| 325 | |
| 326 | #endif // RUNTIME_VM_HANDLES_IMPL_H_ |
| 327 | |