1 | // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/snapshot.h" |
6 | |
7 | #include "platform/assert.h" |
8 | #include "vm/bootstrap.h" |
9 | #include "vm/class_finalizer.h" |
10 | #include "vm/dart.h" |
11 | #include "vm/exceptions.h" |
12 | #include "vm/heap/heap.h" |
13 | #include "vm/longjump.h" |
14 | #include "vm/message.h" |
15 | #include "vm/object.h" |
16 | #include "vm/object_store.h" |
17 | #include "vm/snapshot_ids.h" |
18 | #include "vm/stub_code.h" |
19 | #include "vm/symbols.h" |
20 | #include "vm/timeline.h" |
21 | #include "vm/type_testing_stubs.h" |
22 | #include "vm/version.h" |
23 | |
24 | // We currently only expect the Dart mutator to read snapshots. |
25 | #define ASSERT_NO_SAFEPOINT_SCOPE() \ |
26 | isolate()->AssertCurrentThreadIsMutator(); \ |
27 | ASSERT(thread()->no_safepoint_scope_depth() != 0) |
28 | |
29 | namespace dart { |
30 | |
31 | static const int kNumInitialReferences = 32; |
32 | |
33 | static bool IsSingletonClassId(intptr_t class_id) { |
34 | // Check if this is a singleton object class which is shared by all isolates. |
35 | return ((class_id >= kClassCid && class_id <= kUnwindErrorCid) || |
36 | (class_id == kTypeArgumentsCid) || |
37 | (class_id >= kNullCid && class_id <= kVoidCid)); |
38 | } |
39 | |
40 | static bool IsBootstrapedClassId(intptr_t class_id) { |
41 | // Check if this is a class which is created during bootstrapping. |
42 | return (class_id == kObjectCid || |
43 | (class_id >= kInstanceCid && class_id <= kUserTagCid) || |
44 | class_id == kArrayCid || class_id == kImmutableArrayCid || |
45 | IsStringClassId(class_id) || IsTypedDataClassId(class_id) || |
46 | IsExternalTypedDataClassId(class_id) || |
47 | IsTypedDataViewClassId(class_id) || class_id == kNullCid || |
48 | class_id == kNeverCid || class_id == kTransferableTypedDataCid); |
49 | } |
50 | |
51 | static bool IsObjectStoreTypeId(intptr_t index) { |
52 | // Check if this is a type which is stored in the object store. |
53 | static_assert(kFirstTypeArgumentsSnapshotId == kLastTypeSnapshotId + 1, |
54 | "Type and type arguments snapshot ids should be adjacent" ); |
55 | return index >= kFirstTypeSnapshotId && index <= kLastTypeArgumentsSnapshotId; |
56 | } |
57 | |
58 | static bool IsSplitClassId(intptr_t class_id) { |
59 | // Return whether this class is serialized in two steps: first a reference, |
60 | // with sufficient information to allocate a correctly sized object, and then |
61 | // later inline with complete contents. |
62 | return class_id >= kNumPredefinedCids || class_id == kArrayCid || |
63 | class_id == kImmutableArrayCid || class_id == kObjectPoolCid || |
64 | IsImplicitFieldClassId(class_id); |
65 | } |
66 | |
67 | static intptr_t ClassIdFromObjectId(intptr_t object_id) { |
68 | ASSERT(object_id > kClassIdsOffset); |
69 | intptr_t class_id = (object_id - kClassIdsOffset); |
70 | return class_id; |
71 | } |
72 | |
73 | static intptr_t ObjectIdFromClassId(intptr_t class_id) { |
74 | ASSERT((class_id > kIllegalCid) && (class_id < kNumPredefinedCids)); |
75 | return (class_id + kClassIdsOffset); |
76 | } |
77 | |
78 | static ObjectPtr GetType(ObjectStore* object_store, intptr_t index) { |
79 | switch (index) { |
80 | case kLegacyObjectType: |
81 | return object_store->legacy_object_type(); |
82 | case kNullableObjectType: |
83 | return object_store->nullable_object_type(); |
84 | case kNullType: |
85 | return object_store->null_type(); |
86 | case kNeverType: |
87 | return object_store->never_type(); |
88 | case kLegacyFunctionType: |
89 | return object_store->legacy_function_type(); |
90 | case kLegacyNumberType: |
91 | return object_store->legacy_number_type(); |
92 | case kLegacySmiType: |
93 | return object_store->legacy_smi_type(); |
94 | case kLegacyMintType: |
95 | return object_store->legacy_mint_type(); |
96 | case kLegacyDoubleType: |
97 | return object_store->legacy_double_type(); |
98 | case kLegacyIntType: |
99 | return object_store->legacy_int_type(); |
100 | case kLegacyBoolType: |
101 | return object_store->legacy_bool_type(); |
102 | case kLegacyStringType: |
103 | return object_store->legacy_string_type(); |
104 | case kLegacyArrayType: |
105 | return object_store->legacy_array_type(); |
106 | case kLegacyIntTypeArguments: |
107 | return object_store->type_argument_legacy_int(); |
108 | case kLegacyDoubleTypeArguments: |
109 | return object_store->type_argument_legacy_double(); |
110 | case kLegacyStringTypeArguments: |
111 | return object_store->type_argument_legacy_string(); |
112 | case kLegacyStringDynamicTypeArguments: |
113 | return object_store->type_argument_legacy_string_dynamic(); |
114 | case kLegacyStringLegacyStringTypeArguments: |
115 | return object_store->type_argument_legacy_string_legacy_string(); |
116 | case kNonNullableObjectType: |
117 | return object_store->non_nullable_object_type(); |
118 | case kNonNullableFunctionType: |
119 | return object_store->non_nullable_function_type(); |
120 | case kNonNullableNumberType: |
121 | return object_store->non_nullable_number_type(); |
122 | case kNonNullableSmiType: |
123 | return object_store->non_nullable_smi_type(); |
124 | case kNonNullableMintType: |
125 | return object_store->non_nullable_mint_type(); |
126 | case kNonNullableDoubleType: |
127 | return object_store->non_nullable_double_type(); |
128 | case kNonNullableIntType: |
129 | return object_store->non_nullable_int_type(); |
130 | case kNonNullableBoolType: |
131 | return object_store->non_nullable_bool_type(); |
132 | case kNonNullableStringType: |
133 | return object_store->non_nullable_string_type(); |
134 | case kNonNullableArrayType: |
135 | return object_store->non_nullable_array_type(); |
136 | case kNonNullableIntTypeArguments: |
137 | return object_store->type_argument_non_nullable_int(); |
138 | case kNonNullableDoubleTypeArguments: |
139 | return object_store->type_argument_non_nullable_double(); |
140 | case kNonNullableStringTypeArguments: |
141 | return object_store->type_argument_non_nullable_string(); |
142 | case kNonNullableStringDynamicTypeArguments: |
143 | return object_store->type_argument_non_nullable_string_dynamic(); |
144 | case kNonNullableStringNonNullableStringTypeArguments: |
145 | return object_store |
146 | ->type_argument_non_nullable_string_non_nullable_string(); |
147 | default: |
148 | break; |
149 | } |
150 | UNREACHABLE(); |
151 | return Type::null(); |
152 | } |
153 | |
154 | static intptr_t GetTypeIndex(ObjectStore* object_store, |
155 | const ObjectPtr raw_type) { |
156 | if (raw_type == object_store->legacy_object_type()) { |
157 | return kLegacyObjectType; |
158 | } else if (raw_type == object_store->null_type()) { |
159 | return kNullType; |
160 | } else if (raw_type == object_store->never_type()) { |
161 | return kNeverType; |
162 | } else if (raw_type == object_store->legacy_function_type()) { |
163 | return kLegacyFunctionType; |
164 | } else if (raw_type == object_store->legacy_number_type()) { |
165 | return kLegacyNumberType; |
166 | } else if (raw_type == object_store->legacy_smi_type()) { |
167 | return kLegacySmiType; |
168 | } else if (raw_type == object_store->legacy_mint_type()) { |
169 | return kLegacyMintType; |
170 | } else if (raw_type == object_store->legacy_double_type()) { |
171 | return kLegacyDoubleType; |
172 | } else if (raw_type == object_store->legacy_int_type()) { |
173 | return kLegacyIntType; |
174 | } else if (raw_type == object_store->legacy_bool_type()) { |
175 | return kLegacyBoolType; |
176 | } else if (raw_type == object_store->legacy_string_type()) { |
177 | return kLegacyStringType; |
178 | } else if (raw_type == object_store->legacy_array_type()) { |
179 | return kLegacyArrayType; |
180 | } else if (raw_type == object_store->type_argument_legacy_int()) { |
181 | return kLegacyIntTypeArguments; |
182 | } else if (raw_type == object_store->type_argument_legacy_double()) { |
183 | return kLegacyDoubleTypeArguments; |
184 | } else if (raw_type == object_store->type_argument_legacy_string()) { |
185 | return kLegacyStringTypeArguments; |
186 | } else if (raw_type == object_store->type_argument_legacy_string_dynamic()) { |
187 | return kLegacyStringDynamicTypeArguments; |
188 | } else if (raw_type == |
189 | object_store->type_argument_legacy_string_legacy_string()) { |
190 | return kLegacyStringLegacyStringTypeArguments; |
191 | } else if (raw_type == object_store->non_nullable_object_type()) { |
192 | return kNonNullableObjectType; |
193 | } else if (raw_type == object_store->non_nullable_function_type()) { |
194 | return kNonNullableFunctionType; |
195 | } else if (raw_type == object_store->non_nullable_number_type()) { |
196 | return kNonNullableNumberType; |
197 | } else if (raw_type == object_store->non_nullable_smi_type()) { |
198 | return kNonNullableSmiType; |
199 | } else if (raw_type == object_store->non_nullable_mint_type()) { |
200 | return kNonNullableMintType; |
201 | } else if (raw_type == object_store->non_nullable_double_type()) { |
202 | return kNonNullableDoubleType; |
203 | } else if (raw_type == object_store->non_nullable_int_type()) { |
204 | return kNonNullableIntType; |
205 | } else if (raw_type == object_store->non_nullable_bool_type()) { |
206 | return kNonNullableBoolType; |
207 | } else if (raw_type == object_store->non_nullable_string_type()) { |
208 | return kNonNullableStringType; |
209 | } else if (raw_type == object_store->non_nullable_array_type()) { |
210 | return kNonNullableArrayType; |
211 | } else if (raw_type == object_store->type_argument_non_nullable_int()) { |
212 | return kNonNullableIntTypeArguments; |
213 | } else if (raw_type == object_store->type_argument_non_nullable_double()) { |
214 | return kNonNullableDoubleTypeArguments; |
215 | } else if (raw_type == object_store->type_argument_non_nullable_string()) { |
216 | return kNonNullableStringTypeArguments; |
217 | } else if (raw_type == |
218 | object_store->type_argument_non_nullable_string_dynamic()) { |
219 | return kNonNullableStringDynamicTypeArguments; |
220 | } else if (raw_type == |
221 | object_store |
222 | ->type_argument_non_nullable_string_non_nullable_string()) { |
223 | return kNonNullableStringNonNullableStringTypeArguments; |
224 | } |
225 | return kInvalidIndex; |
226 | } |
227 | |
228 | const char* Snapshot::KindToCString(Kind kind) { |
229 | switch (kind) { |
230 | case kFull: |
231 | return "full" ; |
232 | case kFullJIT: |
233 | return "full-jit" ; |
234 | case kFullAOT: |
235 | return "full-aot" ; |
236 | case kMessage: |
237 | return "message" ; |
238 | case kNone: |
239 | return "none" ; |
240 | case kInvalid: |
241 | default: |
242 | return "invalid" ; |
243 | } |
244 | } |
245 | |
246 | const Snapshot* Snapshot::SetupFromBuffer(const void* raw_memory) { |
247 | ASSERT(raw_memory != NULL); |
248 | const Snapshot* snapshot = reinterpret_cast<const Snapshot*>(raw_memory); |
249 | if (!snapshot->check_magic()) { |
250 | return NULL; |
251 | } |
252 | // If the raw length is negative or greater than what the local machine can |
253 | // handle, then signal an error. |
254 | int64_t length = snapshot->large_length(); |
255 | if ((length < 0) || (length > kIntptrMax)) { |
256 | return NULL; |
257 | } |
258 | return snapshot; |
259 | } |
260 | |
261 | SmiPtr BaseReader::ReadAsSmi() { |
262 | SmiPtr value = static_cast<SmiPtr>(Read<intptr_t>()); |
263 | ASSERT((static_cast<uword>(value) & kSmiTagMask) == kSmiTag); |
264 | return value; |
265 | } |
266 | |
267 | intptr_t BaseReader::ReadSmiValue() { |
268 | return Smi::Value(ReadAsSmi()); |
269 | } |
270 | |
271 | SnapshotReader::SnapshotReader(const uint8_t* buffer, |
272 | intptr_t size, |
273 | Snapshot::Kind kind, |
274 | ZoneGrowableArray<BackRefNode>* backward_refs, |
275 | Thread* thread) |
276 | : BaseReader(buffer, size), |
277 | kind_(kind), |
278 | thread_(thread), |
279 | zone_(thread->zone()), |
280 | heap_(isolate()->heap()), |
281 | old_space_(thread_->isolate()->heap()->old_space()), |
282 | cls_(Class::Handle(zone_)), |
283 | code_(Code::Handle(zone_)), |
284 | instance_(Instance::Handle(zone_)), |
285 | instructions_(Instructions::Handle(zone_)), |
286 | obj_(Object::Handle(zone_)), |
287 | pobj_(PassiveObject::Handle(zone_)), |
288 | array_(Array::Handle(zone_)), |
289 | field_(Field::Handle(zone_)), |
290 | str_(String::Handle(zone_)), |
291 | library_(Library::Handle(zone_)), |
292 | type_(AbstractType::Handle(zone_)), |
293 | type_arguments_(TypeArguments::Handle(zone_)), |
294 | tokens_(GrowableObjectArray::Handle(zone_)), |
295 | data_(ExternalTypedData::Handle(zone_)), |
296 | typed_data_base_(TypedDataBase::Handle(zone_)), |
297 | typed_data_(TypedData::Handle(zone_)), |
298 | typed_data_view_(TypedDataView::Handle(zone_)), |
299 | function_(Function::Handle(zone_)), |
300 | error_(UnhandledException::Handle(zone_)), |
301 | set_class_(Class::ZoneHandle( |
302 | zone_, |
303 | thread_->isolate()->object_store()->linked_hash_set_class())), |
304 | max_vm_isolate_object_id_( |
305 | (Snapshot::IsFull(kind)) |
306 | ? Object::vm_isolate_snapshot_object_table().Length() |
307 | : 0), |
308 | backward_references_(backward_refs), |
309 | types_to_postprocess_(GrowableObjectArray::Handle(zone_)), |
310 | objects_to_rehash_(GrowableObjectArray::Handle(zone_)) {} |
311 | |
312 | ObjectPtr SnapshotReader::ReadObject() { |
313 | // Setup for long jump in case there is an exception while reading. |
314 | LongJumpScope jump; |
315 | if (setjmp(*jump.Set()) == 0) { |
316 | PassiveObject& obj = |
317 | PassiveObject::Handle(zone(), ReadObjectImpl(kAsInlinedObject)); |
318 | for (intptr_t i = 0; i < backward_references_->length(); i++) { |
319 | if (!(*backward_references_)[i].is_deserialized()) { |
320 | ReadObjectImpl(kAsInlinedObject); |
321 | (*backward_references_)[i].set_state(kIsDeserialized); |
322 | } |
323 | } |
324 | Object& result = Object::Handle(zone_); |
325 | if (backward_references_->length() > 0) { |
326 | result = (*backward_references_)[0].reference()->raw(); |
327 | } else { |
328 | result = obj.raw(); |
329 | } |
330 | RunDelayedTypePostprocessing(); |
331 | const Object& ok = Object::Handle(zone_, RunDelayedRehashingOfMaps()); |
332 | objects_to_rehash_ = GrowableObjectArray::null(); |
333 | if (!ok.IsNull()) { |
334 | return ok.raw(); |
335 | } |
336 | return result.raw(); |
337 | } else { |
338 | // An error occurred while reading, return the error object. |
339 | return Thread::Current()->StealStickyError(); |
340 | } |
341 | } |
342 | |
343 | void SnapshotReader::EnqueueTypePostprocessing(const AbstractType& type) { |
344 | if (types_to_postprocess_.IsNull()) { |
345 | types_to_postprocess_ = GrowableObjectArray::New(); |
346 | } |
347 | types_to_postprocess_.Add(type); |
348 | } |
349 | |
350 | void SnapshotReader::RunDelayedTypePostprocessing() { |
351 | if (types_to_postprocess_.IsNull()) { |
352 | return; |
353 | } |
354 | |
355 | AbstractType& type = AbstractType::Handle(); |
356 | Code& code = Code::Handle(); |
357 | for (intptr_t i = 0; i < types_to_postprocess_.Length(); ++i) { |
358 | type ^= types_to_postprocess_.At(i); |
359 | code = TypeTestingStubGenerator::DefaultCodeForType(type); |
360 | type.SetTypeTestingStub(code); |
361 | } |
362 | } |
363 | |
364 | void SnapshotReader::EnqueueRehashingOfMap(const LinkedHashMap& map) { |
365 | if (objects_to_rehash_.IsNull()) { |
366 | objects_to_rehash_ = GrowableObjectArray::New(); |
367 | } |
368 | objects_to_rehash_.Add(map); |
369 | } |
370 | |
371 | ObjectPtr SnapshotReader::RunDelayedRehashingOfMaps() { |
372 | if (!objects_to_rehash_.IsNull()) { |
373 | const Library& collections_lib = |
374 | Library::Handle(zone_, Library::CollectionLibrary()); |
375 | const Function& rehashing_function = Function::Handle( |
376 | zone_, |
377 | collections_lib.LookupFunctionAllowPrivate(Symbols::_rehashObjects())); |
378 | ASSERT(!rehashing_function.IsNull()); |
379 | |
380 | const Array& arguments = Array::Handle(zone_, Array::New(1)); |
381 | arguments.SetAt(0, objects_to_rehash_); |
382 | |
383 | return DartEntry::InvokeFunction(rehashing_function, arguments); |
384 | } |
385 | return Object::null(); |
386 | } |
387 | |
388 | ClassPtr SnapshotReader::ReadClassId(intptr_t object_id) { |
389 | ASSERT(!Snapshot::IsFull(kind_)); |
390 | // Read the class header information and lookup the class. |
391 | intptr_t = Read<int32_t>(); |
392 | ASSERT((class_header & kSmiTagMask) != kSmiTag); |
393 | ASSERT(!IsVMIsolateObject(class_header) || |
394 | !IsSingletonClassId(GetVMIsolateObjectId(class_header))); |
395 | ASSERT((SerializedHeaderTag::decode(class_header) != kObjectId) || |
396 | !IsBootstrapedClassId(SerializedHeaderData::decode(class_header))); |
397 | Class& cls = Class::ZoneHandle(zone(), Class::null()); |
398 | AddBackRef(object_id, &cls, kIsDeserialized); |
399 | // Read the library/class information and lookup the class. |
400 | str_ ^= ReadObjectImpl(class_header, kAsInlinedObject); |
401 | library_ = Library::LookupLibrary(thread(), str_); |
402 | if (library_.IsNull() || !library_.Loaded()) { |
403 | SetReadException( |
404 | "Invalid object found in message: library is not found or loaded." ); |
405 | } |
406 | str_ ^= ReadObjectImpl(kAsInlinedObject); |
407 | if (str_.raw() == Symbols::TopLevel().raw()) { |
408 | cls = library_.toplevel_class(); |
409 | } else { |
410 | str_ = String::New(String::ScrubName(str_)); |
411 | cls = library_.LookupClassAllowPrivate(str_); |
412 | } |
413 | if (cls.IsNull()) { |
414 | SetReadException("Invalid object found in message: class not found" ); |
415 | } |
416 | cls.EnsureIsFinalized(thread()); |
417 | return cls.raw(); |
418 | } |
419 | |
420 | ObjectPtr SnapshotReader::ReadStaticImplicitClosure(intptr_t object_id, |
421 | intptr_t ) { |
422 | ASSERT(!Snapshot::IsFull(kind_)); |
423 | |
424 | // First create a function object and associate it with the specified |
425 | // 'object_id'. |
426 | Function& func = Function::Handle(zone(), Function::null()); |
427 | Instance& obj = Instance::ZoneHandle(zone(), Instance::null()); |
428 | AddBackRef(object_id, &obj, kIsDeserialized); |
429 | |
430 | // Read the library/class/function information and lookup the function. |
431 | // Note: WriteStaticImplicitClosure is *not* scrubbing the names before |
432 | // writing them into the snapshot, because scrubbing requires allocation. |
433 | // This means that names we read here might be mangled with private |
434 | // keys. These keys need to be scrubbed before performing lookups |
435 | // otherwise lookups might fail. |
436 | str_ ^= ReadObjectImpl(kAsInlinedObject); |
437 | library_ = Library::LookupLibrary(thread(), str_); |
438 | if (library_.IsNull() || !library_.Loaded()) { |
439 | SetReadException("Invalid Library object found in message." ); |
440 | } |
441 | str_ ^= ReadObjectImpl(kAsInlinedObject); |
442 | if (str_.Equals(Symbols::TopLevel())) { |
443 | str_ ^= ReadObjectImpl(kAsInlinedObject); |
444 | str_ = String::New(String::ScrubName(str_)); |
445 | func = library_.LookupFunctionAllowPrivate(str_); |
446 | } else { |
447 | str_ = String::New(String::ScrubName(str_)); |
448 | cls_ = library_.LookupClassAllowPrivate(str_); |
449 | if (cls_.IsNull()) { |
450 | OS::PrintErr("Name of class not found %s\n" , str_.ToCString()); |
451 | SetReadException("Invalid Class object found in message." ); |
452 | } |
453 | cls_.EnsureIsFinalized(thread()); |
454 | str_ ^= ReadObjectImpl(kAsInlinedObject); |
455 | str_ = String::New(String::ScrubName(str_)); |
456 | func = cls_.LookupFunctionAllowPrivate(str_); |
457 | } |
458 | if (func.IsNull()) { |
459 | SetReadException("Invalid function object found in message." ); |
460 | } |
461 | func = func.ImplicitClosureFunction(); |
462 | ASSERT(!func.IsNull()); |
463 | |
464 | // Return the associated implicit static closure. |
465 | obj = func.ImplicitStaticClosure(); |
466 | return obj.raw(); |
467 | } |
468 | |
469 | intptr_t SnapshotReader::NextAvailableObjectId() const { |
470 | return backward_references_->length() + kMaxPredefinedObjectIds + |
471 | max_vm_isolate_object_id_; |
472 | } |
473 | |
474 | void SnapshotReader::SetReadException(const char* msg) { |
475 | const String& error_str = String::Handle(zone(), String::New(msg)); |
476 | const Array& args = Array::Handle(zone(), Array::New(1)); |
477 | args.SetAt(0, error_str); |
478 | Object& result = Object::Handle(zone()); |
479 | const Library& library = Library::Handle(zone(), Library::CoreLibrary()); |
480 | result = DartLibraryCalls::InstanceCreate(library, Symbols::ArgumentError(), |
481 | Symbols::Dot(), args); |
482 | const StackTrace& stacktrace = StackTrace::Handle(zone()); |
483 | const UnhandledException& error = UnhandledException::Handle( |
484 | zone(), UnhandledException::New(Instance::Cast(result), stacktrace)); |
485 | thread()->long_jump_base()->Jump(1, error); |
486 | } |
487 | |
488 | ObjectPtr SnapshotReader::VmIsolateSnapshotObject(intptr_t index) const { |
489 | return Object::vm_isolate_snapshot_object_table().At(index); |
490 | } |
491 | |
492 | bool SnapshotReader::is_vm_isolate() const { |
493 | return isolate() == Dart::vm_isolate(); |
494 | } |
495 | |
496 | ObjectPtr SnapshotReader::ReadObjectImpl(bool as_reference) { |
497 | int64_t = Read<int64_t>(); |
498 | if ((header_value & kSmiTagMask) == kSmiTag) { |
499 | return NewInteger(header_value); |
500 | } |
501 | ASSERT((header_value <= kIntptrMax) && (header_value >= kIntptrMin)); |
502 | return ReadObjectImpl(static_cast<intptr_t>(header_value), as_reference); |
503 | } |
504 | |
505 | ObjectPtr SnapshotReader::ReadObjectImpl(intptr_t , |
506 | bool as_reference) { |
507 | if (IsVMIsolateObject(header_value)) { |
508 | return ReadVMIsolateObject(header_value); |
509 | } |
510 | if (SerializedHeaderTag::decode(header_value) == kObjectId) { |
511 | return ReadIndexedObject(SerializedHeaderData::decode(header_value)); |
512 | } |
513 | ASSERT(SerializedHeaderTag::decode(header_value) == kInlined); |
514 | intptr_t object_id = SerializedHeaderData::decode(header_value); |
515 | if (object_id == kOmittedObjectId) { |
516 | object_id = NextAvailableObjectId(); |
517 | } |
518 | |
519 | // Read the class header information. |
520 | intptr_t = Read<int32_t>(); |
521 | intptr_t tags = ReadTags(); |
522 | bool read_as_reference = as_reference && !ObjectLayout::IsCanonical(tags); |
523 | intptr_t = SerializedHeaderData::decode(class_header); |
524 | if (header_id == kInstanceObjectId) { |
525 | return ReadInstance(object_id, tags, read_as_reference); |
526 | } else if (header_id == kStaticImplicitClosureObjectId) { |
527 | // We skip the tags that have been written as the implicit static |
528 | // closure is going to be created in this isolate or the canonical |
529 | // version already created in the isolate will be used. |
530 | return ReadStaticImplicitClosure(object_id, class_header); |
531 | } |
532 | ASSERT((class_header & kSmiTagMask) != kSmiTag); |
533 | |
534 | intptr_t class_id = LookupInternalClass(class_header); |
535 | switch (class_id) { |
536 | #define SNAPSHOT_READ(clazz) \ |
537 | case clazz::kClassId: { \ |
538 | pobj_ = clazz::ReadFrom(this, object_id, tags, kind_, read_as_reference); \ |
539 | break; \ |
540 | } |
541 | CLASS_LIST_NO_OBJECT(SNAPSHOT_READ) |
542 | #undef SNAPSHOT_READ |
543 | #define SNAPSHOT_READ(clazz) case kTypedData##clazz##Cid: |
544 | |
545 | CLASS_LIST_TYPED_DATA(SNAPSHOT_READ) { |
546 | tags = ObjectLayout::ClassIdTag::update(class_id, tags); |
547 | pobj_ = |
548 | TypedData::ReadFrom(this, object_id, tags, kind_, read_as_reference); |
549 | break; |
550 | } |
551 | #undef SNAPSHOT_READ |
552 | #define SNAPSHOT_READ(clazz) case kExternalTypedData##clazz##Cid: |
553 | |
554 | CLASS_LIST_TYPED_DATA(SNAPSHOT_READ) { |
555 | tags = ObjectLayout::ClassIdTag::update(class_id, tags); |
556 | pobj_ = ExternalTypedData::ReadFrom(this, object_id, tags, kind_, true); |
557 | break; |
558 | } |
559 | #undef SNAPSHOT_READ |
560 | #define SNAPSHOT_READ(clazz) case kTypedData##clazz##ViewCid: |
561 | |
562 | case kByteDataViewCid: |
563 | CLASS_LIST_TYPED_DATA(SNAPSHOT_READ) { |
564 | tags = ObjectLayout::ClassIdTag::update(class_id, tags); |
565 | pobj_ = TypedDataView::ReadFrom(this, object_id, tags, kind_, true); |
566 | break; |
567 | } |
568 | #undef SNAPSHOT_READ |
569 | #define SNAPSHOT_READ(clazz) case kFfi##clazz##Cid: |
570 | |
571 | CLASS_LIST_FFI(SNAPSHOT_READ) { UNREACHABLE(); } |
572 | #undef SNAPSHOT_READ |
573 | default: |
574 | UNREACHABLE(); |
575 | break; |
576 | } |
577 | return pobj_.raw(); |
578 | } |
579 | |
580 | void SnapshotReader::EnqueueRehashingOfSet(const Object& set) { |
581 | if (objects_to_rehash_.IsNull()) { |
582 | objects_to_rehash_ = GrowableObjectArray::New(); |
583 | } |
584 | objects_to_rehash_.Add(set); |
585 | } |
586 | |
587 | ObjectPtr SnapshotReader::ReadInstance(intptr_t object_id, |
588 | intptr_t tags, |
589 | bool as_reference) { |
590 | // Object is regular dart instance. |
591 | intptr_t instance_size = 0; |
592 | Instance* result = NULL; |
593 | DeserializeState state; |
594 | if (!as_reference) { |
595 | result = reinterpret_cast<Instance*>(GetBackRef(object_id)); |
596 | state = kIsDeserialized; |
597 | } else { |
598 | state = kIsNotDeserialized; |
599 | } |
600 | if (result == NULL) { |
601 | result = &(Instance::ZoneHandle(zone(), Instance::null())); |
602 | AddBackRef(object_id, result, state); |
603 | cls_ ^= ReadObjectImpl(kAsInlinedObject); |
604 | ASSERT(!cls_.IsNull()); |
605 | // Closure instances are handled by Closure::ReadFrom(). |
606 | ASSERT(!cls_.IsClosureClass()); |
607 | instance_size = cls_.host_instance_size(); |
608 | ASSERT(instance_size > 0); |
609 | // Allocate the instance and read in all the fields for the object. |
610 | *result ^= Object::Allocate(cls_.id(), instance_size, Heap::kNew); |
611 | } else { |
612 | cls_ ^= ReadObjectImpl(kAsInlinedObject); |
613 | ASSERT(!cls_.IsNull()); |
614 | instance_size = cls_.host_instance_size(); |
615 | } |
616 | if (cls_.id() == set_class_.id()) { |
617 | EnqueueRehashingOfSet(*result); |
618 | } |
619 | if (!as_reference) { |
620 | // Read all the individual fields for inlined objects. |
621 | intptr_t next_field_offset = cls_.host_next_field_offset(); |
622 | |
623 | intptr_t type_argument_field_offset = |
624 | cls_.host_type_arguments_field_offset(); |
625 | ASSERT(next_field_offset > 0); |
626 | // Instance::NextFieldOffset() returns the offset of the first field in |
627 | // a Dart object. |
628 | bool read_as_reference = ObjectLayout::IsCanonical(tags) ? false : true; |
629 | intptr_t offset = Instance::NextFieldOffset(); |
630 | intptr_t result_cid = result->GetClassId(); |
631 | |
632 | const auto unboxed_fields = |
633 | isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt( |
634 | result_cid); |
635 | |
636 | while (offset < next_field_offset) { |
637 | if (unboxed_fields.Get(offset / kWordSize)) { |
638 | uword* p = reinterpret_cast<uword*>(result->raw_value() - |
639 | kHeapObjectTag + offset); |
640 | // Reads 32 bits of the unboxed value at a time |
641 | *p = ReadWordWith32BitReads(); |
642 | } else { |
643 | pobj_ = ReadObjectImpl(read_as_reference); |
644 | result->SetFieldAtOffset(offset, pobj_); |
645 | if ((offset != type_argument_field_offset) && |
646 | (kind_ == Snapshot::kMessage) && isolate()->use_field_guards() && |
647 | (pobj_.raw() != Object::sentinel().raw())) { |
648 | // TODO(fschneider): Consider hoisting these lookups out of the loop. |
649 | // This would involve creating a handle, since cls_ can't be reused |
650 | // across the call to ReadObjectImpl. |
651 | cls_ = isolate()->class_table()->At(result_cid); |
652 | array_ = cls_.OffsetToFieldMap(); |
653 | field_ ^= array_.At(offset >> kWordSizeLog2); |
654 | ASSERT(!field_.IsNull()); |
655 | ASSERT(field_.HostOffset() == offset); |
656 | obj_ = pobj_.raw(); |
657 | field_.RecordStore(obj_); |
658 | } |
659 | // TODO(fschneider): Verify the guarded cid and length for other kinds |
660 | // of snapshot (kFull, kScript) with asserts. |
661 | } |
662 | offset += kWordSize; |
663 | } |
664 | if (ObjectLayout::IsCanonical(tags)) { |
665 | const char* error_str = NULL; |
666 | *result = result->CheckAndCanonicalize(thread(), &error_str); |
667 | if (error_str != NULL) { |
668 | FATAL1("Failed to canonicalize %s\n" , error_str); |
669 | } |
670 | ASSERT(!result->IsNull()); |
671 | } |
672 | } |
673 | return result->raw(); |
674 | } |
675 | |
676 | void SnapshotReader::AddBackRef(intptr_t id, |
677 | Object* obj, |
678 | DeserializeState state) { |
679 | intptr_t index = (id - kMaxPredefinedObjectIds); |
680 | ASSERT(index >= max_vm_isolate_object_id_); |
681 | index -= max_vm_isolate_object_id_; |
682 | ASSERT(index == backward_references_->length()); |
683 | BackRefNode node(obj, state); |
684 | backward_references_->Add(node); |
685 | } |
686 | |
687 | Object* SnapshotReader::GetBackRef(intptr_t id) { |
688 | ASSERT(id >= kMaxPredefinedObjectIds); |
689 | intptr_t index = (id - kMaxPredefinedObjectIds); |
690 | ASSERT(index >= max_vm_isolate_object_id_); |
691 | index -= max_vm_isolate_object_id_; |
692 | if (index < backward_references_->length()) { |
693 | return (*backward_references_)[index].reference(); |
694 | } |
695 | return NULL; |
696 | } |
697 | |
698 | ApiErrorPtr SnapshotReader::VerifyVersionAndFeatures(Isolate* isolate) { |
699 | // If the version string doesn't match, return an error. |
700 | // Note: New things are allocated only if we're going to return an error. |
701 | |
702 | const char* expected_version = Version::SnapshotString(); |
703 | ASSERT(expected_version != NULL); |
704 | const intptr_t version_len = strlen(expected_version); |
705 | if (PendingBytes() < version_len) { |
706 | const intptr_t kMessageBufferSize = 128; |
707 | char message_buffer[kMessageBufferSize]; |
708 | Utils::SNPrint(message_buffer, kMessageBufferSize, |
709 | "No full snapshot version found, expected '%s'" , |
710 | expected_version); |
711 | // This can also fail while bringing up the VM isolate, so make sure to |
712 | // allocate the error message in old space. |
713 | const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); |
714 | return ApiError::New(msg, Heap::kOld); |
715 | } |
716 | |
717 | const char* version = reinterpret_cast<const char*>(CurrentBufferAddress()); |
718 | ASSERT(version != NULL); |
719 | if (strncmp(version, expected_version, version_len) != 0) { |
720 | const intptr_t kMessageBufferSize = 256; |
721 | char message_buffer[kMessageBufferSize]; |
722 | char* actual_version = Utils::StrNDup(version, version_len); |
723 | Utils::SNPrint(message_buffer, kMessageBufferSize, |
724 | "Wrong %s snapshot version, expected '%s' found '%s'" , |
725 | (Snapshot::IsFull(kind_)) ? "full" : "script" , |
726 | expected_version, actual_version); |
727 | free(actual_version); |
728 | // This can also fail while bringing up the VM isolate, so make sure to |
729 | // allocate the error message in old space. |
730 | const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); |
731 | return ApiError::New(msg, Heap::kOld); |
732 | } |
733 | Advance(version_len); |
734 | |
735 | const char* expected_features = Dart::FeaturesString(isolate, false, kind_); |
736 | ASSERT(expected_features != NULL); |
737 | const intptr_t expected_len = strlen(expected_features); |
738 | |
739 | const char* features = reinterpret_cast<const char*>(CurrentBufferAddress()); |
740 | ASSERT(features != NULL); |
741 | intptr_t buffer_len = Utils::StrNLen(features, PendingBytes()); |
742 | if ((buffer_len != expected_len) || |
743 | (strncmp(features, expected_features, expected_len) != 0)) { |
744 | const intptr_t kMessageBufferSize = 256; |
745 | char message_buffer[kMessageBufferSize]; |
746 | char* actual_features = |
747 | Utils::StrNDup(features, buffer_len < 128 ? buffer_len : 128); |
748 | Utils::SNPrint(message_buffer, kMessageBufferSize, |
749 | "Snapshot not compatible with the current VM configuration: " |
750 | "the snapshot requires '%s' but the VM has '%s'" , |
751 | actual_features, expected_features); |
752 | free(const_cast<char*>(expected_features)); |
753 | free(actual_features); |
754 | // This can also fail while bringing up the VM isolate, so make sure to |
755 | // allocate the error message in old space. |
756 | const String& msg = String::Handle(String::New(message_buffer, Heap::kOld)); |
757 | return ApiError::New(msg, Heap::kOld); |
758 | } |
759 | free(const_cast<char*>(expected_features)); |
760 | Advance(expected_len + 1); |
761 | return ApiError::null(); |
762 | } |
763 | |
764 | ObjectPtr SnapshotReader::NewInteger(int64_t value) { |
765 | ASSERT((value & kSmiTagMask) == kSmiTag); |
766 | value = value >> kSmiTagShift; |
767 | if (Smi::IsValid(value)) { |
768 | return Smi::New(static_cast<intptr_t>(value)); |
769 | } |
770 | return Mint::NewCanonical(value); |
771 | } |
772 | |
773 | intptr_t SnapshotReader::LookupInternalClass(intptr_t ) { |
774 | // If the header is an object Id, lookup singleton VM classes or classes |
775 | // stored in the object store. |
776 | if (IsVMIsolateObject(class_header)) { |
777 | intptr_t class_id = GetVMIsolateObjectId(class_header); |
778 | ASSERT(IsSingletonClassId(class_id)); |
779 | return class_id; |
780 | } |
781 | ASSERT(SerializedHeaderTag::decode(class_header) == kObjectId); |
782 | intptr_t class_id = SerializedHeaderData::decode(class_header); |
783 | ASSERT(IsBootstrapedClassId(class_id) || IsSingletonClassId(class_id)); |
784 | return class_id; |
785 | } |
786 | |
787 | #define READ_VM_SINGLETON_OBJ(id, obj) \ |
788 | if (object_id == id) { \ |
789 | return obj; \ |
790 | } |
791 | |
792 | ObjectPtr SnapshotReader::ReadVMIsolateObject(intptr_t ) { |
793 | intptr_t object_id = GetVMIsolateObjectId(header_value); |
794 | |
795 | // First check if it is one of the singleton objects. |
796 | READ_VM_SINGLETON_OBJ(kNullObject, Object::null()); |
797 | READ_VM_SINGLETON_OBJ(kSentinelObject, Object::sentinel().raw()); |
798 | READ_VM_SINGLETON_OBJ(kTransitionSentinelObject, |
799 | Object::transition_sentinel().raw()); |
800 | READ_VM_SINGLETON_OBJ(kEmptyArrayObject, Object::empty_array().raw()); |
801 | READ_VM_SINGLETON_OBJ(kZeroArrayObject, Object::zero_array().raw()); |
802 | READ_VM_SINGLETON_OBJ(kDynamicType, Object::dynamic_type().raw()); |
803 | READ_VM_SINGLETON_OBJ(kVoidType, Object::void_type().raw()); |
804 | READ_VM_SINGLETON_OBJ(kEmptyTypeArguments, |
805 | Object::empty_type_arguments().raw()); |
806 | READ_VM_SINGLETON_OBJ(kTrueValue, Bool::True().raw()); |
807 | READ_VM_SINGLETON_OBJ(kFalseValue, Bool::False().raw()); |
808 | READ_VM_SINGLETON_OBJ(kExtractorParameterTypes, |
809 | Object::extractor_parameter_types().raw()); |
810 | READ_VM_SINGLETON_OBJ(kExtractorParameterNames, |
811 | Object::extractor_parameter_names().raw()); |
812 | READ_VM_SINGLETON_OBJ(kEmptyContextScopeObject, |
813 | Object::empty_context_scope().raw()); |
814 | READ_VM_SINGLETON_OBJ(kEmptyObjectPool, Object::empty_object_pool().raw()); |
815 | READ_VM_SINGLETON_OBJ(kEmptyDescriptors, Object::empty_descriptors().raw()); |
816 | READ_VM_SINGLETON_OBJ(kEmptyVarDescriptors, |
817 | Object::empty_var_descriptors().raw()); |
818 | READ_VM_SINGLETON_OBJ(kEmptyExceptionHandlers, |
819 | Object::empty_exception_handlers().raw()); |
820 | |
821 | // Check if it is a double. |
822 | if (object_id == kDoubleObject) { |
823 | ASSERT(kind_ == Snapshot::kMessage); |
824 | return Double::New(ReadDouble()); |
825 | } |
826 | |
827 | // Check it is a singleton class object. |
828 | intptr_t class_id = ClassIdFromObjectId(object_id); |
829 | if (IsSingletonClassId(class_id)) { |
830 | return isolate()->class_table()->At(class_id); // get singleton class. |
831 | } |
832 | |
833 | // Check if it is a singleton Argument descriptor object. |
834 | for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) { |
835 | if (object_id == (kCachedArgumentsDescriptor0 + i)) { |
836 | return ArgumentsDescriptor::cached_args_descriptors_[i]; |
837 | } |
838 | } |
839 | |
840 | // Check if it is a singleton ICData array object. |
841 | for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) { |
842 | if (object_id == (kCachedICDataArray0 + i)) { |
843 | return ICData::cached_icdata_arrays_[i]; |
844 | } |
845 | } |
846 | |
847 | ASSERT(Symbols::IsPredefinedSymbolId(object_id)); |
848 | return Symbols::GetPredefinedSymbol(object_id); // return VM symbol. |
849 | } |
850 | |
851 | ObjectPtr SnapshotReader::ReadIndexedObject(intptr_t object_id) { |
852 | intptr_t class_id = ClassIdFromObjectId(object_id); |
853 | if (IsBootstrapedClassId(class_id)) { |
854 | return isolate()->class_table()->At(class_id); // get singleton class. |
855 | } |
856 | if (IsObjectStoreTypeId(object_id)) { |
857 | return GetType(object_store(), object_id); // return type obj. |
858 | } |
859 | ASSERT(object_id >= kMaxPredefinedObjectIds); |
860 | intptr_t index = (object_id - kMaxPredefinedObjectIds); |
861 | if (index < max_vm_isolate_object_id_) { |
862 | return VmIsolateSnapshotObject(index); |
863 | } |
864 | return GetBackRef(object_id)->raw(); |
865 | } |
866 | |
867 | void SnapshotReader::ArrayReadFrom(intptr_t object_id, |
868 | const Array& result, |
869 | intptr_t len, |
870 | intptr_t tags) { |
871 | // Setup the object fields. |
872 | *TypeArgumentsHandle() ^= ReadObjectImpl(kAsInlinedObject); |
873 | result.SetTypeArguments(*TypeArgumentsHandle()); |
874 | |
875 | bool as_reference = ObjectLayout::IsCanonical(tags) ? false : true; |
876 | for (intptr_t i = 0; i < len; i++) { |
877 | *PassiveObjectHandle() = ReadObjectImpl(as_reference); |
878 | result.SetAt(i, *PassiveObjectHandle()); |
879 | } |
880 | } |
881 | |
882 | MessageSnapshotReader::MessageSnapshotReader(Message* message, Thread* thread) |
883 | : SnapshotReader(message->snapshot(), |
884 | message->snapshot_length(), |
885 | Snapshot::kMessage, |
886 | new ZoneGrowableArray<BackRefNode>(kNumInitialReferences), |
887 | thread), |
888 | finalizable_data_(message->finalizable_data()) {} |
889 | |
890 | MessageSnapshotReader::~MessageSnapshotReader() { |
891 | ResetBackwardReferenceTable(); |
892 | } |
893 | |
894 | SnapshotWriter::SnapshotWriter(Thread* thread, |
895 | Snapshot::Kind kind, |
896 | ReAlloc alloc, |
897 | DeAlloc dealloc, |
898 | intptr_t initial_size, |
899 | ForwardList* forward_list, |
900 | bool can_send_any_object) |
901 | : BaseWriter(alloc, dealloc, initial_size), |
902 | thread_(thread), |
903 | kind_(kind), |
904 | object_store_(isolate()->object_store()), |
905 | class_table_(isolate()->class_table()), |
906 | forward_list_(forward_list), |
907 | exception_type_(Exceptions::kNone), |
908 | exception_msg_(NULL), |
909 | can_send_any_object_(can_send_any_object) { |
910 | ASSERT(forward_list_ != NULL); |
911 | } |
912 | |
913 | void SnapshotWriter::WriteObject(ObjectPtr rawobj) { |
914 | WriteObjectImpl(rawobj, kAsInlinedObject); |
915 | WriteForwardedObjects(); |
916 | } |
917 | |
918 | uint32_t SnapshotWriter::GetObjectTags(ObjectPtr raw) { |
919 | return raw->ptr()->tags_; |
920 | } |
921 | |
922 | uint32_t SnapshotWriter::GetObjectTags(ObjectLayout* raw) { |
923 | return raw->tags_; |
924 | } |
925 | |
926 | uword SnapshotWriter::GetObjectTagsAndHash(ObjectPtr raw) { |
927 | uword result = raw->ptr()->tags_; |
928 | #if defined(HASH_IN_OBJECT_HEADER) |
929 | result |= static_cast<uword>(raw->ptr()->hash_) << 32; |
930 | #endif |
931 | return result; |
932 | } |
933 | |
934 | #define VM_OBJECT_CLASS_LIST(V) \ |
935 | V(OneByteString) \ |
936 | V(TwoByteString) \ |
937 | V(Mint) \ |
938 | V(Double) \ |
939 | V(ImmutableArray) |
940 | |
941 | #define VM_OBJECT_WRITE(clazz) \ |
942 | case clazz::kClassId: { \ |
943 | object_id = forward_list_->AddObject(zone(), rawobj, kIsSerialized); \ |
944 | clazz##Ptr raw_obj = static_cast<clazz##Ptr>(rawobj); \ |
945 | raw_obj->ptr()->WriteTo(this, object_id, kind(), false); \ |
946 | return true; \ |
947 | } |
948 | |
949 | #define WRITE_VM_SINGLETON_OBJ(obj, id) \ |
950 | if (rawobj == obj) { \ |
951 | WriteVMIsolateObject(id); \ |
952 | return true; \ |
953 | } |
954 | |
955 | bool SnapshotWriter::HandleVMIsolateObject(ObjectPtr rawobj) { |
956 | // Check if it is one of the singleton VM objects. |
957 | WRITE_VM_SINGLETON_OBJ(Object::null(), kNullObject); |
958 | WRITE_VM_SINGLETON_OBJ(Object::sentinel().raw(), kSentinelObject); |
959 | WRITE_VM_SINGLETON_OBJ(Object::transition_sentinel().raw(), |
960 | kTransitionSentinelObject); |
961 | WRITE_VM_SINGLETON_OBJ(Object::empty_array().raw(), kEmptyArrayObject); |
962 | WRITE_VM_SINGLETON_OBJ(Object::zero_array().raw(), kZeroArrayObject); |
963 | WRITE_VM_SINGLETON_OBJ(Object::dynamic_type().raw(), kDynamicType); |
964 | WRITE_VM_SINGLETON_OBJ(Object::void_type().raw(), kVoidType); |
965 | WRITE_VM_SINGLETON_OBJ(Object::empty_type_arguments().raw(), |
966 | kEmptyTypeArguments); |
967 | WRITE_VM_SINGLETON_OBJ(Bool::True().raw(), kTrueValue); |
968 | WRITE_VM_SINGLETON_OBJ(Bool::False().raw(), kFalseValue); |
969 | WRITE_VM_SINGLETON_OBJ(Object::extractor_parameter_types().raw(), |
970 | kExtractorParameterTypes); |
971 | WRITE_VM_SINGLETON_OBJ(Object::extractor_parameter_names().raw(), |
972 | kExtractorParameterNames); |
973 | WRITE_VM_SINGLETON_OBJ(Object::empty_context_scope().raw(), |
974 | kEmptyContextScopeObject); |
975 | WRITE_VM_SINGLETON_OBJ(Object::empty_object_pool().raw(), kEmptyObjectPool); |
976 | WRITE_VM_SINGLETON_OBJ(Object::empty_descriptors().raw(), kEmptyDescriptors); |
977 | WRITE_VM_SINGLETON_OBJ(Object::empty_var_descriptors().raw(), |
978 | kEmptyVarDescriptors); |
979 | WRITE_VM_SINGLETON_OBJ(Object::empty_exception_handlers().raw(), |
980 | kEmptyExceptionHandlers); |
981 | |
982 | // Check if it is a singleton class object which is shared by |
983 | // all isolates. |
984 | intptr_t id = rawobj->GetClassId(); |
985 | if (id == kClassCid) { |
986 | ClassPtr raw_class = static_cast<ClassPtr>(rawobj); |
987 | intptr_t class_id = raw_class->ptr()->id_; |
988 | if (IsSingletonClassId(class_id)) { |
989 | intptr_t object_id = ObjectIdFromClassId(class_id); |
990 | WriteVMIsolateObject(object_id); |
991 | return true; |
992 | } |
993 | } |
994 | |
995 | // Check if it is a singleton Argument descriptor object. |
996 | for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) { |
997 | if (rawobj == ArgumentsDescriptor::cached_args_descriptors_[i]) { |
998 | WriteVMIsolateObject(kCachedArgumentsDescriptor0 + i); |
999 | return true; |
1000 | } |
1001 | } |
1002 | |
1003 | // Check if it is a singleton ICData array object. |
1004 | for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) { |
1005 | if (rawobj == ICData::cached_icdata_arrays_[i]) { |
1006 | WriteVMIsolateObject(kCachedICDataArray0 + i); |
1007 | return true; |
1008 | } |
1009 | } |
1010 | |
1011 | // In the case of script snapshots or for messages we do not use |
1012 | // the index into the vm isolate snapshot object table, instead we |
1013 | // explicitly write the object out. |
1014 | intptr_t object_id = forward_list_->FindObject(rawobj); |
1015 | if (object_id != -1) { |
1016 | WriteIndexedObject(object_id); |
1017 | return true; |
1018 | } else { |
1019 | // We do this check down here, because it's quite expensive. |
1020 | if (!rawobj->ptr()->InVMIsolateHeap()) { |
1021 | return false; |
1022 | } |
1023 | |
1024 | switch (id) { |
1025 | VM_OBJECT_CLASS_LIST(VM_OBJECT_WRITE) |
1026 | case kTypedDataUint32ArrayCid: { |
1027 | object_id = forward_list_->AddObject(zone(), rawobj, kIsSerialized); |
1028 | TypedDataPtr raw_obj = static_cast<TypedDataPtr>(rawobj); |
1029 | raw_obj->ptr()->WriteTo(this, object_id, kind(), false); |
1030 | return true; |
1031 | } |
1032 | default: |
1033 | OS::PrintErr("class id = %" Pd "\n" , id); |
1034 | break; |
1035 | } |
1036 | } |
1037 | |
1038 | const Object& obj = Object::Handle(rawobj); |
1039 | FATAL1("Unexpected reference to object in VM isolate: %s\n" , obj.ToCString()); |
1040 | return false; |
1041 | } |
1042 | |
1043 | #undef VM_OBJECT_WRITE |
1044 | |
1045 | ForwardList::ForwardList(Thread* thread, intptr_t first_object_id) |
1046 | : thread_(thread), |
1047 | first_object_id_(first_object_id), |
1048 | nodes_(), |
1049 | first_unprocessed_object_id_(first_object_id) { |
1050 | ASSERT(first_object_id > 0); |
1051 | isolate()->set_forward_table_new(new WeakTable()); |
1052 | isolate()->set_forward_table_old(new WeakTable()); |
1053 | } |
1054 | |
1055 | ForwardList::~ForwardList() { |
1056 | isolate()->set_forward_table_new(nullptr); |
1057 | isolate()->set_forward_table_old(nullptr); |
1058 | } |
1059 | |
1060 | intptr_t ForwardList::AddObject(Zone* zone, |
1061 | ObjectPtr raw, |
1062 | SerializeState state) { |
1063 | NoSafepointScope no_safepoint; |
1064 | intptr_t object_id = next_object_id(); |
1065 | ASSERT(object_id > 0 && object_id <= kMaxObjectId); |
1066 | const Object& obj = Object::ZoneHandle(zone, raw); |
1067 | Node* node = new Node(&obj, state); |
1068 | ASSERT(node != NULL); |
1069 | nodes_.Add(node); |
1070 | ASSERT(object_id != 0); |
1071 | SetObjectId(raw, object_id); |
1072 | return object_id; |
1073 | } |
1074 | |
1075 | intptr_t ForwardList::FindObject(ObjectPtr raw) { |
1076 | NoSafepointScope no_safepoint; |
1077 | intptr_t id = GetObjectId(raw); |
1078 | ASSERT(id == 0 || NodeForObjectId(id)->obj()->raw() == raw); |
1079 | return (id == 0) ? static_cast<intptr_t>(kInvalidIndex) : id; |
1080 | } |
1081 | |
1082 | void ForwardList::SetObjectId(ObjectPtr object, intptr_t id) { |
1083 | if (object->IsNewObject()) { |
1084 | isolate()->forward_table_new()->SetValueExclusive(object, id); |
1085 | } else { |
1086 | isolate()->forward_table_old()->SetValueExclusive(object, id); |
1087 | } |
1088 | } |
1089 | |
1090 | intptr_t ForwardList::GetObjectId(ObjectPtr object) { |
1091 | if (object->IsNewObject()) { |
1092 | return isolate()->forward_table_new()->GetValueExclusive(object); |
1093 | } else { |
1094 | return isolate()->forward_table_old()->GetValueExclusive(object); |
1095 | } |
1096 | } |
1097 | |
1098 | bool SnapshotWriter::CheckAndWritePredefinedObject(ObjectPtr rawobj) { |
1099 | // Check if object can be written in one of the following ways: |
1100 | // - Smi: the Smi value is written as is (last bit is not tagged). |
1101 | // - VM internal class (from VM isolate): (index of class in vm isolate | 0x3) |
1102 | // - Object that has already been written: (negative id in stream | 0x3) |
1103 | |
1104 | NoSafepointScope no_safepoint; |
1105 | |
1106 | // First check if it is a Smi (i.e not a heap object). |
1107 | if (!rawobj->IsHeapObject()) { |
1108 | Write<int64_t>(static_cast<intptr_t>(rawobj)); |
1109 | return true; |
1110 | } |
1111 | |
1112 | intptr_t cid = rawobj->GetClassId(); |
1113 | |
1114 | if ((kind_ == Snapshot::kMessage) && (cid == kDoubleCid)) { |
1115 | WriteVMIsolateObject(kDoubleObject); |
1116 | DoublePtr rd = static_cast<DoublePtr>(rawobj); |
1117 | WriteDouble(rd->ptr()->value_); |
1118 | return true; |
1119 | } |
1120 | |
1121 | // Check if object has already been serialized, in that case just write |
1122 | // the object id out. |
1123 | intptr_t object_id = forward_list_->FindObject(rawobj); |
1124 | if (object_id != kInvalidIndex) { |
1125 | WriteIndexedObject(object_id); |
1126 | return true; |
1127 | } |
1128 | |
1129 | // Check if it is a code object in that case just write a Null object |
1130 | // as we do not want code objects in the snapshot. |
1131 | if ((cid == kCodeCid) || (cid == kBytecodeCid)) { |
1132 | WriteVMIsolateObject(kNullObject); |
1133 | return true; |
1134 | } |
1135 | |
1136 | // Now check if it is an object from the VM isolate. These objects are shared |
1137 | // by all isolates. |
1138 | if (HandleVMIsolateObject(rawobj)) { |
1139 | return true; |
1140 | } |
1141 | |
1142 | // Check if classes are not being serialized and it is preinitialized type |
1143 | // or a predefined internal VM class in the object store. |
1144 | // Check if it is an internal VM class which is in the object store. |
1145 | if (cid == kClassCid) { |
1146 | ClassPtr raw_class = static_cast<ClassPtr>(rawobj); |
1147 | intptr_t class_id = raw_class->ptr()->id_; |
1148 | if (IsBootstrapedClassId(class_id)) { |
1149 | intptr_t object_id = ObjectIdFromClassId(class_id); |
1150 | WriteIndexedObject(object_id); |
1151 | return true; |
1152 | } |
1153 | } |
1154 | |
1155 | // Now check it is a preinitialized type object. |
1156 | intptr_t index = GetTypeIndex(object_store(), rawobj); |
1157 | if (index != kInvalidIndex) { |
1158 | WriteIndexedObject(index); |
1159 | return true; |
1160 | } |
1161 | |
1162 | return false; |
1163 | } |
1164 | |
1165 | void SnapshotWriter::WriteObjectImpl(ObjectPtr raw, bool as_reference) { |
1166 | // First check if object can be written as a simple predefined type. |
1167 | if (CheckAndWritePredefinedObject(raw)) { |
1168 | return; |
1169 | } |
1170 | |
1171 | // When we know that we are dealing with leaf or shallow objects we write |
1172 | // these objects inline even when 'as_reference' is true. |
1173 | const bool write_as_reference = as_reference && !raw->ptr()->IsCanonical(); |
1174 | uintptr_t tags = GetObjectTagsAndHash(raw); |
1175 | |
1176 | // Add object to the forward ref list and mark it so that future references |
1177 | // to this object in the snapshot will use this object id. Mark the |
1178 | // serialization state so that we do the right thing when we go through |
1179 | // the forward list. |
1180 | intptr_t class_id = raw->GetClassId(); |
1181 | intptr_t object_id; |
1182 | if (write_as_reference && IsSplitClassId(class_id)) { |
1183 | object_id = forward_list_->AddObject(zone(), raw, kIsNotSerialized); |
1184 | } else { |
1185 | object_id = forward_list_->AddObject(zone(), raw, kIsSerialized); |
1186 | } |
1187 | if (write_as_reference || !IsSplitClassId(class_id)) { |
1188 | object_id = kOmittedObjectId; |
1189 | } |
1190 | WriteMarkedObjectImpl(raw, tags, object_id, write_as_reference); |
1191 | } |
1192 | |
1193 | void SnapshotWriter::WriteMarkedObjectImpl(ObjectPtr raw, |
1194 | intptr_t tags, |
1195 | intptr_t object_id, |
1196 | bool as_reference) { |
1197 | NoSafepointScope no_safepoint; |
1198 | ClassPtr cls = class_table_->At(ObjectLayout::ClassIdTag::decode(tags)); |
1199 | intptr_t class_id = cls->ptr()->id_; |
1200 | ASSERT(class_id == ObjectLayout::ClassIdTag::decode(tags)); |
1201 | if (class_id >= kNumPredefinedCids || IsImplicitFieldClassId(class_id)) { |
1202 | WriteInstance(raw, cls, tags, object_id, as_reference); |
1203 | return; |
1204 | } |
1205 | switch (class_id) { |
1206 | #define SNAPSHOT_WRITE(clazz) \ |
1207 | case clazz::kClassId: { \ |
1208 | clazz##Ptr raw_obj = static_cast<clazz##Ptr>(raw); \ |
1209 | raw_obj->ptr()->WriteTo(this, object_id, kind_, as_reference); \ |
1210 | return; \ |
1211 | } |
1212 | |
1213 | CLASS_LIST_NO_OBJECT(SNAPSHOT_WRITE) |
1214 | #undef SNAPSHOT_WRITE |
1215 | #define SNAPSHOT_WRITE(clazz) case kTypedData##clazz##Cid: |
1216 | |
1217 | CLASS_LIST_TYPED_DATA(SNAPSHOT_WRITE) { |
1218 | TypedDataPtr raw_obj = static_cast<TypedDataPtr>(raw); |
1219 | raw_obj->ptr()->WriteTo(this, object_id, kind_, as_reference); |
1220 | return; |
1221 | } |
1222 | #undef SNAPSHOT_WRITE |
1223 | #define SNAPSHOT_WRITE(clazz) case kExternalTypedData##clazz##Cid: |
1224 | |
1225 | CLASS_LIST_TYPED_DATA(SNAPSHOT_WRITE) { |
1226 | ExternalTypedDataPtr raw_obj = static_cast<ExternalTypedDataPtr>(raw); |
1227 | raw_obj->ptr()->WriteTo(this, object_id, kind_, as_reference); |
1228 | return; |
1229 | } |
1230 | #undef SNAPSHOT_WRITE |
1231 | #define SNAPSHOT_WRITE(clazz) case kTypedData##clazz##ViewCid: |
1232 | |
1233 | case kByteDataViewCid: |
1234 | CLASS_LIST_TYPED_DATA(SNAPSHOT_WRITE) { |
1235 | auto raw_obj = static_cast<TypedDataViewPtr>(raw); |
1236 | raw_obj->ptr()->WriteTo(this, object_id, kind_, as_reference); |
1237 | return; |
1238 | } |
1239 | #undef SNAPSHOT_WRITE |
1240 | |
1241 | #define SNAPSHOT_WRITE(clazz) case kFfi##clazz##Cid: |
1242 | |
1243 | CLASS_LIST_FFI(SNAPSHOT_WRITE) { |
1244 | SetWriteException(Exceptions::kArgument, |
1245 | "Native objects (from dart:ffi) such as Pointers and " |
1246 | "Structs cannot be passed between isolates." ); |
1247 | UNREACHABLE(); |
1248 | } |
1249 | #undef SNAPSHOT_WRITE |
1250 | default: |
1251 | break; |
1252 | } |
1253 | |
1254 | const Object& obj = Object::Handle(raw); |
1255 | FATAL1("Unexpected object: %s\n" , obj.ToCString()); |
1256 | } |
1257 | |
1258 | class WriteInlinedObjectVisitor : public ObjectVisitor { |
1259 | public: |
1260 | explicit WriteInlinedObjectVisitor(SnapshotWriter* writer) |
1261 | : writer_(writer) {} |
1262 | |
1263 | virtual void VisitObject(ObjectPtr obj) { |
1264 | intptr_t object_id = writer_->forward_list_->FindObject(obj); |
1265 | ASSERT(object_id != kInvalidIndex); |
1266 | intptr_t tags = MessageWriter::GetObjectTagsAndHash(ObjectPtr(obj)); |
1267 | writer_->WriteMarkedObjectImpl(obj, tags, object_id, kAsInlinedObject); |
1268 | } |
1269 | |
1270 | private: |
1271 | SnapshotWriter* writer_; |
1272 | }; |
1273 | |
1274 | void SnapshotWriter::WriteForwardedObjects() { |
1275 | WriteInlinedObjectVisitor visitor(this); |
1276 | forward_list_->SerializeAll(&visitor); |
1277 | } |
1278 | |
1279 | void ForwardList::SerializeAll(ObjectVisitor* writer) { |
1280 | // Write out all objects that were added to the forward list and have |
1281 | // not been serialized yet. These would typically be fields of instance |
1282 | // objects, arrays or immutable arrays (this is done in order to avoid |
1283 | // deep recursive calls to WriteObjectImpl). |
1284 | // NOTE: The forward list might grow as we process the list. |
1285 | #ifdef DEBUG |
1286 | for (intptr_t i = first_object_id(); i < first_unprocessed_object_id_; ++i) { |
1287 | ASSERT(NodeForObjectId(i)->is_serialized()); |
1288 | } |
1289 | #endif // DEBUG |
1290 | for (intptr_t id = first_unprocessed_object_id_; id < next_object_id(); |
1291 | ++id) { |
1292 | if (!NodeForObjectId(id)->is_serialized()) { |
1293 | // Write the object out in the stream. |
1294 | ObjectPtr raw = NodeForObjectId(id)->obj()->raw(); |
1295 | writer->VisitObject(raw); |
1296 | |
1297 | // Mark object as serialized. |
1298 | NodeForObjectId(id)->set_state(kIsSerialized); |
1299 | } |
1300 | } |
1301 | first_unprocessed_object_id_ = next_object_id(); |
1302 | } |
1303 | |
1304 | void SnapshotWriter::WriteClassId(ClassLayout* cls) { |
1305 | ASSERT(!Snapshot::IsFull(kind_)); |
1306 | int class_id = cls->id_; |
1307 | ASSERT(!IsSingletonClassId(class_id) && !IsBootstrapedClassId(class_id)); |
1308 | |
1309 | // Write out the library url and class name. |
1310 | LibraryPtr library = cls->library_; |
1311 | ASSERT(library != Library::null()); |
1312 | WriteObjectImpl(library->ptr()->url_, kAsInlinedObject); |
1313 | WriteObjectImpl(cls->name_, kAsInlinedObject); |
1314 | } |
1315 | |
1316 | void SnapshotWriter::WriteStaticImplicitClosure(intptr_t object_id, |
1317 | FunctionPtr func, |
1318 | intptr_t tags) { |
1319 | // Write out the serialization header value for this object. |
1320 | WriteInlinedObjectHeader(object_id); |
1321 | |
1322 | // Indicate this is a static implicit closure object. |
1323 | Write<int32_t>(SerializedHeaderData::encode(kStaticImplicitClosureObjectId)); |
1324 | |
1325 | // Write out the tags. |
1326 | WriteTags(tags); |
1327 | |
1328 | // Write out the library url, class name and signature function name. |
1329 | ClassPtr cls = GetFunctionOwner(func); |
1330 | ASSERT(cls != Class::null()); |
1331 | LibraryPtr library = cls->ptr()->library_; |
1332 | ASSERT(library != Library::null()); |
1333 | WriteObjectImpl(library->ptr()->url_, kAsInlinedObject); |
1334 | WriteObjectImpl(cls->ptr()->name_, kAsInlinedObject); |
1335 | WriteObjectImpl(func->ptr()->name_, kAsInlinedObject); |
1336 | } |
1337 | |
1338 | void SnapshotWriter::ArrayWriteTo(intptr_t object_id, |
1339 | intptr_t array_kind, |
1340 | intptr_t tags, |
1341 | SmiPtr length, |
1342 | TypeArgumentsPtr type_arguments, |
1343 | ObjectPtr data[], |
1344 | bool as_reference) { |
1345 | if (as_reference) { |
1346 | // Write out the serialization header value for this object. |
1347 | WriteInlinedObjectHeader(kOmittedObjectId); |
1348 | |
1349 | // Write out the class information. |
1350 | WriteIndexedObject(array_kind); |
1351 | WriteTags(tags); |
1352 | |
1353 | // Write out the length field. |
1354 | Write<ObjectPtr>(length); |
1355 | } else { |
1356 | intptr_t len = Smi::Value(length); |
1357 | |
1358 | // Write out the serialization header value for this object. |
1359 | WriteInlinedObjectHeader(object_id); |
1360 | |
1361 | // Write out the class and tags information. |
1362 | WriteIndexedObject(array_kind); |
1363 | WriteTags(tags); |
1364 | |
1365 | // Write out the length field. |
1366 | Write<ObjectPtr>(length); |
1367 | |
1368 | // Write out the type arguments. |
1369 | WriteObjectImpl(type_arguments, kAsInlinedObject); |
1370 | |
1371 | // Write out the individual object ids. |
1372 | bool write_as_reference = ObjectLayout::IsCanonical(tags) ? false : true; |
1373 | for (intptr_t i = 0; i < len; i++) { |
1374 | WriteObjectImpl(data[i], write_as_reference); |
1375 | } |
1376 | } |
1377 | } |
1378 | |
1379 | FunctionPtr SnapshotWriter::IsSerializableClosure(ClosurePtr closure) { |
1380 | // Extract the function object to check if this closure |
1381 | // can be sent in an isolate message. |
1382 | FunctionPtr func = closure->ptr()->function_; |
1383 | // We only allow closure of top level methods or static functions in a |
1384 | // class to be sent in isolate messages. |
1385 | if (can_send_any_object() && |
1386 | Function::IsImplicitStaticClosureFunction(func)) { |
1387 | return func; |
1388 | } |
1389 | // Not a closure of a top level method or static function, throw an |
1390 | // exception as we do not allow these objects to be serialized. |
1391 | HANDLESCOPE(thread()); |
1392 | |
1393 | const Function& errorFunc = Function::Handle(zone(), func); |
1394 | ASSERT(!errorFunc.IsNull()); |
1395 | |
1396 | // All other closures are errors. |
1397 | char* chars = OS::SCreate( |
1398 | thread()->zone(), |
1399 | "Illegal argument in isolate message : (object is a closure - %s)" , |
1400 | errorFunc.ToCString()); |
1401 | SetWriteException(Exceptions::kArgument, chars); |
1402 | return Function::null(); |
1403 | } |
1404 | |
1405 | ClassPtr SnapshotWriter::GetFunctionOwner(FunctionPtr func) { |
1406 | ObjectPtr owner = func->ptr()->owner_; |
1407 | uint32_t tags = GetObjectTags(owner); |
1408 | intptr_t class_id = ObjectLayout::ClassIdTag::decode(tags); |
1409 | if (class_id == kClassCid) { |
1410 | return static_cast<ClassPtr>(owner); |
1411 | } |
1412 | ASSERT(class_id == kPatchClassCid); |
1413 | return static_cast<PatchClassPtr>(owner)->ptr()->patched_class_; |
1414 | } |
1415 | |
1416 | void SnapshotWriter::CheckForNativeFields(ClassPtr cls) { |
1417 | if (cls->ptr()->num_native_fields_ != 0) { |
1418 | // We do not allow objects with native fields in an isolate message. |
1419 | HANDLESCOPE(thread()); |
1420 | const Class& clazz = Class::Handle(zone(), cls); |
1421 | char* chars = OS::SCreate(thread()->zone(), |
1422 | "Illegal argument in isolate message" |
1423 | " : (object extends NativeWrapper - %s)" , |
1424 | clazz.ToCString()); |
1425 | SetWriteException(Exceptions::kArgument, chars); |
1426 | } |
1427 | } |
1428 | |
1429 | void SnapshotWriter::SetWriteException(Exceptions::ExceptionType type, |
1430 | const char* msg) { |
1431 | set_exception_type(type); |
1432 | set_exception_msg(msg); |
1433 | // The more specific error is set up in SnapshotWriter::ThrowException(). |
1434 | thread()->long_jump_base()->Jump(1, Object::snapshot_writer_error()); |
1435 | } |
1436 | |
1437 | void SnapshotWriter::WriteInstance(ObjectPtr raw, |
1438 | ClassPtr cls, |
1439 | intptr_t tags, |
1440 | intptr_t object_id, |
1441 | bool as_reference) { |
1442 | // Closure instances are handled by ClosureLayout::WriteTo(). |
1443 | ASSERT(!Class::IsClosureClass(cls)); |
1444 | |
1445 | // Check if the instance has native fields and throw an exception if it does. |
1446 | CheckForNativeFields(cls); |
1447 | |
1448 | // Object is regular dart instance. |
1449 | if (as_reference) { |
1450 | // Write out the serialization header value for this object. |
1451 | WriteInlinedObjectHeader(kOmittedObjectId); |
1452 | |
1453 | // Indicate this is an instance object. |
1454 | Write<int32_t>(SerializedHeaderData::encode(kInstanceObjectId)); |
1455 | WriteTags(tags); |
1456 | |
1457 | // Write out the class information for this object. |
1458 | WriteObjectImpl(cls, kAsInlinedObject); |
1459 | } else { |
1460 | intptr_t next_field_offset = Class::host_next_field_offset_in_words(cls) |
1461 | << kWordSizeLog2; |
1462 | ASSERT(next_field_offset > 0); |
1463 | |
1464 | // Write out the serialization header value for this object. |
1465 | WriteInlinedObjectHeader(object_id); |
1466 | |
1467 | // Indicate this is an instance object. |
1468 | Write<int32_t>(SerializedHeaderData::encode(kInstanceObjectId)); |
1469 | |
1470 | // Write out the tags. |
1471 | WriteTags(tags); |
1472 | |
1473 | // Write out the class information for this object. |
1474 | WriteObjectImpl(cls, kAsInlinedObject); |
1475 | |
1476 | const auto unboxed_fields = |
1477 | isolate()->group()->shared_class_table()->GetUnboxedFieldsMapAt( |
1478 | cls->ptr()->id_); |
1479 | |
1480 | // Write out all the fields for the object. |
1481 | // Instance::NextFieldOffset() returns the offset of the first field in |
1482 | // a Dart object. |
1483 | bool write_as_reference = ObjectLayout::IsCanonical(tags) ? false : true; |
1484 | |
1485 | intptr_t offset = Instance::NextFieldOffset(); |
1486 | while (offset < next_field_offset) { |
1487 | if (unboxed_fields.Get(offset / kWordSize)) { |
1488 | // Writes 32 bits of the unboxed value at a time |
1489 | const uword value = *reinterpret_cast<uword*>( |
1490 | reinterpret_cast<uword>(raw->ptr()) + offset); |
1491 | WriteWordWith32BitWrites(value); |
1492 | } else { |
1493 | ObjectPtr raw_obj = *reinterpret_cast<ObjectPtr*>( |
1494 | reinterpret_cast<uword>(raw->ptr()) + offset); |
1495 | WriteObjectImpl(raw_obj, write_as_reference); |
1496 | } |
1497 | offset += kWordSize; |
1498 | } |
1499 | } |
1500 | return; |
1501 | } |
1502 | |
1503 | bool SnapshotWriter::AllowObjectsInDartLibrary(LibraryPtr library) { |
1504 | return (library == object_store()->collection_library() || |
1505 | library == object_store()->core_library() || |
1506 | library == object_store()->typed_data_library()); |
1507 | } |
1508 | |
1509 | intptr_t SnapshotWriter::FindVmSnapshotObject(ObjectPtr rawobj) { |
1510 | intptr_t length = Object::vm_isolate_snapshot_object_table().Length(); |
1511 | for (intptr_t i = 0; i < length; i++) { |
1512 | if (Object::vm_isolate_snapshot_object_table().At(i) == rawobj) { |
1513 | return (i + kMaxPredefinedObjectIds); |
1514 | } |
1515 | } |
1516 | return kInvalidIndex; |
1517 | } |
1518 | |
1519 | void SnapshotWriter::ThrowException(Exceptions::ExceptionType type, |
1520 | const char* msg) { |
1521 | { |
1522 | NoSafepointScope no_safepoint; |
1523 | ErrorPtr error = thread()->StealStickyError(); |
1524 | ASSERT(error == Object::snapshot_writer_error().raw()); |
1525 | } |
1526 | |
1527 | if (msg != NULL) { |
1528 | const String& msg_obj = String::Handle(String::New(msg)); |
1529 | const Array& args = Array::Handle(Array::New(1)); |
1530 | args.SetAt(0, msg_obj); |
1531 | Exceptions::ThrowByType(type, args); |
1532 | } else { |
1533 | Exceptions::ThrowByType(type, Object::empty_array()); |
1534 | } |
1535 | UNREACHABLE(); |
1536 | } |
1537 | |
1538 | void SnapshotWriter::WriteVersionAndFeatures() { |
1539 | const char* expected_version = Version::SnapshotString(); |
1540 | ASSERT(expected_version != NULL); |
1541 | const intptr_t version_len = strlen(expected_version); |
1542 | WriteBytes(reinterpret_cast<const uint8_t*>(expected_version), version_len); |
1543 | |
1544 | const char* expected_features = |
1545 | Dart::FeaturesString(Isolate::Current(), false, kind_); |
1546 | ASSERT(expected_features != NULL); |
1547 | const intptr_t features_len = strlen(expected_features); |
1548 | WriteBytes(reinterpret_cast<const uint8_t*>(expected_features), |
1549 | features_len + 1); |
1550 | free(const_cast<char*>(expected_features)); |
1551 | } |
1552 | |
1553 | void SnapshotWriterVisitor::VisitPointers(ObjectPtr* first, ObjectPtr* last) { |
1554 | ASSERT(Utils::IsAligned(first, sizeof(*first))); |
1555 | ASSERT(Utils::IsAligned(last, sizeof(*last))); |
1556 | for (ObjectPtr* current = first; current <= last; current++) { |
1557 | ObjectPtr raw_obj = *current; |
1558 | writer_->WriteObjectImpl(raw_obj, as_references_); |
1559 | } |
1560 | } |
1561 | |
1562 | static uint8_t* malloc_allocator(uint8_t* ptr, |
1563 | intptr_t old_size, |
1564 | intptr_t new_size) { |
1565 | void* new_ptr = realloc(reinterpret_cast<void*>(ptr), new_size); |
1566 | return reinterpret_cast<uint8_t*>(new_ptr); |
1567 | } |
1568 | |
1569 | static void malloc_deallocator(uint8_t* ptr) { |
1570 | free(reinterpret_cast<void*>(ptr)); |
1571 | } |
1572 | |
1573 | MessageWriter::MessageWriter(bool can_send_any_object) |
1574 | : SnapshotWriter(Thread::Current(), |
1575 | Snapshot::kMessage, |
1576 | malloc_allocator, |
1577 | malloc_deallocator, |
1578 | kInitialSize, |
1579 | &forward_list_, |
1580 | can_send_any_object), |
1581 | forward_list_(thread(), kMaxPredefinedObjectIds), |
1582 | finalizable_data_(new MessageFinalizableData()) {} |
1583 | |
1584 | MessageWriter::~MessageWriter() { |
1585 | delete finalizable_data_; |
1586 | } |
1587 | |
1588 | std::unique_ptr<Message> MessageWriter::WriteMessage( |
1589 | const Object& obj, |
1590 | Dart_Port dest_port, |
1591 | Message::Priority priority) { |
1592 | ASSERT(kind() == Snapshot::kMessage); |
1593 | ASSERT(isolate() != NULL); |
1594 | |
1595 | // Setup for long jump in case there is an exception while writing |
1596 | // the message. |
1597 | volatile bool has_exception = false; |
1598 | { |
1599 | LongJumpScope jump; |
1600 | if (setjmp(*jump.Set()) == 0) { |
1601 | NoSafepointScope no_safepoint; |
1602 | WriteObject(obj.raw()); |
1603 | } else { |
1604 | FreeBuffer(); |
1605 | has_exception = true; |
1606 | } |
1607 | } |
1608 | if (has_exception) { |
1609 | ThrowException(exception_type(), exception_msg()); |
1610 | } else { |
1611 | finalizable_data_->SerializationSucceeded(); |
1612 | } |
1613 | |
1614 | MessageFinalizableData* finalizable_data = finalizable_data_; |
1615 | finalizable_data_ = NULL; |
1616 | return Message::New(dest_port, buffer(), BytesWritten(), finalizable_data, |
1617 | priority); |
1618 | } |
1619 | |
1620 | } // namespace dart |
1621 | |