1 | // Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/compiler/backend/slot.h" |
6 | |
7 | #include "vm/compiler/compiler_state.h" |
8 | #include "vm/hash_map.h" |
9 | #include "vm/parser.h" |
10 | #include "vm/scopes.h" |
11 | |
12 | namespace dart { |
13 | |
14 | // Canonicalization cache for Slot objects. |
15 | // |
16 | // This cache is attached to the CompilerState to ensure that we preserve |
17 | // identity of Slot objects during each individual compilation. |
18 | class SlotCache : public ZoneAllocated { |
19 | public: |
20 | // Returns an instance of SlotCache for the current compilation. |
21 | static SlotCache& Instance(Thread* thread) { |
22 | auto result = thread->compiler_state().slot_cache(); |
23 | if (result == nullptr) { |
24 | result = new (thread->zone()) SlotCache(thread); |
25 | thread->compiler_state().set_slot_cache(result); |
26 | } |
27 | return *result; |
28 | } |
29 | |
30 | const Slot& Canonicalize(const Slot& value) { |
31 | auto result = fields_.LookupValue(&value); |
32 | if (result == nullptr) { |
33 | result = new (zone_) Slot(value); |
34 | fields_.Insert(result); |
35 | } |
36 | return *result; |
37 | } |
38 | |
39 | private: |
40 | explicit SlotCache(Thread* thread) |
41 | : zone_(thread->zone()), fields_(thread->zone()) {} |
42 | |
43 | Zone* const zone_; |
44 | DirectChainedHashMap<PointerKeyValueTrait<const Slot> > fields_; |
45 | }; |
46 | |
47 | #define NATIVE_SLOT_NAME(C, F, id, M) Kind::k##C##_##F |
48 | #define NATIVE_TO_STR(C, F, id, M) #C "_" #F |
49 | |
50 | const char* Slot::KindToCString(Kind k) { |
51 | switch (k) { |
52 | #define NATIVE_CASE(C, U, F, id, M) \ |
53 | case NATIVE_SLOT_NAME(C, F, id, M): \ |
54 | return NATIVE_TO_STR(C, F, id, M); |
55 | NATIVE_SLOTS_LIST(NATIVE_CASE) |
56 | #undef NATIVE_CASE |
57 | case Kind::kTypeArguments: |
58 | return "TypeArguments" ; |
59 | case Kind::kCapturedVariable: |
60 | return "CapturedVariable" ; |
61 | case Kind::kDartField: |
62 | return "DartField" ; |
63 | default: |
64 | UNREACHABLE(); |
65 | return nullptr; |
66 | } |
67 | } |
68 | |
69 | bool Slot::ParseKind(const char* str, Kind* out) { |
70 | ASSERT(str != nullptr && out != nullptr); |
71 | #define NATIVE_CASE(C, U, F, id, M) \ |
72 | if (strcmp(str, NATIVE_TO_STR(C, F, id, M)) == 0) { \ |
73 | *out = NATIVE_SLOT_NAME(C, F, id, M); \ |
74 | return true; \ |
75 | } |
76 | NATIVE_SLOTS_LIST(NATIVE_CASE) |
77 | #undef NATIVE_CASE |
78 | if (strcmp(str, "TypeArguments" ) == 0) { |
79 | *out = Kind::kTypeArguments; |
80 | return true; |
81 | } |
82 | if (strcmp(str, "CapturedVariable" ) == 0) { |
83 | *out = Kind::kCapturedVariable; |
84 | return true; |
85 | } |
86 | if (strcmp(str, "DartField" ) == 0) { |
87 | *out = Kind::kDartField; |
88 | return true; |
89 | } |
90 | return false; |
91 | } |
92 | |
93 | #undef NATIVE_TO_STR |
94 | #undef NATIVE_SLOT_NAME |
95 | |
96 | const Slot& Slot::GetNativeSlot(Kind kind) { |
97 | // There is a fixed statically known number of native slots so we cache |
98 | // them statically. |
99 | static const Slot fields[] = { |
100 | #define FIELD_FINAL (IsImmutableBit::encode(true)) |
101 | #define FIELD_VAR (0) |
102 | #define DEFINE_NATIVE_FIELD(ClassName, UnderlyingType, FieldName, cid, \ |
103 | mutability) \ |
104 | Slot(Kind::k##ClassName##_##FieldName, FIELD_##mutability, k##cid##Cid, \ |
105 | compiler::target::ClassName::FieldName##_offset(), \ |
106 | #ClassName "." #FieldName, nullptr), |
107 | |
108 | NATIVE_SLOTS_LIST(DEFINE_NATIVE_FIELD) |
109 | |
110 | #undef DEFINE_FIELD |
111 | #undef FIELD_VAR |
112 | #undef FIELD_FINAL |
113 | }; |
114 | |
115 | ASSERT(static_cast<uint8_t>(kind) < ARRAY_SIZE(fields)); |
116 | return fields[static_cast<uint8_t>(kind)]; |
117 | } |
118 | |
119 | // Note: should only be called with cids of array-like classes. |
120 | const Slot& Slot::GetLengthFieldForArrayCid(intptr_t array_cid) { |
121 | if (IsExternalTypedDataClassId(array_cid) || IsTypedDataClassId(array_cid) || |
122 | IsTypedDataViewClassId(array_cid)) { |
123 | return GetNativeSlot(Kind::kTypedDataBase_length); |
124 | } |
125 | switch (array_cid) { |
126 | case kGrowableObjectArrayCid: |
127 | return GetNativeSlot(Kind::kGrowableObjectArray_length); |
128 | |
129 | case kOneByteStringCid: |
130 | case kTwoByteStringCid: |
131 | case kExternalOneByteStringCid: |
132 | case kExternalTwoByteStringCid: |
133 | return GetNativeSlot(Kind::kString_length); |
134 | |
135 | case kArrayCid: |
136 | case kImmutableArrayCid: |
137 | return GetNativeSlot(Kind::kArray_length); |
138 | |
139 | default: |
140 | UNREACHABLE(); |
141 | return GetNativeSlot(Kind::kArray_length); |
142 | } |
143 | } |
144 | |
145 | const Slot& Slot::GetTypeArgumentsSlotAt(Thread* thread, intptr_t offset) { |
146 | ASSERT(offset != Class::kNoTypeArguments); |
147 | return SlotCache::Instance(thread).Canonicalize(Slot( |
148 | Kind::kTypeArguments, IsImmutableBit::encode(true), kTypeArgumentsCid, |
149 | offset, ":type_arguments" , /*static_type=*/nullptr)); |
150 | } |
151 | |
152 | const Slot& Slot::GetTypeArgumentsSlotFor(Thread* thread, const Class& cls) { |
153 | return GetTypeArgumentsSlotAt( |
154 | thread, compiler::target::Class::TypeArgumentsFieldOffset(cls)); |
155 | } |
156 | |
157 | const Slot& Slot::GetContextVariableSlotFor(Thread* thread, |
158 | const LocalVariable& variable) { |
159 | ASSERT(variable.is_captured()); |
160 | return SlotCache::Instance(thread).Canonicalize( |
161 | Slot(Kind::kCapturedVariable, |
162 | IsImmutableBit::encode(variable.is_final() && !variable.is_late()) | |
163 | IsNullableBit::encode(true), |
164 | kDynamicCid, |
165 | compiler::target::Context::variable_offset(variable.index().value()), |
166 | &variable.name(), &variable.type())); |
167 | } |
168 | |
169 | const Slot& Slot::GetTypeArgumentsIndexSlot(Thread* thread, intptr_t index) { |
170 | const intptr_t offset = |
171 | compiler::target::TypeArguments::type_at_offset(index); |
172 | const Slot& slot = |
173 | Slot(Kind::kTypeArgumentsIndex, IsImmutableBit::encode(true), kDynamicCid, |
174 | offset, ":argument" , /*static_type=*/nullptr); |
175 | return SlotCache::Instance(thread).Canonicalize(slot); |
176 | } |
177 | |
178 | const Slot& Slot::Get(const Field& field, |
179 | const ParsedFunction* parsed_function) { |
180 | Thread* thread = Thread::Current(); |
181 | Zone* zone = thread->zone(); |
182 | intptr_t nullable_cid = kDynamicCid; |
183 | bool is_nullable = true; |
184 | |
185 | if (field.has_pragma()) { |
186 | const intptr_t cid = MethodRecognizer::ResultCidFromPragma(field); |
187 | if (cid != kDynamicCid) { |
188 | nullable_cid = cid; |
189 | is_nullable = false; |
190 | } else if (MethodRecognizer::HasNonNullableResultTypeFromPragma(field)) { |
191 | is_nullable = false; |
192 | } |
193 | } |
194 | |
195 | AbstractType& type = AbstractType::ZoneHandle(zone, field.type()); |
196 | if (type.IsStrictlyNonNullable()) { |
197 | is_nullable = false; |
198 | } |
199 | |
200 | bool used_guarded_state = false; |
201 | if (field.guarded_cid() != kIllegalCid && |
202 | field.guarded_cid() != kDynamicCid) { |
203 | // Use guarded state if it is more precise then what we already have. |
204 | if (nullable_cid == kDynamicCid) { |
205 | nullable_cid = field.guarded_cid(); |
206 | used_guarded_state = true; |
207 | } |
208 | |
209 | if (is_nullable && !field.is_nullable()) { |
210 | is_nullable = false; |
211 | used_guarded_state = true; |
212 | } |
213 | } |
214 | |
215 | if (field.needs_load_guard()) { |
216 | // Should be kept in sync with LoadStaticFieldInstr::ComputeType. |
217 | type = Type::DynamicType(); |
218 | nullable_cid = kDynamicCid; |
219 | is_nullable = true; |
220 | used_guarded_state = false; |
221 | } |
222 | |
223 | if (field.is_non_nullable_integer()) { |
224 | is_nullable = false; |
225 | } |
226 | |
227 | const Slot& slot = SlotCache::Instance(thread).Canonicalize(Slot( |
228 | Kind::kDartField, |
229 | IsImmutableBit::encode((field.is_final() && !field.is_late()) || |
230 | field.is_const()) | |
231 | IsNullableBit::encode(is_nullable) | |
232 | IsGuardedBit::encode(used_guarded_state), |
233 | nullable_cid, compiler::target::Field::OffsetOf(field), &field, &type)); |
234 | |
235 | // If properties of this slot were based on the guarded state make sure |
236 | // to add the field to the list of guarded fields. Note that during background |
237 | // compilation we might have two field clones that have incompatible guarded |
238 | // state - however both of these clones would correspond to the same slot. |
239 | // That is why we check the is_guarded_field() property of the slot rather |
240 | // than look at the current guarded state of the field, because current |
241 | // guarded state of the field might be set to kDynamicCid, while it was |
242 | // set to something more concrete when the slot was created. |
243 | // Note that we could have created this slot during an unsuccessful inlining |
244 | // attempt where we built and discarded the graph, in this case guarded |
245 | // fields associated with that graph are also discarded. However the slot |
246 | // itself stays behind in the compilation global cache. Thus we must always |
247 | // try to add it to the list of guarded fields of the current function. |
248 | if (slot.is_guarded_field()) { |
249 | if (thread->isolate()->use_field_guards()) { |
250 | ASSERT(parsed_function != nullptr); |
251 | parsed_function->AddToGuardedFields(&slot.field()); |
252 | } else { |
253 | // In precompiled mode we use guarded_cid field for type information |
254 | // inferred by TFA. |
255 | ASSERT(CompilerState::Current().is_aot()); |
256 | } |
257 | } |
258 | |
259 | return slot; |
260 | } |
261 | |
262 | CompileType Slot::ComputeCompileType() const { |
263 | return CompileType::CreateNullable(is_nullable(), nullable_cid()); |
264 | } |
265 | |
266 | const AbstractType& Slot::static_type() const { |
267 | return static_type_ != nullptr ? *static_type_ : Object::null_abstract_type(); |
268 | } |
269 | |
270 | const char* Slot::Name() const { |
271 | if (IsLocalVariable()) { |
272 | return DataAs<const String>()->ToCString(); |
273 | } else if (IsDartField()) { |
274 | return String::Handle(field().name()).ToCString(); |
275 | } else { |
276 | return DataAs<const char>(); |
277 | } |
278 | } |
279 | |
280 | bool Slot::Equals(const Slot* other) const { |
281 | if (kind_ != other->kind_) { |
282 | return false; |
283 | } |
284 | |
285 | switch (kind_) { |
286 | case Kind::kTypeArguments: |
287 | case Kind::kTypeArgumentsIndex: |
288 | return (offset_in_bytes_ == other->offset_in_bytes_); |
289 | |
290 | case Kind::kCapturedVariable: |
291 | return (offset_in_bytes_ == other->offset_in_bytes_) && |
292 | (flags_ == other->flags_) && |
293 | (DataAs<const String>()->raw() == |
294 | other->DataAs<const String>()->raw()); |
295 | |
296 | case Kind::kDartField: |
297 | return (offset_in_bytes_ == other->offset_in_bytes_) && |
298 | other->DataAs<const Field>()->Original() == |
299 | DataAs<const Field>()->Original(); |
300 | |
301 | default: |
302 | UNREACHABLE(); |
303 | return false; |
304 | } |
305 | } |
306 | |
307 | intptr_t Slot::Hashcode() const { |
308 | intptr_t result = (static_cast<int8_t>(kind_) * 63 + offset_in_bytes_) * 31; |
309 | if (IsDartField()) { |
310 | result += String::Handle(DataAs<const Field>()->name()).Hash(); |
311 | } else if (IsLocalVariable()) { |
312 | result += DataAs<const String>()->Hash(); |
313 | } |
314 | return result; |
315 | } |
316 | |
317 | } // namespace dart |
318 | |