1 | // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include "vm/compiler/aot/precompiler.h" |
6 | |
7 | #include "platform/unicode.h" |
8 | #include "vm/class_finalizer.h" |
9 | #include "vm/code_patcher.h" |
10 | #include "vm/compiler/aot/aot_call_specializer.h" |
11 | #include "vm/compiler/aot/precompiler_tracer.h" |
12 | #include "vm/compiler/assembler/assembler.h" |
13 | #include "vm/compiler/assembler/disassembler.h" |
14 | #include "vm/compiler/backend/branch_optimizer.h" |
15 | #include "vm/compiler/backend/constant_propagator.h" |
16 | #include "vm/compiler/backend/flow_graph.h" |
17 | #include "vm/compiler/backend/flow_graph_compiler.h" |
18 | #include "vm/compiler/backend/il_printer.h" |
19 | #include "vm/compiler/backend/il_serializer.h" |
20 | #include "vm/compiler/backend/inliner.h" |
21 | #include "vm/compiler/backend/linearscan.h" |
22 | #include "vm/compiler/backend/range_analysis.h" |
23 | #include "vm/compiler/backend/redundancy_elimination.h" |
24 | #include "vm/compiler/backend/type_propagator.h" |
25 | #include "vm/compiler/cha.h" |
26 | #include "vm/compiler/compiler_pass.h" |
27 | #include "vm/compiler/compiler_state.h" |
28 | #include "vm/compiler/frontend/flow_graph_builder.h" |
29 | #include "vm/compiler/frontend/kernel_to_il.h" |
30 | #include "vm/compiler/jit/compiler.h" |
31 | #include "vm/dart_entry.h" |
32 | #include "vm/exceptions.h" |
33 | #include "vm/flags.h" |
34 | #include "vm/hash_table.h" |
35 | #include "vm/isolate.h" |
36 | #include "vm/log.h" |
37 | #include "vm/longjump.h" |
38 | #include "vm/object.h" |
39 | #include "vm/object_store.h" |
40 | #include "vm/os.h" |
41 | #include "vm/parser.h" |
42 | #include "vm/program_visitor.h" |
43 | #include "vm/regexp_assembler.h" |
44 | #include "vm/regexp_parser.h" |
45 | #include "vm/resolver.h" |
46 | #include "vm/runtime_entry.h" |
47 | #include "vm/symbols.h" |
48 | #include "vm/tags.h" |
49 | #include "vm/timeline.h" |
50 | #include "vm/timer.h" |
51 | #include "vm/type_table.h" |
52 | #include "vm/type_testing_stubs.h" |
53 | #include "vm/version.h" |
54 | #include "vm/zone_text_buffer.h" |
55 | |
56 | namespace dart { |
57 | |
58 | #define T (thread()) |
59 | #define I (isolate()) |
60 | #define Z (zone()) |
61 | |
62 | DEFINE_FLAG(bool, print_unique_targets, false, "Print unique dynamic targets" ); |
63 | DEFINE_FLAG(bool, print_gop, false, "Print global object pool" ); |
64 | DEFINE_FLAG(bool, trace_precompiler, false, "Trace precompiler." ); |
65 | DEFINE_FLAG( |
66 | int, |
67 | max_speculative_inlining_attempts, |
68 | 1, |
69 | "Max number of attempts with speculative inlining (precompilation only)" ); |
70 | |
71 | DECLARE_FLAG(bool, print_flow_graph); |
72 | DECLARE_FLAG(bool, print_flow_graph_optimized); |
73 | DECLARE_FLAG(bool, trace_compiler); |
74 | DECLARE_FLAG(bool, trace_optimizing_compiler); |
75 | DECLARE_FLAG(bool, trace_bailout); |
76 | DECLARE_FLAG(bool, huge_method_cutoff_in_code_size); |
77 | DECLARE_FLAG(bool, trace_failed_optimization_attempts); |
78 | DECLARE_FLAG(bool, trace_inlining_intervals); |
79 | DECLARE_FLAG(int, inlining_hotness); |
80 | DECLARE_FLAG(int, inlining_size_threshold); |
81 | DECLARE_FLAG(int, inlining_callee_size_threshold); |
82 | DECLARE_FLAG(int, inline_getters_setters_smaller_than); |
83 | DECLARE_FLAG(int, inlining_depth_threshold); |
84 | DECLARE_FLAG(int, inlining_caller_size_threshold); |
85 | DECLARE_FLAG(int, inlining_constant_arguments_max_size_threshold); |
86 | DECLARE_FLAG(int, inlining_constant_arguments_min_size_threshold); |
87 | DECLARE_FLAG(bool, print_instruction_stats); |
88 | |
89 | DEFINE_FLAG(charp, |
90 | serialize_flow_graphs_to, |
91 | nullptr, |
92 | "Serialize flow graphs to the given file" ); |
93 | |
94 | DEFINE_FLAG(bool, |
95 | populate_llvm_constant_pool, |
96 | false, |
97 | "Add constant pool entries from flow graphs to a special pool " |
98 | "serialized in AOT snapshots (with --serialize_flow_graphs_to)" ); |
99 | |
100 | Precompiler* Precompiler::singleton_ = nullptr; |
101 | |
102 | #if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32) |
103 | |
104 | class PrecompileParsedFunctionHelper : public ValueObject { |
105 | public: |
106 | PrecompileParsedFunctionHelper(Precompiler* precompiler, |
107 | ParsedFunction* parsed_function, |
108 | bool optimized) |
109 | : precompiler_(precompiler), |
110 | parsed_function_(parsed_function), |
111 | optimized_(optimized), |
112 | thread_(Thread::Current()) {} |
113 | |
114 | bool Compile(CompilationPipeline* pipeline); |
115 | |
116 | private: |
117 | ParsedFunction* parsed_function() const { return parsed_function_; } |
118 | bool optimized() const { return optimized_; } |
119 | Thread* thread() const { return thread_; } |
120 | Isolate* isolate() const { return thread_->isolate(); } |
121 | |
122 | void FinalizeCompilation(compiler::Assembler* assembler, |
123 | FlowGraphCompiler* graph_compiler, |
124 | FlowGraph* flow_graph, |
125 | CodeStatistics* stats); |
126 | |
127 | Precompiler* precompiler_; |
128 | ParsedFunction* parsed_function_; |
129 | const bool optimized_; |
130 | Thread* const thread_; |
131 | |
132 | DISALLOW_COPY_AND_ASSIGN(PrecompileParsedFunctionHelper); |
133 | }; |
134 | |
135 | static void Jump(const Error& error) { |
136 | Thread::Current()->long_jump_base()->Jump(1, error); |
137 | } |
138 | |
139 | ErrorPtr Precompiler::CompileAll() { |
140 | LongJumpScope jump; |
141 | if (setjmp(*jump.Set()) == 0) { |
142 | Precompiler precompiler(Thread::Current()); |
143 | precompiler.DoCompileAll(); |
144 | return Error::null(); |
145 | } else { |
146 | return Thread::Current()->StealStickyError(); |
147 | } |
148 | } |
149 | |
150 | Precompiler::Precompiler(Thread* thread) |
151 | : thread_(thread), |
152 | zone_(NULL), |
153 | isolate_(thread->isolate()), |
154 | changed_(false), |
155 | retain_root_library_caches_(false), |
156 | function_count_(0), |
157 | class_count_(0), |
158 | selector_count_(0), |
159 | dropped_function_count_(0), |
160 | dropped_field_count_(0), |
161 | dropped_class_count_(0), |
162 | dropped_typearg_count_(0), |
163 | dropped_type_count_(0), |
164 | dropped_library_count_(0), |
165 | libraries_(GrowableObjectArray::Handle(I->object_store()->libraries())), |
166 | pending_functions_( |
167 | GrowableObjectArray::Handle(GrowableObjectArray::New())), |
168 | sent_selectors_(), |
169 | seen_functions_(HashTables::New<FunctionSet>(/*initial_capacity=*/1024)), |
170 | possibly_retained_functions_( |
171 | HashTables::New<FunctionSet>(/*initial_capacity=*/1024)), |
172 | fields_to_retain_(), |
173 | functions_to_retain_( |
174 | HashTables::New<FunctionSet>(/*initial_capacity=*/1024)), |
175 | classes_to_retain_(), |
176 | typeargs_to_retain_(), |
177 | types_to_retain_(), |
178 | typeparams_to_retain_(), |
179 | consts_to_retain_(), |
180 | seen_table_selectors_(), |
181 | error_(Error::Handle()), |
182 | get_runtime_type_is_unique_(false), |
183 | il_serialization_stream_(nullptr) { |
184 | ASSERT(Precompiler::singleton_ == NULL); |
185 | Precompiler::singleton_ = this; |
186 | } |
187 | |
188 | Precompiler::~Precompiler() { |
189 | // We have to call Release() in DEBUG mode. |
190 | seen_functions_.Release(); |
191 | possibly_retained_functions_.Release(); |
192 | functions_to_retain_.Release(); |
193 | |
194 | ASSERT(Precompiler::singleton_ == this); |
195 | Precompiler::singleton_ = NULL; |
196 | } |
197 | |
198 | void Precompiler::DoCompileAll() { |
199 | { |
200 | StackZone stack_zone(T); |
201 | zone_ = stack_zone.GetZone(); |
202 | |
203 | if (FLAG_use_bare_instructions) { |
204 | // Since we keep the object pool until the end of AOT compilation, it |
205 | // will hang on to its entries until the very end. Therefore we have |
206 | // to use handles which survive that long, so we use [zone_] here. |
207 | global_object_pool_builder_.InitializeWithZone(zone_); |
208 | } |
209 | |
210 | { |
211 | HANDLESCOPE(T); |
212 | |
213 | // Make sure class hierarchy is stable before compilation so that CHA |
214 | // can be used. Also ensures lookup of entry points won't miss functions |
215 | // because their class hasn't been finalized yet. |
216 | FinalizeAllClasses(); |
217 | ASSERT(Error::Handle(Z, T->sticky_error()).IsNull()); |
218 | |
219 | ClassFinalizer::SortClasses(); |
220 | |
221 | // Collects type usage information which allows us to decide when/how to |
222 | // optimize runtime type tests. |
223 | TypeUsageInfo type_usage_info(T); |
224 | |
225 | // The cid-ranges of subclasses of a class are e.g. used for is/as checks |
226 | // as well as other type checks. |
227 | HierarchyInfo hierarchy_info(T); |
228 | |
229 | if (FLAG_use_bare_instructions && FLAG_use_table_dispatch) { |
230 | dispatch_table_generator_ = new compiler::DispatchTableGenerator(Z); |
231 | dispatch_table_generator_->Initialize(I->class_table()); |
232 | } |
233 | |
234 | // Precompile constructors to compute information such as |
235 | // optimized instruction count (used in inlining heuristics). |
236 | ClassFinalizer::ClearAllCode( |
237 | /*including_nonchanging_cids=*/FLAG_use_bare_instructions); |
238 | |
239 | { |
240 | CompilerState state(thread_, /*is_aot=*/true); |
241 | PrecompileConstructors(); |
242 | } |
243 | |
244 | ClassFinalizer::ClearAllCode( |
245 | /*including_nonchanging_cids=*/FLAG_use_bare_instructions); |
246 | |
247 | // After this point, it should be safe to serialize flow graphs produced |
248 | // during compilation and add constants to the LLVM constant pool. |
249 | // |
250 | // Check that both the file open and write callbacks are available, though |
251 | // we only use the latter during IL processing. |
252 | if (FLAG_serialize_flow_graphs_to != nullptr && |
253 | Dart::file_write_callback() != nullptr) { |
254 | if (auto file_open = Dart::file_open_callback()) { |
255 | auto file = file_open(FLAG_serialize_flow_graphs_to, /*write=*/true); |
256 | set_il_serialization_stream(file); |
257 | } |
258 | if (FLAG_populate_llvm_constant_pool) { |
259 | auto const object_store = I->object_store(); |
260 | auto& llvm_constants = GrowableObjectArray::Handle( |
261 | Z, GrowableObjectArray::New(16, Heap::kOld)); |
262 | auto& llvm_functions = GrowableObjectArray::Handle( |
263 | Z, GrowableObjectArray::New(16, Heap::kOld)); |
264 | auto& llvm_constant_hash_table = Array::Handle( |
265 | Z, HashTables::New<FlowGraphSerializer::LLVMPoolMap>(16, |
266 | Heap::kOld)); |
267 | object_store->set_llvm_constant_pool(llvm_constants); |
268 | object_store->set_llvm_function_pool(llvm_functions); |
269 | object_store->set_llvm_constant_hash_table(llvm_constant_hash_table); |
270 | } |
271 | } |
272 | |
273 | tracer_ = PrecompilerTracer::StartTracingIfRequested(this); |
274 | |
275 | // All stubs have already been generated, all of them share the same pool. |
276 | // We use that pool to initialize our global object pool, to guarantee |
277 | // stubs as well as code compiled from here on will have the same pool. |
278 | if (FLAG_use_bare_instructions) { |
279 | // We use any stub here to get it's object pool (all stubs share the |
280 | // same object pool in bare instructions mode). |
281 | const Code& code = StubCode::InterpretCall(); |
282 | const ObjectPool& stub_pool = ObjectPool::Handle(code.object_pool()); |
283 | |
284 | global_object_pool_builder()->Reset(); |
285 | stub_pool.CopyInto(global_object_pool_builder()); |
286 | |
287 | // We have various stubs we would like to generate inside the isolate, |
288 | // to ensure the rest of the AOT compilation will use the |
289 | // isolate-specific stubs (callable via pc-relative calls). |
290 | auto& stub_code = Code::Handle(); |
291 | #define DO(member, name) \ |
292 | stub_code = StubCode::BuildIsolateSpecific##name##Stub( \ |
293 | global_object_pool_builder()); \ |
294 | I->object_store()->set_##member(stub_code); |
295 | OBJECT_STORE_STUB_CODE_LIST(DO) |
296 | #undef DO |
297 | stub_code = |
298 | StubCode::GetBuildMethodExtractorStub(global_object_pool_builder()); |
299 | I->object_store()->set_build_method_extractor_code(stub_code); |
300 | } |
301 | |
302 | CollectDynamicFunctionNames(); |
303 | |
304 | // Start with the allocations and invocations that happen from C++. |
305 | { |
306 | TracingScope scope(this); |
307 | AddRoots(); |
308 | AddAnnotatedRoots(); |
309 | } |
310 | |
311 | // With the nnbd experiment enabled, these non-nullable type arguments may |
312 | // not be retained, although they will be used and expected to be |
313 | // canonical. |
314 | AddTypeArguments( |
315 | TypeArguments::Handle(Z, I->object_store()->type_argument_int())); |
316 | AddTypeArguments( |
317 | TypeArguments::Handle(Z, I->object_store()->type_argument_double())); |
318 | AddTypeArguments( |
319 | TypeArguments::Handle(Z, I->object_store()->type_argument_string())); |
320 | AddTypeArguments(TypeArguments::Handle( |
321 | Z, I->object_store()->type_argument_string_dynamic())); |
322 | AddTypeArguments(TypeArguments::Handle( |
323 | Z, I->object_store()->type_argument_string_string())); |
324 | |
325 | // Compile newly found targets and add their callees until we reach a |
326 | // fixed point. |
327 | Iterate(); |
328 | |
329 | // Replace the default type testing stubs installed on [Type]s with new |
330 | // [Type]-specialized stubs. |
331 | AttachOptimizedTypeTestingStub(); |
332 | |
333 | if (FLAG_use_bare_instructions) { |
334 | // Now we generate the actual object pool instance and attach it to the |
335 | // object store. The AOT runtime will use it from there in the enter |
336 | // dart code stub. |
337 | const auto& pool = ObjectPool::Handle( |
338 | ObjectPool::NewFromBuilder(*global_object_pool_builder())); |
339 | I->object_store()->set_global_object_pool(pool); |
340 | global_object_pool_builder()->Reset(); |
341 | |
342 | if (FLAG_print_gop) { |
343 | THR_Print("Global object pool:\n" ); |
344 | pool.DebugPrint(); |
345 | } |
346 | } |
347 | |
348 | if (FLAG_serialize_flow_graphs_to != nullptr && |
349 | Dart::file_write_callback() != nullptr) { |
350 | if (auto file_close = Dart::file_close_callback()) { |
351 | file_close(il_serialization_stream()); |
352 | } |
353 | set_il_serialization_stream(nullptr); |
354 | if (FLAG_populate_llvm_constant_pool) { |
355 | // We don't want the Array backing for any mappings in the snapshot, |
356 | // only the pools themselves. |
357 | I->object_store()->set_llvm_constant_hash_table(Array::null_array()); |
358 | |
359 | // Keep any functions, classes, etc. referenced from the LLVM pools, |
360 | // even if they could have been dropped due to not being otherwise |
361 | // needed at runtime. |
362 | const auto& constant_pool = GrowableObjectArray::Handle( |
363 | Z, I->object_store()->llvm_constant_pool()); |
364 | auto& object = Object::Handle(Z); |
365 | for (intptr_t i = 0; i < constant_pool.Length(); i++) { |
366 | object = constant_pool.At(i); |
367 | if (object.IsNull()) continue; |
368 | if (object.IsInstance()) { |
369 | AddConstObject(Instance::Cast(object)); |
370 | } else if (object.IsField()) { |
371 | AddField(Field::Cast(object)); |
372 | } else if (object.IsFunction()) { |
373 | AddFunction(Function::Cast(object)); |
374 | } |
375 | } |
376 | |
377 | const auto& function_pool = GrowableObjectArray::Handle( |
378 | Z, I->object_store()->llvm_function_pool()); |
379 | auto& function = Function::Handle(Z); |
380 | for (intptr_t i = 0; i < function_pool.Length(); i++) { |
381 | function ^= function_pool.At(i); |
382 | AddFunction(function); |
383 | } |
384 | } |
385 | } |
386 | |
387 | if (tracer_ != nullptr) { |
388 | tracer_->Finalize(); |
389 | tracer_ = nullptr; |
390 | } |
391 | |
392 | TraceForRetainedFunctions(); |
393 | FinalizeDispatchTable(); |
394 | ReplaceFunctionPCRelativeCallEntries(); |
395 | |
396 | DropFunctions(); |
397 | DropFields(); |
398 | TraceTypesFromRetainedClasses(); |
399 | DropTypes(); |
400 | DropTypeParameters(); |
401 | DropTypeArguments(); |
402 | |
403 | // Clear these before dropping classes as they may hold onto otherwise |
404 | // dead instances of classes we will remove or otherwise unused symbols. |
405 | I->object_store()->set_unique_dynamic_targets(Array::null_array()); |
406 | Class& null_class = Class::Handle(Z); |
407 | Function& null_function = Function::Handle(Z); |
408 | Field& null_field = Field::Handle(Z); |
409 | I->object_store()->set_pragma_class(null_class); |
410 | I->object_store()->set_pragma_name(null_field); |
411 | I->object_store()->set_pragma_options(null_field); |
412 | I->object_store()->set_completer_class(null_class); |
413 | I->object_store()->set_symbol_class(null_class); |
414 | I->object_store()->set_compiletime_error_class(null_class); |
415 | I->object_store()->set_growable_list_factory(null_function); |
416 | I->object_store()->set_simple_instance_of_function(null_function); |
417 | I->object_store()->set_simple_instance_of_true_function(null_function); |
418 | I->object_store()->set_simple_instance_of_false_function(null_function); |
419 | I->object_store()->set_async_set_thread_stack_trace(null_function); |
420 | I->object_store()->set_async_star_move_next_helper(null_function); |
421 | I->object_store()->set_complete_on_async_return(null_function); |
422 | I->object_store()->set_async_star_stream_controller(null_class); |
423 | I->object_store()->set_bytecode_attributes(Array::null_array()); |
424 | DropMetadata(); |
425 | DropLibraryEntries(); |
426 | } |
427 | DropClasses(); |
428 | DropLibraries(); |
429 | |
430 | Obfuscate(); |
431 | |
432 | #if defined(DEBUG) |
433 | const auto& non_visited = |
434 | Function::Handle(Z, FindUnvisitedRetainedFunction()); |
435 | if (!non_visited.IsNull()) { |
436 | FATAL1("Code visitor would miss the code for function \"%s\"\n" , |
437 | non_visited.ToFullyQualifiedCString()); |
438 | } |
439 | #endif |
440 | ProgramVisitor::Dedup(T); |
441 | |
442 | zone_ = NULL; |
443 | } |
444 | |
445 | intptr_t symbols_before = -1; |
446 | intptr_t symbols_after = -1; |
447 | intptr_t capacity = -1; |
448 | if (FLAG_trace_precompiler) { |
449 | Symbols::GetStats(I, &symbols_before, &capacity); |
450 | } |
451 | |
452 | Symbols::Compact(); |
453 | |
454 | if (FLAG_trace_precompiler) { |
455 | Symbols::GetStats(I, &symbols_after, &capacity); |
456 | THR_Print("Precompiled %" Pd " functions," , function_count_); |
457 | THR_Print(" %" Pd " dynamic types," , class_count_); |
458 | THR_Print(" %" Pd " dynamic selectors.\n" , selector_count_); |
459 | |
460 | THR_Print("Dropped %" Pd " functions," , dropped_function_count_); |
461 | THR_Print(" %" Pd " fields," , dropped_field_count_); |
462 | THR_Print(" %" Pd " symbols," , symbols_before - symbols_after); |
463 | THR_Print(" %" Pd " types," , dropped_type_count_); |
464 | THR_Print(" %" Pd " type arguments," , dropped_typearg_count_); |
465 | THR_Print(" %" Pd " classes," , dropped_class_count_); |
466 | THR_Print(" %" Pd " libraries.\n" , dropped_library_count_); |
467 | } |
468 | } |
469 | |
470 | void Precompiler::PrecompileConstructors() { |
471 | class ConstructorVisitor : public FunctionVisitor { |
472 | public: |
473 | explicit ConstructorVisitor(Precompiler* precompiler, Zone* zone) |
474 | : precompiler_(precompiler), zone_(zone) {} |
475 | void VisitFunction(const Function& function) { |
476 | if (!function.IsGenerativeConstructor()) return; |
477 | if (function.HasCode()) { |
478 | // Const constructors may have been visited before. Recompile them here |
479 | // to collect type information for final fields for them as well. |
480 | function.ClearCode(); |
481 | } |
482 | if (FLAG_trace_precompiler) { |
483 | THR_Print("Precompiling constructor %s\n" , function.ToCString()); |
484 | } |
485 | CompileFunction(precompiler_, Thread::Current(), zone_, function); |
486 | } |
487 | |
488 | private: |
489 | Precompiler* precompiler_; |
490 | Zone* zone_; |
491 | }; |
492 | |
493 | phase_ = Phase::kCompilingConstructorsForInstructionCounts; |
494 | HANDLESCOPE(T); |
495 | ConstructorVisitor visitor(this, Z); |
496 | ProgramVisitor::WalkProgram(Z, I, &visitor); |
497 | phase_ = Phase::kPreparation; |
498 | } |
499 | |
500 | void Precompiler::AddRoots() { |
501 | // Note that <rootlibrary>.main is not a root. The appropriate main will be |
502 | // discovered through _getMainClosure. |
503 | |
504 | AddSelector(Symbols::NoSuchMethod()); |
505 | |
506 | AddSelector(Symbols::Call()); // For speed, not correctness. |
507 | |
508 | const Library& lib = Library::Handle(I->object_store()->root_library()); |
509 | if (lib.IsNull()) { |
510 | const String& msg = String::Handle( |
511 | Z, String::New("Cannot find root library in isolate.\n" )); |
512 | Jump(Error::Handle(Z, ApiError::New(msg))); |
513 | UNREACHABLE(); |
514 | } |
515 | |
516 | const String& name = String::Handle(String::New("main" )); |
517 | const Object& main_closure = Object::Handle(lib.GetFunctionClosure(name)); |
518 | if (main_closure.IsClosure()) { |
519 | if (lib.LookupLocalFunction(name) == Function::null()) { |
520 | // Check whether the function is in exported namespace of library, in |
521 | // this case we have to retain the root library caches. |
522 | if (lib.LookupFunctionAllowPrivate(name) != Function::null() || |
523 | lib.LookupReExport(name) != Object::null()) { |
524 | retain_root_library_caches_ = true; |
525 | } |
526 | } |
527 | AddConstObject(Closure::Cast(main_closure)); |
528 | } else if (main_closure.IsError()) { |
529 | const Error& error = Error::Cast(main_closure); |
530 | String& msg = |
531 | String::Handle(Z, String::NewFormatted("Cannot find main closure %s\n" , |
532 | error.ToErrorCString())); |
533 | Jump(Error::Handle(Z, ApiError::New(msg))); |
534 | UNREACHABLE(); |
535 | } |
536 | } |
537 | |
538 | void Precompiler::Iterate() { |
539 | Function& function = Function::Handle(Z); |
540 | |
541 | phase_ = Phase::kFixpointCodeGeneration; |
542 | while (changed_) { |
543 | changed_ = false; |
544 | |
545 | while (pending_functions_.Length() > 0) { |
546 | function ^= pending_functions_.RemoveLast(); |
547 | ProcessFunction(function); |
548 | } |
549 | |
550 | CheckForNewDynamicFunctions(); |
551 | CollectCallbackFields(); |
552 | } |
553 | phase_ = Phase::kDone; |
554 | } |
555 | |
556 | void Precompiler::CollectCallbackFields() { |
557 | Library& lib = Library::Handle(Z); |
558 | Class& cls = Class::Handle(Z); |
559 | Class& subcls = Class::Handle(Z); |
560 | Array& fields = Array::Handle(Z); |
561 | Field& field = Field::Handle(Z); |
562 | Function& function = Function::Handle(Z); |
563 | Function& dispatcher = Function::Handle(Z); |
564 | Array& args_desc = Array::Handle(Z); |
565 | AbstractType& field_type = AbstractType::Handle(Z); |
566 | String& field_name = String::Handle(Z); |
567 | GrowableArray<intptr_t> cids; |
568 | |
569 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
570 | lib ^= libraries_.At(i); |
571 | ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate); |
572 | while (it.HasNext()) { |
573 | cls = it.GetNextClass(); |
574 | |
575 | if (!cls.is_allocated()) continue; |
576 | |
577 | fields = cls.fields(); |
578 | for (intptr_t k = 0; k < fields.Length(); k++) { |
579 | field ^= fields.At(k); |
580 | if (field.is_static()) continue; |
581 | field_type = field.type(); |
582 | if (!field_type.IsFunctionType()) continue; |
583 | field_name = field.name(); |
584 | if (!IsSent(field_name)) continue; |
585 | // Create arguments descriptor with fixed parameters from |
586 | // signature of field_type. |
587 | function = Type::Cast(field_type).signature(); |
588 | if (function.IsGeneric()) continue; |
589 | if (function.HasOptionalParameters()) continue; |
590 | if (FLAG_trace_precompiler) { |
591 | THR_Print("Found callback field %s\n" , field_name.ToCString()); |
592 | } |
593 | |
594 | // TODO(dartbug.com/33549): Update this code to use the size of the |
595 | // parameters when supporting calls to non-static methods with |
596 | // unboxed parameters. |
597 | args_desc = |
598 | ArgumentsDescriptor::NewBoxed(0, // No type argument vector. |
599 | function.num_fixed_parameters()); |
600 | cids.Clear(); |
601 | if (CHA::ConcreteSubclasses(cls, &cids)) { |
602 | for (intptr_t j = 0; j < cids.length(); ++j) { |
603 | subcls = I->class_table()->At(cids[j]); |
604 | if (subcls.is_allocated()) { |
605 | // Add dispatcher to cls. |
606 | dispatcher = subcls.GetInvocationDispatcher( |
607 | field_name, args_desc, FunctionLayout::kInvokeFieldDispatcher, |
608 | /* create_if_absent = */ true); |
609 | if (FLAG_trace_precompiler) { |
610 | THR_Print("Added invoke-field-dispatcher for %s to %s\n" , |
611 | field_name.ToCString(), subcls.ToCString()); |
612 | } |
613 | AddFunction(dispatcher); |
614 | } |
615 | } |
616 | } |
617 | } |
618 | } |
619 | } |
620 | } |
621 | |
622 | void Precompiler::ProcessFunction(const Function& function) { |
623 | const intptr_t gop_offset = |
624 | FLAG_use_bare_instructions ? global_object_pool_builder()->CurrentLength() |
625 | : 0; |
626 | RELEASE_ASSERT(!function.HasCode()); |
627 | |
628 | TracingScope tracing_scope(this); |
629 | function_count_++; |
630 | |
631 | if (FLAG_trace_precompiler) { |
632 | THR_Print("Precompiling %" Pd " %s (%s, %s)\n" , function_count_, |
633 | function.ToLibNamePrefixedQualifiedCString(), |
634 | function.token_pos().ToCString(), |
635 | Function::KindToCString(function.kind())); |
636 | } |
637 | |
638 | ASSERT(!function.is_abstract()); |
639 | ASSERT(!function.IsRedirectingFactory()); |
640 | |
641 | error_ = CompileFunction(this, thread_, zone_, function); |
642 | if (!error_.IsNull()) { |
643 | Jump(error_); |
644 | } |
645 | // Used in the JIT to save type-feedback across compilations. |
646 | function.ClearICDataArray(); |
647 | AddCalleesOf(function, gop_offset); |
648 | } |
649 | |
650 | void Precompiler::AddCalleesOf(const Function& function, intptr_t gop_offset) { |
651 | ASSERT(function.HasCode()); |
652 | |
653 | const Code& code = Code::Handle(Z, function.CurrentCode()); |
654 | |
655 | Object& entry = Object::Handle(Z); |
656 | Class& cls = Class::Handle(Z); |
657 | Function& target = Function::Handle(Z); |
658 | |
659 | const Array& table = Array::Handle(Z, code.static_calls_target_table()); |
660 | StaticCallsTable static_calls(table); |
661 | for (auto& view : static_calls) { |
662 | entry = view.Get<Code::kSCallTableFunctionTarget>(); |
663 | if (entry.IsFunction()) { |
664 | AddFunction(Function::Cast(entry), FLAG_retain_function_objects); |
665 | ASSERT(view.Get<Code::kSCallTableCodeOrTypeTarget>() == Code::null()); |
666 | continue; |
667 | } |
668 | entry = view.Get<Code::kSCallTableCodeOrTypeTarget>(); |
669 | if (entry.IsCode() && Code::Cast(entry).IsAllocationStubCode()) { |
670 | cls ^= Code::Cast(entry).owner(); |
671 | AddInstantiatedClass(cls); |
672 | } |
673 | } |
674 | |
675 | #if defined(TARGET_ARCH_IA32) |
676 | FATAL("Callee scanning unimplemented for IA32" ); |
677 | #endif |
678 | |
679 | String& selector = String::Handle(Z); |
680 | // When tracing we want to scan the object pool attached to the code object |
681 | // rather than scanning global object pool - because we want to include |
682 | // *all* outgoing references into the trace. Scanning GOP would exclude |
683 | // references that have been deduplicated. |
684 | if (FLAG_use_bare_instructions && !is_tracing()) { |
685 | for (intptr_t i = gop_offset; |
686 | i < global_object_pool_builder()->CurrentLength(); i++) { |
687 | const auto& wrapper_entry = global_object_pool_builder()->EntryAt(i); |
688 | if (wrapper_entry.type() == |
689 | compiler::ObjectPoolBuilderEntry::kTaggedObject) { |
690 | const auto& entry = *wrapper_entry.obj_; |
691 | AddCalleesOfHelper(entry, &selector, &cls); |
692 | } |
693 | } |
694 | } else { |
695 | const auto& pool = ObjectPool::Handle(Z, code.object_pool()); |
696 | auto& entry = Object::Handle(Z); |
697 | for (intptr_t i = 0; i < pool.Length(); i++) { |
698 | if (pool.TypeAt(i) == ObjectPool::EntryType::kTaggedObject) { |
699 | entry = pool.ObjectAt(i); |
700 | AddCalleesOfHelper(entry, &selector, &cls); |
701 | } |
702 | } |
703 | } |
704 | |
705 | const Array& inlined_functions = |
706 | Array::Handle(Z, code.inlined_id_to_function()); |
707 | for (intptr_t i = 0; i < inlined_functions.Length(); i++) { |
708 | target ^= inlined_functions.At(i); |
709 | AddTypesOf(target); |
710 | } |
711 | } |
712 | |
713 | static bool IsPotentialClosureCall(const String& selector) { |
714 | return selector.raw() == Symbols::Call().raw() || |
715 | selector.raw() == Symbols::DynamicCall().raw(); |
716 | } |
717 | |
718 | void Precompiler::AddCalleesOfHelper(const Object& entry, |
719 | String* temp_selector, |
720 | Class* temp_cls) { |
721 | if (entry.IsUnlinkedCall()) { |
722 | const auto& call_site = UnlinkedCall::Cast(entry); |
723 | // A dynamic call. |
724 | *temp_selector = call_site.target_name(); |
725 | AddSelector(*temp_selector); |
726 | if (IsPotentialClosureCall(*temp_selector)) { |
727 | const Array& arguments_descriptor = |
728 | Array::Handle(Z, call_site.arguments_descriptor()); |
729 | AddClosureCall(*temp_selector, arguments_descriptor); |
730 | } |
731 | } else if (entry.IsMegamorphicCache()) { |
732 | // A dynamic call. |
733 | const auto& cache = MegamorphicCache::Cast(entry); |
734 | *temp_selector = cache.target_name(); |
735 | AddSelector(*temp_selector); |
736 | if (IsPotentialClosureCall(*temp_selector)) { |
737 | const Array& arguments_descriptor = |
738 | Array::Handle(Z, cache.arguments_descriptor()); |
739 | AddClosureCall(*temp_selector, arguments_descriptor); |
740 | } |
741 | } else if (entry.IsField()) { |
742 | // Potential need for field initializer. |
743 | const auto& field = Field::Cast(entry); |
744 | AddField(field); |
745 | } else if (entry.IsInstance()) { |
746 | // Const object, literal or args descriptor. |
747 | const auto& instance = Instance::Cast(entry); |
748 | AddConstObject(instance); |
749 | } else if (entry.IsFunction()) { |
750 | // Local closure function. |
751 | const auto& target = Function::Cast(entry); |
752 | AddFunction(target); |
753 | } else if (entry.IsCode()) { |
754 | const auto& target_code = Code::Cast(entry); |
755 | if (target_code.IsAllocationStubCode()) { |
756 | *temp_cls ^= target_code.owner(); |
757 | AddInstantiatedClass(*temp_cls); |
758 | } |
759 | } |
760 | } |
761 | |
762 | void Precompiler::AddTypesOf(const Class& cls) { |
763 | if (cls.IsNull()) return; |
764 | if (classes_to_retain_.HasKey(&cls)) return; |
765 | classes_to_retain_.Insert(&Class::ZoneHandle(Z, cls.raw())); |
766 | |
767 | Array& interfaces = Array::Handle(Z, cls.interfaces()); |
768 | AbstractType& type = AbstractType::Handle(Z); |
769 | for (intptr_t i = 0; i < interfaces.Length(); i++) { |
770 | type ^= interfaces.At(i); |
771 | AddType(type); |
772 | } |
773 | |
774 | AddTypeArguments(TypeArguments::Handle(Z, cls.type_parameters())); |
775 | |
776 | type = cls.super_type(); |
777 | AddType(type); |
778 | |
779 | if (cls.IsTypedefClass()) { |
780 | AddTypesOf(Function::Handle(Z, cls.signature_function())); |
781 | } |
782 | } |
783 | |
784 | void Precompiler::AddTypesOf(const Function& function) { |
785 | if (function.IsNull()) return; |
786 | if (functions_to_retain_.ContainsKey(function)) return; |
787 | // We don't expect to see a reference to a redirecting factory. Only its |
788 | // target should remain. |
789 | ASSERT(!function.IsRedirectingFactory()); |
790 | functions_to_retain_.Insert(function); |
791 | |
792 | AddTypeArguments(TypeArguments::Handle(Z, function.type_parameters())); |
793 | |
794 | AbstractType& type = AbstractType::Handle(Z); |
795 | type = function.result_type(); |
796 | AddType(type); |
797 | for (intptr_t i = 0; i < function.NumParameters(); i++) { |
798 | type = function.ParameterTypeAt(i); |
799 | AddType(type); |
800 | } |
801 | Code& code = Code::Handle(Z, function.CurrentCode()); |
802 | if (code.IsNull()) { |
803 | ASSERT(function.kind() == FunctionLayout::kSignatureFunction); |
804 | } else { |
805 | const ExceptionHandlers& handlers = |
806 | ExceptionHandlers::Handle(Z, code.exception_handlers()); |
807 | if (!handlers.IsNull()) { |
808 | Array& types = Array::Handle(Z); |
809 | for (intptr_t i = 0; i < handlers.num_entries(); i++) { |
810 | types = handlers.GetHandledTypes(i); |
811 | for (intptr_t j = 0; j < types.Length(); j++) { |
812 | type ^= types.At(j); |
813 | AddType(type); |
814 | } |
815 | } |
816 | } |
817 | } |
818 | // A function can always be inlined and have only a nested local function |
819 | // remain. |
820 | const Function& parent = Function::Handle(Z, function.parent_function()); |
821 | if (!parent.IsNull()) { |
822 | AddTypesOf(parent); |
823 | } |
824 | if (function.IsSignatureFunction() || function.IsClosureFunction()) { |
825 | type = function.ExistingSignatureType(); |
826 | if (!type.IsNull()) { |
827 | AddType(type); |
828 | } |
829 | } |
830 | // A class may have all functions inlined except a local function. |
831 | const Class& owner = Class::Handle(Z, function.Owner()); |
832 | AddTypesOf(owner); |
833 | } |
834 | |
835 | void Precompiler::AddType(const AbstractType& abstype) { |
836 | if (abstype.IsNull()) return; |
837 | |
838 | if (abstype.IsTypeParameter()) { |
839 | if (typeparams_to_retain_.HasKey(&TypeParameter::Cast(abstype))) return; |
840 | typeparams_to_retain_.Insert( |
841 | &TypeParameter::ZoneHandle(Z, TypeParameter::Cast(abstype).raw())); |
842 | |
843 | const AbstractType& type = |
844 | AbstractType::Handle(Z, TypeParameter::Cast(abstype).bound()); |
845 | AddType(type); |
846 | const auto& function = Function::Handle( |
847 | Z, TypeParameter::Cast(abstype).parameterized_function()); |
848 | AddTypesOf(function); |
849 | const Class& cls = |
850 | Class::Handle(Z, TypeParameter::Cast(abstype).parameterized_class()); |
851 | AddTypesOf(cls); |
852 | return; |
853 | } |
854 | |
855 | if (types_to_retain_.HasKey(&abstype)) return; |
856 | types_to_retain_.Insert(&AbstractType::ZoneHandle(Z, abstype.raw())); |
857 | |
858 | if (abstype.IsType()) { |
859 | const Type& type = Type::Cast(abstype); |
860 | const Class& cls = Class::Handle(Z, type.type_class()); |
861 | AddTypesOf(cls); |
862 | const TypeArguments& vector = TypeArguments::Handle(Z, abstype.arguments()); |
863 | AddTypeArguments(vector); |
864 | if (type.IsFunctionType()) { |
865 | const Function& func = Function::Handle(Z, type.signature()); |
866 | AddTypesOf(func); |
867 | } |
868 | } else if (abstype.IsTypeRef()) { |
869 | AbstractType& type = AbstractType::Handle(Z); |
870 | type = TypeRef::Cast(abstype).type(); |
871 | AddType(type); |
872 | } |
873 | } |
874 | |
875 | void Precompiler::AddTypeArguments(const TypeArguments& args) { |
876 | if (args.IsNull()) return; |
877 | |
878 | if (typeargs_to_retain_.HasKey(&args)) return; |
879 | typeargs_to_retain_.Insert(&TypeArguments::ZoneHandle(Z, args.raw())); |
880 | |
881 | AbstractType& arg = AbstractType::Handle(Z); |
882 | for (intptr_t i = 0; i < args.Length(); i++) { |
883 | arg = args.TypeAt(i); |
884 | AddType(arg); |
885 | } |
886 | } |
887 | |
888 | void Precompiler::AddConstObject(const class Instance& instance) { |
889 | // Types, type parameters, and type arguments require special handling. |
890 | if (instance.IsAbstractType()) { // Includes type parameter. |
891 | AddType(AbstractType::Cast(instance)); |
892 | return; |
893 | } else if (instance.IsTypeArguments()) { |
894 | AddTypeArguments(TypeArguments::Cast(instance)); |
895 | return; |
896 | } |
897 | |
898 | if (instance.raw() == Object::sentinel().raw() || |
899 | instance.raw() == Object::transition_sentinel().raw()) { |
900 | return; |
901 | } |
902 | |
903 | Class& cls = Class::Handle(Z, instance.clazz()); |
904 | AddInstantiatedClass(cls); |
905 | |
906 | if (instance.IsClosure()) { |
907 | // An implicit static closure. |
908 | const Function& func = |
909 | Function::Handle(Z, Closure::Cast(instance).function()); |
910 | ASSERT(func.is_static()); |
911 | AddFunction(func); |
912 | AddTypeArguments(TypeArguments::Handle( |
913 | Z, Closure::Cast(instance).instantiator_type_arguments())); |
914 | AddTypeArguments(TypeArguments::Handle( |
915 | Z, Closure::Cast(instance).function_type_arguments())); |
916 | AddTypeArguments(TypeArguments::Handle( |
917 | Z, Closure::Cast(instance).delayed_type_arguments())); |
918 | return; |
919 | } |
920 | |
921 | if (instance.IsLibraryPrefix()) { |
922 | const LibraryPrefix& prefix = LibraryPrefix::Cast(instance); |
923 | ASSERT(prefix.is_deferred_load()); |
924 | const Library& target = Library::Handle(Z, prefix.GetLibrary(0)); |
925 | cls = target.toplevel_class(); |
926 | if (!classes_to_retain_.HasKey(&cls)) { |
927 | classes_to_retain_.Insert(&Class::ZoneHandle(Z, cls.raw())); |
928 | } |
929 | return; |
930 | } |
931 | |
932 | // Can't ask immediate objects if they're canonical. |
933 | if (instance.IsSmi()) return; |
934 | |
935 | // Some Instances in the ObjectPool aren't const objects, such as |
936 | // argument descriptors. |
937 | if (!instance.IsCanonical()) return; |
938 | |
939 | // Constants are canonicalized and we avoid repeated processing of them. |
940 | if (consts_to_retain_.HasKey(&instance)) return; |
941 | |
942 | consts_to_retain_.Insert(&Instance::ZoneHandle(Z, instance.raw())); |
943 | |
944 | if (cls.NumTypeArguments() > 0) { |
945 | AddTypeArguments(TypeArguments::Handle(Z, instance.GetTypeArguments())); |
946 | } |
947 | |
948 | class ConstObjectVisitor : public ObjectPointerVisitor { |
949 | public: |
950 | ConstObjectVisitor(Precompiler* precompiler, Isolate* isolate) |
951 | : ObjectPointerVisitor(isolate->group()), |
952 | precompiler_(precompiler), |
953 | subinstance_(Object::Handle()) {} |
954 | |
955 | virtual void VisitPointers(ObjectPtr* first, ObjectPtr* last) { |
956 | for (ObjectPtr* current = first; current <= last; current++) { |
957 | subinstance_ = *current; |
958 | if (subinstance_.IsInstance()) { |
959 | precompiler_->AddConstObject(Instance::Cast(subinstance_)); |
960 | } |
961 | } |
962 | subinstance_ = Object::null(); |
963 | } |
964 | |
965 | private: |
966 | Precompiler* precompiler_; |
967 | Object& subinstance_; |
968 | }; |
969 | |
970 | ConstObjectVisitor visitor(this, I); |
971 | instance.raw()->ptr()->VisitPointers(&visitor); |
972 | } |
973 | |
974 | void Precompiler::AddClosureCall(const String& call_selector, |
975 | const Array& arguments_descriptor) { |
976 | const Class& cache_class = |
977 | Class::Handle(Z, I->object_store()->closure_class()); |
978 | const Function& dispatcher = |
979 | Function::Handle(Z, cache_class.GetInvocationDispatcher( |
980 | call_selector, arguments_descriptor, |
981 | FunctionLayout::kInvokeFieldDispatcher, |
982 | true /* create_if_absent */)); |
983 | AddFunction(dispatcher); |
984 | } |
985 | |
986 | void Precompiler::AddField(const Field& field) { |
987 | if (is_tracing()) { |
988 | tracer_->WriteFieldRef(field); |
989 | } |
990 | |
991 | if (fields_to_retain_.HasKey(&field)) return; |
992 | |
993 | fields_to_retain_.Insert(&Field::ZoneHandle(Z, field.raw())); |
994 | |
995 | if (field.is_static()) { |
996 | const Object& value = Object::Handle(Z, field.StaticValue()); |
997 | // Should not be in the middle of initialization while precompiling. |
998 | ASSERT(value.raw() != Object::transition_sentinel().raw()); |
999 | |
1000 | if (value.raw() != Object::sentinel().raw() && |
1001 | value.raw() != Object::null()) { |
1002 | ASSERT(value.IsInstance()); |
1003 | AddConstObject(Instance::Cast(value)); |
1004 | } |
1005 | } |
1006 | |
1007 | if (field.has_nontrivial_initializer() && |
1008 | (field.is_static() || field.is_late())) { |
1009 | const Function& initializer = |
1010 | Function::ZoneHandle(Z, field.EnsureInitializerFunction()); |
1011 | AddFunction(initializer); |
1012 | } |
1013 | } |
1014 | |
1015 | bool Precompiler::MustRetainFunction(const Function& function) { |
1016 | // There are some cases where we must retain, even if there are no directly |
1017 | // observable need for function objects at runtime. Here, we check for cases |
1018 | // where the function is not marked with the vm:entry-point pragma, which also |
1019 | // forces retention: |
1020 | // |
1021 | // * Native functions (for LinkNativeCall) |
1022 | // * Selector matches a symbol used in Resolver::ResolveDynamic calls |
1023 | // in dart_entry.cc or dart_api_impl.cc. |
1024 | // * _Closure.call (used in async stack handling) |
1025 | if (function.is_native()) return true; |
1026 | |
1027 | // Resolver::ResolveDynamic uses. |
1028 | const auto& selector = String::Handle(Z, function.name()); |
1029 | if (selector.raw() == Symbols::toString().raw()) return true; |
1030 | if (selector.raw() == Symbols::AssignIndexToken().raw()) return true; |
1031 | if (selector.raw() == Symbols::IndexToken().raw()) return true; |
1032 | if (selector.raw() == Symbols::hashCode().raw()) return true; |
1033 | if (selector.raw() == Symbols::NoSuchMethod().raw()) return true; |
1034 | if (selector.raw() == Symbols::EqualOperator().raw()) return true; |
1035 | |
1036 | // Use the same check for _Closure.call as in stack_trace.{h|cc}. |
1037 | if (selector.raw() == Symbols::Call().raw()) { |
1038 | const auto& name = String::Handle(Z, function.QualifiedScrubbedName()); |
1039 | if (name.Equals(Symbols::_ClosureCall())) return true; |
1040 | } |
1041 | |
1042 | // We have to retain functions which can be a target of a SwitchableCall |
1043 | // at AOT runtime, since the AOT runtime needs to be able to find the |
1044 | // function object in the class. |
1045 | if (function.NeedsMonomorphicCheckedEntry(Z) || |
1046 | Function::IsDynamicInvocationForwarderName(function.name())) { |
1047 | return true; |
1048 | } |
1049 | |
1050 | return false; |
1051 | } |
1052 | |
1053 | void Precompiler::AddFunction(const Function& function, bool retain) { |
1054 | if (is_tracing()) { |
1055 | tracer_->WriteFunctionRef(function); |
1056 | } |
1057 | |
1058 | if (possibly_retained_functions_.ContainsKey(function)) return; |
1059 | if (retain || MustRetainFunction(function)) { |
1060 | possibly_retained_functions_.Insert(function); |
1061 | } |
1062 | |
1063 | if (seen_functions_.ContainsKey(function)) return; |
1064 | seen_functions_.Insert(function); |
1065 | pending_functions_.Add(function); |
1066 | changed_ = true; |
1067 | } |
1068 | |
1069 | bool Precompiler::IsSent(const String& selector) { |
1070 | if (selector.IsNull()) { |
1071 | return false; |
1072 | } |
1073 | return sent_selectors_.HasKey(&selector); |
1074 | } |
1075 | |
1076 | void Precompiler::AddSelector(const String& selector) { |
1077 | if (is_tracing()) { |
1078 | tracer_->WriteSelectorRef(selector); |
1079 | } |
1080 | |
1081 | ASSERT(!selector.IsNull()); |
1082 | if (!IsSent(selector)) { |
1083 | sent_selectors_.Insert(&String::ZoneHandle(Z, selector.raw())); |
1084 | selector_count_++; |
1085 | changed_ = true; |
1086 | |
1087 | if (FLAG_trace_precompiler) { |
1088 | THR_Print("Enqueueing selector %" Pd " %s\n" , selector_count_, |
1089 | selector.ToCString()); |
1090 | } |
1091 | } |
1092 | } |
1093 | |
1094 | void Precompiler::AddTableSelector(const compiler::TableSelector* selector) { |
1095 | ASSERT(FLAG_use_bare_instructions && FLAG_use_table_dispatch); |
1096 | |
1097 | if (is_tracing()) { |
1098 | tracer_->WriteTableSelectorRef(selector->id); |
1099 | } |
1100 | |
1101 | if (seen_table_selectors_.HasKey(selector->id)) return; |
1102 | |
1103 | seen_table_selectors_.Insert(selector->id); |
1104 | changed_ = true; |
1105 | } |
1106 | |
1107 | bool Precompiler::IsHitByTableSelector(const Function& function) { |
1108 | if (!(FLAG_use_bare_instructions && FLAG_use_table_dispatch)) { |
1109 | return false; |
1110 | } |
1111 | |
1112 | const int32_t selector_id = selector_map()->SelectorId(function); |
1113 | if (selector_id == compiler::SelectorMap::kInvalidSelectorId) return false; |
1114 | return seen_table_selectors_.HasKey(selector_id); |
1115 | } |
1116 | |
1117 | void Precompiler::AddInstantiatedClass(const Class& cls) { |
1118 | if (is_tracing()) { |
1119 | tracer_->WriteClassInstantiationRef(cls); |
1120 | } |
1121 | |
1122 | if (cls.is_allocated()) return; |
1123 | |
1124 | class_count_++; |
1125 | cls.set_is_allocated(true); |
1126 | error_ = cls.EnsureIsFinalized(T); |
1127 | if (!error_.IsNull()) { |
1128 | Jump(error_); |
1129 | } |
1130 | |
1131 | changed_ = true; |
1132 | |
1133 | if (FLAG_trace_precompiler) { |
1134 | THR_Print("Allocation %" Pd " %s\n" , class_count_, cls.ToCString()); |
1135 | } |
1136 | |
1137 | const Class& superclass = Class::Handle(cls.SuperClass()); |
1138 | if (!superclass.IsNull()) { |
1139 | AddInstantiatedClass(superclass); |
1140 | } |
1141 | } |
1142 | |
1143 | // Adds all values annotated with @pragma('vm:entry-point') as roots. |
1144 | void Precompiler::AddAnnotatedRoots() { |
1145 | auto& lib = Library::Handle(Z); |
1146 | auto& cls = Class::Handle(Z); |
1147 | auto& members = Array::Handle(Z); |
1148 | auto& function = Function::Handle(Z); |
1149 | auto& function2 = Function::Handle(Z); |
1150 | auto& field = Field::Handle(Z); |
1151 | auto& metadata = Array::Handle(Z); |
1152 | auto& reusable_object_handle = Object::Handle(Z); |
1153 | auto& reusable_field_handle = Field::Handle(Z); |
1154 | |
1155 | // Lists of fields which need implicit getter/setter/static final getter |
1156 | // added. |
1157 | auto& implicit_getters = GrowableObjectArray::Handle(Z); |
1158 | auto& implicit_setters = GrowableObjectArray::Handle(Z); |
1159 | auto& implicit_static_getters = GrowableObjectArray::Handle(Z); |
1160 | |
1161 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
1162 | lib ^= libraries_.At(i); |
1163 | ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate); |
1164 | while (it.HasNext()) { |
1165 | cls = it.GetNextClass(); |
1166 | |
1167 | // Check for @pragma on the class itself. |
1168 | if (cls.has_pragma()) { |
1169 | metadata ^= lib.GetMetadata(cls); |
1170 | if (FindEntryPointPragma(isolate(), metadata, &reusable_field_handle, |
1171 | &reusable_object_handle) == |
1172 | EntryPointPragma::kAlways) { |
1173 | AddInstantiatedClass(cls); |
1174 | } |
1175 | } |
1176 | |
1177 | // Check for @pragma on any fields in the class. |
1178 | members = cls.fields(); |
1179 | implicit_getters = GrowableObjectArray::New(members.Length()); |
1180 | implicit_setters = GrowableObjectArray::New(members.Length()); |
1181 | implicit_static_getters = GrowableObjectArray::New(members.Length()); |
1182 | for (intptr_t k = 0; k < members.Length(); ++k) { |
1183 | field ^= members.At(k); |
1184 | if (field.has_pragma()) { |
1185 | metadata ^= lib.GetMetadata(field); |
1186 | if (metadata.IsNull()) continue; |
1187 | EntryPointPragma pragma = |
1188 | FindEntryPointPragma(isolate(), metadata, &reusable_field_handle, |
1189 | &reusable_object_handle); |
1190 | if (pragma == EntryPointPragma::kNever) continue; |
1191 | |
1192 | AddField(field); |
1193 | |
1194 | if (!field.is_static()) { |
1195 | if (pragma != EntryPointPragma::kSetterOnly) { |
1196 | implicit_getters.Add(field); |
1197 | } |
1198 | if (pragma != EntryPointPragma::kGetterOnly) { |
1199 | implicit_setters.Add(field); |
1200 | } |
1201 | } else { |
1202 | implicit_static_getters.Add(field); |
1203 | } |
1204 | } |
1205 | } |
1206 | |
1207 | // Check for @pragma on any functions in the class. |
1208 | members = cls.functions(); |
1209 | for (intptr_t k = 0; k < members.Length(); k++) { |
1210 | function ^= members.At(k); |
1211 | if (function.has_pragma()) { |
1212 | metadata ^= lib.GetMetadata(function); |
1213 | if (metadata.IsNull()) continue; |
1214 | auto type = |
1215 | FindEntryPointPragma(isolate(), metadata, &reusable_field_handle, |
1216 | &reusable_object_handle); |
1217 | |
1218 | if (type == EntryPointPragma::kAlways || |
1219 | type == EntryPointPragma::kCallOnly) { |
1220 | AddFunction(function); |
1221 | } |
1222 | |
1223 | if ((type == EntryPointPragma::kAlways || |
1224 | type == EntryPointPragma::kGetterOnly) && |
1225 | function.kind() != FunctionLayout::kConstructor && |
1226 | !function.IsSetterFunction()) { |
1227 | function2 = function.ImplicitClosureFunction(); |
1228 | AddFunction(function2); |
1229 | } |
1230 | |
1231 | if (function.IsGenerativeConstructor()) { |
1232 | AddInstantiatedClass(cls); |
1233 | } |
1234 | } |
1235 | if (function.kind() == FunctionLayout::kImplicitGetter && |
1236 | !implicit_getters.IsNull()) { |
1237 | for (intptr_t i = 0; i < implicit_getters.Length(); ++i) { |
1238 | field ^= implicit_getters.At(i); |
1239 | if (function.accessor_field() == field.raw()) { |
1240 | AddFunction(function); |
1241 | } |
1242 | } |
1243 | } |
1244 | if (function.kind() == FunctionLayout::kImplicitSetter && |
1245 | !implicit_setters.IsNull()) { |
1246 | for (intptr_t i = 0; i < implicit_setters.Length(); ++i) { |
1247 | field ^= implicit_setters.At(i); |
1248 | if (function.accessor_field() == field.raw()) { |
1249 | AddFunction(function); |
1250 | } |
1251 | } |
1252 | } |
1253 | if (function.kind() == FunctionLayout::kImplicitStaticGetter && |
1254 | !implicit_static_getters.IsNull()) { |
1255 | for (intptr_t i = 0; i < implicit_static_getters.Length(); ++i) { |
1256 | field ^= implicit_static_getters.At(i); |
1257 | if (function.accessor_field() == field.raw()) { |
1258 | AddFunction(function); |
1259 | } |
1260 | } |
1261 | } |
1262 | } |
1263 | |
1264 | implicit_getters = GrowableObjectArray::null(); |
1265 | implicit_setters = GrowableObjectArray::null(); |
1266 | implicit_static_getters = GrowableObjectArray::null(); |
1267 | } |
1268 | } |
1269 | } |
1270 | |
1271 | void Precompiler::CheckForNewDynamicFunctions() { |
1272 | Library& lib = Library::Handle(Z); |
1273 | Class& cls = Class::Handle(Z); |
1274 | Array& functions = Array::Handle(Z); |
1275 | Function& function = Function::Handle(Z); |
1276 | Function& function2 = Function::Handle(Z); |
1277 | String& selector = String::Handle(Z); |
1278 | String& selector2 = String::Handle(Z); |
1279 | String& selector3 = String::Handle(Z); |
1280 | Field& field = Field::Handle(Z); |
1281 | |
1282 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
1283 | lib ^= libraries_.At(i); |
1284 | ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate); |
1285 | while (it.HasNext()) { |
1286 | cls = it.GetNextClass(); |
1287 | |
1288 | if (!cls.is_allocated()) continue; |
1289 | |
1290 | functions = cls.functions(); |
1291 | for (intptr_t k = 0; k < functions.Length(); k++) { |
1292 | function ^= functions.At(k); |
1293 | |
1294 | if (function.is_static() || function.is_abstract()) continue; |
1295 | |
1296 | // Don't bail out early if there is already code because we may discover |
1297 | // the corresponding getter selector is sent in some later iteration. |
1298 | // if (function.HasCode()) continue; |
1299 | |
1300 | selector = function.name(); |
1301 | if (IsSent(selector)) { |
1302 | AddFunction(function); |
1303 | } |
1304 | if (IsHitByTableSelector(function)) { |
1305 | AddFunction(function, FLAG_retain_function_objects); |
1306 | } |
1307 | |
1308 | bool found_metadata = false; |
1309 | kernel::ProcedureAttributesMetadata metadata; |
1310 | |
1311 | // Handle the implicit call type conversions. |
1312 | if (Field::IsGetterName(selector) && |
1313 | (function.kind() != FunctionLayout::kMethodExtractor)) { |
1314 | // Call-through-getter. |
1315 | // Function is get:foo and somewhere foo (or dyn:foo) is called. |
1316 | // Note that we need to skip method extractors (which were potentially |
1317 | // created by DispatchTableGenerator): call of foo will never |
1318 | // hit method extractor get:foo, because it will hit an existing |
1319 | // method foo first. |
1320 | selector2 = Field::NameFromGetter(selector); |
1321 | if (IsSent(selector2)) { |
1322 | AddFunction(function); |
1323 | } |
1324 | selector2 = Function::CreateDynamicInvocationForwarderName(selector2); |
1325 | if (IsSent(selector2)) { |
1326 | selector2 = |
1327 | Function::CreateDynamicInvocationForwarderName(selector); |
1328 | function2 = function.GetDynamicInvocationForwarder(selector2); |
1329 | AddFunction(function2); |
1330 | } |
1331 | } else if (function.kind() == FunctionLayout::kRegularFunction) { |
1332 | selector2 = Field::LookupGetterSymbol(selector); |
1333 | selector3 = String::null(); |
1334 | if (!selector2.IsNull()) { |
1335 | selector3 = |
1336 | Function::CreateDynamicInvocationForwarderName(selector2); |
1337 | } |
1338 | if (IsSent(selector2) || IsSent(selector3)) { |
1339 | metadata = kernel::ProcedureAttributesOf(function, Z); |
1340 | found_metadata = true; |
1341 | |
1342 | if (metadata.has_tearoff_uses) { |
1343 | // Closurization. |
1344 | // Function is foo and somewhere get:foo is called. |
1345 | function2 = function.ImplicitClosureFunction(); |
1346 | AddFunction(function2); |
1347 | |
1348 | // Add corresponding method extractor. |
1349 | function2 = function.GetMethodExtractor(selector2); |
1350 | AddFunction(function2); |
1351 | } |
1352 | } |
1353 | } |
1354 | |
1355 | const bool is_getter = |
1356 | function.kind() == FunctionLayout::kImplicitGetter || |
1357 | function.kind() == FunctionLayout::kGetterFunction; |
1358 | const bool is_setter = |
1359 | function.kind() == FunctionLayout::kImplicitSetter || |
1360 | function.kind() == FunctionLayout::kSetterFunction; |
1361 | const bool is_regular = |
1362 | function.kind() == FunctionLayout::kRegularFunction; |
1363 | if (is_getter || is_setter || is_regular) { |
1364 | selector2 = Function::CreateDynamicInvocationForwarderName(selector); |
1365 | if (IsSent(selector2)) { |
1366 | if (function.kind() == FunctionLayout::kImplicitGetter || |
1367 | function.kind() == FunctionLayout::kImplicitSetter) { |
1368 | field = function.accessor_field(); |
1369 | metadata = kernel::ProcedureAttributesOf(field, Z); |
1370 | } else if (!found_metadata) { |
1371 | metadata = kernel::ProcedureAttributesOf(function, Z); |
1372 | } |
1373 | |
1374 | if (is_getter) { |
1375 | if (metadata.getter_called_dynamically) { |
1376 | function2 = function.GetDynamicInvocationForwarder(selector2); |
1377 | AddFunction(function2); |
1378 | } |
1379 | } else { |
1380 | if (metadata.method_or_setter_called_dynamically) { |
1381 | function2 = function.GetDynamicInvocationForwarder(selector2); |
1382 | AddFunction(function2); |
1383 | } |
1384 | } |
1385 | } |
1386 | } |
1387 | } |
1388 | } |
1389 | } |
1390 | } |
1391 | |
1392 | class NameFunctionsTraits { |
1393 | public: |
1394 | static const char* Name() { return "NameFunctionsTraits" ; } |
1395 | static bool ReportStats() { return false; } |
1396 | |
1397 | static bool IsMatch(const Object& a, const Object& b) { |
1398 | return a.IsString() && b.IsString() && |
1399 | String::Cast(a).Equals(String::Cast(b)); |
1400 | } |
1401 | static uword Hash(const Object& obj) { return String::Cast(obj).Hash(); } |
1402 | static ObjectPtr NewKey(const String& str) { return str.raw(); } |
1403 | }; |
1404 | |
1405 | typedef UnorderedHashMap<NameFunctionsTraits> Table; |
1406 | |
1407 | static void AddNameToFunctionsTable(Zone* zone, |
1408 | Table* table, |
1409 | const String& fname, |
1410 | const Function& function) { |
1411 | Array& farray = Array::Handle(zone); |
1412 | farray ^= table->InsertNewOrGetValue(fname, Array::empty_array()); |
1413 | farray = Array::Grow(farray, farray.Length() + 1); |
1414 | farray.SetAt(farray.Length() - 1, function); |
1415 | table->UpdateValue(fname, farray); |
1416 | } |
1417 | |
1418 | static void AddNamesToFunctionsTable(Zone* zone, |
1419 | Table* table, |
1420 | const String& fname, |
1421 | const Function& function, |
1422 | String* mangled_name, |
1423 | Function* dyn_function) { |
1424 | AddNameToFunctionsTable(zone, table, fname, function); |
1425 | |
1426 | *dyn_function = function.raw(); |
1427 | if (kernel::NeedsDynamicInvocationForwarder(function)) { |
1428 | *mangled_name = function.name(); |
1429 | *mangled_name = |
1430 | Function::CreateDynamicInvocationForwarderName(*mangled_name); |
1431 | *dyn_function = function.GetDynamicInvocationForwarder(*mangled_name, |
1432 | /*allow_add=*/true); |
1433 | } |
1434 | *mangled_name = Function::CreateDynamicInvocationForwarderName(fname); |
1435 | AddNameToFunctionsTable(zone, table, *mangled_name, *dyn_function); |
1436 | } |
1437 | |
1438 | void Precompiler::CollectDynamicFunctionNames() { |
1439 | if (!FLAG_collect_dynamic_function_names) { |
1440 | return; |
1441 | } |
1442 | auto& lib = Library::Handle(Z); |
1443 | auto& cls = Class::Handle(Z); |
1444 | auto& functions = Array::Handle(Z); |
1445 | auto& function = Function::Handle(Z); |
1446 | auto& fname = String::Handle(Z); |
1447 | auto& farray = Array::Handle(Z); |
1448 | auto& mangled_name = String::Handle(Z); |
1449 | auto& dyn_function = Function::Handle(Z); |
1450 | |
1451 | Table table(HashTables::New<Table>(100)); |
1452 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
1453 | lib ^= libraries_.At(i); |
1454 | ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate); |
1455 | while (it.HasNext()) { |
1456 | cls = it.GetNextClass(); |
1457 | functions = cls.functions(); |
1458 | |
1459 | const intptr_t length = functions.Length(); |
1460 | for (intptr_t j = 0; j < length; j++) { |
1461 | function ^= functions.At(j); |
1462 | if (function.IsDynamicFunction()) { |
1463 | fname = function.name(); |
1464 | if (function.IsSetterFunction() || |
1465 | function.IsImplicitSetterFunction()) { |
1466 | AddNamesToFunctionsTable(zone(), &table, fname, function, |
1467 | &mangled_name, &dyn_function); |
1468 | } else if (function.IsGetterFunction() || |
1469 | function.IsImplicitGetterFunction()) { |
1470 | // Enter both getter and non getter name. |
1471 | AddNamesToFunctionsTable(zone(), &table, fname, function, |
1472 | &mangled_name, &dyn_function); |
1473 | fname = Field::NameFromGetter(fname); |
1474 | AddNamesToFunctionsTable(zone(), &table, fname, function, |
1475 | &mangled_name, &dyn_function); |
1476 | } else if (function.IsMethodExtractor()) { |
1477 | // Skip. We already add getter names for regular methods below. |
1478 | continue; |
1479 | } else { |
1480 | // Regular function. Enter both getter and non getter name. |
1481 | AddNamesToFunctionsTable(zone(), &table, fname, function, |
1482 | &mangled_name, &dyn_function); |
1483 | fname = Field::GetterName(fname); |
1484 | AddNamesToFunctionsTable(zone(), &table, fname, function, |
1485 | &mangled_name, &dyn_function); |
1486 | } |
1487 | } |
1488 | } |
1489 | } |
1490 | } |
1491 | |
1492 | // Locate all entries with one function only |
1493 | Table::Iterator iter(&table); |
1494 | String& key = String::Handle(Z); |
1495 | String& key_demangled = String::Handle(Z); |
1496 | UniqueFunctionsMap functions_map(HashTables::New<UniqueFunctionsMap>(20)); |
1497 | while (iter.MoveNext()) { |
1498 | intptr_t curr_key = iter.Current(); |
1499 | key ^= table.GetKey(curr_key); |
1500 | farray ^= table.GetOrNull(key); |
1501 | ASSERT(!farray.IsNull()); |
1502 | if (farray.Length() == 1) { |
1503 | function ^= farray.At(0); |
1504 | |
1505 | // It looks like there is exactly one target for the given name. Though we |
1506 | // have to be careful: e.g. A name like `dyn:get:foo` might have a target |
1507 | // `foo()`. Though the actual target would be a lazily created method |
1508 | // extractor `get:foo` for the `foo` function. |
1509 | // |
1510 | // We'd like to prevent eager creation of functions which we normally |
1511 | // create lazily. |
1512 | // => We disable unique target optimization if the target belongs to the |
1513 | // lazily created functions. |
1514 | key_demangled = key.raw(); |
1515 | if (Function::IsDynamicInvocationForwarderName(key)) { |
1516 | key_demangled = Function::DemangleDynamicInvocationForwarderName(key); |
1517 | } |
1518 | if (function.name() != key.raw() && |
1519 | function.name() != key_demangled.raw()) { |
1520 | continue; |
1521 | } |
1522 | functions_map.UpdateOrInsert(key, function); |
1523 | } |
1524 | } |
1525 | |
1526 | farray ^= table.GetOrNull(Symbols::GetRuntimeType()); |
1527 | |
1528 | get_runtime_type_is_unique_ = !farray.IsNull() && (farray.Length() == 1); |
1529 | |
1530 | if (FLAG_print_unique_targets) { |
1531 | UniqueFunctionsMap::Iterator unique_iter(&functions_map); |
1532 | while (unique_iter.MoveNext()) { |
1533 | intptr_t curr_key = unique_iter.Current(); |
1534 | function ^= functions_map.GetPayload(curr_key, 0); |
1535 | THR_Print("* %s\n" , function.ToQualifiedCString()); |
1536 | } |
1537 | THR_Print("%" Pd " of %" Pd " dynamic selectors are unique\n" , |
1538 | functions_map.NumOccupied(), table.NumOccupied()); |
1539 | } |
1540 | |
1541 | isolate()->object_store()->set_unique_dynamic_targets( |
1542 | functions_map.Release()); |
1543 | table.Release(); |
1544 | } |
1545 | |
1546 | void Precompiler::TraceForRetainedFunctions() { |
1547 | Library& lib = Library::Handle(Z); |
1548 | Class& cls = Class::Handle(Z); |
1549 | Array& functions = Array::Handle(Z); |
1550 | String& name = String::Handle(Z); |
1551 | Function& function = Function::Handle(Z); |
1552 | Function& function2 = Function::Handle(Z); |
1553 | GrowableObjectArray& closures = GrowableObjectArray::Handle(Z); |
1554 | |
1555 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
1556 | lib ^= libraries_.At(i); |
1557 | ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate); |
1558 | while (it.HasNext()) { |
1559 | cls = it.GetNextClass(); |
1560 | functions = cls.functions(); |
1561 | for (intptr_t j = 0; j < functions.Length(); j++) { |
1562 | function ^= functions.At(j); |
1563 | bool retain = possibly_retained_functions_.ContainsKey(function); |
1564 | if (!retain && function.HasImplicitClosureFunction()) { |
1565 | // It can happen that all uses of an implicit closure inline their |
1566 | // target function, leaving the target function uncompiled. Keep |
1567 | // the target function anyway so we can enumerate it to bind its |
1568 | // static calls, etc. |
1569 | function2 = function.ImplicitClosureFunction(); |
1570 | retain = function2.HasCode(); |
1571 | } |
1572 | if (retain) { |
1573 | function.DropUncompiledImplicitClosureFunction(); |
1574 | AddTypesOf(function); |
1575 | } |
1576 | } |
1577 | |
1578 | { |
1579 | functions = cls.invocation_dispatcher_cache(); |
1580 | InvocationDispatcherTable dispatchers(functions); |
1581 | for (auto dispatcher : dispatchers) { |
1582 | name = dispatcher.Get<Class::kInvocationDispatcherName>(); |
1583 | if (name.IsNull()) break; // Reached last entry. |
1584 | function = dispatcher.Get<Class::kInvocationDispatcherFunction>(); |
1585 | if (possibly_retained_functions_.ContainsKey(function)) { |
1586 | AddTypesOf(function); |
1587 | } |
1588 | } |
1589 | } |
1590 | } |
1591 | } |
1592 | |
1593 | closures = isolate()->object_store()->closure_functions(); |
1594 | for (intptr_t j = 0; j < closures.Length(); j++) { |
1595 | function ^= closures.At(j); |
1596 | bool retain = possibly_retained_functions_.ContainsKey(function); |
1597 | if (retain) { |
1598 | AddTypesOf(function); |
1599 | |
1600 | cls = function.Owner(); |
1601 | AddTypesOf(cls); |
1602 | |
1603 | // It can happen that all uses of a function are inlined, leaving |
1604 | // a compiled local function with an uncompiled parent. Retain such |
1605 | // parents and their enclosing classes and libraries. |
1606 | function = function.parent_function(); |
1607 | while (!function.IsNull()) { |
1608 | AddTypesOf(function); |
1609 | function = function.parent_function(); |
1610 | } |
1611 | } |
1612 | } |
1613 | } |
1614 | |
1615 | void Precompiler::FinalizeDispatchTable() { |
1616 | if (!FLAG_use_bare_instructions || !FLAG_use_table_dispatch) return; |
1617 | // Build the entries used to serialize the dispatch table before |
1618 | // dropping functions, as we may clear references to Code objects. |
1619 | const auto& entries = |
1620 | Array::Handle(Z, dispatch_table_generator_->BuildCodeArray()); |
1621 | I->object_store()->set_dispatch_table_code_entries(entries); |
1622 | // Delete the dispatch table generator to ensure there's no attempt |
1623 | // to add new entries after this point. |
1624 | delete dispatch_table_generator_; |
1625 | dispatch_table_generator_ = nullptr; |
1626 | |
1627 | if (FLAG_retain_function_objects || !FLAG_trace_precompiler) return; |
1628 | |
1629 | FunctionSet printed(HashTables::New<FunctionSet>(/*initial_capacity=*/1024)); |
1630 | auto& code = Code::Handle(Z); |
1631 | auto& function = Function::Handle(Z); |
1632 | for (intptr_t i = 0; i < entries.Length(); i++) { |
1633 | code = Code::RawCast(entries.At(i)); |
1634 | if (code.IsNull()) continue; |
1635 | if (!code.IsFunctionCode()) continue; |
1636 | function = code.function(); |
1637 | ASSERT(!function.IsNull()); |
1638 | if (printed.ContainsKey(function)) continue; |
1639 | if (functions_to_retain_.ContainsKey(function)) continue; |
1640 | THR_Print("Dispatch table references code for function to drop: %s\n" , |
1641 | function.ToLibNamePrefixedQualifiedCString()); |
1642 | printed.Insert(function); |
1643 | } |
1644 | printed.Release(); |
1645 | } |
1646 | |
1647 | void Precompiler::ReplaceFunctionPCRelativeCallEntries() { |
1648 | class StaticCallTableEntryFixer : public CodeVisitor { |
1649 | public: |
1650 | explicit StaticCallTableEntryFixer(Zone* zone) |
1651 | : table_(Array::Handle(zone)), |
1652 | kind_and_offset_(Smi::Handle(zone)), |
1653 | target_function_(Function::Handle(zone)), |
1654 | target_code_(Code::Handle(zone)) {} |
1655 | |
1656 | void VisitCode(const Code& code) { |
1657 | if (!code.IsFunctionCode()) return; |
1658 | table_ = code.static_calls_target_table(); |
1659 | StaticCallsTable static_calls(table_); |
1660 | for (auto& view : static_calls) { |
1661 | kind_and_offset_ = view.Get<Code::kSCallTableKindAndOffset>(); |
1662 | auto const kind = Code::KindField::decode(kind_and_offset_.Value()); |
1663 | if (kind != Code::kPcRelativeCall) continue; |
1664 | |
1665 | target_function_ = view.Get<Code::kSCallTableFunctionTarget>(); |
1666 | if (target_function_.IsNull()) continue; |
1667 | |
1668 | ASSERT(view.Get<Code::kSCallTableCodeOrTypeTarget>() == Code::null()); |
1669 | ASSERT(target_function_.HasCode()); |
1670 | target_code_ = target_function_.CurrentCode(); |
1671 | ASSERT(!target_code_.IsStubCode()); |
1672 | view.Set<Code::kSCallTableCodeOrTypeTarget>(target_code_); |
1673 | view.Set<Code::kSCallTableFunctionTarget>(Object::null_function()); |
1674 | if (FLAG_trace_precompiler) { |
1675 | THR_Print("Updated static call entry to %s in \"%s\"\n" , |
1676 | target_function_.ToFullyQualifiedCString(), |
1677 | code.ToCString()); |
1678 | } |
1679 | } |
1680 | } |
1681 | |
1682 | private: |
1683 | Array& table_; |
1684 | Smi& kind_and_offset_; |
1685 | Function& target_function_; |
1686 | Code& target_code_; |
1687 | }; |
1688 | |
1689 | HANDLESCOPE(T); |
1690 | StaticCallTableEntryFixer visitor(Z); |
1691 | ProgramVisitor::WalkProgram(Z, I, &visitor); |
1692 | } |
1693 | |
1694 | void Precompiler::DropFunctions() { |
1695 | Library& lib = Library::Handle(Z); |
1696 | Class& cls = Class::Handle(Z); |
1697 | Array& functions = Array::Handle(Z); |
1698 | Function& function = Function::Handle(Z); |
1699 | Code& code = Code::Handle(Z); |
1700 | Object& owner = Object::Handle(Z); |
1701 | GrowableObjectArray& retained_functions = GrowableObjectArray::Handle(Z); |
1702 | GrowableObjectArray& closures = GrowableObjectArray::Handle(Z); |
1703 | |
1704 | auto drop_function = [&](const Function& function) { |
1705 | if (function.HasCode()) { |
1706 | code = function.CurrentCode(); |
1707 | function.ClearCode(); |
1708 | // Wrap the owner of the code object in case the code object will be |
1709 | // serialized but the function object will not. |
1710 | owner = code.owner(); |
1711 | owner = WeakSerializationReference::Wrap(Z, owner); |
1712 | code.set_owner(owner); |
1713 | } |
1714 | dropped_function_count_++; |
1715 | if (FLAG_trace_precompiler) { |
1716 | THR_Print("Dropping function %s\n" , |
1717 | function.ToLibNamePrefixedQualifiedCString()); |
1718 | } |
1719 | }; |
1720 | |
1721 | auto& dispatchers_array = Array::Handle(Z); |
1722 | auto& name = String::Handle(Z); |
1723 | auto& desc = Array::Handle(Z); |
1724 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
1725 | lib ^= libraries_.At(i); |
1726 | ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate); |
1727 | while (it.HasNext()) { |
1728 | cls = it.GetNextClass(); |
1729 | functions = cls.functions(); |
1730 | retained_functions = GrowableObjectArray::New(); |
1731 | for (intptr_t j = 0; j < functions.Length(); j++) { |
1732 | function ^= functions.At(j); |
1733 | function.DropUncompiledImplicitClosureFunction(); |
1734 | function.ClearBytecode(); |
1735 | if (functions_to_retain_.ContainsKey(function)) { |
1736 | retained_functions.Add(function); |
1737 | } else { |
1738 | drop_function(function); |
1739 | } |
1740 | } |
1741 | |
1742 | if (retained_functions.Length() > 0) { |
1743 | functions = Array::MakeFixedLength(retained_functions); |
1744 | cls.SetFunctions(functions); |
1745 | } else { |
1746 | cls.SetFunctions(Object::empty_array()); |
1747 | } |
1748 | |
1749 | retained_functions = GrowableObjectArray::New(); |
1750 | { |
1751 | dispatchers_array = cls.invocation_dispatcher_cache(); |
1752 | InvocationDispatcherTable dispatchers(dispatchers_array); |
1753 | for (auto dispatcher : dispatchers) { |
1754 | name = dispatcher.Get<Class::kInvocationDispatcherName>(); |
1755 | if (name.IsNull()) break; // Reached last entry. |
1756 | desc = dispatcher.Get<Class::kInvocationDispatcherArgsDesc>(); |
1757 | function = dispatcher.Get<Class::kInvocationDispatcherFunction>(); |
1758 | if (functions_to_retain_.ContainsKey(function)) { |
1759 | retained_functions.Add(name); |
1760 | retained_functions.Add(desc); |
1761 | retained_functions.Add(function); |
1762 | } else { |
1763 | drop_function(function); |
1764 | } |
1765 | } |
1766 | } |
1767 | if (retained_functions.Length() > 0) { |
1768 | // Last entry must be null. |
1769 | retained_functions.Add(Object::null_object()); |
1770 | retained_functions.Add(Object::null_object()); |
1771 | retained_functions.Add(Object::null_object()); |
1772 | functions = Array::MakeFixedLength(retained_functions); |
1773 | } else { |
1774 | functions = Object::empty_array().raw(); |
1775 | } |
1776 | cls.set_invocation_dispatcher_cache(functions); |
1777 | } |
1778 | } |
1779 | |
1780 | closures = isolate()->object_store()->closure_functions(); |
1781 | retained_functions = GrowableObjectArray::New(); |
1782 | for (intptr_t j = 0; j < closures.Length(); j++) { |
1783 | function ^= closures.At(j); |
1784 | function.ClearBytecode(); |
1785 | if (functions_to_retain_.ContainsKey(function)) { |
1786 | retained_functions.Add(function); |
1787 | } else { |
1788 | drop_function(function); |
1789 | } |
1790 | } |
1791 | isolate()->object_store()->set_closure_functions(retained_functions); |
1792 | } |
1793 | |
1794 | void Precompiler::DropFields() { |
1795 | Library& lib = Library::Handle(Z); |
1796 | Class& cls = Class::Handle(Z); |
1797 | Array& fields = Array::Handle(Z); |
1798 | Field& field = Field::Handle(Z); |
1799 | GrowableObjectArray& retained_fields = GrowableObjectArray::Handle(Z); |
1800 | AbstractType& type = AbstractType::Handle(Z); |
1801 | Function& initializer_function = Function::Handle(Z); |
1802 | |
1803 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
1804 | lib ^= libraries_.At(i); |
1805 | ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate); |
1806 | while (it.HasNext()) { |
1807 | cls = it.GetNextClass(); |
1808 | fields = cls.fields(); |
1809 | retained_fields = GrowableObjectArray::New(); |
1810 | for (intptr_t j = 0; j < fields.Length(); j++) { |
1811 | field ^= fields.At(j); |
1812 | bool retain = fields_to_retain_.HasKey(&field); |
1813 | if (field.HasInitializerFunction()) { |
1814 | initializer_function = field.InitializerFunction(); |
1815 | initializer_function.ClearBytecode(); |
1816 | } |
1817 | #if !defined(PRODUCT) |
1818 | if (field.is_instance() && cls.is_allocated()) { |
1819 | // Keep instance fields so their names are available to graph tools. |
1820 | retain = true; |
1821 | } |
1822 | #endif |
1823 | if (retain) { |
1824 | retained_fields.Add(field); |
1825 | type = field.type(); |
1826 | AddType(type); |
1827 | } else { |
1828 | dropped_field_count_++; |
1829 | if (FLAG_trace_precompiler) { |
1830 | THR_Print("Dropping field %s\n" , field.ToCString()); |
1831 | } |
1832 | |
1833 | // This cleans up references to field current and initial values. |
1834 | if (field.is_static()) { |
1835 | field.SetStaticValue(Object::null_instance(), |
1836 | /*save_initial_value=*/true); |
1837 | } |
1838 | } |
1839 | } |
1840 | |
1841 | if (retained_fields.Length() > 0) { |
1842 | fields = Array::MakeFixedLength(retained_fields); |
1843 | cls.SetFields(fields); |
1844 | } else { |
1845 | cls.SetFields(Object::empty_array()); |
1846 | } |
1847 | } |
1848 | } |
1849 | } |
1850 | |
1851 | void Precompiler::AttachOptimizedTypeTestingStub() { |
1852 | Isolate::Current()->heap()->CollectAllGarbage(); |
1853 | GrowableHandlePtrArray<const AbstractType> types(Z, 200); |
1854 | { |
1855 | class TypesCollector : public ObjectVisitor { |
1856 | public: |
1857 | explicit TypesCollector(Zone* zone, |
1858 | GrowableHandlePtrArray<const AbstractType>* types) |
1859 | : type_(AbstractType::Handle(zone)), types_(types) {} |
1860 | |
1861 | void VisitObject(ObjectPtr obj) { |
1862 | if (obj->GetClassId() == kTypeCid || obj->GetClassId() == kTypeRefCid) { |
1863 | type_ ^= obj; |
1864 | types_->Add(type_); |
1865 | } |
1866 | } |
1867 | |
1868 | private: |
1869 | AbstractType& type_; |
1870 | GrowableHandlePtrArray<const AbstractType>* types_; |
1871 | }; |
1872 | |
1873 | HeapIterationScope his(T); |
1874 | TypesCollector visitor(Z, &types); |
1875 | |
1876 | // Find all type objects in this isolate. |
1877 | I->heap()->VisitObjects(&visitor); |
1878 | |
1879 | // Find all type objects in the vm-isolate. |
1880 | Dart::vm_isolate()->heap()->VisitObjects(&visitor); |
1881 | } |
1882 | |
1883 | TypeUsageInfo* type_usage_info = Thread::Current()->type_usage_info(); |
1884 | |
1885 | // At this point we're not generating any new code, so we build a picture of |
1886 | // which types we might type-test against. |
1887 | type_usage_info->BuildTypeUsageInformation(); |
1888 | |
1889 | TypeTestingStubGenerator type_testing_stubs; |
1890 | Code& code = Code::Handle(); |
1891 | for (intptr_t i = 0; i < types.length(); i++) { |
1892 | const AbstractType& type = types.At(i); |
1893 | |
1894 | if (type.InVMIsolateHeap()) { |
1895 | // The only important types in the vm isolate are |
1896 | // "dynamic"/"void"/"Never", which will get their optimized |
1897 | // testing stub installed at creation. |
1898 | continue; |
1899 | } |
1900 | |
1901 | if (type_usage_info->IsUsedInTypeTest(type)) { |
1902 | code = type_testing_stubs.OptimizedCodeForType(type); |
1903 | type.SetTypeTestingStub(code); |
1904 | |
1905 | // Ensure we retain the type. |
1906 | AddType(type); |
1907 | } |
1908 | } |
1909 | |
1910 | ASSERT(Object::dynamic_type().type_test_stub_entry_point() == |
1911 | StubCode::TopTypeTypeTest().EntryPoint()); |
1912 | } |
1913 | |
1914 | void Precompiler::DropTypes() { |
1915 | ObjectStore* object_store = I->object_store(); |
1916 | GrowableObjectArray& retained_types = |
1917 | GrowableObjectArray::Handle(Z, GrowableObjectArray::New()); |
1918 | Array& types_array = Array::Handle(Z); |
1919 | Type& type = Type::Handle(Z); |
1920 | // First drop all the types that are not referenced. |
1921 | { |
1922 | CanonicalTypeSet types_table(Z, object_store->canonical_types()); |
1923 | types_array = HashTables::ToArray(types_table, false); |
1924 | for (intptr_t i = 0; i < types_array.Length(); i++) { |
1925 | type ^= types_array.At(i); |
1926 | bool retain = types_to_retain_.HasKey(&type); |
1927 | if (retain) { |
1928 | retained_types.Add(type); |
1929 | } else { |
1930 | type.ClearCanonical(); |
1931 | dropped_type_count_++; |
1932 | } |
1933 | } |
1934 | types_table.Release(); |
1935 | } |
1936 | |
1937 | // Now construct a new type table and save in the object store. |
1938 | const intptr_t dict_size = |
1939 | Utils::RoundUpToPowerOfTwo(retained_types.Length() * 4 / 3); |
1940 | types_array = HashTables::New<CanonicalTypeSet>(dict_size, Heap::kOld); |
1941 | CanonicalTypeSet types_table(Z, types_array.raw()); |
1942 | bool present; |
1943 | for (intptr_t i = 0; i < retained_types.Length(); i++) { |
1944 | type ^= retained_types.At(i); |
1945 | present = types_table.Insert(type); |
1946 | ASSERT(!present); |
1947 | } |
1948 | object_store->set_canonical_types(types_table.Release()); |
1949 | } |
1950 | |
1951 | void Precompiler::DropTypeParameters() { |
1952 | ObjectStore* object_store = I->object_store(); |
1953 | GrowableObjectArray& retained_typeparams = |
1954 | GrowableObjectArray::Handle(Z, GrowableObjectArray::New()); |
1955 | Array& typeparams_array = Array::Handle(Z); |
1956 | TypeParameter& typeparam = TypeParameter::Handle(Z); |
1957 | // First drop all the type parameters that are not referenced. |
1958 | // Note that we only visit 'free-floating' type parameters and not |
1959 | // declarations of type parameters contained in the 'type_parameters' |
1960 | // array in generic classes and functions. |
1961 | { |
1962 | CanonicalTypeParameterSet typeparams_table( |
1963 | Z, object_store->canonical_type_parameters()); |
1964 | typeparams_array = HashTables::ToArray(typeparams_table, false); |
1965 | for (intptr_t i = 0; i < typeparams_array.Length(); i++) { |
1966 | typeparam ^= typeparams_array.At(i); |
1967 | bool retain = typeparams_to_retain_.HasKey(&typeparam); |
1968 | if (retain) { |
1969 | retained_typeparams.Add(typeparam); |
1970 | } else { |
1971 | typeparam.ClearCanonical(); |
1972 | dropped_typeparam_count_++; |
1973 | } |
1974 | } |
1975 | typeparams_table.Release(); |
1976 | } |
1977 | |
1978 | // Now construct a new type parameter table and save in the object store. |
1979 | const intptr_t dict_size = |
1980 | Utils::RoundUpToPowerOfTwo(retained_typeparams.Length() * 4 / 3); |
1981 | typeparams_array = |
1982 | HashTables::New<CanonicalTypeParameterSet>(dict_size, Heap::kOld); |
1983 | CanonicalTypeParameterSet typeparams_table(Z, typeparams_array.raw()); |
1984 | bool present; |
1985 | for (intptr_t i = 0; i < retained_typeparams.Length(); i++) { |
1986 | typeparam ^= retained_typeparams.At(i); |
1987 | present = typeparams_table.Insert(typeparam); |
1988 | ASSERT(!present); |
1989 | } |
1990 | object_store->set_canonical_type_parameters(typeparams_table.Release()); |
1991 | } |
1992 | |
1993 | void Precompiler::DropTypeArguments() { |
1994 | ObjectStore* object_store = I->object_store(); |
1995 | Array& typeargs_array = Array::Handle(Z); |
1996 | GrowableObjectArray& retained_typeargs = |
1997 | GrowableObjectArray::Handle(Z, GrowableObjectArray::New()); |
1998 | TypeArguments& typeargs = TypeArguments::Handle(Z); |
1999 | // First drop all the type arguments that are not referenced. |
2000 | { |
2001 | CanonicalTypeArgumentsSet typeargs_table( |
2002 | Z, object_store->canonical_type_arguments()); |
2003 | typeargs_array = HashTables::ToArray(typeargs_table, false); |
2004 | for (intptr_t i = 0; i < typeargs_array.Length(); i++) { |
2005 | typeargs ^= typeargs_array.At(i); |
2006 | bool retain = typeargs_to_retain_.HasKey(&typeargs); |
2007 | if (retain) { |
2008 | retained_typeargs.Add(typeargs); |
2009 | } else { |
2010 | typeargs.ClearCanonical(); |
2011 | dropped_typearg_count_++; |
2012 | } |
2013 | } |
2014 | typeargs_table.Release(); |
2015 | } |
2016 | |
2017 | // Now construct a new type arguments table and save in the object store. |
2018 | const intptr_t dict_size = |
2019 | Utils::RoundUpToPowerOfTwo(retained_typeargs.Length() * 4 / 3); |
2020 | typeargs_array = |
2021 | HashTables::New<CanonicalTypeArgumentsSet>(dict_size, Heap::kOld); |
2022 | CanonicalTypeArgumentsSet typeargs_table(Z, typeargs_array.raw()); |
2023 | bool present; |
2024 | for (intptr_t i = 0; i < retained_typeargs.Length(); i++) { |
2025 | typeargs ^= retained_typeargs.At(i); |
2026 | present = typeargs_table.Insert(typeargs); |
2027 | ASSERT(!present); |
2028 | } |
2029 | object_store->set_canonical_type_arguments(typeargs_table.Release()); |
2030 | } |
2031 | |
2032 | void Precompiler::TraceTypesFromRetainedClasses() { |
2033 | auto& lib = Library::Handle(Z); |
2034 | auto& cls = Class::Handle(Z); |
2035 | auto& members = Array::Handle(Z); |
2036 | auto& constants = Array::Handle(Z); |
2037 | auto& retained_constants = GrowableObjectArray::Handle(Z); |
2038 | auto& constant = Instance::Handle(Z); |
2039 | |
2040 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
2041 | lib ^= libraries_.At(i); |
2042 | ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate); |
2043 | while (it.HasNext()) { |
2044 | cls = it.GetNextClass(); |
2045 | |
2046 | // The subclasses/implementors array is only needed for CHA. |
2047 | cls.ClearDirectSubclasses(); |
2048 | cls.ClearDirectImplementors(); |
2049 | |
2050 | bool retain = false; |
2051 | members = cls.fields(); |
2052 | if (members.Length() > 0) { |
2053 | retain = true; |
2054 | } |
2055 | members = cls.functions(); |
2056 | if (members.Length() > 0) { |
2057 | retain = true; |
2058 | } |
2059 | if (cls.is_allocated()) { |
2060 | retain = true; |
2061 | } |
2062 | if (cls.is_enum_class()) { |
2063 | // Enum classes have live instances, so we cannot unregister |
2064 | // them. |
2065 | retain = true; |
2066 | } |
2067 | |
2068 | constants = cls.constants(); |
2069 | retained_constants = GrowableObjectArray::New(); |
2070 | for (intptr_t j = 0; j < constants.Length(); j++) { |
2071 | constant ^= constants.At(j); |
2072 | bool retain = consts_to_retain_.HasKey(&constant); |
2073 | if (retain) { |
2074 | retained_constants.Add(constant); |
2075 | } |
2076 | } |
2077 | intptr_t cid = cls.id(); |
2078 | if (cid == kDoubleCid) { |
2079 | // Rehash. |
2080 | cls.set_constants(Object::empty_array()); |
2081 | for (intptr_t j = 0; j < retained_constants.Length(); j++) { |
2082 | constant ^= retained_constants.At(j); |
2083 | cls.InsertCanonicalDouble(Z, Double::Cast(constant)); |
2084 | } |
2085 | } else if (cid == kMintCid) { |
2086 | // Rehash. |
2087 | cls.set_constants(Object::empty_array()); |
2088 | for (intptr_t j = 0; j < retained_constants.Length(); j++) { |
2089 | constant ^= retained_constants.At(j); |
2090 | cls.InsertCanonicalMint(Z, Mint::Cast(constant)); |
2091 | } |
2092 | } else { |
2093 | // Rehash. |
2094 | cls.set_constants(Object::empty_array()); |
2095 | for (intptr_t j = 0; j < retained_constants.Length(); j++) { |
2096 | constant ^= retained_constants.At(j); |
2097 | cls.InsertCanonicalConstant(Z, constant); |
2098 | } |
2099 | } |
2100 | |
2101 | if (retained_constants.Length() > 0) { |
2102 | ASSERT(retain); // This shouldn't be the reason we keep a class. |
2103 | retain = true; |
2104 | } |
2105 | |
2106 | if (retain) { |
2107 | AddTypesOf(cls); |
2108 | } |
2109 | } |
2110 | } |
2111 | } |
2112 | |
2113 | void Precompiler::DropMetadata() { |
2114 | Library& lib = Library::Handle(Z); |
2115 | const GrowableObjectArray& null_growable_list = |
2116 | GrowableObjectArray::Handle(Z); |
2117 | Array& dependencies = Array::Handle(Z); |
2118 | Namespace& ns = Namespace::Handle(Z); |
2119 | const Field& null_field = Field::Handle(Z); |
2120 | GrowableObjectArray& metadata = GrowableObjectArray::Handle(Z); |
2121 | Field& metadata_field = Field::Handle(Z); |
2122 | |
2123 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
2124 | lib ^= libraries_.At(i); |
2125 | metadata ^= lib.metadata(); |
2126 | for (intptr_t j = 0; j < metadata.Length(); j++) { |
2127 | metadata_field ^= metadata.At(j); |
2128 | if (metadata_field.is_static()) { |
2129 | // Although this field will become garbage after clearing the list |
2130 | // below, we also need to clear its value from the field table. |
2131 | // The value may be an instance of an otherwise dead class, and if |
2132 | // it remains in the field table we can get an instance on the heap |
2133 | // with a deleted class. |
2134 | metadata_field.SetStaticValue(Object::null_instance(), |
2135 | /*save_initial_value=*/true); |
2136 | } |
2137 | } |
2138 | |
2139 | lib.set_metadata(null_growable_list); |
2140 | |
2141 | dependencies = lib.imports(); |
2142 | for (intptr_t j = 0; j < dependencies.Length(); j++) { |
2143 | ns ^= dependencies.At(j); |
2144 | if (!ns.IsNull()) { |
2145 | ns.set_metadata_field(null_field); |
2146 | } |
2147 | } |
2148 | |
2149 | dependencies = lib.exports(); |
2150 | for (intptr_t j = 0; j < dependencies.Length(); j++) { |
2151 | ns ^= dependencies.At(j); |
2152 | if (!ns.IsNull()) { |
2153 | ns.set_metadata_field(null_field); |
2154 | } |
2155 | } |
2156 | } |
2157 | } |
2158 | |
2159 | void Precompiler::DropLibraryEntries() { |
2160 | Library& lib = Library::Handle(Z); |
2161 | Array& dict = Array::Handle(Z); |
2162 | Object& entry = Object::Handle(Z); |
2163 | |
2164 | Array& scripts = Array::Handle(Z); |
2165 | Script& script = Script::Handle(Z); |
2166 | KernelProgramInfo& program_info = KernelProgramInfo::Handle(Z); |
2167 | const TypedData& null_typed_data = TypedData::Handle(Z); |
2168 | const KernelProgramInfo& null_info = KernelProgramInfo::Handle(Z); |
2169 | |
2170 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
2171 | lib ^= libraries_.At(i); |
2172 | |
2173 | dict = lib.dictionary(); |
2174 | intptr_t dict_size = dict.Length() - 1; |
2175 | intptr_t used = 0; |
2176 | for (intptr_t j = 0; j < dict_size; j++) { |
2177 | entry = dict.At(j); |
2178 | if (entry.IsNull()) continue; |
2179 | |
2180 | if (entry.IsClass()) { |
2181 | if (classes_to_retain_.HasKey(&Class::Cast(entry))) { |
2182 | used++; |
2183 | continue; |
2184 | } |
2185 | } else if (entry.IsFunction()) { |
2186 | if (functions_to_retain_.ContainsKey(Function::Cast(entry))) { |
2187 | used++; |
2188 | continue; |
2189 | } |
2190 | } else if (entry.IsField()) { |
2191 | if (fields_to_retain_.HasKey(&Field::Cast(entry))) { |
2192 | used++; |
2193 | continue; |
2194 | } |
2195 | } else if (entry.IsLibraryPrefix()) { |
2196 | // Always drop. |
2197 | } else { |
2198 | FATAL1("Unexpected library entry: %s" , entry.ToCString()); |
2199 | } |
2200 | dict.SetAt(j, Object::null_object()); |
2201 | } |
2202 | |
2203 | scripts = lib.LoadedScripts(); |
2204 | if (!scripts.IsNull()) { |
2205 | for (intptr_t i = 0; i < scripts.Length(); ++i) { |
2206 | script = Script::RawCast(scripts.At(i)); |
2207 | program_info = script.kernel_program_info(); |
2208 | if (!program_info.IsNull()) { |
2209 | program_info.set_constants(Array::null_array()); |
2210 | program_info.set_scripts(Array::null_array()); |
2211 | program_info.set_libraries_cache(Array::null_array()); |
2212 | program_info.set_classes_cache(Array::null_array()); |
2213 | program_info.set_bytecode_component(Array::null_array()); |
2214 | } |
2215 | script.set_resolved_url(String::null_string()); |
2216 | script.set_compile_time_constants(Array::null_array()); |
2217 | script.set_line_starts(null_typed_data); |
2218 | script.set_debug_positions(Array::null_array()); |
2219 | script.set_kernel_program_info(null_info); |
2220 | script.set_source(String::null_string()); |
2221 | } |
2222 | } |
2223 | |
2224 | lib.RehashDictionary(dict, used * 4 / 3 + 1); |
2225 | if (!(retain_root_library_caches_ && |
2226 | (lib.raw() == I->object_store()->root_library()))) { |
2227 | lib.DropDependenciesAndCaches(); |
2228 | } |
2229 | } |
2230 | } |
2231 | |
2232 | void Precompiler::DropClasses() { |
2233 | Class& cls = Class::Handle(Z); |
2234 | Array& constants = Array::Handle(Z); |
2235 | |
2236 | // We are about to remove classes from the class table. For this to be safe, |
2237 | // there must be no instances of these classes on the heap, not even |
2238 | // corpses because the class table entry may be used to find the size of |
2239 | // corpses. Request a full GC and wait for the sweeper tasks to finish before |
2240 | // we continue. |
2241 | I->heap()->CollectAllGarbage(); |
2242 | I->heap()->WaitForSweeperTasks(T); |
2243 | |
2244 | ClassTable* class_table = I->class_table(); |
2245 | intptr_t num_cids = class_table->NumCids(); |
2246 | |
2247 | for (intptr_t cid = kNumPredefinedCids; cid < num_cids; cid++) { |
2248 | if (!class_table->IsValidIndex(cid)) continue; |
2249 | if (!class_table->HasValidClassAt(cid)) continue; |
2250 | |
2251 | cls = class_table->At(cid); |
2252 | ASSERT(!cls.IsNull()); |
2253 | |
2254 | if (cls.IsTopLevel()) { |
2255 | // Top-level classes are referenced directly from their library. They |
2256 | // will only be removed as a consequence of an entire library being |
2257 | // removed. |
2258 | continue; |
2259 | } |
2260 | |
2261 | bool retain = classes_to_retain_.HasKey(&cls); |
2262 | if (retain) { |
2263 | continue; |
2264 | } |
2265 | |
2266 | ASSERT(!cls.is_allocated()); |
2267 | constants = cls.constants(); |
2268 | ASSERT(constants.Length() == 0); |
2269 | |
2270 | dropped_class_count_++; |
2271 | if (FLAG_trace_precompiler) { |
2272 | THR_Print("Dropping class %" Pd " %s\n" , cid, cls.ToCString()); |
2273 | } |
2274 | |
2275 | class_table->Unregister(cid); |
2276 | cls.set_id(kIllegalCid); // We check this when serializing. |
2277 | } |
2278 | } |
2279 | |
2280 | void Precompiler::DropLibraries() { |
2281 | const GrowableObjectArray& retained_libraries = |
2282 | GrowableObjectArray::Handle(Z, GrowableObjectArray::New()); |
2283 | const Library& root_lib = |
2284 | Library::Handle(Z, I->object_store()->root_library()); |
2285 | Library& lib = Library::Handle(Z); |
2286 | Class& toplevel_class = Class::Handle(Z); |
2287 | |
2288 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
2289 | lib ^= libraries_.At(i); |
2290 | intptr_t entries = 0; |
2291 | DictionaryIterator it(lib); |
2292 | while (it.HasNext()) { |
2293 | entries++; |
2294 | it.GetNext(); |
2295 | } |
2296 | bool retain = false; |
2297 | if (entries > 0) { |
2298 | retain = true; |
2299 | } else if (lib.is_dart_scheme()) { |
2300 | // The core libraries are referenced from the object store. |
2301 | retain = true; |
2302 | } else if (lib.raw() == root_lib.raw()) { |
2303 | // The root library might have no surviving members if it only exports |
2304 | // main from another library. It will still be referenced from the object |
2305 | // store, so retain it. |
2306 | retain = true; |
2307 | } else { |
2308 | // A type for a top-level class may be referenced from an object pool as |
2309 | // part of an error message. |
2310 | toplevel_class = lib.toplevel_class(); |
2311 | if (classes_to_retain_.HasKey(&toplevel_class)) { |
2312 | retain = true; |
2313 | } |
2314 | } |
2315 | |
2316 | if (retain) { |
2317 | lib.set_index(retained_libraries.Length()); |
2318 | retained_libraries.Add(lib); |
2319 | } else { |
2320 | toplevel_class = lib.toplevel_class(); |
2321 | |
2322 | I->class_table()->UnregisterTopLevel(toplevel_class.id()); |
2323 | toplevel_class.set_id(kIllegalCid); // We check this when serializing. |
2324 | |
2325 | dropped_library_count_++; |
2326 | lib.set_index(-1); |
2327 | if (FLAG_trace_precompiler) { |
2328 | THR_Print("Dropping library %s\n" , lib.ToCString()); |
2329 | } |
2330 | } |
2331 | } |
2332 | |
2333 | Library::RegisterLibraries(T, retained_libraries); |
2334 | libraries_ = retained_libraries.raw(); |
2335 | } |
2336 | |
2337 | // Traits for the HashTable template. |
2338 | struct CodeKeyTraits { |
2339 | static uint32_t Hash(const Object& key) { return Code::Cast(key).Size(); } |
2340 | static const char* Name() { return "CodeKeyTraits" ; } |
2341 | static bool IsMatch(const Object& x, const Object& y) { |
2342 | return x.raw() == y.raw(); |
2343 | } |
2344 | static bool ReportStats() { return false; } |
2345 | }; |
2346 | |
2347 | typedef UnorderedHashSet<CodeKeyTraits> CodeSet; |
2348 | |
2349 | #if defined(DEBUG) |
2350 | FunctionPtr Precompiler::FindUnvisitedRetainedFunction() { |
2351 | class CodeChecker : public CodeVisitor { |
2352 | public: |
2353 | CodeChecker() |
2354 | : visited_code_(HashTables::New<CodeSet>(/*initial_capacity=*/1024)) {} |
2355 | ~CodeChecker() { visited_code_.Release(); } |
2356 | |
2357 | const CodeSet& visited() const { return visited_code_; } |
2358 | |
2359 | void VisitCode(const Code& code) { visited_code_.Insert(code); } |
2360 | |
2361 | private: |
2362 | CodeSet visited_code_; |
2363 | }; |
2364 | |
2365 | CodeChecker visitor; |
2366 | ProgramVisitor::WalkProgram(Z, I, &visitor); |
2367 | const CodeSet& visited = visitor.visited(); |
2368 | |
2369 | FunctionSet::Iterator it(&functions_to_retain_); |
2370 | Function& function = Function::Handle(Z); |
2371 | Code& code = Code::Handle(Z); |
2372 | while (it.MoveNext()) { |
2373 | function ^= functions_to_retain_.GetKey(it.Current()); |
2374 | if (!function.HasCode()) continue; |
2375 | code = function.CurrentCode(); |
2376 | if (!visited.ContainsKey(code)) return function.raw(); |
2377 | } |
2378 | return Function::null(); |
2379 | } |
2380 | #endif |
2381 | |
2382 | void Precompiler::Obfuscate() { |
2383 | if (!I->obfuscate()) { |
2384 | return; |
2385 | } |
2386 | |
2387 | class ScriptsCollector : public ObjectVisitor { |
2388 | public: |
2389 | explicit ScriptsCollector(Zone* zone, |
2390 | GrowableHandlePtrArray<const Script>* scripts) |
2391 | : script_(Script::Handle(zone)), scripts_(scripts) {} |
2392 | |
2393 | void VisitObject(ObjectPtr obj) { |
2394 | if (obj->GetClassId() == kScriptCid) { |
2395 | script_ ^= obj; |
2396 | scripts_->Add(Script::Cast(script_)); |
2397 | } |
2398 | } |
2399 | |
2400 | private: |
2401 | Script& script_; |
2402 | GrowableHandlePtrArray<const Script>* scripts_; |
2403 | }; |
2404 | |
2405 | GrowableHandlePtrArray<const Script> scripts(Z, 100); |
2406 | Isolate::Current()->heap()->CollectAllGarbage(); |
2407 | { |
2408 | HeapIterationScope his(T); |
2409 | ScriptsCollector visitor(Z, &scripts); |
2410 | I->heap()->VisitObjects(&visitor); |
2411 | } |
2412 | |
2413 | { |
2414 | // Note: when this object is destroyed it will commit obfuscation |
2415 | // mappings into the ObjectStore. Hence the block around it - to |
2416 | // ensure that destructor is called before we save obfuscation |
2417 | // mappings and clear the ObjectStore. |
2418 | Obfuscator obfuscator(T, /*private_key=*/String::Handle(Z)); |
2419 | String& str = String::Handle(Z); |
2420 | for (intptr_t i = 0; i < scripts.length(); i++) { |
2421 | const Script& script = scripts.At(i); |
2422 | |
2423 | str = script.url(); |
2424 | str = Symbols::New(T, str); |
2425 | str = obfuscator.Rename(str, /*atomic=*/true); |
2426 | script.set_url(str); |
2427 | } |
2428 | |
2429 | Library& lib = Library::Handle(); |
2430 | for (intptr_t i = 0; i < libraries_.Length(); i++) { |
2431 | lib ^= libraries_.At(i); |
2432 | if (!lib.is_dart_scheme()) { |
2433 | str = lib.name(); |
2434 | str = obfuscator.Rename(str, /*atomic=*/true); |
2435 | lib.set_name(str); |
2436 | |
2437 | str = lib.url(); |
2438 | str = Symbols::New(T, str); |
2439 | str = obfuscator.Rename(str, /*atomic=*/true); |
2440 | lib.set_url(str); |
2441 | } |
2442 | } |
2443 | Library::RegisterLibraries(T, libraries_); |
2444 | } |
2445 | |
2446 | // Obfuscation is done. Move obfuscation map into malloced memory. |
2447 | I->set_obfuscation_map(Obfuscator::SerializeMap(T)); |
2448 | |
2449 | // Discard obfuscation mappings to avoid including them into snapshot. |
2450 | I->object_store()->set_obfuscation_map(Array::Handle(Z)); |
2451 | } |
2452 | |
2453 | void Precompiler::FinalizeAllClasses() { |
2454 | // Create a fresh Zone because kernel reading during class finalization |
2455 | // may create zone handles. Those handles may prevent garbage collection of |
2456 | // otherwise unreachable constants of dropped classes, which would |
2457 | // cause assertion failures during GC after classes are dropped. |
2458 | StackZone stack_zone(thread()); |
2459 | HANDLESCOPE(thread()); |
2460 | |
2461 | error_ = Library::FinalizeAllClasses(); |
2462 | if (!error_.IsNull()) { |
2463 | Jump(error_); |
2464 | } |
2465 | I->set_all_classes_finalized(true); |
2466 | } |
2467 | |
2468 | void PrecompileParsedFunctionHelper::FinalizeCompilation( |
2469 | compiler::Assembler* assembler, |
2470 | FlowGraphCompiler* graph_compiler, |
2471 | FlowGraph* flow_graph, |
2472 | CodeStatistics* stats) { |
2473 | const Function& function = parsed_function()->function(); |
2474 | Zone* const zone = thread()->zone(); |
2475 | |
2476 | // CreateDeoptInfo uses the object pool and needs to be done before |
2477 | // FinalizeCode. |
2478 | const Array& deopt_info_array = |
2479 | Array::Handle(zone, graph_compiler->CreateDeoptInfo(assembler)); |
2480 | // Allocates instruction object. Since this occurs only at safepoint, |
2481 | // there can be no concurrent access to the instruction page. |
2482 | const auto pool_attachment = FLAG_use_bare_instructions |
2483 | ? Code::PoolAttachment::kNotAttachPool |
2484 | : Code::PoolAttachment::kAttachPool; |
2485 | const Code& code = Code::Handle( |
2486 | Code::FinalizeCodeAndNotify(function, graph_compiler, assembler, |
2487 | pool_attachment, optimized(), stats)); |
2488 | code.set_is_optimized(optimized()); |
2489 | code.set_owner(function); |
2490 | if (!function.IsOptimizable()) { |
2491 | // A function with huge unoptimized code can become non-optimizable |
2492 | // after generating unoptimized code. |
2493 | function.set_usage_counter(INT32_MIN); |
2494 | } |
2495 | |
2496 | graph_compiler->FinalizePcDescriptors(code); |
2497 | code.set_deopt_info_array(deopt_info_array); |
2498 | |
2499 | graph_compiler->FinalizeStackMaps(code); |
2500 | graph_compiler->FinalizeVarDescriptors(code); |
2501 | graph_compiler->FinalizeExceptionHandlers(code); |
2502 | graph_compiler->FinalizeCatchEntryMovesMap(code); |
2503 | graph_compiler->FinalizeStaticCallTargetsTable(code); |
2504 | graph_compiler->FinalizeCodeSourceMap(code); |
2505 | |
2506 | if (optimized()) { |
2507 | // Installs code while at safepoint. |
2508 | ASSERT(thread()->IsMutatorThread()); |
2509 | function.InstallOptimizedCode(code); |
2510 | } else { // not optimized. |
2511 | function.set_unoptimized_code(code); |
2512 | function.AttachCode(code); |
2513 | } |
2514 | } |
2515 | |
2516 | // Generate allocation stubs referenced by AllocateObject instructions. |
2517 | static void GenerateNecessaryAllocationStubs(FlowGraph* flow_graph) { |
2518 | for (auto block : flow_graph->reverse_postorder()) { |
2519 | for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) { |
2520 | if (auto allocation = it.Current()->AsAllocateObject()) { |
2521 | StubCode::GetAllocationStubForClass(allocation->cls()); |
2522 | } |
2523 | } |
2524 | } |
2525 | } |
2526 | |
2527 | // Return false if bailed out. |
2528 | bool PrecompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) { |
2529 | ASSERT(CompilerState::Current().is_aot()); |
2530 | if (optimized() && !parsed_function()->function().IsOptimizable()) { |
2531 | // All functions compiled by precompiler must be optimizable. |
2532 | UNREACHABLE(); |
2533 | return false; |
2534 | } |
2535 | volatile bool is_compiled = false; |
2536 | Zone* const zone = thread()->zone(); |
2537 | HANDLESCOPE(thread()); |
2538 | |
2539 | // We may reattempt compilation if the function needs to be assembled using |
2540 | // far branches on ARM. In the else branch of the setjmp call, done is set to |
2541 | // false, and use_far_branches is set to true if there is a longjmp from the |
2542 | // ARM assembler. In all other paths through this while loop, done is set to |
2543 | // true. use_far_branches is always false on ia32 and x64. |
2544 | bool done = false; |
2545 | // volatile because the variable may be clobbered by a longjmp. |
2546 | volatile bool use_far_branches = false; |
2547 | SpeculativeInliningPolicy speculative_policy( |
2548 | true, FLAG_max_speculative_inlining_attempts); |
2549 | |
2550 | while (!done) { |
2551 | LongJumpScope jump; |
2552 | const intptr_t val = setjmp(*jump.Set()); |
2553 | if (val == 0) { |
2554 | FlowGraph* flow_graph = nullptr; |
2555 | ZoneGrowableArray<const ICData*>* ic_data_array = nullptr; |
2556 | const Function& function = parsed_function()->function(); |
2557 | |
2558 | CompilerState compiler_state(thread(), /*is_aot=*/true, |
2559 | CompilerState::ShouldTrace(function)); |
2560 | |
2561 | { |
2562 | ic_data_array = new (zone) ZoneGrowableArray<const ICData*>(); |
2563 | |
2564 | TIMELINE_DURATION(thread(), CompilerVerbose, "BuildFlowGraph" ); |
2565 | flow_graph = |
2566 | pipeline->BuildFlowGraph(zone, parsed_function(), ic_data_array, |
2567 | Compiler::kNoOSRDeoptId, optimized()); |
2568 | } |
2569 | |
2570 | if (optimized()) { |
2571 | flow_graph->PopulateWithICData(function); |
2572 | } |
2573 | |
2574 | const bool print_flow_graph = |
2575 | (FLAG_print_flow_graph || |
2576 | (optimized() && FLAG_print_flow_graph_optimized)) && |
2577 | FlowGraphPrinter::ShouldPrint(function); |
2578 | |
2579 | if (print_flow_graph && !optimized()) { |
2580 | FlowGraphPrinter::PrintGraph("Unoptimized Compilation" , flow_graph); |
2581 | } |
2582 | |
2583 | CompilerPassState pass_state(thread(), flow_graph, &speculative_policy, |
2584 | precompiler_); |
2585 | pass_state.reorder_blocks = |
2586 | FlowGraph::ShouldReorderBlocks(function, optimized()); |
2587 | |
2588 | if (function.ForceOptimize()) { |
2589 | ASSERT(optimized()); |
2590 | TIMELINE_DURATION(thread(), CompilerVerbose, "OptimizationPasses" ); |
2591 | flow_graph = CompilerPass::RunForceOptimizedPipeline(CompilerPass::kAOT, |
2592 | &pass_state); |
2593 | } else if (optimized()) { |
2594 | TIMELINE_DURATION(thread(), CompilerVerbose, "OptimizationPasses" ); |
2595 | |
2596 | pass_state.inline_id_to_function.Add(&function); |
2597 | // We do not add the token position now because we don't know the |
2598 | // position of the inlined call until later. A side effect of this |
2599 | // is that the length of |inline_id_to_function| is always larger |
2600 | // than the length of |inline_id_to_token_pos| by one. |
2601 | // Top scope function has no caller (-1). We do this because we expect |
2602 | // all token positions to be at an inlined call. |
2603 | // Top scope function has no caller (-1). |
2604 | pass_state.caller_inline_id.Add(-1); |
2605 | |
2606 | AotCallSpecializer call_specializer(precompiler_, flow_graph, |
2607 | &speculative_policy); |
2608 | pass_state.call_specializer = &call_specializer; |
2609 | |
2610 | flow_graph = CompilerPass::RunPipeline(CompilerPass::kAOT, &pass_state); |
2611 | } |
2612 | |
2613 | ASSERT(pass_state.inline_id_to_function.length() == |
2614 | pass_state.caller_inline_id.length()); |
2615 | |
2616 | ASSERT(!FLAG_use_bare_instructions || precompiler_ != nullptr); |
2617 | |
2618 | if (FLAG_use_bare_instructions) { |
2619 | // When generating code in bare instruction mode all code objects |
2620 | // share the same global object pool. To reduce interleaving of |
2621 | // unrelated object pool entries from different code objects |
2622 | // we attempt to pregenerate stubs referenced by the code |
2623 | // we are going to generate. |
2624 | // |
2625 | // Reducing interleaving means reducing recompilations triggered by |
2626 | // failure to commit object pool into the global object pool. |
2627 | GenerateNecessaryAllocationStubs(flow_graph); |
2628 | } |
2629 | |
2630 | // Even in bare instructions mode we don't directly add objects into |
2631 | // the global object pool because code generation can bail out |
2632 | // (e.g. due to speculative optimization or branch offsets being |
2633 | // too big). If we were adding objects into the global pool directly |
2634 | // these recompilations would leave dead entries behind. |
2635 | // Instead we add objects into an intermediary pool which gets |
2636 | // commited into the global object pool at the end of the compilation. |
2637 | // This makes an assumption that global object pool itself does not |
2638 | // grow during code generation - unfortunately this is not the case |
2639 | // becase we might have nested code generation (i.e. we might generate |
2640 | // some stubs). If this indeed happens we retry the compilation. |
2641 | // (See TryCommitToParent invocation below). |
2642 | compiler::ObjectPoolBuilder object_pool_builder( |
2643 | FLAG_use_bare_instructions |
2644 | ? precompiler_->global_object_pool_builder() |
2645 | : nullptr); |
2646 | compiler::Assembler assembler(&object_pool_builder, use_far_branches); |
2647 | |
2648 | CodeStatistics* function_stats = NULL; |
2649 | if (FLAG_print_instruction_stats) { |
2650 | // At the moment we are leaking CodeStatistics objects for |
2651 | // simplicity because this is just a development mode flag. |
2652 | function_stats = new CodeStatistics(&assembler); |
2653 | } |
2654 | |
2655 | FlowGraphCompiler graph_compiler( |
2656 | &assembler, flow_graph, *parsed_function(), optimized(), |
2657 | &speculative_policy, pass_state.inline_id_to_function, |
2658 | pass_state.inline_id_to_token_pos, pass_state.caller_inline_id, |
2659 | ic_data_array, function_stats); |
2660 | { |
2661 | TIMELINE_DURATION(thread(), CompilerVerbose, "CompileGraph" ); |
2662 | graph_compiler.CompileGraph(); |
2663 | } |
2664 | { |
2665 | TIMELINE_DURATION(thread(), CompilerVerbose, "FinalizeCompilation" ); |
2666 | ASSERT(thread()->IsMutatorThread()); |
2667 | FinalizeCompilation(&assembler, &graph_compiler, flow_graph, |
2668 | function_stats); |
2669 | } |
2670 | |
2671 | if (precompiler_->phase() == |
2672 | Precompiler::Phase::kFixpointCodeGeneration) { |
2673 | for (intptr_t i = 0; i < graph_compiler.used_static_fields().length(); |
2674 | i++) { |
2675 | precompiler_->AddField(*graph_compiler.used_static_fields().At(i)); |
2676 | } |
2677 | |
2678 | const GrowableArray<const compiler::TableSelector*>& call_selectors = |
2679 | graph_compiler.dispatch_table_call_targets(); |
2680 | for (intptr_t i = 0; i < call_selectors.length(); i++) { |
2681 | precompiler_->AddTableSelector(call_selectors[i]); |
2682 | } |
2683 | } else { |
2684 | // We should not be generating code outside of these two specific |
2685 | // precompilation phases. |
2686 | RELEASE_ASSERT( |
2687 | precompiler_->phase() == |
2688 | Precompiler::Phase::kCompilingConstructorsForInstructionCounts); |
2689 | } |
2690 | |
2691 | // In bare instructions mode try adding all entries from the object |
2692 | // pool into the global object pool. This might fail if we have |
2693 | // nested code generation (i.e. we generated some stubs) which means |
2694 | // that some of the object indices we used are already occupied in the |
2695 | // global object pool. |
2696 | // |
2697 | // In this case we simply retry compilation assuming that we are not |
2698 | // going to hit this problem on the second attempt. |
2699 | // |
2700 | // Note: currently we can't assume that two compilations of the same |
2701 | // method will lead to the same IR due to instability of inlining |
2702 | // heuristics (under some conditions we might end up inlining |
2703 | // more aggressively on the second attempt). |
2704 | if (FLAG_use_bare_instructions && |
2705 | !object_pool_builder.TryCommitToParent()) { |
2706 | done = false; |
2707 | continue; |
2708 | } |
2709 | // Exit the loop and the function with the correct result value. |
2710 | is_compiled = true; |
2711 | done = true; |
2712 | } else { |
2713 | // We bailed out or we encountered an error. |
2714 | const Error& error = Error::Handle(thread()->StealStickyError()); |
2715 | |
2716 | if (error.raw() == Object::branch_offset_error().raw()) { |
2717 | // Compilation failed due to an out of range branch offset in the |
2718 | // assembler. We try again (done = false) with far branches enabled. |
2719 | done = false; |
2720 | ASSERT(!use_far_branches); |
2721 | use_far_branches = true; |
2722 | } else if (error.raw() == Object::speculative_inlining_error().raw()) { |
2723 | // The return value of setjmp is the deopt id of the check instruction |
2724 | // that caused the bailout. |
2725 | done = false; |
2726 | if (!speculative_policy.AllowsSpeculativeInlining()) { |
2727 | // Assert that we don't repeatedly retry speculation. |
2728 | UNREACHABLE(); |
2729 | } |
2730 | if (!speculative_policy.AddBlockedDeoptId(val)) { |
2731 | if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) { |
2732 | THR_Print("Disabled speculative inlining after %" Pd " attempts.\n" , |
2733 | speculative_policy.length()); |
2734 | } |
2735 | } |
2736 | } else { |
2737 | // If the error isn't due to an out of range branch offset, we don't |
2738 | // try again (done = true), and indicate that we did not finish |
2739 | // compiling (is_compiled = false). |
2740 | if (FLAG_trace_bailout) { |
2741 | THR_Print("%s\n" , error.ToErrorCString()); |
2742 | } |
2743 | done = true; |
2744 | } |
2745 | |
2746 | if (error.IsLanguageError() && |
2747 | (LanguageError::Cast(error).kind() == Report::kBailout)) { |
2748 | // Discard the error if it was not a real error, but just a bailout. |
2749 | } else { |
2750 | // Otherwise, continue propagating. |
2751 | thread()->set_sticky_error(error); |
2752 | } |
2753 | is_compiled = false; |
2754 | } |
2755 | } |
2756 | return is_compiled; |
2757 | } |
2758 | |
2759 | static ErrorPtr PrecompileFunctionHelper(Precompiler* precompiler, |
2760 | CompilationPipeline* pipeline, |
2761 | const Function& function, |
2762 | bool optimized) { |
2763 | // Check that we optimize, except if the function is not optimizable. |
2764 | ASSERT(CompilerState::Current().is_aot()); |
2765 | ASSERT(!function.IsOptimizable() || optimized); |
2766 | ASSERT(!function.HasCode()); |
2767 | LongJumpScope jump; |
2768 | if (setjmp(*jump.Set()) == 0) { |
2769 | Thread* const thread = Thread::Current(); |
2770 | StackZone stack_zone(thread); |
2771 | Zone* const zone = stack_zone.GetZone(); |
2772 | const bool trace_compiler = |
2773 | FLAG_trace_compiler || (FLAG_trace_optimizing_compiler && optimized); |
2774 | Timer per_compile_timer(trace_compiler, "Compilation time" ); |
2775 | per_compile_timer.Start(); |
2776 | |
2777 | ParsedFunction* parsed_function = new (zone) |
2778 | ParsedFunction(thread, Function::ZoneHandle(zone, function.raw())); |
2779 | if (trace_compiler) { |
2780 | THR_Print("Precompiling %sfunction: '%s' @ token %" Pd ", size %" Pd "\n" , |
2781 | (optimized ? "optimized " : "" ), |
2782 | function.ToFullyQualifiedCString(), function.token_pos().Pos(), |
2783 | (function.end_token_pos().Pos() - function.token_pos().Pos())); |
2784 | } |
2785 | { |
2786 | HANDLESCOPE(thread); |
2787 | pipeline->ParseFunction(parsed_function); |
2788 | } |
2789 | |
2790 | PrecompileParsedFunctionHelper helper(precompiler, parsed_function, |
2791 | optimized); |
2792 | const bool success = helper.Compile(pipeline); |
2793 | if (!success) { |
2794 | // We got an error during compilation. |
2795 | const Error& error = Error::Handle(thread->StealStickyError()); |
2796 | ASSERT(error.IsLanguageError() && |
2797 | LanguageError::Cast(error).kind() != Report::kBailout); |
2798 | return error.raw(); |
2799 | } |
2800 | |
2801 | per_compile_timer.Stop(); |
2802 | |
2803 | if (trace_compiler) { |
2804 | THR_Print("--> '%s' entry: %#" Px " size: %" Pd " time: %" Pd64 " us\n" , |
2805 | function.ToFullyQualifiedCString(), |
2806 | Code::Handle(function.CurrentCode()).PayloadStart(), |
2807 | Code::Handle(function.CurrentCode()).Size(), |
2808 | per_compile_timer.TotalElapsedTime()); |
2809 | } |
2810 | |
2811 | if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) { |
2812 | Code& code = Code::Handle(function.CurrentCode()); |
2813 | Disassembler::DisassembleCode(function, code, optimized); |
2814 | } else if (FLAG_disassemble_optimized && optimized && |
2815 | FlowGraphPrinter::ShouldPrint(function)) { |
2816 | Code& code = Code::Handle(function.CurrentCode()); |
2817 | Disassembler::DisassembleCode(function, code, true); |
2818 | } |
2819 | return Error::null(); |
2820 | } else { |
2821 | Thread* const thread = Thread::Current(); |
2822 | StackZone stack_zone(thread); |
2823 | // We got an error during compilation. |
2824 | const Error& error = Error::Handle(thread->StealStickyError()); |
2825 | // Precompilation may encounter compile-time errors. |
2826 | // Do not attempt to optimize functions that can cause errors. |
2827 | function.set_is_optimizable(false); |
2828 | return error.raw(); |
2829 | } |
2830 | UNREACHABLE(); |
2831 | return Error::null(); |
2832 | } |
2833 | |
2834 | ErrorPtr Precompiler::CompileFunction(Precompiler* precompiler, |
2835 | Thread* thread, |
2836 | Zone* zone, |
2837 | const Function& function) { |
2838 | VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId); |
2839 | TIMELINE_FUNCTION_COMPILATION_DURATION(thread, "CompileFunction" , function); |
2840 | |
2841 | ASSERT(CompilerState::Current().is_aot()); |
2842 | const bool optimized = function.IsOptimizable(); // False for natives. |
2843 | DartCompilationPipeline pipeline; |
2844 | if (precompiler->is_tracing()) { |
2845 | precompiler->tracer_->WriteCompileFunctionEvent(function); |
2846 | } |
2847 | |
2848 | return PrecompileFunctionHelper(precompiler, &pipeline, function, optimized); |
2849 | } |
2850 | |
2851 | Obfuscator::Obfuscator(Thread* thread, const String& private_key) |
2852 | : state_(NULL) { |
2853 | Isolate* isolate = thread->isolate(); |
2854 | Zone* zone = thread->zone(); |
2855 | if (!isolate->obfuscate()) { |
2856 | // Nothing to do. |
2857 | return; |
2858 | } |
2859 | |
2860 | // Create ObfuscationState from ObjectStore::obfusction_map(). |
2861 | ObjectStore* store = thread->isolate()->object_store(); |
2862 | Array& obfuscation_state = Array::Handle(zone, store->obfuscation_map()); |
2863 | |
2864 | if (store->obfuscation_map() == Array::null()) { |
2865 | // We are just starting the obfuscation. Create initial state. |
2866 | const int kInitialPrivateCapacity = 256; |
2867 | obfuscation_state = Array::New(kSavedStateSize); |
2868 | obfuscation_state.SetAt( |
2869 | 1, Array::Handle(zone, HashTables::New<ObfuscationMap>( |
2870 | kInitialPrivateCapacity, Heap::kOld))); |
2871 | } |
2872 | |
2873 | state_ = new (zone) ObfuscationState(thread, obfuscation_state, private_key); |
2874 | |
2875 | if (store->obfuscation_map() == Array::null()) { |
2876 | // We are just starting the obfuscation. Initialize the renaming map. |
2877 | // Note: InitializeRenamingMap uses state_. |
2878 | InitializeRenamingMap(isolate); |
2879 | } |
2880 | } |
2881 | |
2882 | Obfuscator::~Obfuscator() { |
2883 | if (state_ != NULL) { |
2884 | state_->SaveState(); |
2885 | } |
2886 | } |
2887 | |
2888 | void Obfuscator::InitializeRenamingMap(Isolate* isolate) { |
2889 | // Prevent renaming of all pseudo-keywords and operators. |
2890 | // Note: not all pseudo-keywords are mentioned in DART_KEYWORD_LIST |
2891 | // (for example 'hide', 'show' and async related keywords are omitted). |
2892 | // Those are protected from renaming as part of all symbols. |
2893 | #define PREVENT_RENAMING(name, value, priority, attr) \ |
2894 | do { \ |
2895 | if (Token::CanBeOverloaded(Token::name) || \ |
2896 | ((Token::attr & Token::kPseudoKeyword) != 0)) { \ |
2897 | PreventRenaming(value); \ |
2898 | } \ |
2899 | } while (0); |
2900 | |
2901 | DART_TOKEN_LIST(PREVENT_RENAMING) |
2902 | DART_KEYWORD_LIST(PREVENT_RENAMING) |
2903 | #undef PREVENT_RENAMING |
2904 | |
2905 | // this is a keyword token unless it occurs in the string interpolation |
2906 | // which causes it to be obfuscated. |
2907 | PreventRenaming("this" ); |
2908 | |
2909 | // Protect all symbols from renaming. |
2910 | #define PREVENT_RENAMING(name, value) PreventRenaming(value); |
2911 | PREDEFINED_SYMBOLS_LIST(PREVENT_RENAMING) |
2912 | #undef PREVENT_RENAMING |
2913 | |
2914 | // Protect NativeFieldWrapperClassX names from being obfuscated. Those |
2915 | // classes are created manually by the runtime system. |
2916 | // TODO(dartbug.com/30524) instead call to Obfuscator::Rename from a place |
2917 | // where these are created. |
2918 | PreventRenaming("NativeFieldWrapperClass1" ); |
2919 | PreventRenaming("NativeFieldWrapperClass2" ); |
2920 | PreventRenaming("NativeFieldWrapperClass3" ); |
2921 | PreventRenaming("NativeFieldWrapperClass4" ); |
2922 | |
2923 | // Prevent renaming of ClassID.cid* fields. These fields are injected by |
2924 | // runtime. |
2925 | // TODO(dartbug.com/30524) instead call to Obfuscator::Rename from a place |
2926 | // where these are created. |
2927 | #define CLASS_LIST_WITH_NULL(V) \ |
2928 | V(Null) \ |
2929 | CLASS_LIST_NO_OBJECT(V) |
2930 | #define PREVENT_RENAMING(clazz) PreventRenaming("cid" #clazz); |
2931 | CLASS_LIST_WITH_NULL(PREVENT_RENAMING) |
2932 | #undef PREVENT_RENAMING |
2933 | #undef CLASS_LIST_WITH_NULL |
2934 | |
2935 | // Prevent renaming of methods that are looked up by method recognizer. |
2936 | // TODO(dartbug.com/30524) instead call to Obfuscator::Rename from a place |
2937 | // where these are looked up. |
2938 | #define PREVENT_RENAMING(class_name, function_name, recognized_enum, \ |
2939 | fingerprint) \ |
2940 | do { \ |
2941 | PreventRenaming(#class_name); \ |
2942 | PreventRenaming(#function_name); \ |
2943 | } while (0); |
2944 | RECOGNIZED_LIST(PREVENT_RENAMING) |
2945 | #undef PREVENT_RENAMING |
2946 | |
2947 | // Prevent renaming of methods that are looked up by method recognizer. |
2948 | // TODO(dartbug.com/30524) instead call to Obfuscator::Rename from a place |
2949 | // where these are looked up. |
2950 | #define PREVENT_RENAMING(class_name, function_name, recognized_enum, \ |
2951 | fingerprint) \ |
2952 | do { \ |
2953 | PreventRenaming(#class_name); \ |
2954 | PreventRenaming(#function_name); \ |
2955 | } while (0); |
2956 | POLYMORPHIC_TARGET_LIST(PREVENT_RENAMING) |
2957 | #undef PREVENT_RENAMING |
2958 | |
2959 | // These are not mentioned by entry points but are still looked up by name. |
2960 | // (They are not mentioned in the entry points because we don't need them |
2961 | // after the compilation) |
2962 | PreventRenaming("_resolveScriptUri" ); |
2963 | |
2964 | // Precompiler is looking up "main". |
2965 | // TODO(dartbug.com/30524) instead call to Obfuscator::Rename from a place |
2966 | // where these are created. |
2967 | PreventRenaming("main" ); |
2968 | |
2969 | // Fast path for common conditional import. See Deobfuscate method. |
2970 | PreventRenaming("dart" ); |
2971 | PreventRenaming("library" ); |
2972 | PreventRenaming("io" ); |
2973 | PreventRenaming("html" ); |
2974 | |
2975 | // Looked up by name via "DartUtils::GetDartType". |
2976 | PreventRenaming("_RandomAccessFileOpsImpl" ); |
2977 | PreventRenaming("_NamespaceImpl" ); |
2978 | } |
2979 | |
2980 | StringPtr Obfuscator::ObfuscationState::RenameImpl(const String& name, |
2981 | bool atomic) { |
2982 | ASSERT(name.IsSymbol()); |
2983 | |
2984 | renamed_ ^= renames_.GetOrNull(name); |
2985 | if (renamed_.IsNull()) { |
2986 | renamed_ = BuildRename(name, atomic); |
2987 | renames_.UpdateOrInsert(name, renamed_); |
2988 | } |
2989 | return renamed_.raw(); |
2990 | } |
2991 | |
2992 | static const char* const kGetterPrefix = "get:" ; |
2993 | static const intptr_t kGetterPrefixLength = strlen(kGetterPrefix); |
2994 | static const char* const kSetterPrefix = "set:" ; |
2995 | static const intptr_t kSetterPrefixLength = strlen(kSetterPrefix); |
2996 | |
2997 | void Obfuscator::PreventRenaming(const char* name) { |
2998 | // For constructor names Class.name skip class name (if any) and a dot. |
2999 | const char* dot = strchr(name, '.'); |
3000 | if (dot != NULL) { |
3001 | name = dot + 1; |
3002 | } |
3003 | |
3004 | // Empty name: do nothing. |
3005 | if (name[0] == '\0') { |
3006 | return; |
3007 | } |
3008 | |
3009 | // Skip get: and set: prefixes. |
3010 | if (strncmp(name, kGetterPrefix, kGetterPrefixLength) == 0) { |
3011 | name = name + kGetterPrefixLength; |
3012 | } else if (strncmp(name, kSetterPrefix, kSetterPrefixLength) == 0) { |
3013 | name = name + kSetterPrefixLength; |
3014 | } |
3015 | |
3016 | state_->PreventRenaming(name); |
3017 | } |
3018 | |
3019 | void Obfuscator::ObfuscationState::SaveState() { |
3020 | saved_state_.SetAt(kSavedStateNameIndex, String::Handle(String::New(name_))); |
3021 | saved_state_.SetAt(kSavedStateRenamesIndex, renames_.Release()); |
3022 | thread_->isolate()->object_store()->set_obfuscation_map(saved_state_); |
3023 | } |
3024 | |
3025 | void Obfuscator::ObfuscationState::PreventRenaming(const char* name) { |
3026 | string_ = Symbols::New(thread_, name); |
3027 | PreventRenaming(string_); |
3028 | } |
3029 | |
3030 | void Obfuscator::ObfuscationState::PreventRenaming(const String& name) { |
3031 | renames_.UpdateOrInsert(name, name); |
3032 | } |
3033 | |
3034 | void Obfuscator::ObfuscationState::NextName() { |
3035 | // We apply the following rules: |
3036 | // |
3037 | // inc(a) = b, ... , inc(z) = A, ..., inc(Z) = a & carry. |
3038 | // |
3039 | for (intptr_t i = 0;; i++) { |
3040 | const char digit = name_[i]; |
3041 | if (digit == '\0') { |
3042 | name_[i] = 'a'; |
3043 | } else if (digit < 'Z') { |
3044 | name_[i]++; |
3045 | } else if (digit == 'Z') { |
3046 | name_[i] = 'a'; |
3047 | continue; // Carry. |
3048 | } else if (digit < 'z') { |
3049 | name_[i]++; |
3050 | } else { |
3051 | name_[i] = 'A'; |
3052 | } |
3053 | break; |
3054 | } |
3055 | } |
3056 | |
3057 | StringPtr Obfuscator::ObfuscationState::NewAtomicRename( |
3058 | bool should_be_private) { |
3059 | do { |
3060 | NextName(); |
3061 | renamed_ = Symbols::NewFormatted(thread_, "%s%s" , |
3062 | should_be_private ? "_" : "" , name_); |
3063 | // Must check if our generated name clashes with something that will |
3064 | // have an identity renaming. |
3065 | } while (renames_.GetOrNull(renamed_) == renamed_.raw()); |
3066 | return renamed_.raw(); |
3067 | } |
3068 | |
3069 | StringPtr Obfuscator::ObfuscationState::BuildRename(const String& name, |
3070 | bool atomic) { |
3071 | if (atomic) { |
3072 | return NewAtomicRename(name.CharAt(0) == '_'); |
3073 | } |
3074 | |
3075 | intptr_t start = 0; |
3076 | intptr_t end = name.Length(); |
3077 | |
3078 | // Follow the rules: |
3079 | // |
3080 | // Rename(get:foo) = get:Rename(foo). |
3081 | // Rename(set:foo) = set:Rename(foo). |
3082 | // |
3083 | bool is_getter = false; |
3084 | bool is_setter = false; |
3085 | if (Field::IsGetterName(name)) { |
3086 | is_getter = true; |
3087 | start = kGetterPrefixLength; |
3088 | } else if (Field::IsSetterName(name)) { |
3089 | is_setter = true; |
3090 | start = kSetterPrefixLength; |
3091 | } |
3092 | |
3093 | // Follow the rule: |
3094 | // |
3095 | // Rename(_ident@key) = Rename(_ident)@private_key_. |
3096 | // |
3097 | const bool is_private = name.CharAt(start) == '_'; |
3098 | if (is_private) { |
3099 | // Find the first '@'. |
3100 | intptr_t i = start; |
3101 | while (i < name.Length() && name.CharAt(i) != '@') { |
3102 | i++; |
3103 | } |
3104 | end = i; |
3105 | } |
3106 | |
3107 | if (is_getter || is_setter || is_private) { |
3108 | string_ = Symbols::New(thread_, name, start, end - start); |
3109 | // It's OK to call RenameImpl() recursively because 'string_' is used |
3110 | // only if atomic == false. |
3111 | string_ = RenameImpl(string_, /*atomic=*/true); |
3112 | if (is_private && (end < name.Length())) { |
3113 | string_ = Symbols::FromConcat(thread_, string_, private_key_); |
3114 | } |
3115 | if (is_getter) { |
3116 | return Symbols::FromGet(thread_, string_); |
3117 | } else if (is_setter) { |
3118 | return Symbols::FromSet(thread_, string_); |
3119 | } |
3120 | return string_.raw(); |
3121 | } else { |
3122 | return NewAtomicRename(is_private); |
3123 | } |
3124 | } |
3125 | |
3126 | void Obfuscator::Deobfuscate(Thread* thread, |
3127 | const GrowableObjectArray& pieces) { |
3128 | const Array& obfuscation_state = Array::Handle( |
3129 | thread->zone(), thread->isolate()->object_store()->obfuscation_map()); |
3130 | if (obfuscation_state.IsNull()) { |
3131 | return; |
3132 | } |
3133 | |
3134 | const Array& renames = Array::Handle( |
3135 | thread->zone(), GetRenamesFromSavedState(obfuscation_state)); |
3136 | |
3137 | ObfuscationMap renames_map(renames.raw()); |
3138 | String& piece = String::Handle(); |
3139 | for (intptr_t i = 0; i < pieces.Length(); i++) { |
3140 | piece ^= pieces.At(i); |
3141 | ASSERT(piece.IsSymbol()); |
3142 | |
3143 | // Fast path: skip '.' |
3144 | if (piece.raw() == Symbols::Dot().raw()) { |
3145 | continue; |
3146 | } |
3147 | |
3148 | // Fast path: check if piece has an identity obfuscation. |
3149 | if (renames_map.GetOrNull(piece) == piece.raw()) { |
3150 | continue; |
3151 | } |
3152 | |
3153 | // Search through the whole obfuscation map until matching value is found. |
3154 | // We are using linear search instead of generating a reverse mapping |
3155 | // because we assume that Deobfuscate() method is almost never called. |
3156 | ObfuscationMap::Iterator it(&renames_map); |
3157 | while (it.MoveNext()) { |
3158 | const intptr_t entry = it.Current(); |
3159 | if (renames_map.GetPayload(entry, 0) == piece.raw()) { |
3160 | piece ^= renames_map.GetKey(entry); |
3161 | pieces.SetAt(i, piece); |
3162 | break; |
3163 | } |
3164 | } |
3165 | } |
3166 | renames_map.Release(); |
3167 | } |
3168 | |
3169 | static const char* StringToCString(const String& str) { |
3170 | const intptr_t len = Utf8::Length(str); |
3171 | char* result = new char[len + 1]; |
3172 | str.ToUTF8(reinterpret_cast<uint8_t*>(result), len); |
3173 | result[len] = 0; |
3174 | return result; |
3175 | } |
3176 | |
3177 | const char** Obfuscator::SerializeMap(Thread* thread) { |
3178 | const Array& obfuscation_state = Array::Handle( |
3179 | thread->zone(), thread->isolate()->object_store()->obfuscation_map()); |
3180 | if (obfuscation_state.IsNull()) { |
3181 | return NULL; |
3182 | } |
3183 | |
3184 | const Array& renames = Array::Handle( |
3185 | thread->zone(), GetRenamesFromSavedState(obfuscation_state)); |
3186 | ObfuscationMap renames_map(renames.raw()); |
3187 | |
3188 | const char** result = new const char*[renames_map.NumOccupied() * 2 + 1]; |
3189 | intptr_t idx = 0; |
3190 | String& str = String::Handle(); |
3191 | |
3192 | ObfuscationMap::Iterator it(&renames_map); |
3193 | while (it.MoveNext()) { |
3194 | const intptr_t entry = it.Current(); |
3195 | str ^= renames_map.GetKey(entry); |
3196 | result[idx++] = StringToCString(str); |
3197 | str ^= renames_map.GetPayload(entry, 0); |
3198 | result[idx++] = StringToCString(str); |
3199 | } |
3200 | result[idx++] = NULL; |
3201 | renames_map.Release(); |
3202 | |
3203 | return result; |
3204 | } |
3205 | |
3206 | #endif // defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32) |
3207 | |
3208 | } // namespace dart |
3209 | |