| 1 | // Copyright (c) 2020, the Dart project authors.  Please see the AUTHORS file | 
|---|
| 2 | // for details. All rights reserved. Use of this source code is governed by a | 
|---|
| 3 | // BSD-style license that can be found in the LICENSE file. | 
|---|
| 4 |  | 
|---|
| 5 | #include "vm/compiler/runtime_api.h" | 
|---|
| 6 | #include "vm/globals.h" | 
|---|
| 7 |  | 
|---|
| 8 | // For `StubCodeCompiler::GenerateAllocateUnhandledExceptionStub` | 
|---|
| 9 | #include "vm/compiler/backend/il.h" | 
|---|
| 10 |  | 
|---|
| 11 | #define SHOULD_NOT_INCLUDE_RUNTIME | 
|---|
| 12 |  | 
|---|
| 13 | #include "vm/compiler/stub_code_compiler.h" | 
|---|
| 14 |  | 
|---|
| 15 | #include "vm/compiler/assembler/assembler.h" | 
|---|
| 16 |  | 
|---|
| 17 | #define __ assembler-> | 
|---|
| 18 |  | 
|---|
| 19 | namespace dart { | 
|---|
| 20 |  | 
|---|
| 21 | namespace compiler { | 
|---|
| 22 |  | 
|---|
| 23 | intptr_t StubCodeCompiler::WordOffsetFromFpToCpuRegister( | 
|---|
| 24 | Register cpu_register) { | 
|---|
| 25 | ASSERT(RegisterSet::Contains(kDartAvailableCpuRegs, cpu_register)); | 
|---|
| 26 |  | 
|---|
| 27 | // Skip FP + saved PC. | 
|---|
| 28 | intptr_t slots_from_fp = 2; | 
|---|
| 29 | for (intptr_t i = 0; i < kNumberOfCpuRegisters; i++) { | 
|---|
| 30 | Register reg = static_cast<Register>(i); | 
|---|
| 31 | if (reg == cpu_register) break; | 
|---|
| 32 | if (RegisterSet::Contains(kDartAvailableCpuRegs, reg)) { | 
|---|
| 33 | slots_from_fp++; | 
|---|
| 34 | } | 
|---|
| 35 | } | 
|---|
| 36 | return slots_from_fp; | 
|---|
| 37 | } | 
|---|
| 38 |  | 
|---|
| 39 | void StubCodeCompiler::GenerateInitStaticFieldStub(Assembler* assembler) { | 
|---|
| 40 | __ EnterStubFrame(); | 
|---|
| 41 | __ PushObject(NullObject());  // Make room for result. | 
|---|
| 42 | __ PushRegister(InitStaticFieldABI::kFieldReg); | 
|---|
| 43 | __ CallRuntime(kInitStaticFieldRuntimeEntry, /*argument_count=*/1); | 
|---|
| 44 | __ Drop(1); | 
|---|
| 45 | __ PopRegister(InitStaticFieldABI::kResultReg); | 
|---|
| 46 | __ LeaveStubFrame(); | 
|---|
| 47 | __ Ret(); | 
|---|
| 48 | } | 
|---|
| 49 |  | 
|---|
| 50 | void StubCodeCompiler::GenerateInitInstanceFieldStub(Assembler* assembler) { | 
|---|
| 51 | __ EnterStubFrame(); | 
|---|
| 52 | __ PushObject(NullObject());  // Make room for result. | 
|---|
| 53 | __ PushRegister(InitInstanceFieldABI::kInstanceReg); | 
|---|
| 54 | __ PushRegister(InitInstanceFieldABI::kFieldReg); | 
|---|
| 55 | __ CallRuntime(kInitInstanceFieldRuntimeEntry, /*argument_count=*/2); | 
|---|
| 56 | __ Drop(2); | 
|---|
| 57 | __ PopRegister(InitInstanceFieldABI::kResultReg); | 
|---|
| 58 | __ LeaveStubFrame(); | 
|---|
| 59 | __ Ret(); | 
|---|
| 60 | } | 
|---|
| 61 |  | 
|---|
| 62 | void StubCodeCompiler::GenerateInitLateInstanceFieldStub(Assembler* assembler, | 
|---|
| 63 | bool is_final) { | 
|---|
| 64 | const Register kFunctionReg = InitLateInstanceFieldInternalRegs::kFunctionReg; | 
|---|
| 65 | const Register kInstanceReg = InitInstanceFieldABI::kInstanceReg; | 
|---|
| 66 | const Register kFieldReg = InitInstanceFieldABI::kFieldReg; | 
|---|
| 67 | const Register kAddressReg = InitLateInstanceFieldInternalRegs::kAddressReg; | 
|---|
| 68 | const Register kScratchReg = InitLateInstanceFieldInternalRegs::kScratchReg; | 
|---|
| 69 |  | 
|---|
| 70 | __ EnterStubFrame(); | 
|---|
| 71 | // Save for later. | 
|---|
| 72 | __ PushRegisterPair(kInstanceReg, kFieldReg); | 
|---|
| 73 |  | 
|---|
| 74 | // Call initializer function. | 
|---|
| 75 | __ PushRegister(kInstanceReg); | 
|---|
| 76 |  | 
|---|
| 77 | static_assert( | 
|---|
| 78 | InitInstanceFieldABI::kResultReg == CallingConventions::kReturnReg, | 
|---|
| 79 | "Result is a return value from initializer"); | 
|---|
| 80 |  | 
|---|
| 81 | __ LoadField(kFunctionReg, | 
|---|
| 82 | FieldAddress(InitInstanceFieldABI::kFieldReg, | 
|---|
| 83 | target::Field::initializer_function_offset())); | 
|---|
| 84 | if (!FLAG_precompiled_mode || !FLAG_use_bare_instructions) { | 
|---|
| 85 | __ LoadField(CODE_REG, | 
|---|
| 86 | FieldAddress(kFunctionReg, target::Function::code_offset())); | 
|---|
| 87 | if (FLAG_enable_interpreter) { | 
|---|
| 88 | // InterpretCall stub needs arguments descriptor for all function calls. | 
|---|
| 89 | __ LoadObject(ARGS_DESC_REG, | 
|---|
| 90 | CastHandle<Object>(OneArgArgumentsDescriptor())); | 
|---|
| 91 | } else { | 
|---|
| 92 | // Load a GC-safe value for the arguments descriptor (unused but tagged). | 
|---|
| 93 | __ LoadImmediate(ARGS_DESC_REG, 0); | 
|---|
| 94 | } | 
|---|
| 95 | } | 
|---|
| 96 | __ Call(FieldAddress(kFunctionReg, target::Function::entry_point_offset())); | 
|---|
| 97 | __ Drop(1);  // Drop argument. | 
|---|
| 98 |  | 
|---|
| 99 | __ PopRegisterPair(kInstanceReg, kFieldReg); | 
|---|
| 100 | __ LoadField( | 
|---|
| 101 | kScratchReg, | 
|---|
| 102 | FieldAddress(kFieldReg, target::Field::host_offset_or_field_id_offset())); | 
|---|
| 103 | __ LoadFieldAddressForRegOffset(kAddressReg, kInstanceReg, kScratchReg); | 
|---|
| 104 |  | 
|---|
| 105 | Label throw_exception; | 
|---|
| 106 | if (is_final) { | 
|---|
| 107 | __ LoadMemoryValue(kScratchReg, kAddressReg, 0); | 
|---|
| 108 | __ CompareObject(kScratchReg, SentinelObject()); | 
|---|
| 109 | __ BranchIf(NOT_EQUAL, &throw_exception); | 
|---|
| 110 | } | 
|---|
| 111 |  | 
|---|
| 112 | #if defined(TARGET_ARCH_IA32) | 
|---|
| 113 | // On IA32 StoreIntoObject clobbers value register, so scratch | 
|---|
| 114 | // register is used in StoreIntoObject to preserve kResultReg. | 
|---|
| 115 | __ MoveRegister(kScratchReg, InitInstanceFieldABI::kResultReg); | 
|---|
| 116 | __ StoreIntoObject(kInstanceReg, Address(kAddressReg, 0), kScratchReg); | 
|---|
| 117 | #else | 
|---|
| 118 | __ StoreIntoObject(kInstanceReg, Address(kAddressReg, 0), | 
|---|
| 119 | InitInstanceFieldABI::kResultReg); | 
|---|
| 120 | #endif  // defined(TARGET_ARCH_IA32) | 
|---|
| 121 |  | 
|---|
| 122 | __ LeaveStubFrame(); | 
|---|
| 123 | __ Ret(); | 
|---|
| 124 |  | 
|---|
| 125 | if (is_final) { | 
|---|
| 126 | __ Bind(&throw_exception); | 
|---|
| 127 | __ PushObject(NullObject());  // Make room for (unused) result. | 
|---|
| 128 | __ PushRegister(kFieldReg); | 
|---|
| 129 | __ CallRuntime(kLateInitializationErrorRuntimeEntry, | 
|---|
| 130 | /*argument_count=*/1); | 
|---|
| 131 | __ Breakpoint(); | 
|---|
| 132 | } | 
|---|
| 133 | } | 
|---|
| 134 |  | 
|---|
| 135 | void StubCodeCompiler::GenerateInitLateInstanceFieldStub(Assembler* assembler) { | 
|---|
| 136 | GenerateInitLateInstanceFieldStub(assembler, /*is_final=*/false); | 
|---|
| 137 | } | 
|---|
| 138 |  | 
|---|
| 139 | void StubCodeCompiler::GenerateInitLateFinalInstanceFieldStub( | 
|---|
| 140 | Assembler* assembler) { | 
|---|
| 141 | GenerateInitLateInstanceFieldStub(assembler, /*is_final=*/true); | 
|---|
| 142 | } | 
|---|
| 143 |  | 
|---|
| 144 | void StubCodeCompiler::GenerateThrowStub(Assembler* assembler) { | 
|---|
| 145 | __ EnterStubFrame(); | 
|---|
| 146 | __ PushObject(NullObject());  // Make room for (unused) result. | 
|---|
| 147 | __ PushRegister(ThrowABI::kExceptionReg); | 
|---|
| 148 | __ CallRuntime(kThrowRuntimeEntry, /*argument_count=*/1); | 
|---|
| 149 | __ Breakpoint(); | 
|---|
| 150 | } | 
|---|
| 151 |  | 
|---|
| 152 | void StubCodeCompiler::GenerateReThrowStub(Assembler* assembler) { | 
|---|
| 153 | __ EnterStubFrame(); | 
|---|
| 154 | __ PushObject(NullObject());  // Make room for (unused) result. | 
|---|
| 155 | __ PushRegister(ReThrowABI::kExceptionReg); | 
|---|
| 156 | __ PushRegister(ReThrowABI::kStackTraceReg); | 
|---|
| 157 | __ CallRuntime(kReThrowRuntimeEntry, /*argument_count=*/2); | 
|---|
| 158 | __ Breakpoint(); | 
|---|
| 159 | } | 
|---|
| 160 |  | 
|---|
| 161 | void StubCodeCompiler::GenerateAssertBooleanStub(Assembler* assembler) { | 
|---|
| 162 | __ EnterStubFrame(); | 
|---|
| 163 | __ PushObject(NullObject());  // Make room for (unused) result. | 
|---|
| 164 | __ PushRegister(AssertBooleanABI::kObjectReg); | 
|---|
| 165 | __ CallRuntime(kNonBoolTypeErrorRuntimeEntry, /*argument_count=*/1); | 
|---|
| 166 | __ Breakpoint(); | 
|---|
| 167 | } | 
|---|
| 168 |  | 
|---|
| 169 | void StubCodeCompiler::GenerateInstanceOfStub(Assembler* assembler) { | 
|---|
| 170 | __ EnterStubFrame(); | 
|---|
| 171 | __ PushObject(NullObject());  // Make room for the result. | 
|---|
| 172 | __ PushRegister(TypeTestABI::kInstanceReg); | 
|---|
| 173 | __ PushRegister(TypeTestABI::kDstTypeReg); | 
|---|
| 174 | __ PushRegister(TypeTestABI::kInstantiatorTypeArgumentsReg); | 
|---|
| 175 | __ PushRegister(TypeTestABI::kFunctionTypeArgumentsReg); | 
|---|
| 176 | __ PushRegister(TypeTestABI::kSubtypeTestCacheReg); | 
|---|
| 177 | __ CallRuntime(kInstanceofRuntimeEntry, /*argument_count=*/5); | 
|---|
| 178 | __ Drop(5); | 
|---|
| 179 | __ PopRegister(TypeTestABI::kResultReg); | 
|---|
| 180 | __ LeaveStubFrame(); | 
|---|
| 181 | __ Ret(); | 
|---|
| 182 | } | 
|---|
| 183 |  | 
|---|
| 184 | // The UnhandledException class lives in the VM isolate, so it cannot cache | 
|---|
| 185 | // an allocation stub for itself. Instead, we cache it in the stub code list. | 
|---|
| 186 | void StubCodeCompiler::GenerateAllocateUnhandledExceptionStub( | 
|---|
| 187 | Assembler* assembler) { | 
|---|
| 188 | Thread* thread = Thread::Current(); | 
|---|
| 189 | auto class_table = thread->isolate()->class_table(); | 
|---|
| 190 | ASSERT(class_table->HasValidClassAt(kUnhandledExceptionCid)); | 
|---|
| 191 | const auto& cls = Class::ZoneHandle(thread->zone(), | 
|---|
| 192 | class_table->At(kUnhandledExceptionCid)); | 
|---|
| 193 | ASSERT(!cls.IsNull()); | 
|---|
| 194 |  | 
|---|
| 195 | GenerateAllocationStubForClass(assembler, nullptr, cls, | 
|---|
| 196 | Code::Handle(Code::null()), | 
|---|
| 197 | Code::Handle(Code::null())); | 
|---|
| 198 | } | 
|---|
| 199 |  | 
|---|
| 200 | }  // namespace compiler | 
|---|
| 201 |  | 
|---|
| 202 | }  // namespace dart | 
|---|
| 203 |  | 
|---|