1/*
2 * Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23
24#include "precompiled.hpp"
25#include "asm/macroAssembler.inline.hpp"
26#include "code/codeBlob.hpp"
27#include "gc/z/zBarrier.inline.hpp"
28#include "gc/z/zBarrierSet.hpp"
29#include "gc/z/zBarrierSetAssembler.hpp"
30#include "gc/z/zBarrierSetRuntime.hpp"
31#include "memory/resourceArea.hpp"
32#include "runtime/stubCodeGenerator.hpp"
33#include "utilities/macros.hpp"
34#ifdef COMPILER1
35#include "c1/c1_LIRAssembler.hpp"
36#include "c1/c1_MacroAssembler.hpp"
37#include "gc/z/c1/zBarrierSetC1.hpp"
38#endif // COMPILER1
39
40ZBarrierSetAssembler::ZBarrierSetAssembler() :
41 _load_barrier_slow_stub(),
42 _load_barrier_weak_slow_stub() {}
43
44#ifdef PRODUCT
45#define BLOCK_COMMENT(str) /* nothing */
46#else
47#define BLOCK_COMMENT(str) __ block_comment(str)
48#endif
49
50#undef __
51#define __ masm->
52
53static void call_vm(MacroAssembler* masm,
54 address entry_point,
55 Register arg0,
56 Register arg1) {
57 // Setup arguments
58 if (arg1 == c_rarg0) {
59 if (arg0 == c_rarg1) {
60 __ xchgptr(c_rarg1, c_rarg0);
61 } else {
62 __ movptr(c_rarg1, arg1);
63 __ movptr(c_rarg0, arg0);
64 }
65 } else {
66 if (arg0 != c_rarg0) {
67 __ movptr(c_rarg0, arg0);
68 }
69 if (arg1 != c_rarg1) {
70 __ movptr(c_rarg1, arg1);
71 }
72 }
73
74 // Call VM
75 __ MacroAssembler::call_VM_leaf_base(entry_point, 2);
76}
77
78void ZBarrierSetAssembler::load_at(MacroAssembler* masm,
79 DecoratorSet decorators,
80 BasicType type,
81 Register dst,
82 Address src,
83 Register tmp1,
84 Register tmp_thread) {
85 if (!ZBarrierSet::barrier_needed(decorators, type)) {
86 // Barrier not needed
87 BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);
88 return;
89 }
90
91 BLOCK_COMMENT("ZBarrierSetAssembler::load_at {");
92
93 // Allocate scratch register
94 Register scratch = tmp1;
95 if (tmp1 == noreg) {
96 scratch = r12;
97 __ push(scratch);
98 }
99
100 assert_different_registers(dst, scratch);
101
102 Label done;
103
104 //
105 // Fast Path
106 //
107
108 // Load address
109 __ lea(scratch, src);
110
111 // Load oop at address
112 __ movptr(dst, Address(scratch, 0));
113
114 // Test address bad mask
115 __ testptr(dst, address_bad_mask_from_thread(r15_thread));
116 __ jcc(Assembler::zero, done);
117
118 //
119 // Slow path
120 //
121
122 // Save registers
123 __ push(rax);
124 __ push(rcx);
125 __ push(rdx);
126 __ push(rdi);
127 __ push(rsi);
128 __ push(r8);
129 __ push(r9);
130 __ push(r10);
131 __ push(r11);
132
133 // We may end up here from generate_native_wrapper, then the method may have
134 // floats as arguments, and we must spill them before calling the VM runtime
135 // leaf. From the interpreter all floats are passed on the stack.
136 assert(Argument::n_float_register_parameters_j == 8, "Assumption");
137 const int xmm_size = wordSize * 2;
138 const int xmm_spill_size = xmm_size * Argument::n_float_register_parameters_j;
139 __ subptr(rsp, xmm_spill_size);
140 __ movdqu(Address(rsp, xmm_size * 7), xmm7);
141 __ movdqu(Address(rsp, xmm_size * 6), xmm6);
142 __ movdqu(Address(rsp, xmm_size * 5), xmm5);
143 __ movdqu(Address(rsp, xmm_size * 4), xmm4);
144 __ movdqu(Address(rsp, xmm_size * 3), xmm3);
145 __ movdqu(Address(rsp, xmm_size * 2), xmm2);
146 __ movdqu(Address(rsp, xmm_size * 1), xmm1);
147 __ movdqu(Address(rsp, xmm_size * 0), xmm0);
148
149 // Call VM
150 call_vm(masm, ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), dst, scratch);
151
152 // Restore registers
153 __ movdqu(xmm0, Address(rsp, xmm_size * 0));
154 __ movdqu(xmm1, Address(rsp, xmm_size * 1));
155 __ movdqu(xmm2, Address(rsp, xmm_size * 2));
156 __ movdqu(xmm3, Address(rsp, xmm_size * 3));
157 __ movdqu(xmm4, Address(rsp, xmm_size * 4));
158 __ movdqu(xmm5, Address(rsp, xmm_size * 5));
159 __ movdqu(xmm6, Address(rsp, xmm_size * 6));
160 __ movdqu(xmm7, Address(rsp, xmm_size * 7));
161 __ addptr(rsp, xmm_spill_size);
162
163 __ pop(r11);
164 __ pop(r10);
165 __ pop(r9);
166 __ pop(r8);
167 __ pop(rsi);
168 __ pop(rdi);
169 __ pop(rdx);
170 __ pop(rcx);
171
172 if (dst == rax) {
173 __ addptr(rsp, wordSize);
174 } else {
175 __ movptr(dst, rax);
176 __ pop(rax);
177 }
178
179 __ bind(done);
180
181 // Restore scratch register
182 if (tmp1 == noreg) {
183 __ pop(scratch);
184 }
185
186 BLOCK_COMMENT("} ZBarrierSetAssembler::load_at");
187}
188
189#ifdef ASSERT
190
191void ZBarrierSetAssembler::store_at(MacroAssembler* masm,
192 DecoratorSet decorators,
193 BasicType type,
194 Address dst,
195 Register src,
196 Register tmp1,
197 Register tmp2) {
198 BLOCK_COMMENT("ZBarrierSetAssembler::store_at {");
199
200 // Verify oop store
201 if (type == T_OBJECT || type == T_ARRAY) {
202 // Note that src could be noreg, which means we
203 // are storing null and can skip verification.
204 if (src != noreg) {
205 Label done;
206 __ testptr(src, address_bad_mask_from_thread(r15_thread));
207 __ jcc(Assembler::zero, done);
208 __ stop("Verify oop store failed");
209 __ should_not_reach_here();
210 __ bind(done);
211 }
212 }
213
214 // Store value
215 BarrierSetAssembler::store_at(masm, decorators, type, dst, src, tmp1, tmp2);
216
217 BLOCK_COMMENT("} ZBarrierSetAssembler::store_at");
218}
219
220#endif // ASSERT
221
222void ZBarrierSetAssembler::arraycopy_prologue(MacroAssembler* masm,
223 DecoratorSet decorators,
224 BasicType type,
225 Register src,
226 Register dst,
227 Register count) {
228 if (!ZBarrierSet::barrier_needed(decorators, type)) {
229 // Barrier not needed
230 return;
231 }
232
233 BLOCK_COMMENT("ZBarrierSetAssembler::arraycopy_prologue {");
234
235 // Save registers
236 __ pusha();
237
238 // Call VM
239 call_vm(masm, ZBarrierSetRuntime::load_barrier_on_oop_array_addr(), src, count);
240
241 // Restore registers
242 __ popa();
243
244 BLOCK_COMMENT("} ZBarrierSetAssembler::arraycopy_prologue");
245}
246
247void ZBarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm,
248 Register jni_env,
249 Register obj,
250 Register tmp,
251 Label& slowpath) {
252 BLOCK_COMMENT("ZBarrierSetAssembler::try_resolve_jobject_in_native {");
253
254 // Resolve jobject
255 BarrierSetAssembler::try_resolve_jobject_in_native(masm, jni_env, obj, tmp, slowpath);
256
257 // Test address bad mask
258 __ testptr(obj, address_bad_mask_from_jni_env(jni_env));
259 __ jcc(Assembler::notZero, slowpath);
260
261 BLOCK_COMMENT("} ZBarrierSetAssembler::try_resolve_jobject_in_native");
262}
263
264#ifdef COMPILER1
265
266#undef __
267#define __ ce->masm()->
268
269void ZBarrierSetAssembler::generate_c1_load_barrier_test(LIR_Assembler* ce,
270 LIR_Opr ref) const {
271 __ testptr(ref->as_register(), address_bad_mask_from_thread(r15_thread));
272}
273
274void ZBarrierSetAssembler::generate_c1_load_barrier_stub(LIR_Assembler* ce,
275 ZLoadBarrierStubC1* stub) const {
276 // Stub entry
277 __ bind(*stub->entry());
278
279 Register ref = stub->ref()->as_register();
280 Register ref_addr = noreg;
281 Register tmp = noreg;
282
283 if (stub->tmp()->is_valid()) {
284 // Load address into tmp register
285 ce->leal(stub->ref_addr(), stub->tmp());
286 ref_addr = tmp = stub->tmp()->as_pointer_register();
287 } else {
288 // Address already in register
289 ref_addr = stub->ref_addr()->as_address_ptr()->base()->as_pointer_register();
290 }
291
292 assert_different_registers(ref, ref_addr, noreg);
293
294 // Save rax unless it is the result or tmp register
295 if (ref != rax && tmp != rax) {
296 __ push(rax);
297 }
298
299 // Setup arguments and call runtime stub
300 __ subptr(rsp, 2 * BytesPerWord);
301 ce->store_parameter(ref_addr, 1);
302 ce->store_parameter(ref, 0);
303 __ call(RuntimeAddress(stub->runtime_stub()));
304 __ addptr(rsp, 2 * BytesPerWord);
305
306 // Verify result
307 __ verify_oop(rax, "Bad oop");
308
309 // Move result into place
310 if (ref != rax) {
311 __ movptr(ref, rax);
312 }
313
314 // Restore rax unless it is the result or tmp register
315 if (ref != rax && tmp != rax) {
316 __ pop(rax);
317 }
318
319 // Stub exit
320 __ jmp(*stub->continuation());
321}
322
323#undef __
324#define __ sasm->
325
326void ZBarrierSetAssembler::generate_c1_load_barrier_runtime_stub(StubAssembler* sasm,
327 DecoratorSet decorators) const {
328 // Enter and save registers
329 __ enter();
330 __ save_live_registers_no_oop_map(true /* save_fpu_registers */);
331
332 // Setup arguments
333 __ load_parameter(1, c_rarg1);
334 __ load_parameter(0, c_rarg0);
335
336 // Call VM
337 __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
338
339 // Restore registers and return
340 __ restore_live_registers_except_rax(true /* restore_fpu_registers */);
341 __ leave();
342 __ ret(0);
343}
344
345#endif // COMPILER1
346
347#undef __
348#define __ cgen->assembler()->
349
350// Generates a register specific stub for calling
351// ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
352// ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
353//
354// The raddr register serves as both input and output for this stub. When the stub is
355// called the raddr register contains the object field address (oop*) where the bad oop
356// was loaded from, which caused the slow path to be taken. On return from the stub the
357// raddr register contains the good/healed oop returned from
358// ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded() or
359// ZBarrierSetRuntime::load_barrier_on_weak_oop_field_preloaded().
360static address generate_load_barrier_stub(StubCodeGenerator* cgen, Register raddr, DecoratorSet decorators) {
361 // Don't generate stub for invalid registers
362 if (raddr == rsp || raddr == r15) {
363 return NULL;
364 }
365
366 // Create stub name
367 char name[64];
368 const bool weak = (decorators & ON_WEAK_OOP_REF) != 0;
369 os::snprintf(name, sizeof(name), "zgc_load_barrier%s_stub_%s", weak ? "_weak" : "", raddr->name());
370
371 __ align(CodeEntryAlignment);
372 StubCodeMark mark(cgen, "StubRoutines", os::strdup(name, mtCode));
373 address start = __ pc();
374
375 // Save live registers
376 if (raddr != rax) {
377 __ push(rax);
378 }
379 if (raddr != rcx) {
380 __ push(rcx);
381 }
382 if (raddr != rdx) {
383 __ push(rdx);
384 }
385 if (raddr != rsi) {
386 __ push(rsi);
387 }
388 if (raddr != rdi) {
389 __ push(rdi);
390 }
391 if (raddr != r8) {
392 __ push(r8);
393 }
394 if (raddr != r9) {
395 __ push(r9);
396 }
397 if (raddr != r10) {
398 __ push(r10);
399 }
400 if (raddr != r11) {
401 __ push(r11);
402 }
403
404 // Setup arguments
405 if (raddr != c_rarg1) {
406 __ movq(c_rarg1, raddr);
407 }
408 __ movq(c_rarg0, Address(raddr, 0));
409
410 // Call barrier function
411 __ call_VM_leaf(ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators), c_rarg0, c_rarg1);
412
413 // Move result returned in rax to raddr, if needed
414 if (raddr != rax) {
415 __ movq(raddr, rax);
416 }
417
418 // Restore saved registers
419 if (raddr != r11) {
420 __ pop(r11);
421 }
422 if (raddr != r10) {
423 __ pop(r10);
424 }
425 if (raddr != r9) {
426 __ pop(r9);
427 }
428 if (raddr != r8) {
429 __ pop(r8);
430 }
431 if (raddr != rdi) {
432 __ pop(rdi);
433 }
434 if (raddr != rsi) {
435 __ pop(rsi);
436 }
437 if (raddr != rdx) {
438 __ pop(rdx);
439 }
440 if (raddr != rcx) {
441 __ pop(rcx);
442 }
443 if (raddr != rax) {
444 __ pop(rax);
445 }
446
447 __ ret(0);
448
449 return start;
450}
451
452#undef __
453
454static void barrier_stubs_init_inner(const char* label, const DecoratorSet decorators, address* stub) {
455 const int nregs = RegisterImpl::number_of_registers;
456 const int code_size = nregs * 128; // Rough estimate of code size
457
458 ResourceMark rm;
459
460 CodeBuffer buf(BufferBlob::create(label, code_size));
461 StubCodeGenerator cgen(&buf);
462
463 for (int i = 0; i < nregs; i++) {
464 const Register reg = as_Register(i);
465 stub[i] = generate_load_barrier_stub(&cgen, reg, decorators);
466 }
467}
468
469void ZBarrierSetAssembler::barrier_stubs_init() {
470 barrier_stubs_init_inner("zgc_load_barrier_stubs", ON_STRONG_OOP_REF, _load_barrier_slow_stub);
471 barrier_stubs_init_inner("zgc_load_barrier_weak_stubs", ON_WEAK_OOP_REF, _load_barrier_weak_slow_stub);
472}
473
474address ZBarrierSetAssembler::load_barrier_slow_stub(Register reg) {
475 return _load_barrier_slow_stub[reg->encoding()];
476}
477
478address ZBarrierSetAssembler::load_barrier_weak_slow_stub(Register reg) {
479 return _load_barrier_weak_slow_stub[reg->encoding()];
480}
481