1/*
2 * Copyright (c) 2017, 2018, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef SHARE_OOPS_ACCESSBACKEND_HPP
26#define SHARE_OOPS_ACCESSBACKEND_HPP
27
28#include "gc/shared/barrierSetConfig.hpp"
29#include "memory/allocation.hpp"
30#include "metaprogramming/conditional.hpp"
31#include "metaprogramming/decay.hpp"
32#include "metaprogramming/enableIf.hpp"
33#include "metaprogramming/integralConstant.hpp"
34#include "metaprogramming/isFloatingPoint.hpp"
35#include "metaprogramming/isIntegral.hpp"
36#include "metaprogramming/isPointer.hpp"
37#include "metaprogramming/isSame.hpp"
38#include "metaprogramming/isVolatile.hpp"
39#include "oops/accessDecorators.hpp"
40#include "oops/oopsHierarchy.hpp"
41#include "utilities/debug.hpp"
42#include "utilities/globalDefinitions.hpp"
43
44
45// This metafunction returns either oop or narrowOop depending on whether
46// an access needs to use compressed oops or not.
47template <DecoratorSet decorators>
48struct HeapOopType: AllStatic {
49 static const bool needs_oop_compress = HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value &&
50 HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value;
51 typedef typename Conditional<needs_oop_compress, narrowOop, oop>::type type;
52};
53
54namespace AccessInternal {
55 enum BarrierType {
56 BARRIER_STORE,
57 BARRIER_STORE_AT,
58 BARRIER_LOAD,
59 BARRIER_LOAD_AT,
60 BARRIER_ATOMIC_CMPXCHG,
61 BARRIER_ATOMIC_CMPXCHG_AT,
62 BARRIER_ATOMIC_XCHG,
63 BARRIER_ATOMIC_XCHG_AT,
64 BARRIER_ARRAYCOPY,
65 BARRIER_CLONE,
66 BARRIER_RESOLVE,
67 BARRIER_EQUALS
68 };
69
70 template <DecoratorSet decorators, typename T>
71 struct MustConvertCompressedOop: public IntegralConstant<bool,
72 HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value &&
73 IsSame<typename HeapOopType<decorators>::type, narrowOop>::value &&
74 IsSame<T, oop>::value> {};
75
76 // This metafunction returns an appropriate oop type if the value is oop-like
77 // and otherwise returns the same type T.
78 template <DecoratorSet decorators, typename T>
79 struct EncodedType: AllStatic {
80 typedef typename Conditional<
81 HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
82 typename HeapOopType<decorators>::type, T>::type type;
83 };
84
85 template <DecoratorSet decorators>
86 inline typename HeapOopType<decorators>::type*
87 oop_field_addr(oop base, ptrdiff_t byte_offset) {
88 return reinterpret_cast<typename HeapOopType<decorators>::type*>(
89 reinterpret_cast<intptr_t>((void*)base) + byte_offset);
90 }
91
92 // This metafunction returns whether it is possible for a type T to require
93 // locking to support wide atomics or not.
94 template <typename T>
95#ifdef SUPPORTS_NATIVE_CX8
96 struct PossiblyLockedAccess: public IntegralConstant<bool, false> {};
97#else
98 struct PossiblyLockedAccess: public IntegralConstant<bool, (sizeof(T) > 4)> {};
99#endif
100
101 template <DecoratorSet decorators, typename T>
102 struct AccessFunctionTypes {
103 typedef T (*load_at_func_t)(oop base, ptrdiff_t offset);
104 typedef void (*store_at_func_t)(oop base, ptrdiff_t offset, T value);
105 typedef T (*atomic_cmpxchg_at_func_t)(T new_value, oop base, ptrdiff_t offset, T compare_value);
106 typedef T (*atomic_xchg_at_func_t)(T new_value, oop base, ptrdiff_t offset);
107
108 typedef T (*load_func_t)(void* addr);
109 typedef void (*store_func_t)(void* addr, T value);
110 typedef T (*atomic_cmpxchg_func_t)(T new_value, void* addr, T compare_value);
111 typedef T (*atomic_xchg_func_t)(T new_value, void* addr);
112
113 typedef bool (*arraycopy_func_t)(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
114 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
115 size_t length);
116 typedef void (*clone_func_t)(oop src, oop dst, size_t size);
117 typedef oop (*resolve_func_t)(oop obj);
118 typedef bool (*equals_func_t)(oop o1, oop o2);
119 };
120
121 template <DecoratorSet decorators>
122 struct AccessFunctionTypes<decorators, void> {
123 typedef bool (*arraycopy_func_t)(arrayOop src_obj, size_t src_offset_in_bytes, void* src,
124 arrayOop dst_obj, size_t dst_offset_in_bytes, void* dst,
125 size_t length);
126 };
127
128 template <DecoratorSet decorators, typename T, BarrierType barrier> struct AccessFunction {};
129
130#define ACCESS_GENERATE_ACCESS_FUNCTION(bt, func) \
131 template <DecoratorSet decorators, typename T> \
132 struct AccessFunction<decorators, T, bt>: AllStatic{ \
133 typedef typename AccessFunctionTypes<decorators, T>::func type; \
134 }
135 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE, store_func_t);
136 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_STORE_AT, store_at_func_t);
137 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD, load_func_t);
138 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_LOAD_AT, load_at_func_t);
139 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG, atomic_cmpxchg_func_t);
140 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_CMPXCHG_AT, atomic_cmpxchg_at_func_t);
141 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG, atomic_xchg_func_t);
142 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ATOMIC_XCHG_AT, atomic_xchg_at_func_t);
143 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_ARRAYCOPY, arraycopy_func_t);
144 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_CLONE, clone_func_t);
145 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_RESOLVE, resolve_func_t);
146 ACCESS_GENERATE_ACCESS_FUNCTION(BARRIER_EQUALS, equals_func_t);
147#undef ACCESS_GENERATE_ACCESS_FUNCTION
148
149 template <DecoratorSet decorators, typename T, BarrierType barrier_type>
150 typename AccessFunction<decorators, T, barrier_type>::type resolve_barrier();
151
152 template <DecoratorSet decorators, typename T, BarrierType barrier_type>
153 typename AccessFunction<decorators, T, barrier_type>::type resolve_oop_barrier();
154
155 class AccessLocker {
156 public:
157 AccessLocker();
158 ~AccessLocker();
159 };
160 bool wide_atomic_needs_locking();
161
162 void* field_addr(oop base, ptrdiff_t offset);
163
164 // Forward calls to Copy:: in the cpp file to reduce dependencies and allow
165 // faster build times, given how frequently included access is.
166 void arraycopy_arrayof_conjoint_oops(void* src, void* dst, size_t length);
167 void arraycopy_conjoint_oops(oop* src, oop* dst, size_t length);
168 void arraycopy_conjoint_oops(narrowOop* src, narrowOop* dst, size_t length);
169
170 void arraycopy_disjoint_words(void* src, void* dst, size_t length);
171 void arraycopy_disjoint_words_atomic(void* src, void* dst, size_t length);
172
173 template<typename T>
174 void arraycopy_conjoint(T* src, T* dst, size_t length);
175 template<typename T>
176 void arraycopy_arrayof_conjoint(T* src, T* dst, size_t length);
177 template<typename T>
178 void arraycopy_conjoint_atomic(T* src, T* dst, size_t length);
179}
180
181// This mask specifies what decorators are relevant for raw accesses. When passing
182// accesses to the raw layer, irrelevant decorators are removed.
183const DecoratorSet RAW_DECORATOR_MASK = INTERNAL_DECORATOR_MASK | MO_DECORATOR_MASK |
184 ARRAYCOPY_DECORATOR_MASK | IS_NOT_NULL;
185
186// The RawAccessBarrier performs raw accesses with additional knowledge of
187// memory ordering, so that OrderAccess/Atomic is called when necessary.
188// It additionally handles compressed oops, and hence is not completely "raw"
189// strictly speaking.
190template <DecoratorSet decorators>
191class RawAccessBarrier: public AllStatic {
192protected:
193 static inline void* field_addr(oop base, ptrdiff_t byte_offset) {
194 return AccessInternal::field_addr(base, byte_offset);
195 }
196
197protected:
198 // Only encode if INTERNAL_VALUE_IS_OOP
199 template <DecoratorSet idecorators, typename T>
200 static inline typename EnableIf<
201 AccessInternal::MustConvertCompressedOop<idecorators, T>::value,
202 typename HeapOopType<idecorators>::type>::type
203 encode_internal(T value);
204
205 template <DecoratorSet idecorators, typename T>
206 static inline typename EnableIf<
207 !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
208 encode_internal(T value) {
209 return value;
210 }
211
212 template <typename T>
213 static inline typename AccessInternal::EncodedType<decorators, T>::type
214 encode(T value) {
215 return encode_internal<decorators, T>(value);
216 }
217
218 // Only decode if INTERNAL_VALUE_IS_OOP
219 template <DecoratorSet idecorators, typename T>
220 static inline typename EnableIf<
221 AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
222 decode_internal(typename HeapOopType<idecorators>::type value);
223
224 template <DecoratorSet idecorators, typename T>
225 static inline typename EnableIf<
226 !AccessInternal::MustConvertCompressedOop<idecorators, T>::value, T>::type
227 decode_internal(T value) {
228 return value;
229 }
230
231 template <typename T>
232 static inline T decode(typename AccessInternal::EncodedType<decorators, T>::type value) {
233 return decode_internal<decorators, T>(value);
234 }
235
236protected:
237 template <DecoratorSet ds, typename T>
238 static typename EnableIf<
239 HasDecorator<ds, MO_SEQ_CST>::value, T>::type
240 load_internal(void* addr);
241
242 template <DecoratorSet ds, typename T>
243 static typename EnableIf<
244 HasDecorator<ds, MO_ACQUIRE>::value, T>::type
245 load_internal(void* addr);
246
247 template <DecoratorSet ds, typename T>
248 static typename EnableIf<
249 HasDecorator<ds, MO_RELAXED>::value, T>::type
250 load_internal(void* addr);
251
252 template <DecoratorSet ds, typename T>
253 static inline typename EnableIf<
254 HasDecorator<ds, MO_VOLATILE>::value, T>::type
255 load_internal(void* addr) {
256 return *reinterpret_cast<const volatile T*>(addr);
257 }
258
259 template <DecoratorSet ds, typename T>
260 static inline typename EnableIf<
261 HasDecorator<ds, MO_UNORDERED>::value, T>::type
262 load_internal(void* addr) {
263 return *reinterpret_cast<T*>(addr);
264 }
265
266 template <DecoratorSet ds, typename T>
267 static typename EnableIf<
268 HasDecorator<ds, MO_SEQ_CST>::value>::type
269 store_internal(void* addr, T value);
270
271 template <DecoratorSet ds, typename T>
272 static typename EnableIf<
273 HasDecorator<ds, MO_RELEASE>::value>::type
274 store_internal(void* addr, T value);
275
276 template <DecoratorSet ds, typename T>
277 static typename EnableIf<
278 HasDecorator<ds, MO_RELAXED>::value>::type
279 store_internal(void* addr, T value);
280
281 template <DecoratorSet ds, typename T>
282 static inline typename EnableIf<
283 HasDecorator<ds, MO_VOLATILE>::value>::type
284 store_internal(void* addr, T value) {
285 (void)const_cast<T&>(*reinterpret_cast<volatile T*>(addr) = value);
286 }
287
288 template <DecoratorSet ds, typename T>
289 static inline typename EnableIf<
290 HasDecorator<ds, MO_UNORDERED>::value>::type
291 store_internal(void* addr, T value) {
292 *reinterpret_cast<T*>(addr) = value;
293 }
294
295 template <DecoratorSet ds, typename T>
296 static typename EnableIf<
297 HasDecorator<ds, MO_SEQ_CST>::value, T>::type
298 atomic_cmpxchg_internal(T new_value, void* addr, T compare_value);
299
300 template <DecoratorSet ds, typename T>
301 static typename EnableIf<
302 HasDecorator<ds, MO_RELAXED>::value, T>::type
303 atomic_cmpxchg_internal(T new_value, void* addr, T compare_value);
304
305 template <DecoratorSet ds, typename T>
306 static typename EnableIf<
307 HasDecorator<ds, MO_SEQ_CST>::value, T>::type
308 atomic_xchg_internal(T new_value, void* addr);
309
310 // The following *_locked mechanisms serve the purpose of handling atomic operations
311 // that are larger than a machine can handle, and then possibly opt for using
312 // a slower path using a mutex to perform the operation.
313
314 template <DecoratorSet ds, typename T>
315 static inline typename EnableIf<
316 !AccessInternal::PossiblyLockedAccess<T>::value, T>::type
317 atomic_cmpxchg_maybe_locked(T new_value, void* addr, T compare_value) {
318 return atomic_cmpxchg_internal<ds>(new_value, addr, compare_value);
319 }
320
321 template <DecoratorSet ds, typename T>
322 static typename EnableIf<
323 AccessInternal::PossiblyLockedAccess<T>::value, T>::type
324 atomic_cmpxchg_maybe_locked(T new_value, void* addr, T compare_value);
325
326 template <DecoratorSet ds, typename T>
327 static inline typename EnableIf<
328 !AccessInternal::PossiblyLockedAccess<T>::value, T>::type
329 atomic_xchg_maybe_locked(T new_value, void* addr) {
330 return atomic_xchg_internal<ds>(new_value, addr);
331 }
332
333 template <DecoratorSet ds, typename T>
334 static typename EnableIf<
335 AccessInternal::PossiblyLockedAccess<T>::value, T>::type
336 atomic_xchg_maybe_locked(T new_value, void* addr);
337
338public:
339 template <typename T>
340 static inline void store(void* addr, T value) {
341 store_internal<decorators>(addr, value);
342 }
343
344 template <typename T>
345 static inline T load(void* addr) {
346 return load_internal<decorators, T>(addr);
347 }
348
349 template <typename T>
350 static inline T atomic_cmpxchg(T new_value, void* addr, T compare_value) {
351 return atomic_cmpxchg_maybe_locked<decorators>(new_value, addr, compare_value);
352 }
353
354 template <typename T>
355 static inline T atomic_xchg(T new_value, void* addr) {
356 return atomic_xchg_maybe_locked<decorators>(new_value, addr);
357 }
358
359 template <typename T>
360 static bool arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
361 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
362 size_t length);
363
364 template <typename T>
365 static void oop_store(void* addr, T value);
366 template <typename T>
367 static void oop_store_at(oop base, ptrdiff_t offset, T value);
368
369 template <typename T>
370 static T oop_load(void* addr);
371 template <typename T>
372 static T oop_load_at(oop base, ptrdiff_t offset);
373
374 template <typename T>
375 static T oop_atomic_cmpxchg(T new_value, void* addr, T compare_value);
376 template <typename T>
377 static T oop_atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value);
378
379 template <typename T>
380 static T oop_atomic_xchg(T new_value, void* addr);
381 template <typename T>
382 static T oop_atomic_xchg_at(T new_value, oop base, ptrdiff_t offset);
383
384 template <typename T>
385 static void store_at(oop base, ptrdiff_t offset, T value) {
386 store(field_addr(base, offset), value);
387 }
388
389 template <typename T>
390 static T load_at(oop base, ptrdiff_t offset) {
391 return load<T>(field_addr(base, offset));
392 }
393
394 template <typename T>
395 static T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
396 return atomic_cmpxchg(new_value, field_addr(base, offset), compare_value);
397 }
398
399 template <typename T>
400 static T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
401 return atomic_xchg(new_value, field_addr(base, offset));
402 }
403
404 template <typename T>
405 static bool oop_arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
406 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
407 size_t length);
408
409 static void clone(oop src, oop dst, size_t size);
410
411 static oop resolve(oop obj) { return obj; }
412
413 static bool equals(oop o1, oop o2) { return (void*)o1 == (void*)o2; }
414};
415
416// Below is the implementation of the first 4 steps of the template pipeline:
417// * Step 1: Set default decorators and decay types. This step gets rid of CV qualifiers
418// and sets default decorators to sensible values.
419// * Step 2: Reduce types. This step makes sure there is only a single T type and not
420// multiple types. The P type of the address and T type of the value must
421// match.
422// * Step 3: Pre-runtime dispatch. This step checks whether a runtime call can be
423// avoided, and in that case avoids it (calling raw accesses or
424// primitive accesses in a build that does not require primitive GC barriers)
425// * Step 4: Runtime-dispatch. This step performs a runtime dispatch to the corresponding
426// BarrierSet::AccessBarrier accessor that attaches GC-required barriers
427// to the access.
428
429namespace AccessInternal {
430 template <typename T>
431 struct OopOrNarrowOopInternal: AllStatic {
432 typedef oop type;
433 };
434
435 template <>
436 struct OopOrNarrowOopInternal<narrowOop>: AllStatic {
437 typedef narrowOop type;
438 };
439
440 // This metafunction returns a canonicalized oop/narrowOop type for a passed
441 // in oop-like types passed in from oop_* overloads where the user has sworn
442 // that the passed in values should be oop-like (e.g. oop, oopDesc*, arrayOop,
443 // narrowOoop, instanceOopDesc*, and random other things).
444 // In the oop_* overloads, it must hold that if the passed in type T is not
445 // narrowOop, then it by contract has to be one of many oop-like types implicitly
446 // convertible to oop, and hence returns oop as the canonical oop type.
447 // If it turns out it was not, then the implicit conversion to oop will fail
448 // to compile, as desired.
449 template <typename T>
450 struct OopOrNarrowOop: AllStatic {
451 typedef typename OopOrNarrowOopInternal<typename Decay<T>::type>::type type;
452 };
453
454 inline void* field_addr(oop base, ptrdiff_t byte_offset) {
455 return reinterpret_cast<void*>(reinterpret_cast<intptr_t>((void*)base) + byte_offset);
456 }
457 // Step 4: Runtime dispatch
458 // The RuntimeDispatch class is responsible for performing a runtime dispatch of the
459 // accessor. This is required when the access either depends on whether compressed oops
460 // is being used, or it depends on which GC implementation was chosen (e.g. requires GC
461 // barriers). The way it works is that a function pointer initially pointing to an
462 // accessor resolution function gets called for each access. Upon first invocation,
463 // it resolves which accessor to be used in future invocations and patches the
464 // function pointer to this new accessor.
465
466 template <DecoratorSet decorators, typename T, BarrierType type>
467 struct RuntimeDispatch: AllStatic {};
468
469 template <DecoratorSet decorators, typename T>
470 struct RuntimeDispatch<decorators, T, BARRIER_STORE>: AllStatic {
471 typedef typename AccessFunction<decorators, T, BARRIER_STORE>::type func_t;
472 static func_t _store_func;
473
474 static void store_init(void* addr, T value);
475
476 static inline void store(void* addr, T value) {
477 _store_func(addr, value);
478 }
479 };
480
481 template <DecoratorSet decorators, typename T>
482 struct RuntimeDispatch<decorators, T, BARRIER_STORE_AT>: AllStatic {
483 typedef typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type func_t;
484 static func_t _store_at_func;
485
486 static void store_at_init(oop base, ptrdiff_t offset, T value);
487
488 static inline void store_at(oop base, ptrdiff_t offset, T value) {
489 _store_at_func(base, offset, value);
490 }
491 };
492
493 template <DecoratorSet decorators, typename T>
494 struct RuntimeDispatch<decorators, T, BARRIER_LOAD>: AllStatic {
495 typedef typename AccessFunction<decorators, T, BARRIER_LOAD>::type func_t;
496 static func_t _load_func;
497
498 static T load_init(void* addr);
499
500 static inline T load(void* addr) {
501 return _load_func(addr);
502 }
503 };
504
505 template <DecoratorSet decorators, typename T>
506 struct RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>: AllStatic {
507 typedef typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type func_t;
508 static func_t _load_at_func;
509
510 static T load_at_init(oop base, ptrdiff_t offset);
511
512 static inline T load_at(oop base, ptrdiff_t offset) {
513 return _load_at_func(base, offset);
514 }
515 };
516
517 template <DecoratorSet decorators, typename T>
518 struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>: AllStatic {
519 typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type func_t;
520 static func_t _atomic_cmpxchg_func;
521
522 static T atomic_cmpxchg_init(T new_value, void* addr, T compare_value);
523
524 static inline T atomic_cmpxchg(T new_value, void* addr, T compare_value) {
525 return _atomic_cmpxchg_func(new_value, addr, compare_value);
526 }
527 };
528
529 template <DecoratorSet decorators, typename T>
530 struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>: AllStatic {
531 typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type func_t;
532 static func_t _atomic_cmpxchg_at_func;
533
534 static T atomic_cmpxchg_at_init(T new_value, oop base, ptrdiff_t offset, T compare_value);
535
536 static inline T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
537 return _atomic_cmpxchg_at_func(new_value, base, offset, compare_value);
538 }
539 };
540
541 template <DecoratorSet decorators, typename T>
542 struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>: AllStatic {
543 typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type func_t;
544 static func_t _atomic_xchg_func;
545
546 static T atomic_xchg_init(T new_value, void* addr);
547
548 static inline T atomic_xchg(T new_value, void* addr) {
549 return _atomic_xchg_func(new_value, addr);
550 }
551 };
552
553 template <DecoratorSet decorators, typename T>
554 struct RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>: AllStatic {
555 typedef typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type func_t;
556 static func_t _atomic_xchg_at_func;
557
558 static T atomic_xchg_at_init(T new_value, oop base, ptrdiff_t offset);
559
560 static inline T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
561 return _atomic_xchg_at_func(new_value, base, offset);
562 }
563 };
564
565 template <DecoratorSet decorators, typename T>
566 struct RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>: AllStatic {
567 typedef typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type func_t;
568 static func_t _arraycopy_func;
569
570 static bool arraycopy_init(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
571 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
572 size_t length);
573
574 static inline bool arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
575 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
576 size_t length) {
577 return _arraycopy_func(src_obj, src_offset_in_bytes, src_raw,
578 dst_obj, dst_offset_in_bytes, dst_raw,
579 length);
580 }
581 };
582
583 template <DecoratorSet decorators, typename T>
584 struct RuntimeDispatch<decorators, T, BARRIER_CLONE>: AllStatic {
585 typedef typename AccessFunction<decorators, T, BARRIER_CLONE>::type func_t;
586 static func_t _clone_func;
587
588 static void clone_init(oop src, oop dst, size_t size);
589
590 static inline void clone(oop src, oop dst, size_t size) {
591 _clone_func(src, dst, size);
592 }
593 };
594
595 template <DecoratorSet decorators, typename T>
596 struct RuntimeDispatch<decorators, T, BARRIER_RESOLVE>: AllStatic {
597 typedef typename AccessFunction<decorators, T, BARRIER_RESOLVE>::type func_t;
598 static func_t _resolve_func;
599
600 static oop resolve_init(oop obj);
601
602 static inline oop resolve(oop obj) {
603 return _resolve_func(obj);
604 }
605 };
606
607 template <DecoratorSet decorators, typename T>
608 struct RuntimeDispatch<decorators, T, BARRIER_EQUALS>: AllStatic {
609 typedef typename AccessFunction<decorators, T, BARRIER_EQUALS>::type func_t;
610 static func_t _equals_func;
611
612 static bool equals_init(oop o1, oop o2);
613
614 static inline bool equals(oop o1, oop o2) {
615 return _equals_func(o1, o2);
616 }
617 };
618
619 // Initialize the function pointers to point to the resolving function.
620 template <DecoratorSet decorators, typename T>
621 typename AccessFunction<decorators, T, BARRIER_STORE>::type
622 RuntimeDispatch<decorators, T, BARRIER_STORE>::_store_func = &store_init;
623
624 template <DecoratorSet decorators, typename T>
625 typename AccessFunction<decorators, T, BARRIER_STORE_AT>::type
626 RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::_store_at_func = &store_at_init;
627
628 template <DecoratorSet decorators, typename T>
629 typename AccessFunction<decorators, T, BARRIER_LOAD>::type
630 RuntimeDispatch<decorators, T, BARRIER_LOAD>::_load_func = &load_init;
631
632 template <DecoratorSet decorators, typename T>
633 typename AccessFunction<decorators, T, BARRIER_LOAD_AT>::type
634 RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::_load_at_func = &load_at_init;
635
636 template <DecoratorSet decorators, typename T>
637 typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG>::type
638 RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::_atomic_cmpxchg_func = &atomic_cmpxchg_init;
639
640 template <DecoratorSet decorators, typename T>
641 typename AccessFunction<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::type
642 RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::_atomic_cmpxchg_at_func = &atomic_cmpxchg_at_init;
643
644 template <DecoratorSet decorators, typename T>
645 typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG>::type
646 RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::_atomic_xchg_func = &atomic_xchg_init;
647
648 template <DecoratorSet decorators, typename T>
649 typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type
650 RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::_atomic_xchg_at_func = &atomic_xchg_at_init;
651
652 template <DecoratorSet decorators, typename T>
653 typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type
654 RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::_arraycopy_func = &arraycopy_init;
655
656 template <DecoratorSet decorators, typename T>
657 typename AccessFunction<decorators, T, BARRIER_CLONE>::type
658 RuntimeDispatch<decorators, T, BARRIER_CLONE>::_clone_func = &clone_init;
659
660 template <DecoratorSet decorators, typename T>
661 typename AccessFunction<decorators, T, BARRIER_RESOLVE>::type
662 RuntimeDispatch<decorators, T, BARRIER_RESOLVE>::_resolve_func = &resolve_init;
663
664 template <DecoratorSet decorators, typename T>
665 typename AccessFunction<decorators, T, BARRIER_EQUALS>::type
666 RuntimeDispatch<decorators, T, BARRIER_EQUALS>::_equals_func = &equals_init;
667
668 // Step 3: Pre-runtime dispatching.
669 // The PreRuntimeDispatch class is responsible for filtering the barrier strength
670 // decorators. That is, for AS_RAW, it hardwires the accesses without a runtime
671 // dispatch point. Otherwise it goes through a runtime check if hardwiring was
672 // not possible.
673 struct PreRuntimeDispatch: AllStatic {
674 template<DecoratorSet decorators>
675 struct CanHardwireRaw: public IntegralConstant<
676 bool,
677 !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // primitive access
678 !HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value || // don't care about compressed oops (oop* address)
679 HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value> // we can infer we use compressed oops (narrowOop* address)
680 {};
681
682 static const DecoratorSet convert_compressed_oops = INTERNAL_RT_USE_COMPRESSED_OOPS | INTERNAL_CONVERT_COMPRESSED_OOP;
683
684 template<DecoratorSet decorators>
685 static bool is_hardwired_primitive() {
686 return !HasDecorator<decorators, INTERNAL_BT_BARRIER_ON_PRIMITIVES>::value &&
687 !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value;
688 }
689
690 template <DecoratorSet decorators, typename T>
691 inline static typename EnableIf<
692 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value>::type
693 store(void* addr, T value) {
694 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
695 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
696 Raw::oop_store(addr, value);
697 } else {
698 Raw::store(addr, value);
699 }
700 }
701
702 template <DecoratorSet decorators, typename T>
703 inline static typename EnableIf<
704 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value>::type
705 store(void* addr, T value) {
706 if (UseCompressedOops) {
707 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
708 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
709 } else {
710 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
711 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
712 }
713 }
714
715 template <DecoratorSet decorators, typename T>
716 inline static typename EnableIf<
717 !HasDecorator<decorators, AS_RAW>::value>::type
718 store(void* addr, T value) {
719 if (is_hardwired_primitive<decorators>()) {
720 const DecoratorSet expanded_decorators = decorators | AS_RAW;
721 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
722 } else {
723 RuntimeDispatch<decorators, T, BARRIER_STORE>::store(addr, value);
724 }
725 }
726
727 template <DecoratorSet decorators, typename T>
728 inline static typename EnableIf<
729 HasDecorator<decorators, AS_RAW>::value>::type
730 store_at(oop base, ptrdiff_t offset, T value) {
731 store<decorators>(field_addr(base, offset), value);
732 }
733
734 template <DecoratorSet decorators, typename T>
735 inline static typename EnableIf<
736 !HasDecorator<decorators, AS_RAW>::value>::type
737 store_at(oop base, ptrdiff_t offset, T value) {
738 if (is_hardwired_primitive<decorators>()) {
739 const DecoratorSet expanded_decorators = decorators | AS_RAW;
740 PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, value);
741 } else {
742 RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::store_at(base, offset, value);
743 }
744 }
745
746 template <DecoratorSet decorators, typename T>
747 inline static typename EnableIf<
748 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
749 load(void* addr) {
750 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
751 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
752 return Raw::template oop_load<T>(addr);
753 } else {
754 return Raw::template load<T>(addr);
755 }
756 }
757
758 template <DecoratorSet decorators, typename T>
759 inline static typename EnableIf<
760 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
761 load(void* addr) {
762 if (UseCompressedOops) {
763 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
764 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
765 } else {
766 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
767 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
768 }
769 }
770
771 template <DecoratorSet decorators, typename T>
772 inline static typename EnableIf<
773 !HasDecorator<decorators, AS_RAW>::value, T>::type
774 load(void* addr) {
775 if (is_hardwired_primitive<decorators>()) {
776 const DecoratorSet expanded_decorators = decorators | AS_RAW;
777 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
778 } else {
779 return RuntimeDispatch<decorators, T, BARRIER_LOAD>::load(addr);
780 }
781 }
782
783 template <DecoratorSet decorators, typename T>
784 inline static typename EnableIf<
785 HasDecorator<decorators, AS_RAW>::value, T>::type
786 load_at(oop base, ptrdiff_t offset) {
787 return load<decorators, T>(field_addr(base, offset));
788 }
789
790 template <DecoratorSet decorators, typename T>
791 inline static typename EnableIf<
792 !HasDecorator<decorators, AS_RAW>::value, T>::type
793 load_at(oop base, ptrdiff_t offset) {
794 if (is_hardwired_primitive<decorators>()) {
795 const DecoratorSet expanded_decorators = decorators | AS_RAW;
796 return PreRuntimeDispatch::load_at<expanded_decorators, T>(base, offset);
797 } else {
798 return RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::load_at(base, offset);
799 }
800 }
801
802 template <DecoratorSet decorators, typename T>
803 inline static typename EnableIf<
804 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
805 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
806 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
807 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
808 return Raw::oop_atomic_cmpxchg(new_value, addr, compare_value);
809 } else {
810 return Raw::atomic_cmpxchg(new_value, addr, compare_value);
811 }
812 }
813
814 template <DecoratorSet decorators, typename T>
815 inline static typename EnableIf<
816 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
817 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
818 if (UseCompressedOops) {
819 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
820 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
821 } else {
822 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
823 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
824 }
825 }
826
827 template <DecoratorSet decorators, typename T>
828 inline static typename EnableIf<
829 !HasDecorator<decorators, AS_RAW>::value, T>::type
830 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
831 if (is_hardwired_primitive<decorators>()) {
832 const DecoratorSet expanded_decorators = decorators | AS_RAW;
833 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
834 } else {
835 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::atomic_cmpxchg(new_value, addr, compare_value);
836 }
837 }
838
839 template <DecoratorSet decorators, typename T>
840 inline static typename EnableIf<
841 HasDecorator<decorators, AS_RAW>::value, T>::type
842 atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
843 return atomic_cmpxchg<decorators>(new_value, field_addr(base, offset), compare_value);
844 }
845
846 template <DecoratorSet decorators, typename T>
847 inline static typename EnableIf<
848 !HasDecorator<decorators, AS_RAW>::value, T>::type
849 atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
850 if (is_hardwired_primitive<decorators>()) {
851 const DecoratorSet expanded_decorators = decorators | AS_RAW;
852 return PreRuntimeDispatch::atomic_cmpxchg_at<expanded_decorators>(new_value, base, offset, compare_value);
853 } else {
854 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::atomic_cmpxchg_at(new_value, base, offset, compare_value);
855 }
856 }
857
858 template <DecoratorSet decorators, typename T>
859 inline static typename EnableIf<
860 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
861 atomic_xchg(T new_value, void* addr) {
862 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
863 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
864 return Raw::oop_atomic_xchg(new_value, addr);
865 } else {
866 return Raw::atomic_xchg(new_value, addr);
867 }
868 }
869
870 template <DecoratorSet decorators, typename T>
871 inline static typename EnableIf<
872 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
873 atomic_xchg(T new_value, void* addr) {
874 if (UseCompressedOops) {
875 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
876 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
877 } else {
878 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
879 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
880 }
881 }
882
883 template <DecoratorSet decorators, typename T>
884 inline static typename EnableIf<
885 !HasDecorator<decorators, AS_RAW>::value, T>::type
886 atomic_xchg(T new_value, void* addr) {
887 if (is_hardwired_primitive<decorators>()) {
888 const DecoratorSet expanded_decorators = decorators | AS_RAW;
889 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
890 } else {
891 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::atomic_xchg(new_value, addr);
892 }
893 }
894
895 template <DecoratorSet decorators, typename T>
896 inline static typename EnableIf<
897 HasDecorator<decorators, AS_RAW>::value, T>::type
898 atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
899 return atomic_xchg<decorators>(new_value, field_addr(base, offset));
900 }
901
902 template <DecoratorSet decorators, typename T>
903 inline static typename EnableIf<
904 !HasDecorator<decorators, AS_RAW>::value, T>::type
905 atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
906 if (is_hardwired_primitive<decorators>()) {
907 const DecoratorSet expanded_decorators = decorators | AS_RAW;
908 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, base, offset);
909 } else {
910 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::atomic_xchg_at(new_value, base, offset);
911 }
912 }
913
914 template <DecoratorSet decorators, typename T>
915 inline static typename EnableIf<
916 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, bool>::type
917 arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
918 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
919 size_t length) {
920 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
921 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
922 return Raw::oop_arraycopy(src_obj, src_offset_in_bytes, src_raw,
923 dst_obj, dst_offset_in_bytes, dst_raw,
924 length);
925 } else {
926 return Raw::arraycopy(src_obj, src_offset_in_bytes, src_raw,
927 dst_obj, dst_offset_in_bytes, dst_raw,
928 length);
929 }
930 }
931
932 template <DecoratorSet decorators, typename T>
933 inline static typename EnableIf<
934 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, bool>::type
935 arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
936 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
937 size_t length) {
938 if (UseCompressedOops) {
939 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
940 return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
941 dst_obj, dst_offset_in_bytes, dst_raw,
942 length);
943 } else {
944 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
945 return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
946 dst_obj, dst_offset_in_bytes, dst_raw,
947 length);
948 }
949 }
950
951 template <DecoratorSet decorators, typename T>
952 inline static typename EnableIf<
953 !HasDecorator<decorators, AS_RAW>::value, bool>::type
954 arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
955 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
956 size_t length) {
957 if (is_hardwired_primitive<decorators>()) {
958 const DecoratorSet expanded_decorators = decorators | AS_RAW;
959 return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
960 dst_obj, dst_offset_in_bytes, dst_raw,
961 length);
962 } else {
963 return RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::arraycopy(src_obj, src_offset_in_bytes, src_raw,
964 dst_obj, dst_offset_in_bytes, dst_raw,
965 length);
966 }
967 }
968
969 template <DecoratorSet decorators>
970 inline static typename EnableIf<
971 HasDecorator<decorators, AS_RAW>::value>::type
972 clone(oop src, oop dst, size_t size) {
973 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
974 Raw::clone(src, dst, size);
975 }
976
977 template <DecoratorSet decorators>
978 inline static typename EnableIf<
979 !HasDecorator<decorators, AS_RAW>::value>::type
980 clone(oop src, oop dst, size_t size) {
981 RuntimeDispatch<decorators, oop, BARRIER_CLONE>::clone(src, dst, size);
982 }
983
984 template <DecoratorSet decorators>
985 inline static typename EnableIf<
986 HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, oop>::type
987 resolve(oop obj) {
988 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
989 return Raw::resolve(obj);
990 }
991
992 template <DecoratorSet decorators>
993 inline static typename EnableIf<
994 !HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, oop>::type
995 resolve(oop obj) {
996 return RuntimeDispatch<decorators, oop, BARRIER_RESOLVE>::resolve(obj);
997 }
998
999 template <DecoratorSet decorators>
1000 inline static typename EnableIf<
1001 HasDecorator<decorators, AS_RAW>::value || HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, bool>::type
1002 equals(oop o1, oop o2) {
1003 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
1004 return Raw::equals(o1, o2);
1005 }
1006
1007 template <DecoratorSet decorators>
1008 inline static typename EnableIf<
1009 !HasDecorator<decorators, AS_RAW>::value && !HasDecorator<decorators, INTERNAL_BT_TO_SPACE_INVARIANT>::value, bool>::type
1010 equals(oop o1, oop o2) {
1011 return RuntimeDispatch<decorators, oop, BARRIER_EQUALS>::equals(o1, o2);
1012 }
1013 };
1014
1015 // Step 2: Reduce types.
1016 // Enforce that for non-oop types, T and P have to be strictly the same.
1017 // P is the type of the address and T is the type of the values.
1018 // As for oop types, it is allow to send T in {narrowOop, oop} and
1019 // P in {narrowOop, oop, HeapWord*}. The following rules apply according to
1020 // the subsequent table. (columns are P, rows are T)
1021 // | | HeapWord | oop | narrowOop |
1022 // | oop | rt-comp | hw-none | hw-comp |
1023 // | narrowOop | x | x | hw-none |
1024 //
1025 // x means not allowed
1026 // rt-comp means it must be checked at runtime whether the oop is compressed.
1027 // hw-none means it is statically known the oop will not be compressed.
1028 // hw-comp means it is statically known the oop will be compressed.
1029
1030 template <DecoratorSet decorators, typename T>
1031 inline void store_reduce_types(T* addr, T value) {
1032 PreRuntimeDispatch::store<decorators>(addr, value);
1033 }
1034
1035 template <DecoratorSet decorators>
1036 inline void store_reduce_types(narrowOop* addr, oop value) {
1037 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1038 INTERNAL_RT_USE_COMPRESSED_OOPS;
1039 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1040 }
1041
1042 template <DecoratorSet decorators>
1043 inline void store_reduce_types(narrowOop* addr, narrowOop value) {
1044 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1045 INTERNAL_RT_USE_COMPRESSED_OOPS;
1046 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1047 }
1048
1049 template <DecoratorSet decorators>
1050 inline void store_reduce_types(HeapWord* addr, oop value) {
1051 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1052 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
1053 }
1054
1055 template <DecoratorSet decorators, typename T>
1056 inline T atomic_cmpxchg_reduce_types(T new_value, T* addr, T compare_value) {
1057 return PreRuntimeDispatch::atomic_cmpxchg<decorators>(new_value, addr, compare_value);
1058 }
1059
1060 template <DecoratorSet decorators>
1061 inline oop atomic_cmpxchg_reduce_types(oop new_value, narrowOop* addr, oop compare_value) {
1062 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1063 INTERNAL_RT_USE_COMPRESSED_OOPS;
1064 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
1065 }
1066
1067 template <DecoratorSet decorators>
1068 inline narrowOop atomic_cmpxchg_reduce_types(narrowOop new_value, narrowOop* addr, narrowOop compare_value) {
1069 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1070 INTERNAL_RT_USE_COMPRESSED_OOPS;
1071 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
1072 }
1073
1074 template <DecoratorSet decorators>
1075 inline oop atomic_cmpxchg_reduce_types(oop new_value,
1076 HeapWord* addr,
1077 oop compare_value) {
1078 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1079 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
1080 }
1081
1082 template <DecoratorSet decorators, typename T>
1083 inline T atomic_xchg_reduce_types(T new_value, T* addr) {
1084 const DecoratorSet expanded_decorators = decorators;
1085 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1086 }
1087
1088 template <DecoratorSet decorators>
1089 inline oop atomic_xchg_reduce_types(oop new_value, narrowOop* addr) {
1090 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1091 INTERNAL_RT_USE_COMPRESSED_OOPS;
1092 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1093 }
1094
1095 template <DecoratorSet decorators>
1096 inline narrowOop atomic_xchg_reduce_types(narrowOop new_value, narrowOop* addr) {
1097 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1098 INTERNAL_RT_USE_COMPRESSED_OOPS;
1099 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1100 }
1101
1102 template <DecoratorSet decorators>
1103 inline oop atomic_xchg_reduce_types(oop new_value, HeapWord* addr) {
1104 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1105 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
1106 }
1107
1108 template <DecoratorSet decorators, typename T>
1109 inline T load_reduce_types(T* addr) {
1110 return PreRuntimeDispatch::load<decorators, T>(addr);
1111 }
1112
1113 template <DecoratorSet decorators, typename T>
1114 inline typename OopOrNarrowOop<T>::type load_reduce_types(narrowOop* addr) {
1115 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1116 INTERNAL_RT_USE_COMPRESSED_OOPS;
1117 return PreRuntimeDispatch::load<expanded_decorators, typename OopOrNarrowOop<T>::type>(addr);
1118 }
1119
1120 template <DecoratorSet decorators, typename T>
1121 inline oop load_reduce_types(HeapWord* addr) {
1122 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1123 return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
1124 }
1125
1126 template <DecoratorSet decorators, typename T>
1127 inline bool arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, T* src_raw,
1128 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
1129 size_t length) {
1130 return PreRuntimeDispatch::arraycopy<decorators>(src_obj, src_offset_in_bytes, src_raw,
1131 dst_obj, dst_offset_in_bytes, dst_raw,
1132 length);
1133 }
1134
1135 template <DecoratorSet decorators>
1136 inline bool arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, HeapWord* src_raw,
1137 arrayOop dst_obj, size_t dst_offset_in_bytes, HeapWord* dst_raw,
1138 size_t length) {
1139 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
1140 return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
1141 dst_obj, dst_offset_in_bytes, dst_raw,
1142 length);
1143 }
1144
1145 template <DecoratorSet decorators>
1146 inline bool arraycopy_reduce_types(arrayOop src_obj, size_t src_offset_in_bytes, narrowOop* src_raw,
1147 arrayOop dst_obj, size_t dst_offset_in_bytes, narrowOop* dst_raw,
1148 size_t length) {
1149 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
1150 INTERNAL_RT_USE_COMPRESSED_OOPS;
1151 return PreRuntimeDispatch::arraycopy<expanded_decorators>(src_obj, src_offset_in_bytes, src_raw,
1152 dst_obj, dst_offset_in_bytes, dst_raw,
1153 length);
1154 }
1155
1156 // Step 1: Set default decorators. This step remembers if a type was volatile
1157 // and then sets the MO_VOLATILE decorator by default. Otherwise, a default
1158 // memory ordering is set for the access, and the implied decorator rules
1159 // are applied to select sensible defaults for decorators that have not been
1160 // explicitly set. For example, default object referent strength is set to strong.
1161 // This step also decays the types passed in (e.g. getting rid of CV qualifiers
1162 // and references from the types). This step also perform some type verification
1163 // that the passed in types make sense.
1164
1165 template <DecoratorSet decorators, typename T>
1166 static void verify_types(){
1167 // If this fails to compile, then you have sent in something that is
1168 // not recognized as a valid primitive type to a primitive Access function.
1169 STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // oops have already been validated
1170 (IsPointer<T>::value || IsIntegral<T>::value) ||
1171 IsFloatingPoint<T>::value)); // not allowed primitive type
1172 }
1173
1174 template <DecoratorSet decorators, typename P, typename T>
1175 inline void store(P* addr, T value) {
1176 verify_types<decorators, T>();
1177 typedef typename Decay<P>::type DecayedP;
1178 typedef typename Decay<T>::type DecayedT;
1179 DecayedT decayed_value = value;
1180 // If a volatile address is passed in but no memory ordering decorator,
1181 // set the memory ordering to MO_VOLATILE by default.
1182 const DecoratorSet expanded_decorators = DecoratorFixup<
1183 (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1184 (MO_VOLATILE | decorators) : decorators>::value;
1185 store_reduce_types<expanded_decorators>(const_cast<DecayedP*>(addr), decayed_value);
1186 }
1187
1188 template <DecoratorSet decorators, typename T>
1189 inline void store_at(oop base, ptrdiff_t offset, T value) {
1190 verify_types<decorators, T>();
1191 typedef typename Decay<T>::type DecayedT;
1192 DecayedT decayed_value = value;
1193 const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1194 (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1195 INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1196 PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, decayed_value);
1197 }
1198
1199 template <DecoratorSet decorators, typename P, typename T>
1200 inline T load(P* addr) {
1201 verify_types<decorators, T>();
1202 typedef typename Decay<P>::type DecayedP;
1203 typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1204 typename OopOrNarrowOop<T>::type,
1205 typename Decay<T>::type>::type DecayedT;
1206 // If a volatile address is passed in but no memory ordering decorator,
1207 // set the memory ordering to MO_VOLATILE by default.
1208 const DecoratorSet expanded_decorators = DecoratorFixup<
1209 (IsVolatile<P>::value && !HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1210 (MO_VOLATILE | decorators) : decorators>::value;
1211 return load_reduce_types<expanded_decorators, DecayedT>(const_cast<DecayedP*>(addr));
1212 }
1213
1214 template <DecoratorSet decorators, typename T>
1215 inline T load_at(oop base, ptrdiff_t offset) {
1216 verify_types<decorators, T>();
1217 typedef typename Conditional<HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value,
1218 typename OopOrNarrowOop<T>::type,
1219 typename Decay<T>::type>::type DecayedT;
1220 // Expand the decorators (figure out sensible defaults)
1221 // Potentially remember if we need compressed oop awareness
1222 const DecoratorSet expanded_decorators = DecoratorFixup<decorators |
1223 (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1224 INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1225 return PreRuntimeDispatch::load_at<expanded_decorators, DecayedT>(base, offset);
1226 }
1227
1228 template <DecoratorSet decorators, typename P, typename T>
1229 inline T atomic_cmpxchg(T new_value, P* addr, T compare_value) {
1230 verify_types<decorators, T>();
1231 typedef typename Decay<P>::type DecayedP;
1232 typedef typename Decay<T>::type DecayedT;
1233 DecayedT new_decayed_value = new_value;
1234 DecayedT compare_decayed_value = compare_value;
1235 const DecoratorSet expanded_decorators = DecoratorFixup<
1236 (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1237 (MO_SEQ_CST | decorators) : decorators>::value;
1238 return atomic_cmpxchg_reduce_types<expanded_decorators>(new_decayed_value,
1239 const_cast<DecayedP*>(addr),
1240 compare_decayed_value);
1241 }
1242
1243 template <DecoratorSet decorators, typename T>
1244 inline T atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
1245 verify_types<decorators, T>();
1246 typedef typename Decay<T>::type DecayedT;
1247 DecayedT new_decayed_value = new_value;
1248 DecayedT compare_decayed_value = compare_value;
1249 // Determine default memory ordering
1250 const DecoratorSet expanded_decorators = DecoratorFixup<
1251 (!HasDecorator<decorators, MO_DECORATOR_MASK>::value) ?
1252 (MO_SEQ_CST | decorators) : decorators>::value;
1253 // Potentially remember that we need compressed oop awareness
1254 const DecoratorSet final_decorators = expanded_decorators |
1255 (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1256 INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE);
1257 return PreRuntimeDispatch::atomic_cmpxchg_at<final_decorators>(new_decayed_value, base,
1258 offset, compare_decayed_value);
1259 }
1260
1261 template <DecoratorSet decorators, typename P, typename T>
1262 inline T atomic_xchg(T new_value, P* addr) {
1263 verify_types<decorators, T>();
1264 typedef typename Decay<P>::type DecayedP;
1265 typedef typename Decay<T>::type DecayedT;
1266 DecayedT new_decayed_value = new_value;
1267 // atomic_xchg is only available in SEQ_CST flavour.
1268 const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST>::value;
1269 return atomic_xchg_reduce_types<expanded_decorators>(new_decayed_value,
1270 const_cast<DecayedP*>(addr));
1271 }
1272
1273 template <DecoratorSet decorators, typename T>
1274 inline T atomic_xchg_at(T new_value, oop base, ptrdiff_t offset) {
1275 verify_types<decorators, T>();
1276 typedef typename Decay<T>::type DecayedT;
1277 DecayedT new_decayed_value = new_value;
1278 // atomic_xchg is only available in SEQ_CST flavour.
1279 const DecoratorSet expanded_decorators = DecoratorFixup<decorators | MO_SEQ_CST |
1280 (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ?
1281 INTERNAL_CONVERT_COMPRESSED_OOP : DECORATORS_NONE)>::value;
1282 return PreRuntimeDispatch::atomic_xchg_at<expanded_decorators>(new_decayed_value, base, offset);
1283 }
1284
1285 template <DecoratorSet decorators, typename T>
1286 inline bool arraycopy(arrayOop src_obj, size_t src_offset_in_bytes, const T* src_raw,
1287 arrayOop dst_obj, size_t dst_offset_in_bytes, T* dst_raw,
1288 size_t length) {
1289 STATIC_ASSERT((HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value ||
1290 (IsSame<T, void>::value || IsIntegral<T>::value) ||
1291 IsFloatingPoint<T>::value)); // arraycopy allows type erased void elements
1292 typedef typename Decay<T>::type DecayedT;
1293 const DecoratorSet expanded_decorators = DecoratorFixup<decorators | IS_ARRAY | IN_HEAP>::value;
1294 return arraycopy_reduce_types<expanded_decorators>(src_obj, src_offset_in_bytes, const_cast<DecayedT*>(src_raw),
1295 dst_obj, dst_offset_in_bytes, const_cast<DecayedT*>(dst_raw),
1296 length);
1297 }
1298
1299 template <DecoratorSet decorators>
1300 inline void clone(oop src, oop dst, size_t size) {
1301 const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1302 PreRuntimeDispatch::clone<expanded_decorators>(src, dst, size);
1303 }
1304
1305 template <DecoratorSet decorators>
1306 inline oop resolve(oop obj) {
1307 const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1308 return PreRuntimeDispatch::resolve<expanded_decorators>(obj);
1309 }
1310
1311 template <DecoratorSet decorators>
1312 inline bool equals(oop o1, oop o2) {
1313 const DecoratorSet expanded_decorators = DecoratorFixup<decorators>::value;
1314 return PreRuntimeDispatch::equals<expanded_decorators>(o1, o2);
1315 }
1316
1317 // Infer the type that should be returned from an Access::oop_load.
1318 template <typename P, DecoratorSet decorators>
1319 class OopLoadProxy: public StackObj {
1320 private:
1321 P *const _addr;
1322 public:
1323 OopLoadProxy(P* addr) : _addr(addr) {}
1324
1325 inline operator oop() {
1326 return load<decorators | INTERNAL_VALUE_IS_OOP, P, oop>(_addr);
1327 }
1328
1329 inline operator narrowOop() {
1330 return load<decorators | INTERNAL_VALUE_IS_OOP, P, narrowOop>(_addr);
1331 }
1332
1333 template <typename T>
1334 inline bool operator ==(const T& other) const {
1335 return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) == other;
1336 }
1337
1338 template <typename T>
1339 inline bool operator !=(const T& other) const {
1340 return load<decorators | INTERNAL_VALUE_IS_OOP, P, T>(_addr) != other;
1341 }
1342 };
1343
1344 // Infer the type that should be returned from an Access::load_at.
1345 template <DecoratorSet decorators>
1346 class LoadAtProxy: public StackObj {
1347 private:
1348 const oop _base;
1349 const ptrdiff_t _offset;
1350 public:
1351 LoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1352
1353 template <typename T>
1354 inline operator T() const {
1355 return load_at<decorators, T>(_base, _offset);
1356 }
1357
1358 template <typename T>
1359 inline bool operator ==(const T& other) const { return load_at<decorators, T>(_base, _offset) == other; }
1360
1361 template <typename T>
1362 inline bool operator !=(const T& other) const { return load_at<decorators, T>(_base, _offset) != other; }
1363 };
1364
1365 // Infer the type that should be returned from an Access::oop_load_at.
1366 template <DecoratorSet decorators>
1367 class OopLoadAtProxy: public StackObj {
1368 private:
1369 const oop _base;
1370 const ptrdiff_t _offset;
1371 public:
1372 OopLoadAtProxy(oop base, ptrdiff_t offset) : _base(base), _offset(offset) {}
1373
1374 inline operator oop() const {
1375 return load_at<decorators | INTERNAL_VALUE_IS_OOP, oop>(_base, _offset);
1376 }
1377
1378 inline operator narrowOop() const {
1379 return load_at<decorators | INTERNAL_VALUE_IS_OOP, narrowOop>(_base, _offset);
1380 }
1381
1382 template <typename T>
1383 inline bool operator ==(const T& other) const {
1384 return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) == other;
1385 }
1386
1387 template <typename T>
1388 inline bool operator !=(const T& other) const {
1389 return load_at<decorators | INTERNAL_VALUE_IS_OOP, T>(_base, _offset) != other;
1390 }
1391 };
1392}
1393
1394#endif // SHARE_OOPS_ACCESSBACKEND_HPP
1395