1/*
2 * Copyright (c) 2000, 2019, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "jni.h"
27#include "jvm.h"
28#include "classfile/classFileStream.hpp"
29#include "classfile/classLoader.hpp"
30#include "classfile/vmSymbols.hpp"
31#include "jfr/jfrEvents.hpp"
32#include "memory/allocation.inline.hpp"
33#include "memory/resourceArea.hpp"
34#include "oops/access.inline.hpp"
35#include "oops/fieldStreams.hpp"
36#include "oops/objArrayOop.inline.hpp"
37#include "oops/oop.inline.hpp"
38#include "oops/typeArrayOop.inline.hpp"
39#include "prims/unsafe.hpp"
40#include "runtime/atomic.hpp"
41#include "runtime/globals.hpp"
42#include "runtime/handles.inline.hpp"
43#include "runtime/interfaceSupport.inline.hpp"
44#include "runtime/jniHandles.inline.hpp"
45#include "runtime/orderAccess.hpp"
46#include "runtime/reflection.hpp"
47#include "runtime/thread.hpp"
48#include "runtime/threadSMR.hpp"
49#include "runtime/vm_version.hpp"
50#include "services/threadService.hpp"
51#include "utilities/align.hpp"
52#include "utilities/copy.hpp"
53#include "utilities/dtrace.hpp"
54#include "utilities/macros.hpp"
55
56/**
57 * Implementation of the jdk.internal.misc.Unsafe class
58 */
59
60
61#define MAX_OBJECT_SIZE \
62 ( arrayOopDesc::header_size(T_DOUBLE) * HeapWordSize \
63 + ((julong)max_jint * sizeof(double)) )
64
65
66#define UNSAFE_ENTRY(result_type, header) \
67 JVM_ENTRY(static result_type, header)
68
69#define UNSAFE_LEAF(result_type, header) \
70 JVM_LEAF(static result_type, header)
71
72#define UNSAFE_END JVM_END
73
74
75static inline void* addr_from_java(jlong addr) {
76 // This assert fails in a variety of ways on 32-bit systems.
77 // It is impossible to predict whether native code that converts
78 // pointers to longs will sign-extend or zero-extend the addresses.
79 //assert(addr == (uintptr_t)addr, "must not be odd high bits");
80 return (void*)(uintptr_t)addr;
81}
82
83static inline jlong addr_to_java(void* p) {
84 assert(p == (void*)(uintptr_t)p, "must not be odd high bits");
85 return (uintptr_t)p;
86}
87
88
89// Note: The VM's obj_field and related accessors use byte-scaled
90// ("unscaled") offsets, just as the unsafe methods do.
91
92// However, the method Unsafe.fieldOffset explicitly declines to
93// guarantee this. The field offset values manipulated by the Java user
94// through the Unsafe API are opaque cookies that just happen to be byte
95// offsets. We represent this state of affairs by passing the cookies
96// through conversion functions when going between the VM and the Unsafe API.
97// The conversion functions just happen to be no-ops at present.
98
99static inline jlong field_offset_to_byte_offset(jlong field_offset) {
100 return field_offset;
101}
102
103static inline jlong field_offset_from_byte_offset(jlong byte_offset) {
104 return byte_offset;
105}
106
107static inline void assert_field_offset_sane(oop p, jlong field_offset) {
108#ifdef ASSERT
109 jlong byte_offset = field_offset_to_byte_offset(field_offset);
110
111 if (p != NULL) {
112 assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
113 if (byte_offset == (jint)byte_offset) {
114 void* ptr_plus_disp = (address)p + byte_offset;
115 assert(p->field_addr_raw((jint)byte_offset) == ptr_plus_disp,
116 "raw [ptr+disp] must be consistent with oop::field_addr_raw");
117 }
118 jlong p_size = HeapWordSize * (jlong)(p->size());
119 assert(byte_offset < p_size, "Unsafe access: offset " INT64_FORMAT " > object's size " INT64_FORMAT, (int64_t)byte_offset, (int64_t)p_size);
120 }
121#endif
122}
123
124static inline void* index_oop_from_field_offset_long(oop p, jlong field_offset) {
125 assert_field_offset_sane(p, field_offset);
126 jlong byte_offset = field_offset_to_byte_offset(field_offset);
127
128 if (p != NULL) {
129 p = Access<>::resolve(p);
130 }
131
132 if (sizeof(char*) == sizeof(jint)) { // (this constant folds!)
133 return (address)p + (jint) byte_offset;
134 } else {
135 return (address)p + byte_offset;
136 }
137}
138
139// Externally callable versions:
140// (Use these in compiler intrinsics which emulate unsafe primitives.)
141jlong Unsafe_field_offset_to_byte_offset(jlong field_offset) {
142 return field_offset;
143}
144jlong Unsafe_field_offset_from_byte_offset(jlong byte_offset) {
145 return byte_offset;
146}
147
148
149///// Data read/writes on the Java heap and in native (off-heap) memory
150
151/**
152 * Helper class for accessing memory.
153 *
154 * Normalizes values and wraps accesses in
155 * JavaThread::doing_unsafe_access() if needed.
156 */
157template <typename T>
158class MemoryAccess : StackObj {
159 JavaThread* _thread;
160 oop _obj;
161 ptrdiff_t _offset;
162
163 // Resolves and returns the address of the memory access.
164 // This raw memory access may fault, so we make sure it happens within the
165 // guarded scope by making the access volatile at least. Since the store
166 // of Thread::set_doing_unsafe_access() is also volatile, these accesses
167 // can not be reordered by the compiler. Therefore, if the access triggers
168 // a fault, we will know that Thread::doing_unsafe_access() returns true.
169 volatile T* addr() {
170 void* addr = index_oop_from_field_offset_long(_obj, _offset);
171 return static_cast<volatile T*>(addr);
172 }
173
174 template <typename U>
175 U normalize_for_write(U x) {
176 return x;
177 }
178
179 jboolean normalize_for_write(jboolean x) {
180 return x & 1;
181 }
182
183 template <typename U>
184 U normalize_for_read(U x) {
185 return x;
186 }
187
188 jboolean normalize_for_read(jboolean x) {
189 return x != 0;
190 }
191
192 /**
193 * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
194 */
195 class GuardUnsafeAccess {
196 JavaThread* _thread;
197
198 public:
199 GuardUnsafeAccess(JavaThread* thread) : _thread(thread) {
200 // native/off-heap access which may raise SIGBUS if accessing
201 // memory mapped file data in a region of the file which has
202 // been truncated and is now invalid
203 _thread->set_doing_unsafe_access(true);
204 }
205
206 ~GuardUnsafeAccess() {
207 _thread->set_doing_unsafe_access(false);
208 }
209 };
210
211public:
212 MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
213 : _thread(thread), _obj(JNIHandles::resolve(obj)), _offset((ptrdiff_t)offset) {
214 assert_field_offset_sane(_obj, offset);
215 }
216
217 T get() {
218 if (_obj == NULL) {
219 GuardUnsafeAccess guard(_thread);
220 T ret = RawAccess<>::load(addr());
221 return normalize_for_read(ret);
222 } else {
223 T ret = HeapAccess<>::load_at(_obj, _offset);
224 return normalize_for_read(ret);
225 }
226 }
227
228 void put(T x) {
229 if (_obj == NULL) {
230 GuardUnsafeAccess guard(_thread);
231 RawAccess<>::store(addr(), normalize_for_write(x));
232 } else {
233 HeapAccess<>::store_at(_obj, _offset, normalize_for_write(x));
234 }
235 }
236
237
238 T get_volatile() {
239 if (_obj == NULL) {
240 GuardUnsafeAccess guard(_thread);
241 volatile T ret = RawAccess<MO_SEQ_CST>::load(addr());
242 return normalize_for_read(ret);
243 } else {
244 T ret = HeapAccess<MO_SEQ_CST>::load_at(_obj, _offset);
245 return normalize_for_read(ret);
246 }
247 }
248
249 void put_volatile(T x) {
250 if (_obj == NULL) {
251 GuardUnsafeAccess guard(_thread);
252 RawAccess<MO_SEQ_CST>::store(addr(), normalize_for_write(x));
253 } else {
254 HeapAccess<MO_SEQ_CST>::store_at(_obj, _offset, normalize_for_write(x));
255 }
256 }
257};
258
259// These functions allow a null base pointer with an arbitrary address.
260// But if the base pointer is non-null, the offset should make some sense.
261// That is, it should be in the range [0, MAX_OBJECT_SIZE].
262UNSAFE_ENTRY(jobject, Unsafe_GetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
263 oop p = JNIHandles::resolve(obj);
264 assert_field_offset_sane(p, offset);
265 oop v = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
266 return JNIHandles::make_local(env, v);
267} UNSAFE_END
268
269UNSAFE_ENTRY(void, Unsafe_PutReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
270 oop x = JNIHandles::resolve(x_h);
271 oop p = JNIHandles::resolve(obj);
272 assert_field_offset_sane(p, offset);
273 HeapAccess<ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
274} UNSAFE_END
275
276UNSAFE_ENTRY(jobject, Unsafe_GetReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
277 oop p = JNIHandles::resolve(obj);
278 assert_field_offset_sane(p, offset);
279 oop v = HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_load_at(p, offset);
280 return JNIHandles::make_local(env, v);
281} UNSAFE_END
282
283UNSAFE_ENTRY(void, Unsafe_PutReferenceVolatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h)) {
284 oop x = JNIHandles::resolve(x_h);
285 oop p = JNIHandles::resolve(obj);
286 assert_field_offset_sane(p, offset);
287 HeapAccess<MO_SEQ_CST | ON_UNKNOWN_OOP_REF>::oop_store_at(p, offset, x);
288} UNSAFE_END
289
290UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe, jlong addr)) {
291 oop v = *(oop*) (address) addr;
292 return JNIHandles::make_local(env, v);
293} UNSAFE_END
294
295#define DEFINE_GETSETOOP(java_type, Type) \
296 \
297UNSAFE_ENTRY(java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
298 return MemoryAccess<java_type>(thread, obj, offset).get(); \
299} UNSAFE_END \
300 \
301UNSAFE_ENTRY(void, Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
302 MemoryAccess<java_type>(thread, obj, offset).put(x); \
303} UNSAFE_END \
304 \
305// END DEFINE_GETSETOOP.
306
307DEFINE_GETSETOOP(jboolean, Boolean)
308DEFINE_GETSETOOP(jbyte, Byte)
309DEFINE_GETSETOOP(jshort, Short);
310DEFINE_GETSETOOP(jchar, Char);
311DEFINE_GETSETOOP(jint, Int);
312DEFINE_GETSETOOP(jlong, Long);
313DEFINE_GETSETOOP(jfloat, Float);
314DEFINE_GETSETOOP(jdouble, Double);
315
316#undef DEFINE_GETSETOOP
317
318#define DEFINE_GETSETOOP_VOLATILE(java_type, Type) \
319 \
320UNSAFE_ENTRY(java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
321 return MemoryAccess<java_type>(thread, obj, offset).get_volatile(); \
322} UNSAFE_END \
323 \
324UNSAFE_ENTRY(void, Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
325 MemoryAccess<java_type>(thread, obj, offset).put_volatile(x); \
326} UNSAFE_END \
327 \
328// END DEFINE_GETSETOOP_VOLATILE.
329
330DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
331DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
332DEFINE_GETSETOOP_VOLATILE(jshort, Short);
333DEFINE_GETSETOOP_VOLATILE(jchar, Char);
334DEFINE_GETSETOOP_VOLATILE(jint, Int);
335DEFINE_GETSETOOP_VOLATILE(jlong, Long);
336DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
337DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
338
339#undef DEFINE_GETSETOOP_VOLATILE
340
341UNSAFE_LEAF(void, Unsafe_LoadFence(JNIEnv *env, jobject unsafe)) {
342 OrderAccess::acquire();
343} UNSAFE_END
344
345UNSAFE_LEAF(void, Unsafe_StoreFence(JNIEnv *env, jobject unsafe)) {
346 OrderAccess::release();
347} UNSAFE_END
348
349UNSAFE_LEAF(void, Unsafe_FullFence(JNIEnv *env, jobject unsafe)) {
350 OrderAccess::fence();
351} UNSAFE_END
352
353////// Allocation requests
354
355UNSAFE_ENTRY(jobject, Unsafe_AllocateInstance(JNIEnv *env, jobject unsafe, jclass cls)) {
356 ThreadToNativeFromVM ttnfv(thread);
357 return env->AllocObject(cls);
358} UNSAFE_END
359
360UNSAFE_ENTRY(jlong, Unsafe_AllocateMemory0(JNIEnv *env, jobject unsafe, jlong size)) {
361 size_t sz = (size_t)size;
362
363 sz = align_up(sz, HeapWordSize);
364 void* x = os::malloc(sz, mtOther);
365
366 return addr_to_java(x);
367} UNSAFE_END
368
369UNSAFE_ENTRY(jlong, Unsafe_ReallocateMemory0(JNIEnv *env, jobject unsafe, jlong addr, jlong size)) {
370 void* p = addr_from_java(addr);
371 size_t sz = (size_t)size;
372 sz = align_up(sz, HeapWordSize);
373
374 void* x = os::realloc(p, sz, mtOther);
375
376 return addr_to_java(x);
377} UNSAFE_END
378
379UNSAFE_ENTRY(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
380 void* p = addr_from_java(addr);
381
382 os::free(p);
383} UNSAFE_END
384
385UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
386 size_t sz = (size_t)size;
387
388 oop base = JNIHandles::resolve(obj);
389 void* p = index_oop_from_field_offset_long(base, offset);
390
391 Copy::fill_to_memory_atomic(p, sz, value);
392} UNSAFE_END
393
394UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
395 size_t sz = (size_t)size;
396
397 oop srcp = JNIHandles::resolve(srcObj);
398 oop dstp = JNIHandles::resolve(dstObj);
399
400 void* src = index_oop_from_field_offset_long(srcp, srcOffset);
401 void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
402
403 Copy::conjoint_memory_atomic(src, dst, sz);
404} UNSAFE_END
405
406// This function is a leaf since if the source and destination are both in native memory
407// the copy may potentially be very large, and we don't want to disable GC if we can avoid it.
408// If either source or destination (or both) are on the heap, the function will enter VM using
409// JVM_ENTRY_FROM_LEAF
410UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
411 size_t sz = (size_t)size;
412 size_t esz = (size_t)elemSize;
413
414 if (srcObj == NULL && dstObj == NULL) {
415 // Both src & dst are in native memory
416 address src = (address)srcOffset;
417 address dst = (address)dstOffset;
418
419 Copy::conjoint_swap(src, dst, sz, esz);
420 } else {
421 // At least one of src/dst are on heap, transition to VM to access raw pointers
422
423 JVM_ENTRY_FROM_LEAF(env, void, Unsafe_CopySwapMemory0) {
424 oop srcp = JNIHandles::resolve(srcObj);
425 oop dstp = JNIHandles::resolve(dstObj);
426
427 address src = (address)index_oop_from_field_offset_long(srcp, srcOffset);
428 address dst = (address)index_oop_from_field_offset_long(dstp, dstOffset);
429
430 Copy::conjoint_swap(src, dst, sz, esz);
431 } JVM_END
432 }
433} UNSAFE_END
434
435////// Random queries
436
437static jlong find_field_offset(jclass clazz, jstring name, TRAPS) {
438 assert(clazz != NULL, "clazz must not be NULL");
439 assert(name != NULL, "name must not be NULL");
440
441 ResourceMark rm(THREAD);
442 char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
443
444 InstanceKlass* k = InstanceKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(clazz)));
445
446 jint offset = -1;
447 for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
448 Symbol *name = fs.name();
449 if (name->equals(utf_name)) {
450 offset = fs.offset();
451 break;
452 }
453 }
454 if (offset < 0) {
455 THROW_0(vmSymbols::java_lang_InternalError());
456 }
457 return field_offset_from_byte_offset(offset);
458}
459
460static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
461 assert(field != NULL, "field must not be NULL");
462
463 oop reflected = JNIHandles::resolve_non_null(field);
464 oop mirror = java_lang_reflect_Field::clazz(reflected);
465 Klass* k = java_lang_Class::as_Klass(mirror);
466 int slot = java_lang_reflect_Field::slot(reflected);
467 int modifiers = java_lang_reflect_Field::modifiers(reflected);
468
469 if (must_be_static >= 0) {
470 int really_is_static = ((modifiers & JVM_ACC_STATIC) != 0);
471 if (must_be_static != really_is_static) {
472 THROW_0(vmSymbols::java_lang_IllegalArgumentException());
473 }
474 }
475
476 int offset = InstanceKlass::cast(k)->field_offset(slot);
477 return field_offset_from_byte_offset(offset);
478}
479
480UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
481 return find_field_offset(field, 0, THREAD);
482} UNSAFE_END
483
484UNSAFE_ENTRY(jlong, Unsafe_ObjectFieldOffset1(JNIEnv *env, jobject unsafe, jclass c, jstring name)) {
485 return find_field_offset(c, name, THREAD);
486} UNSAFE_END
487
488UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobject field)) {
489 return find_field_offset(field, 1, THREAD);
490} UNSAFE_END
491
492UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
493 assert(field != NULL, "field must not be NULL");
494
495 // Note: In this VM implementation, a field address is always a short
496 // offset from the base of a a klass metaobject. Thus, the full dynamic
497 // range of the return type is never used. However, some implementations
498 // might put the static field inside an array shared by many classes,
499 // or even at a fixed address, in which case the address could be quite
500 // large. In that last case, this function would return NULL, since
501 // the address would operate alone, without any base pointer.
502
503 oop reflected = JNIHandles::resolve_non_null(field);
504 oop mirror = java_lang_reflect_Field::clazz(reflected);
505 int modifiers = java_lang_reflect_Field::modifiers(reflected);
506
507 if ((modifiers & JVM_ACC_STATIC) == 0) {
508 THROW_0(vmSymbols::java_lang_IllegalArgumentException());
509 }
510
511 return JNIHandles::make_local(env, mirror);
512} UNSAFE_END
513
514UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
515 assert(clazz != NULL, "clazz must not be NULL");
516
517 oop mirror = JNIHandles::resolve_non_null(clazz);
518
519 Klass* klass = java_lang_Class::as_Klass(mirror);
520 if (klass != NULL && klass->should_be_initialized()) {
521 InstanceKlass* k = InstanceKlass::cast(klass);
522 k->initialize(CHECK);
523 }
524}
525UNSAFE_END
526
527UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
528 assert(clazz != NULL, "clazz must not be NULL");
529
530 oop mirror = JNIHandles::resolve_non_null(clazz);
531 Klass* klass = java_lang_Class::as_Klass(mirror);
532
533 if (klass != NULL && klass->should_be_initialized()) {
534 return true;
535 }
536
537 return false;
538}
539UNSAFE_END
540
541static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
542 assert(clazz != NULL, "clazz must not be NULL");
543
544 oop mirror = JNIHandles::resolve_non_null(clazz);
545 Klass* k = java_lang_Class::as_Klass(mirror);
546
547 if (k == NULL || !k->is_array_klass()) {
548 THROW(vmSymbols::java_lang_InvalidClassException());
549 } else if (k->is_objArray_klass()) {
550 base = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
551 scale = heapOopSize;
552 } else if (k->is_typeArray_klass()) {
553 TypeArrayKlass* tak = TypeArrayKlass::cast(k);
554 base = tak->array_header_in_bytes();
555 assert(base == arrayOopDesc::base_offset_in_bytes(tak->element_type()), "array_header_size semantics ok");
556 scale = (1 << tak->log2_element_size());
557 } else {
558 ShouldNotReachHere();
559 }
560}
561
562UNSAFE_ENTRY(jint, Unsafe_ArrayBaseOffset0(JNIEnv *env, jobject unsafe, jclass clazz)) {
563 int base = 0, scale = 0;
564 getBaseAndScale(base, scale, clazz, CHECK_0);
565
566 return field_offset_from_byte_offset(base);
567} UNSAFE_END
568
569
570UNSAFE_ENTRY(jint, Unsafe_ArrayIndexScale0(JNIEnv *env, jobject unsafe, jclass clazz)) {
571 int base = 0, scale = 0;
572 getBaseAndScale(base, scale, clazz, CHECK_0);
573
574 // This VM packs both fields and array elements down to the byte.
575 // But watch out: If this changes, so that array references for
576 // a given primitive type (say, T_BOOLEAN) use different memory units
577 // than fields, this method MUST return zero for such arrays.
578 // For example, the VM used to store sub-word sized fields in full
579 // words in the object layout, so that accessors like getByte(Object,int)
580 // did not really do what one might expect for arrays. Therefore,
581 // this function used to report a zero scale factor, so that the user
582 // would know not to attempt to access sub-word array elements.
583 // // Code for unpacked fields:
584 // if (scale < wordSize) return 0;
585
586 // The following allows for a pretty general fieldOffset cookie scheme,
587 // but requires it to be linear in byte offset.
588 return field_offset_from_byte_offset(scale) - field_offset_from_byte_offset(0);
589} UNSAFE_END
590
591
592static inline void throw_new(JNIEnv *env, const char *ename) {
593 jclass cls = env->FindClass(ename);
594 if (env->ExceptionCheck()) {
595 env->ExceptionClear();
596 tty->print_cr("Unsafe: cannot throw %s because FindClass has failed", ename);
597 return;
598 }
599
600 env->ThrowNew(cls, NULL);
601}
602
603static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
604 // Code lifted from JDK 1.3 ClassLoader.c
605
606 jbyte *body;
607 char *utfName = NULL;
608 jclass result = 0;
609 char buf[128];
610
611 assert(data != NULL, "Class bytes must not be NULL");
612 assert(length >= 0, "length must not be negative: %d", length);
613
614 if (UsePerfData) {
615 ClassLoader::unsafe_defineClassCallCounter()->inc();
616 }
617
618 body = NEW_C_HEAP_ARRAY(jbyte, length, mtInternal);
619 if (body == NULL) {
620 throw_new(env, "java/lang/OutOfMemoryError");
621 return 0;
622 }
623
624 env->GetByteArrayRegion(data, offset, length, body);
625 if (env->ExceptionOccurred()) {
626 goto free_body;
627 }
628
629 if (name != NULL) {
630 uint len = env->GetStringUTFLength(name);
631 int unicode_len = env->GetStringLength(name);
632
633 if (len >= sizeof(buf)) {
634 utfName = NEW_C_HEAP_ARRAY(char, len + 1, mtInternal);
635 if (utfName == NULL) {
636 throw_new(env, "java/lang/OutOfMemoryError");
637 goto free_body;
638 }
639 } else {
640 utfName = buf;
641 }
642
643 env->GetStringUTFRegion(name, 0, unicode_len, utfName);
644
645 for (uint i = 0; i < len; i++) {
646 if (utfName[i] == '.') utfName[i] = '/';
647 }
648 }
649
650 result = JVM_DefineClass(env, utfName, loader, body, length, pd);
651
652 if (utfName && utfName != buf) {
653 FREE_C_HEAP_ARRAY(char, utfName);
654 }
655
656 free_body:
657 FREE_C_HEAP_ARRAY(jbyte, body);
658 return result;
659}
660
661
662UNSAFE_ENTRY(jclass, Unsafe_DefineClass0(JNIEnv *env, jobject unsafe, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd)) {
663 ThreadToNativeFromVM ttnfv(thread);
664
665 return Unsafe_DefineClass_impl(env, name, data, offset, length, loader, pd);
666} UNSAFE_END
667
668
669// define a class but do not make it known to the class loader or system dictionary
670// - host_class: supplies context for linkage, access control, protection domain, and class loader
671// if host_class is itself anonymous then it is replaced with its host class.
672// - data: bytes of a class file, a raw memory address (length gives the number of bytes)
673// - cp_patches: where non-null entries exist, they replace corresponding CP entries in data
674
675// When you load an anonymous class U, it works as if you changed its name just before loading,
676// to a name that you will never use again. Since the name is lost, no other class can directly
677// link to any member of U. Just after U is loaded, the only way to use it is reflectively,
678// through java.lang.Class methods like Class.newInstance.
679
680// The package of an anonymous class must either match its host's class's package or be in the
681// unnamed package. If it is in the unnamed package then it will be put in its host class's
682// package.
683//
684
685// Access checks for linkage sites within U continue to follow the same rules as for named classes.
686// An anonymous class also has special privileges to access any member of its host class.
687// This is the main reason why this loading operation is unsafe. The purpose of this is to
688// allow language implementations to simulate "open classes"; a host class in effect gets
689// new code when an anonymous class is loaded alongside it. A less convenient but more
690// standard way to do this is with reflection, which can also be set to ignore access
691// restrictions.
692
693// Access into an anonymous class is possible only through reflection. Therefore, there
694// are no special access rules for calling into an anonymous class. The relaxed access
695// rule for the host class is applied in the opposite direction: A host class reflectively
696// access one of its anonymous classes.
697
698// If you load the same bytecodes twice, you get two different classes. You can reload
699// the same bytecodes with or without varying CP patches.
700
701// By using the CP patching array, you can have a new anonymous class U2 refer to an older one U1.
702// The bytecodes for U2 should refer to U1 by a symbolic name (doesn't matter what the name is).
703// The CONSTANT_Class entry for that name can be patched to refer directly to U1.
704
705// This allows, for example, U2 to use U1 as a superclass or super-interface, or as
706// an outer class (so that U2 is an anonymous inner class of anonymous U1).
707// It is not possible for a named class, or an older anonymous class, to refer by
708// name (via its CP) to a newer anonymous class.
709
710// CP patching may also be used to modify (i.e., hack) the names of methods, classes,
711// or type descriptors used in the loaded anonymous class.
712
713// Finally, CP patching may be used to introduce "live" objects into the constant pool,
714// instead of "dead" strings. A compiled statement like println((Object)"hello") can
715// be changed to println(greeting), where greeting is an arbitrary object created before
716// the anonymous class is loaded. This is useful in dynamic languages, in which
717// various kinds of metaobjects must be introduced as constants into bytecode.
718// Note the cast (Object), which tells the verifier to expect an arbitrary object,
719// not just a literal string. For such ldc instructions, the verifier uses the
720// type Object instead of String, if the loaded constant is not in fact a String.
721
722static InstanceKlass*
723Unsafe_DefineAnonymousClass_impl(JNIEnv *env,
724 jclass host_class, jbyteArray data, jobjectArray cp_patches_jh,
725 u1** temp_alloc,
726 TRAPS) {
727 assert(host_class != NULL, "host_class must not be NULL");
728 assert(data != NULL, "data must not be NULL");
729
730 if (UsePerfData) {
731 ClassLoader::unsafe_defineClassCallCounter()->inc();
732 }
733
734 jint length = typeArrayOop(JNIHandles::resolve_non_null(data))->length();
735 assert(length >= 0, "class_bytes_length must not be negative: %d", length);
736
737 int class_bytes_length = (int) length;
738
739 u1* class_bytes = NEW_C_HEAP_ARRAY(u1, length, mtInternal);
740 if (class_bytes == NULL) {
741 THROW_0(vmSymbols::java_lang_OutOfMemoryError());
742 }
743
744 // caller responsible to free it:
745 *temp_alloc = class_bytes;
746
747 ArrayAccess<>::arraycopy_to_native(arrayOop(JNIHandles::resolve_non_null(data)), typeArrayOopDesc::element_offset<jbyte>(0),
748 reinterpret_cast<jbyte*>(class_bytes), length);
749
750 objArrayHandle cp_patches_h;
751 if (cp_patches_jh != NULL) {
752 oop p = JNIHandles::resolve_non_null(cp_patches_jh);
753 assert(p->is_objArray(), "cp_patches must be an object[]");
754 cp_patches_h = objArrayHandle(THREAD, (objArrayOop)p);
755 }
756
757 const Klass* host_klass = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(host_class));
758
759 // Make sure it's the real host class, not another anonymous class.
760 while (host_klass != NULL && host_klass->is_instance_klass() &&
761 InstanceKlass::cast(host_klass)->is_unsafe_anonymous()) {
762 host_klass = InstanceKlass::cast(host_klass)->unsafe_anonymous_host();
763 }
764
765 // Primitive types have NULL Klass* fields in their java.lang.Class instances.
766 if (host_klass == NULL) {
767 THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), "Host class is null");
768 }
769
770 assert(host_klass->is_instance_klass(), "Host class must be an instance class");
771
772 const char* host_source = host_klass->external_name();
773 Handle host_loader(THREAD, host_klass->class_loader());
774 Handle host_domain(THREAD, host_klass->protection_domain());
775
776 GrowableArray<Handle>* cp_patches = NULL;
777
778 if (cp_patches_h.not_null()) {
779 int alen = cp_patches_h->length();
780
781 for (int i = alen-1; i >= 0; i--) {
782 oop p = cp_patches_h->obj_at(i);
783 if (p != NULL) {
784 Handle patch(THREAD, p);
785
786 if (cp_patches == NULL) {
787 cp_patches = new GrowableArray<Handle>(i+1, i+1, Handle());
788 }
789
790 cp_patches->at_put(i, patch);
791 }
792 }
793 }
794
795 ClassFileStream st(class_bytes, class_bytes_length, host_source, ClassFileStream::verify);
796
797 Symbol* no_class_name = NULL;
798 Klass* anonk = SystemDictionary::parse_stream(no_class_name,
799 host_loader,
800 host_domain,
801 &st,
802 InstanceKlass::cast(host_klass),
803 cp_patches,
804 CHECK_NULL);
805 if (anonk == NULL) {
806 return NULL;
807 }
808
809 return InstanceKlass::cast(anonk);
810}
811
812UNSAFE_ENTRY(jclass, Unsafe_DefineAnonymousClass0(JNIEnv *env, jobject unsafe, jclass host_class, jbyteArray data, jobjectArray cp_patches_jh)) {
813 ResourceMark rm(THREAD);
814
815 jobject res_jh = NULL;
816 u1* temp_alloc = NULL;
817
818 InstanceKlass* anon_klass = Unsafe_DefineAnonymousClass_impl(env, host_class, data, cp_patches_jh, &temp_alloc, THREAD);
819 if (anon_klass != NULL) {
820 res_jh = JNIHandles::make_local(env, anon_klass->java_mirror());
821 }
822
823 // try/finally clause:
824 if (temp_alloc != NULL) {
825 FREE_C_HEAP_ARRAY(u1, temp_alloc);
826 }
827
828 // The anonymous class loader data has been artificially been kept alive to
829 // this point. The mirror and any instances of this class have to keep
830 // it alive afterwards.
831 if (anon_klass != NULL) {
832 anon_klass->class_loader_data()->dec_keep_alive();
833 }
834
835 // let caller initialize it as needed...
836
837 return (jclass) res_jh;
838} UNSAFE_END
839
840
841
842UNSAFE_ENTRY(void, Unsafe_ThrowException(JNIEnv *env, jobject unsafe, jthrowable thr)) {
843 ThreadToNativeFromVM ttnfv(thread);
844 env->Throw(thr);
845} UNSAFE_END
846
847// JSR166 ------------------------------------------------------------------
848
849UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
850 oop x = JNIHandles::resolve(x_h);
851 oop e = JNIHandles::resolve(e_h);
852 oop p = JNIHandles::resolve(obj);
853 assert_field_offset_sane(p, offset);
854 oop res = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
855 return JNIHandles::make_local(env, res);
856} UNSAFE_END
857
858UNSAFE_ENTRY(jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
859 oop p = JNIHandles::resolve(obj);
860 if (p == NULL) {
861 volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
862 return RawAccess<>::atomic_cmpxchg(x, addr, e);
863 } else {
864 assert_field_offset_sane(p, offset);
865 return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
866 }
867} UNSAFE_END
868
869UNSAFE_ENTRY(jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
870 oop p = JNIHandles::resolve(obj);
871 if (p == NULL) {
872 volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
873 return RawAccess<>::atomic_cmpxchg(x, addr, e);
874 } else {
875 assert_field_offset_sane(p, offset);
876 return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
877 }
878} UNSAFE_END
879
880UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject e_h, jobject x_h)) {
881 oop x = JNIHandles::resolve(x_h);
882 oop e = JNIHandles::resolve(e_h);
883 oop p = JNIHandles::resolve(obj);
884 assert_field_offset_sane(p, offset);
885 oop ret = HeapAccess<ON_UNKNOWN_OOP_REF>::oop_atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e);
886 return oopDesc::equals(ret, e);
887} UNSAFE_END
888
889UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
890 oop p = JNIHandles::resolve(obj);
891 if (p == NULL) {
892 volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long(p, offset);
893 return RawAccess<>::atomic_cmpxchg(x, addr, e) == e;
894 } else {
895 assert_field_offset_sane(p, offset);
896 return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e) == e;
897 }
898} UNSAFE_END
899
900UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
901 oop p = JNIHandles::resolve(obj);
902 if (p == NULL) {
903 volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long(p, offset);
904 return RawAccess<>::atomic_cmpxchg(x, addr, e) == e;
905 } else {
906 assert_field_offset_sane(p, offset);
907 return HeapAccess<>::atomic_cmpxchg_at(x, p, (ptrdiff_t)offset, e) == e;
908 }
909} UNSAFE_END
910
911static void post_thread_park_event(EventThreadPark* event, const oop obj, jlong timeout_nanos, jlong until_epoch_millis) {
912 assert(event != NULL, "invariant");
913 assert(event->should_commit(), "invariant");
914 event->set_parkedClass((obj != NULL) ? obj->klass() : NULL);
915 event->set_timeout(timeout_nanos);
916 event->set_until(until_epoch_millis);
917 event->set_address((obj != NULL) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
918 event->commit();
919}
920
921UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute, jlong time)) {
922 HOTSPOT_THREAD_PARK_BEGIN((uintptr_t) thread->parker(), (int) isAbsolute, time);
923 EventThreadPark event;
924
925 JavaThreadParkedState jtps(thread, time != 0);
926 thread->parker()->park(isAbsolute != 0, time);
927 if (event.should_commit()) {
928 const oop obj = thread->current_park_blocker();
929 if (time == 0) {
930 post_thread_park_event(&event, obj, min_jlong, min_jlong);
931 } else {
932 if (isAbsolute != 0) {
933 post_thread_park_event(&event, obj, min_jlong, time);
934 } else {
935 post_thread_park_event(&event, obj, time, min_jlong);
936 }
937 }
938 }
939 HOTSPOT_THREAD_PARK_END((uintptr_t) thread->parker());
940} UNSAFE_END
941
942UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
943 Parker* p = NULL;
944
945 if (jthread != NULL) {
946 ThreadsListHandle tlh;
947 JavaThread* thr = NULL;
948 oop java_thread = NULL;
949 (void) tlh.cv_internal_thread_to_JavaThread(jthread, &thr, &java_thread);
950 if (java_thread != NULL) {
951 // This is a valid oop.
952 if (thr != NULL) {
953 // The JavaThread is alive.
954 p = thr->parker();
955 }
956 }
957 } // ThreadsListHandle is destroyed here.
958
959 // 'p' points to type-stable-memory if non-NULL. If the target
960 // thread terminates before we get here the new user of this
961 // Parker will get a 'spurious' unpark - which is perfectly valid.
962 if (p != NULL) {
963 HOTSPOT_THREAD_UNPARK((uintptr_t) p);
964 p->unpark();
965 }
966} UNSAFE_END
967
968UNSAFE_ENTRY(jint, Unsafe_GetLoadAverage0(JNIEnv *env, jobject unsafe, jdoubleArray loadavg, jint nelem)) {
969 const int max_nelem = 3;
970 double la[max_nelem];
971 jint ret;
972
973 typeArrayOop a = typeArrayOop(JNIHandles::resolve_non_null(loadavg));
974 assert(a->is_typeArray(), "must be type array");
975
976 ret = os::loadavg(la, nelem);
977 if (ret == -1) {
978 return -1;
979 }
980
981 // if successful, ret is the number of samples actually retrieved.
982 assert(ret >= 0 && ret <= max_nelem, "Unexpected loadavg return value");
983 switch(ret) {
984 case 3: a->double_at_put(2, (jdouble)la[2]); // fall through
985 case 2: a->double_at_put(1, (jdouble)la[1]); // fall through
986 case 1: a->double_at_put(0, (jdouble)la[0]); break;
987 }
988
989 return ret;
990} UNSAFE_END
991
992
993/// JVM_RegisterUnsafeMethods
994
995#define ADR "J"
996
997#define LANG "Ljava/lang/"
998
999#define OBJ LANG "Object;"
1000#define CLS LANG "Class;"
1001#define FLD LANG "reflect/Field;"
1002#define THR LANG "Throwable;"
1003
1004#define DC_Args LANG "String;[BII" LANG "ClassLoader;" "Ljava/security/ProtectionDomain;"
1005#define DAC_Args CLS "[B[" OBJ
1006
1007#define CC (char*) /*cast a literal from (const char*)*/
1008#define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
1009
1010#define DECLARE_GETPUTOOP(Type, Desc) \
1011 {CC "get" #Type, CC "(" OBJ "J)" #Desc, FN_PTR(Unsafe_Get##Type)}, \
1012 {CC "put" #Type, CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Put##Type)}, \
1013 {CC "get" #Type "Volatile", CC "(" OBJ "J)" #Desc, FN_PTR(Unsafe_Get##Type##Volatile)}, \
1014 {CC "put" #Type "Volatile", CC "(" OBJ "J" #Desc ")V", FN_PTR(Unsafe_Put##Type##Volatile)}
1015
1016
1017static JNINativeMethod jdk_internal_misc_Unsafe_methods[] = {
1018 {CC "getReference", CC "(" OBJ "J)" OBJ "", FN_PTR(Unsafe_GetReference)},
1019 {CC "putReference", CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_PutReference)},
1020 {CC "getReferenceVolatile", CC "(" OBJ "J)" OBJ, FN_PTR(Unsafe_GetReferenceVolatile)},
1021 {CC "putReferenceVolatile", CC "(" OBJ "J" OBJ ")V", FN_PTR(Unsafe_PutReferenceVolatile)},
1022
1023 {CC "getUncompressedObject", CC "(" ADR ")" OBJ, FN_PTR(Unsafe_GetUncompressedObject)},
1024
1025 DECLARE_GETPUTOOP(Boolean, Z),
1026 DECLARE_GETPUTOOP(Byte, B),
1027 DECLARE_GETPUTOOP(Short, S),
1028 DECLARE_GETPUTOOP(Char, C),
1029 DECLARE_GETPUTOOP(Int, I),
1030 DECLARE_GETPUTOOP(Long, J),
1031 DECLARE_GETPUTOOP(Float, F),
1032 DECLARE_GETPUTOOP(Double, D),
1033
1034 {CC "allocateMemory0", CC "(J)" ADR, FN_PTR(Unsafe_AllocateMemory0)},
1035 {CC "reallocateMemory0", CC "(" ADR "J)" ADR, FN_PTR(Unsafe_ReallocateMemory0)},
1036 {CC "freeMemory0", CC "(" ADR ")V", FN_PTR(Unsafe_FreeMemory0)},
1037
1038 {CC "objectFieldOffset0", CC "(" FLD ")J", FN_PTR(Unsafe_ObjectFieldOffset0)},
1039 {CC "objectFieldOffset1", CC "(" CLS LANG "String;)J", FN_PTR(Unsafe_ObjectFieldOffset1)},
1040 {CC "staticFieldOffset0", CC "(" FLD ")J", FN_PTR(Unsafe_StaticFieldOffset0)},
1041 {CC "staticFieldBase0", CC "(" FLD ")" OBJ, FN_PTR(Unsafe_StaticFieldBase0)},
1042 {CC "ensureClassInitialized0", CC "(" CLS ")V", FN_PTR(Unsafe_EnsureClassInitialized0)},
1043 {CC "arrayBaseOffset0", CC "(" CLS ")I", FN_PTR(Unsafe_ArrayBaseOffset0)},
1044 {CC "arrayIndexScale0", CC "(" CLS ")I", FN_PTR(Unsafe_ArrayIndexScale0)},
1045
1046 {CC "defineClass0", CC "(" DC_Args ")" CLS, FN_PTR(Unsafe_DefineClass0)},
1047 {CC "allocateInstance", CC "(" CLS ")" OBJ, FN_PTR(Unsafe_AllocateInstance)},
1048 {CC "throwException", CC "(" THR ")V", FN_PTR(Unsafe_ThrowException)},
1049 {CC "compareAndSetReference",CC "(" OBJ "J" OBJ "" OBJ ")Z", FN_PTR(Unsafe_CompareAndSetReference)},
1050 {CC "compareAndSetInt", CC "(" OBJ "J""I""I"")Z", FN_PTR(Unsafe_CompareAndSetInt)},
1051 {CC "compareAndSetLong", CC "(" OBJ "J""J""J"")Z", FN_PTR(Unsafe_CompareAndSetLong)},
1052 {CC "compareAndExchangeReference", CC "(" OBJ "J" OBJ "" OBJ ")" OBJ, FN_PTR(Unsafe_CompareAndExchangeReference)},
1053 {CC "compareAndExchangeInt", CC "(" OBJ "J""I""I"")I", FN_PTR(Unsafe_CompareAndExchangeInt)},
1054 {CC "compareAndExchangeLong", CC "(" OBJ "J""J""J"")J", FN_PTR(Unsafe_CompareAndExchangeLong)},
1055
1056 {CC "park", CC "(ZJ)V", FN_PTR(Unsafe_Park)},
1057 {CC "unpark", CC "(" OBJ ")V", FN_PTR(Unsafe_Unpark)},
1058
1059 {CC "getLoadAverage0", CC "([DI)I", FN_PTR(Unsafe_GetLoadAverage0)},
1060
1061 {CC "copyMemory0", CC "(" OBJ "J" OBJ "JJ)V", FN_PTR(Unsafe_CopyMemory0)},
1062 {CC "copySwapMemory0", CC "(" OBJ "J" OBJ "JJJ)V", FN_PTR(Unsafe_CopySwapMemory0)},
1063 {CC "setMemory0", CC "(" OBJ "JJB)V", FN_PTR(Unsafe_SetMemory0)},
1064
1065 {CC "defineAnonymousClass0", CC "(" DAC_Args ")" CLS, FN_PTR(Unsafe_DefineAnonymousClass0)},
1066
1067 {CC "shouldBeInitialized0", CC "(" CLS ")Z", FN_PTR(Unsafe_ShouldBeInitialized0)},
1068
1069 {CC "loadFence", CC "()V", FN_PTR(Unsafe_LoadFence)},
1070 {CC "storeFence", CC "()V", FN_PTR(Unsafe_StoreFence)},
1071 {CC "fullFence", CC "()V", FN_PTR(Unsafe_FullFence)},
1072};
1073
1074#undef CC
1075#undef FN_PTR
1076
1077#undef ADR
1078#undef LANG
1079#undef OBJ
1080#undef CLS
1081#undef FLD
1082#undef THR
1083#undef DC_Args
1084#undef DAC_Args
1085
1086#undef DECLARE_GETPUTOOP
1087
1088
1089// This function is exported, used by NativeLookup.
1090// The Unsafe_xxx functions above are called only from the interpreter.
1091// The optimizer looks at names and signatures to recognize
1092// individual functions.
1093
1094JVM_ENTRY(void, JVM_RegisterJDKInternalMiscUnsafeMethods(JNIEnv *env, jclass unsafeclass)) {
1095 ThreadToNativeFromVM ttnfv(thread);
1096
1097 int ok = env->RegisterNatives(unsafeclass, jdk_internal_misc_Unsafe_methods, sizeof(jdk_internal_misc_Unsafe_methods)/sizeof(JNINativeMethod));
1098 guarantee(ok == 0, "register jdk.internal.misc.Unsafe natives");
1099} JVM_END
1100