src/share/vm/prims/unsafe.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8161720 Sdiff src/share/vm/prims

src/share/vm/prims/unsafe.cpp

Print this page




 133 
 134 ///// Data read/writes on the Java heap and in native (off-heap) memory
 135 
 136 /**
 137  * Helper class for accessing memory.
 138  *
 139  * Normalizes values and wraps accesses in
 140  * JavaThread::doing_unsafe_access() if needed.
 141  */
 142 class MemoryAccess : StackObj {
 143   JavaThread* _thread;
 144   jobject _obj;
 145   jlong _offset;
 146 
 147   // Resolves and returns the address of the memory access
 148   void* addr() {
 149     return index_oop_from_field_offset_long(JNIHandles::resolve(_obj), _offset);
 150   }
 151 
 152   template <typename T>
 153   T normalize(T x) {
 154     return x;
 155   }
 156 
 157   jboolean normalize(jboolean x) {
 158     return x & 1;
 159   }
 160 









 161   /**
 162    * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 163    */
 164   class GuardUnsafeAccess {
 165     JavaThread* _thread;
 166     bool _active;
 167 
 168   public:
 169     GuardUnsafeAccess(JavaThread* thread, jobject _obj) : _thread(thread) {
 170       if (JNIHandles::resolve(_obj) == NULL) {
 171         // native/off-heap access which may raise SIGBUS if accessing
 172         // memory mapped file data in a region of the file which has
 173         // been truncated and is now invalid
 174         _thread->set_doing_unsafe_access(true);
 175         _active = true;
 176       } else {
 177         _active = false;
 178       }
 179     }
 180 
 181     ~GuardUnsafeAccess() {
 182       if (_active) {
 183         _thread->set_doing_unsafe_access(false);
 184       }
 185     }
 186   };
 187 
 188 public:
 189   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 190     : _thread(thread), _obj(obj), _offset(offset) {
 191   }
 192 
 193   template <typename T>
 194   T get() {
 195     GuardUnsafeAccess guard(_thread, _obj);
 196 
 197     T* p = (T*)addr();
 198 
 199     T x = *p;
 200 
 201     return x;
 202   }
 203 
 204   template <typename T>
 205   void put(T x) {
 206     GuardUnsafeAccess guard(_thread, _obj);
 207 
 208     T* p = (T*)addr();
 209 
 210     *p = normalize(x);
 211   }
 212 
 213 
 214   template <typename T>
 215   T get_volatile() {
 216     GuardUnsafeAccess guard(_thread, _obj);
 217 
 218     T* p = (T*)addr();
 219 
 220     if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
 221       OrderAccess::fence();
 222     }
 223 
 224     T x = OrderAccess::load_acquire((volatile T*)p);
 225 
 226     return x;
 227   }
 228 
 229   template <typename T>
 230   void put_volatile(T x) {
 231     GuardUnsafeAccess guard(_thread, _obj);
 232 
 233     T* p = (T*)addr();
 234 
 235     OrderAccess::release_store_fence((volatile T*)p, normalize(x));
 236   }
 237 
 238 
 239 #ifndef SUPPORTS_NATIVE_CX8
 240   jlong get_jlong_locked() {
 241     GuardUnsafeAccess guard(_thread, _obj);
 242 
 243     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 244 
 245     jlong* p = (jlong*)addr();
 246 
 247     jlong x = Atomic::load(p);
 248 
 249     return x;
 250   }
 251 
 252   void put_jlong_locked(jlong x) {
 253     GuardUnsafeAccess guard(_thread, _obj);
 254 
 255     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 256 
 257     jlong* p = (jlong*)addr();
 258 
 259     Atomic::store(normalize(x),  p);
 260   }
 261 #endif
 262 };
 263 
 264 // Get/PutObject must be special-cased, since it works with handles.
 265 
 266 // These functions allow a null base pointer with an arbitrary address.
 267 // But if the base pointer is non-null, the offset should make some sense.
 268 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 269 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 270   oop p = JNIHandles::resolve(obj);
 271   oop v;
 272 
 273   if (UseCompressedOops) {
 274     narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
 275     v = oopDesc::decode_heap_oop(n);
 276   } else {
 277     v = *(oop*)index_oop_from_field_offset_long(p, offset);
 278   }
 279 




 133 
 134 ///// Data read/writes on the Java heap and in native (off-heap) memory
 135 
 136 /**
 137  * Helper class for accessing memory.
 138  *
 139  * Normalizes values and wraps accesses in
 140  * JavaThread::doing_unsafe_access() if needed.
 141  */
 142 class MemoryAccess : StackObj {
 143   JavaThread* _thread;
 144   jobject _obj;
 145   jlong _offset;
 146 
 147   // Resolves and returns the address of the memory access
 148   void* addr() {
 149     return index_oop_from_field_offset_long(JNIHandles::resolve(_obj), _offset);
 150   }
 151 
 152   template <typename T>
 153   T normalize_for_write(T x) {
 154     return x;
 155   }
 156 
 157   jboolean normalize_for_write(jboolean x) {
 158     return x & 1;
 159   }
 160 
 161   template <typename T>
 162   T normalize_for_read(T x) {
 163     return x;
 164   }
 165 
 166   jboolean normalize_for_read(jboolean x) {
 167     return x != 0;
 168   }
 169 
 170   /**
 171    * Helper class to wrap memory accesses in JavaThread::doing_unsafe_access()
 172    */
 173   class GuardUnsafeAccess {
 174     JavaThread* _thread;
 175     bool _active;
 176 
 177   public:
 178     GuardUnsafeAccess(JavaThread* thread, jobject _obj) : _thread(thread) {
 179       if (JNIHandles::resolve(_obj) == NULL) {
 180         // native/off-heap access which may raise SIGBUS if accessing
 181         // memory mapped file data in a region of the file which has
 182         // been truncated and is now invalid
 183         _thread->set_doing_unsafe_access(true);
 184         _active = true;
 185       } else {
 186         _active = false;
 187       }
 188     }
 189 
 190     ~GuardUnsafeAccess() {
 191       if (_active) {
 192         _thread->set_doing_unsafe_access(false);
 193       }
 194     }
 195   };
 196 
 197 public:
 198   MemoryAccess(JavaThread* thread, jobject obj, jlong offset)
 199     : _thread(thread), _obj(obj), _offset(offset) {
 200   }
 201 
 202   template <typename T>
 203   T get() {
 204     GuardUnsafeAccess guard(_thread, _obj);
 205 
 206     T* p = (T*)addr();
 207 
 208     T x = normalize_for_read(*p);
 209 
 210     return x;
 211   }
 212 
 213   template <typename T>
 214   void put(T x) {
 215     GuardUnsafeAccess guard(_thread, _obj);
 216 
 217     T* p = (T*)addr();
 218 
 219     *p = normalize_for_write(x);
 220   }
 221 
 222 
 223   template <typename T>
 224   T get_volatile() {
 225     GuardUnsafeAccess guard(_thread, _obj);
 226 
 227     T* p = (T*)addr();
 228 
 229     if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
 230       OrderAccess::fence();
 231     }
 232 
 233     T x = OrderAccess::load_acquire((volatile T*)p);
 234 
 235     return normalize_for_read(x);
 236   }
 237 
 238   template <typename T>
 239   void put_volatile(T x) {
 240     GuardUnsafeAccess guard(_thread, _obj);
 241 
 242     T* p = (T*)addr();
 243 
 244     OrderAccess::release_store_fence((volatile T*)p, normalize_for_write(x));
 245   }
 246 
 247 
 248 #ifndef SUPPORTS_NATIVE_CX8
 249   jlong get_jlong_locked() {
 250     GuardUnsafeAccess guard(_thread, _obj);
 251 
 252     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 253 
 254     jlong* p = (jlong*)addr();
 255 
 256     jlong x = Atomic::load(p);
 257 
 258     return x;
 259   }
 260 
 261   void put_jlong_locked(jlong x) {
 262     GuardUnsafeAccess guard(_thread, _obj);
 263 
 264     MutexLockerEx mu(UnsafeJlong_lock, Mutex::_no_safepoint_check_flag);
 265 
 266     jlong* p = (jlong*)addr();
 267 
 268     Atomic::store(normalize_for_write(x),  p);
 269   }
 270 #endif
 271 };
 272 
 273 // Get/PutObject must be special-cased, since it works with handles.
 274 
 275 // These functions allow a null base pointer with an arbitrary address.
 276 // But if the base pointer is non-null, the offset should make some sense.
 277 // That is, it should be in the range [0, MAX_OBJECT_SIZE].
 278 UNSAFE_ENTRY(jobject, Unsafe_GetObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) {
 279   oop p = JNIHandles::resolve(obj);
 280   oop v;
 281 
 282   if (UseCompressedOops) {
 283     narrowOop n = *(narrowOop*)index_oop_from_field_offset_long(p, offset);
 284     v = oopDesc::decode_heap_oop(n);
 285   } else {
 286     v = *(oop*)index_oop_from_field_offset_long(p, offset);
 287   }
 288 


src/share/vm/prims/unsafe.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File