< prev index next >

src/hotspot/share/runtime/synchronizer.cpp

Print this page




 241     lock->set_displaced_header(markWord::unused_mark());
 242 
 243     if (owner == NULL && Atomic::replace_if_null(Self, &(m->_owner))) {
 244       assert(m->_recursions == 0, "invariant");
 245       return true;
 246     }
 247   }
 248 
 249   // Note that we could inflate in quick_enter.
 250   // This is likely a useful optimization
 251   // Critically, in quick_enter() we must not:
 252   // -- perform bias revocation, or
 253   // -- block indefinitely, or
 254   // -- reach a safepoint
 255 
 256   return false;        // revert to slow-path
 257 }
 258 
 259 // -----------------------------------------------------------------------------
 260 // Monitor Enter/Exit
 261 // The interpreter and compiler use some assembly copies of this code. Make sure
 262 // update those code if the following function is changed. The implementation
 263 // is extremely sensitive to race condition. Be careful.
 264 
 265 void ObjectSynchronizer::enter(Handle obj, BasicLock* lock, TRAPS) {
 266   if (UseBiasedLocking) {
 267     if (!SafepointSynchronize::is_at_safepoint()) {
 268       BiasedLocking::revoke(obj, THREAD);
 269     } else {
 270       BiasedLocking::revoke_at_safepoint(obj);
 271     }
 272   }
 273 
 274   markWord mark = obj->mark();
 275   assert(!mark.has_bias_pattern(), "should not see bias pattern here");
 276 
 277   if (mark.is_neutral()) {
 278     // Anticipate successful CAS -- the ST of the displaced mark must
 279     // be visible <= the ST performed by the CAS.
 280     lock->set_displaced_header(mark);
 281     if (mark == obj()->cas_set_mark(markWord::from_pointer(lock), mark)) {
 282       return;
 283     }




 241     lock->set_displaced_header(markWord::unused_mark());
 242 
 243     if (owner == NULL && Atomic::replace_if_null(Self, &(m->_owner))) {
 244       assert(m->_recursions == 0, "invariant");
 245       return true;
 246     }
 247   }
 248 
 249   // Note that we could inflate in quick_enter.
 250   // This is likely a useful optimization
 251   // Critically, in quick_enter() we must not:
 252   // -- perform bias revocation, or
 253   // -- block indefinitely, or
 254   // -- reach a safepoint
 255 
 256   return false;        // revert to slow-path
 257 }
 258 
 259 // -----------------------------------------------------------------------------
 260 // Monitor Enter/Exit
 261 // The interpreter and compiler assembly code tries to lock using the fast path
 262 // of this algorithm. Make sure to update that code if the following function is
 263 // changed. The implementation is extremely sensitive to race condition. Be careful.
 264 
 265 void ObjectSynchronizer::enter(Handle obj, BasicLock* lock, TRAPS) {
 266   if (UseBiasedLocking) {
 267     if (!SafepointSynchronize::is_at_safepoint()) {
 268       BiasedLocking::revoke(obj, THREAD);
 269     } else {
 270       BiasedLocking::revoke_at_safepoint(obj);
 271     }
 272   }
 273 
 274   markWord mark = obj->mark();
 275   assert(!mark.has_bias_pattern(), "should not see bias pattern here");
 276 
 277   if (mark.is_neutral()) {
 278     // Anticipate successful CAS -- the ST of the displaced mark must
 279     // be visible <= the ST performed by the CAS.
 280     lock->set_displaced_header(mark);
 281     if (mark == obj()->cas_set_mark(markWord::from_pointer(lock), mark)) {
 282       return;
 283     }


< prev index next >