474
475 template <DecoratorSet decorators, typename T>
476 typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type
477 RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::_atomic_xchg_at_func = &atomic_xchg_at_init;
478
479 template <DecoratorSet decorators, typename T>
480 typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type
481 RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::_arraycopy_func = &arraycopy_init;
482
483 template <DecoratorSet decorators, typename T>
484 typename AccessFunction<decorators, T, BARRIER_CLONE>::type
485 RuntimeDispatch<decorators, T, BARRIER_CLONE>::_clone_func = &clone_init;
486
487 // Step 3: Pre-runtime dispatching.
488 // The PreRuntimeDispatch class is responsible for filtering the barrier strength
489 // decorators. That is, for AS_RAW, it hardwires the accesses without a runtime
490 // dispatch point. Otherwise it goes through a runtime check if hardwiring was
491 // not possible.
492 struct PreRuntimeDispatch: AllStatic {
493 template<DecoratorSet decorators>
494 static bool can_hardwire_raw() {
495 return !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // primitive access
496 !HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value || // don't care about compressed oops (oop* address)
497 HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value; // we can infer we use compressed oops (narrowOop* address)
498 }
499
500 static const DecoratorSet convert_compressed_oops = INTERNAL_RT_USE_COMPRESSED_OOPS | INTERNAL_CONVERT_COMPRESSED_OOP;
501
502 template<DecoratorSet decorators>
503 static bool is_hardwired_primitive() {
504 return !HasDecorator<decorators, INTERNAL_BT_BARRIER_ON_PRIMITIVES>::value &&
505 !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value;
506 }
507
508 template <DecoratorSet decorators, typename T>
509 inline static typename EnableIf<
510 HasDecorator<decorators, AS_RAW>::value>::type
511 store(void* addr, T value) {
512 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
513 if (can_hardwire_raw<decorators>()) {
514 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
515 Raw::oop_store(addr, value);
516 } else {
517 Raw::store(addr, value);
518 }
519 } else if (UseCompressedOops) {
520 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
521 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
522 } else {
523 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
524 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
525 }
526 }
527
528 template <DecoratorSet decorators, typename T>
529 inline static typename EnableIf<
530 !HasDecorator<decorators, AS_RAW>::value>::type
531 store(void* addr, T value) {
532 if (is_hardwired_primitive<decorators>()) {
533 const DecoratorSet expanded_decorators = decorators | AS_RAW;
534 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
535 } else {
536 RuntimeDispatch<decorators, T, BARRIER_STORE>::store(addr, value);
537 }
538 }
539
541 inline static typename EnableIf<
542 HasDecorator<decorators, AS_RAW>::value>::type
543 store_at(oop base, ptrdiff_t offset, T value) {
544 store<decorators>(field_addr(base, offset), value);
545 }
546
547 template <DecoratorSet decorators, typename T>
548 inline static typename EnableIf<
549 !HasDecorator<decorators, AS_RAW>::value>::type
550 store_at(oop base, ptrdiff_t offset, T value) {
551 if (is_hardwired_primitive<decorators>()) {
552 const DecoratorSet expanded_decorators = decorators | AS_RAW;
553 PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, value);
554 } else {
555 RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::store_at(base, offset, value);
556 }
557 }
558
559 template <DecoratorSet decorators, typename T>
560 inline static typename EnableIf<
561 HasDecorator<decorators, AS_RAW>::value, T>::type
562 load(void* addr) {
563 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
564 if (can_hardwire_raw<decorators>()) {
565 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
566 return Raw::template oop_load<T>(addr);
567 } else {
568 return Raw::template load<T>(addr);
569 }
570 } else if (UseCompressedOops) {
571 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
572 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
573 } else {
574 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
575 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
576 }
577 }
578
579 template <DecoratorSet decorators, typename T>
580 inline static typename EnableIf<
581 !HasDecorator<decorators, AS_RAW>::value, T>::type
582 load(void* addr) {
583 if (is_hardwired_primitive<decorators>()) {
584 const DecoratorSet expanded_decorators = decorators | AS_RAW;
585 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
586 } else {
587 return RuntimeDispatch<decorators, T, BARRIER_LOAD>::load(addr);
588 }
589 }
590
592 inline static typename EnableIf<
593 HasDecorator<decorators, AS_RAW>::value, T>::type
594 load_at(oop base, ptrdiff_t offset) {
595 return load<decorators, T>(field_addr(base, offset));
596 }
597
598 template <DecoratorSet decorators, typename T>
599 inline static typename EnableIf<
600 !HasDecorator<decorators, AS_RAW>::value, T>::type
601 load_at(oop base, ptrdiff_t offset) {
602 if (is_hardwired_primitive<decorators>()) {
603 const DecoratorSet expanded_decorators = decorators | AS_RAW;
604 return PreRuntimeDispatch::load_at<expanded_decorators, T>(base, offset);
605 } else {
606 return RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::load_at(base, offset);
607 }
608 }
609
610 template <DecoratorSet decorators, typename T>
611 inline static typename EnableIf<
612 HasDecorator<decorators, AS_RAW>::value, T>::type
613 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
614 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
615 if (can_hardwire_raw<decorators>()) {
616 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
617 return Raw::oop_atomic_cmpxchg(new_value, addr, compare_value);
618 } else {
619 return Raw::atomic_cmpxchg(new_value, addr, compare_value);
620 }
621 } else if (UseCompressedOops) {
622 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
623 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
624 } else {
625 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
626 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
627 }
628 }
629
630 template <DecoratorSet decorators, typename T>
631 inline static typename EnableIf<
632 !HasDecorator<decorators, AS_RAW>::value, T>::type
633 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
634 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
635 if (is_hardwired_primitive<decorators>()) {
636 const DecoratorSet expanded_decorators = decorators | AS_RAW;
637 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
638 } else {
639 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::atomic_cmpxchg(new_value, addr, compare_value);
640 }
641 }
644 inline static typename EnableIf<
645 HasDecorator<decorators, AS_RAW>::value, T>::type
646 atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
647 return atomic_cmpxchg<decorators>(new_value, field_addr(base, offset), compare_value);
648 }
649
650 template <DecoratorSet decorators, typename T>
651 inline static typename EnableIf<
652 !HasDecorator<decorators, AS_RAW>::value, T>::type
653 atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
654 if (is_hardwired_primitive<decorators>()) {
655 const DecoratorSet expanded_decorators = decorators | AS_RAW;
656 return PreRuntimeDispatch::atomic_cmpxchg_at<expanded_decorators>(new_value, base, offset, compare_value);
657 } else {
658 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::atomic_cmpxchg_at(new_value, base, offset, compare_value);
659 }
660 }
661
662 template <DecoratorSet decorators, typename T>
663 inline static typename EnableIf<
664 HasDecorator<decorators, AS_RAW>::value, T>::type
665 atomic_xchg(T new_value, void* addr) {
666 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
667 if (can_hardwire_raw<decorators>()) {
668 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
669 return Raw::oop_atomic_xchg(new_value, addr);
670 } else {
671 return Raw::atomic_xchg(new_value, addr);
672 }
673 } else if (UseCompressedOops) {
674 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
675 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
676 } else {
677 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
678 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
679 }
680 }
681
682 template <DecoratorSet decorators, typename T>
683 inline static typename EnableIf<
684 !HasDecorator<decorators, AS_RAW>::value, T>::type
685 atomic_xchg(T new_value, void* addr) {
686 if (is_hardwired_primitive<decorators>()) {
687 const DecoratorSet expanded_decorators = decorators | AS_RAW;
688 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
689 } else {
690 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::atomic_xchg(new_value, addr);
691 }
692 }
693
781 // | narrowOop | x | x | hw-none |
782 //
783 // x means not allowed
784 // rt-comp means it must be checked at runtime whether the oop is compressed.
785 // hw-none means it is statically known the oop will not be compressed.
786 // hw-comp means it is statically known the oop will be compressed.
787
788 template <DecoratorSet decorators, typename T>
789 inline void store_reduce_types(T* addr, T value) {
790 PreRuntimeDispatch::store<decorators>(addr, value);
791 }
792
793 template <DecoratorSet decorators>
794 inline void store_reduce_types(narrowOop* addr, oop value) {
795 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
796 INTERNAL_RT_USE_COMPRESSED_OOPS;
797 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
798 }
799
800 template <DecoratorSet decorators>
801 inline void store_reduce_types(HeapWord* addr, oop value) {
802 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
803 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
804 }
805
806 template <DecoratorSet decorators, typename T>
807 inline T atomic_cmpxchg_reduce_types(T new_value, T* addr, T compare_value) {
808 return PreRuntimeDispatch::atomic_cmpxchg<decorators>(new_value, addr, compare_value);
809 }
810
811 template <DecoratorSet decorators>
812 inline oop atomic_cmpxchg_reduce_types(oop new_value, narrowOop* addr, oop compare_value) {
813 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
814 INTERNAL_RT_USE_COMPRESSED_OOPS;
815 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
816 }
817
818 template <DecoratorSet decorators>
819 inline oop atomic_cmpxchg_reduce_types(oop new_value, HeapWord* addr, oop compare_value) {
820 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
821 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
822 }
823
824 template <DecoratorSet decorators, typename T>
825 inline T atomic_xchg_reduce_types(T new_value, T* addr) {
826 const DecoratorSet expanded_decorators = decorators;
827 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
828 }
829
830 template <DecoratorSet decorators>
831 inline oop atomic_xchg_reduce_types(oop new_value, narrowOop* addr) {
832 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
833 INTERNAL_RT_USE_COMPRESSED_OOPS;
834 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
835 }
836
837 template <DecoratorSet decorators>
838 inline oop atomic_xchg_reduce_types(oop new_value, HeapWord* addr) {
839 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
840 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
841 }
842
843 template <DecoratorSet decorators, typename T>
844 inline T load_reduce_types(T* addr) {
845 return PreRuntimeDispatch::load<decorators, T>(addr);
846 }
847
848 template <DecoratorSet decorators, typename T>
849 inline oop load_reduce_types(narrowOop* addr) {
850 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP | INTERNAL_RT_USE_COMPRESSED_OOPS;
851 return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
852 }
853
854 template <DecoratorSet decorators, typename T>
855 inline oop load_reduce_types(HeapWord* addr) {
856 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
857 return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
858 }
859
860 // Step 1: Set default decorators. This step remembers if a type was volatile
861 // and then sets the MO_VOLATILE decorator by default. Otherwise, a default
862 // memory ordering is set for the access, and the implied decorator rules
863 // are applied to select sensible defaults for decorators that have not been
864 // explicitly set. For example, default object referent strength is set to strong.
865 // This step also decays the types passed in (e.g. getting rid of CV qualifiers
866 // and references from the types). This step also perform some type verification
867 // that the passed in types make sense.
868
869 template <DecoratorSet decorators, typename T>
870 static void verify_types(){
871 // If this fails to compile, then you have sent in something that is
|
474
475 template <DecoratorSet decorators, typename T>
476 typename AccessFunction<decorators, T, BARRIER_ATOMIC_XCHG_AT>::type
477 RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG_AT>::_atomic_xchg_at_func = &atomic_xchg_at_init;
478
479 template <DecoratorSet decorators, typename T>
480 typename AccessFunction<decorators, T, BARRIER_ARRAYCOPY>::type
481 RuntimeDispatch<decorators, T, BARRIER_ARRAYCOPY>::_arraycopy_func = &arraycopy_init;
482
483 template <DecoratorSet decorators, typename T>
484 typename AccessFunction<decorators, T, BARRIER_CLONE>::type
485 RuntimeDispatch<decorators, T, BARRIER_CLONE>::_clone_func = &clone_init;
486
487 // Step 3: Pre-runtime dispatching.
488 // The PreRuntimeDispatch class is responsible for filtering the barrier strength
489 // decorators. That is, for AS_RAW, it hardwires the accesses without a runtime
490 // dispatch point. Otherwise it goes through a runtime check if hardwiring was
491 // not possible.
492 struct PreRuntimeDispatch: AllStatic {
493 template<DecoratorSet decorators>
494 struct CanHardwireRaw: public IntegralConstant<
495 bool,
496 !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value || // primitive access
497 !HasDecorator<decorators, INTERNAL_CONVERT_COMPRESSED_OOP>::value || // don't care about compressed oops (oop* address)
498 HasDecorator<decorators, INTERNAL_RT_USE_COMPRESSED_OOPS>::value> // we can infer we use compressed oops (narrowOop* address)
499 {};
500
501 static const DecoratorSet convert_compressed_oops = INTERNAL_RT_USE_COMPRESSED_OOPS | INTERNAL_CONVERT_COMPRESSED_OOP;
502
503 template<DecoratorSet decorators>
504 static bool is_hardwired_primitive() {
505 return !HasDecorator<decorators, INTERNAL_BT_BARRIER_ON_PRIMITIVES>::value &&
506 !HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value;
507 }
508
509 template <DecoratorSet decorators, typename T>
510 inline static typename EnableIf<
511 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value>::type
512 store(void* addr, T value) {
513 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
514 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
515 Raw::oop_store(addr, value);
516 } else {
517 Raw::store(addr, value);
518 }
519 }
520
521 template <DecoratorSet decorators, typename T>
522 inline static typename EnableIf<
523 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value>::type
524 store(void* addr, T value) {
525 if (UseCompressedOops) {
526 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
527 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
528 } else {
529 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
530 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
531 }
532 }
533
534 template <DecoratorSet decorators, typename T>
535 inline static typename EnableIf<
536 !HasDecorator<decorators, AS_RAW>::value>::type
537 store(void* addr, T value) {
538 if (is_hardwired_primitive<decorators>()) {
539 const DecoratorSet expanded_decorators = decorators | AS_RAW;
540 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
541 } else {
542 RuntimeDispatch<decorators, T, BARRIER_STORE>::store(addr, value);
543 }
544 }
545
547 inline static typename EnableIf<
548 HasDecorator<decorators, AS_RAW>::value>::type
549 store_at(oop base, ptrdiff_t offset, T value) {
550 store<decorators>(field_addr(base, offset), value);
551 }
552
553 template <DecoratorSet decorators, typename T>
554 inline static typename EnableIf<
555 !HasDecorator<decorators, AS_RAW>::value>::type
556 store_at(oop base, ptrdiff_t offset, T value) {
557 if (is_hardwired_primitive<decorators>()) {
558 const DecoratorSet expanded_decorators = decorators | AS_RAW;
559 PreRuntimeDispatch::store_at<expanded_decorators>(base, offset, value);
560 } else {
561 RuntimeDispatch<decorators, T, BARRIER_STORE_AT>::store_at(base, offset, value);
562 }
563 }
564
565 template <DecoratorSet decorators, typename T>
566 inline static typename EnableIf<
567 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
568 load(void* addr) {
569 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
570 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
571 return Raw::template oop_load<T>(addr);
572 } else {
573 return Raw::template load<T>(addr);
574 }
575 }
576
577 template <DecoratorSet decorators, typename T>
578 inline static typename EnableIf<
579 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
580 load(void* addr) {
581 if (UseCompressedOops) {
582 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
583 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
584 } else {
585 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
586 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
587 }
588 }
589
590 template <DecoratorSet decorators, typename T>
591 inline static typename EnableIf<
592 !HasDecorator<decorators, AS_RAW>::value, T>::type
593 load(void* addr) {
594 if (is_hardwired_primitive<decorators>()) {
595 const DecoratorSet expanded_decorators = decorators | AS_RAW;
596 return PreRuntimeDispatch::load<expanded_decorators, T>(addr);
597 } else {
598 return RuntimeDispatch<decorators, T, BARRIER_LOAD>::load(addr);
599 }
600 }
601
603 inline static typename EnableIf<
604 HasDecorator<decorators, AS_RAW>::value, T>::type
605 load_at(oop base, ptrdiff_t offset) {
606 return load<decorators, T>(field_addr(base, offset));
607 }
608
609 template <DecoratorSet decorators, typename T>
610 inline static typename EnableIf<
611 !HasDecorator<decorators, AS_RAW>::value, T>::type
612 load_at(oop base, ptrdiff_t offset) {
613 if (is_hardwired_primitive<decorators>()) {
614 const DecoratorSet expanded_decorators = decorators | AS_RAW;
615 return PreRuntimeDispatch::load_at<expanded_decorators, T>(base, offset);
616 } else {
617 return RuntimeDispatch<decorators, T, BARRIER_LOAD_AT>::load_at(base, offset);
618 }
619 }
620
621 template <DecoratorSet decorators, typename T>
622 inline static typename EnableIf<
623 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
624 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
625 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
626 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
627 return Raw::oop_atomic_cmpxchg(new_value, addr, compare_value);
628 } else {
629 return Raw::atomic_cmpxchg(new_value, addr, compare_value);
630 }
631 }
632
633 template <DecoratorSet decorators, typename T>
634 inline static typename EnableIf<
635 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
636 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
637 if (UseCompressedOops) {
638 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
639 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
640 } else {
641 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
642 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
643 }
644 }
645
646 template <DecoratorSet decorators, typename T>
647 inline static typename EnableIf<
648 !HasDecorator<decorators, AS_RAW>::value, T>::type
649 atomic_cmpxchg(T new_value, void* addr, T compare_value) {
650 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
651 if (is_hardwired_primitive<decorators>()) {
652 const DecoratorSet expanded_decorators = decorators | AS_RAW;
653 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
654 } else {
655 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG>::atomic_cmpxchg(new_value, addr, compare_value);
656 }
657 }
660 inline static typename EnableIf<
661 HasDecorator<decorators, AS_RAW>::value, T>::type
662 atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
663 return atomic_cmpxchg<decorators>(new_value, field_addr(base, offset), compare_value);
664 }
665
666 template <DecoratorSet decorators, typename T>
667 inline static typename EnableIf<
668 !HasDecorator<decorators, AS_RAW>::value, T>::type
669 atomic_cmpxchg_at(T new_value, oop base, ptrdiff_t offset, T compare_value) {
670 if (is_hardwired_primitive<decorators>()) {
671 const DecoratorSet expanded_decorators = decorators | AS_RAW;
672 return PreRuntimeDispatch::atomic_cmpxchg_at<expanded_decorators>(new_value, base, offset, compare_value);
673 } else {
674 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_CMPXCHG_AT>::atomic_cmpxchg_at(new_value, base, offset, compare_value);
675 }
676 }
677
678 template <DecoratorSet decorators, typename T>
679 inline static typename EnableIf<
680 HasDecorator<decorators, AS_RAW>::value && CanHardwireRaw<decorators>::value, T>::type
681 atomic_xchg(T new_value, void* addr) {
682 typedef RawAccessBarrier<decorators & RAW_DECORATOR_MASK> Raw;
683 if (HasDecorator<decorators, INTERNAL_VALUE_IS_OOP>::value) {
684 return Raw::oop_atomic_xchg(new_value, addr);
685 } else {
686 return Raw::atomic_xchg(new_value, addr);
687 }
688 }
689
690 template <DecoratorSet decorators, typename T>
691 inline static typename EnableIf<
692 HasDecorator<decorators, AS_RAW>::value && !CanHardwireRaw<decorators>::value, T>::type
693 atomic_xchg(T new_value, void* addr) {
694 if (UseCompressedOops) {
695 const DecoratorSet expanded_decorators = decorators | convert_compressed_oops;
696 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
697 } else {
698 const DecoratorSet expanded_decorators = decorators & ~convert_compressed_oops;
699 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
700 }
701 }
702
703 template <DecoratorSet decorators, typename T>
704 inline static typename EnableIf<
705 !HasDecorator<decorators, AS_RAW>::value, T>::type
706 atomic_xchg(T new_value, void* addr) {
707 if (is_hardwired_primitive<decorators>()) {
708 const DecoratorSet expanded_decorators = decorators | AS_RAW;
709 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
710 } else {
711 return RuntimeDispatch<decorators, T, BARRIER_ATOMIC_XCHG>::atomic_xchg(new_value, addr);
712 }
713 }
714
802 // | narrowOop | x | x | hw-none |
803 //
804 // x means not allowed
805 // rt-comp means it must be checked at runtime whether the oop is compressed.
806 // hw-none means it is statically known the oop will not be compressed.
807 // hw-comp means it is statically known the oop will be compressed.
808
809 template <DecoratorSet decorators, typename T>
810 inline void store_reduce_types(T* addr, T value) {
811 PreRuntimeDispatch::store<decorators>(addr, value);
812 }
813
814 template <DecoratorSet decorators>
815 inline void store_reduce_types(narrowOop* addr, oop value) {
816 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
817 INTERNAL_RT_USE_COMPRESSED_OOPS;
818 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
819 }
820
821 template <DecoratorSet decorators>
822 inline void store_reduce_types(narrowOop* addr, narrowOop value) {
823 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
824 INTERNAL_RT_USE_COMPRESSED_OOPS;
825 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
826 }
827
828 template <DecoratorSet decorators>
829 inline void store_reduce_types(HeapWord* addr, oop value) {
830 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
831 PreRuntimeDispatch::store<expanded_decorators>(addr, value);
832 }
833
834 template <DecoratorSet decorators, typename T>
835 inline T atomic_cmpxchg_reduce_types(T new_value, T* addr, T compare_value) {
836 return PreRuntimeDispatch::atomic_cmpxchg<decorators>(new_value, addr, compare_value);
837 }
838
839 template <DecoratorSet decorators>
840 inline oop atomic_cmpxchg_reduce_types(oop new_value, narrowOop* addr, oop compare_value) {
841 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
842 INTERNAL_RT_USE_COMPRESSED_OOPS;
843 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
844 }
845
846 template <DecoratorSet decorators>
847 inline narrowOop atomic_cmpxchg_reduce_types(narrowOop new_value, narrowOop* addr, narrowOop compare_value) {
848 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
849 INTERNAL_RT_USE_COMPRESSED_OOPS;
850 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
851 }
852
853 template <DecoratorSet decorators>
854 inline oop atomic_cmpxchg_reduce_types(oop new_value,
855 HeapWord* addr,
856 oop compare_value) {
857 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
858 return PreRuntimeDispatch::atomic_cmpxchg<expanded_decorators>(new_value, addr, compare_value);
859 }
860
861 template <DecoratorSet decorators, typename T>
862 inline T atomic_xchg_reduce_types(T new_value, T* addr) {
863 const DecoratorSet expanded_decorators = decorators;
864 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
865 }
866
867 template <DecoratorSet decorators>
868 inline oop atomic_xchg_reduce_types(oop new_value, narrowOop* addr) {
869 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
870 INTERNAL_RT_USE_COMPRESSED_OOPS;
871 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
872 }
873
874 template <DecoratorSet decorators>
875 inline narrowOop atomic_xchg_reduce_types(narrowOop new_value, narrowOop* addr) {
876 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
877 INTERNAL_RT_USE_COMPRESSED_OOPS;
878 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
879 }
880
881 template <DecoratorSet decorators>
882 inline oop atomic_xchg_reduce_types(oop new_value, HeapWord* addr) {
883 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
884 return PreRuntimeDispatch::atomic_xchg<expanded_decorators>(new_value, addr);
885 }
886
887 template <DecoratorSet decorators, typename T>
888 inline T load_reduce_types(T* addr) {
889 return PreRuntimeDispatch::load<decorators, T>(addr);
890 }
891
892 template <DecoratorSet decorators, typename T>
893 inline typename OopOrNarrowOop<T>::type load_reduce_types(narrowOop* addr) {
894 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP |
895 INTERNAL_RT_USE_COMPRESSED_OOPS;
896 return PreRuntimeDispatch::load<expanded_decorators, typename OopOrNarrowOop<T>::type>(addr);
897 }
898
899 template <DecoratorSet decorators, typename T>
900 inline oop load_reduce_types(HeapWord* addr) {
901 const DecoratorSet expanded_decorators = decorators | INTERNAL_CONVERT_COMPRESSED_OOP;
902 return PreRuntimeDispatch::load<expanded_decorators, oop>(addr);
903 }
904
905 // Step 1: Set default decorators. This step remembers if a type was volatile
906 // and then sets the MO_VOLATILE decorator by default. Otherwise, a default
907 // memory ordering is set for the access, and the implied decorator rules
908 // are applied to select sensible defaults for decorators that have not been
909 // explicitly set. For example, default object referent strength is set to strong.
910 // This step also decays the types passed in (e.g. getting rid of CV qualifiers
911 // and references from the types). This step also perform some type verification
912 // that the passed in types make sense.
913
914 template <DecoratorSet decorators, typename T>
915 static void verify_types(){
916 // If this fails to compile, then you have sent in something that is
|