1930 * constant pool index in the instruction.
1931 */
1932 CASE(_getfield):
1933 CASE(_getstatic):
1934 {
1935 u2 index;
1936 ConstantPoolCacheEntry* cache;
1937 index = Bytes::get_native_u2(pc+1);
1938
1939 // QQQ Need to make this as inlined as possible. Probably need to
1940 // split all the bytecode cases out so c++ compiler has a chance
1941 // for constant prop to fold everything possible away.
1942
1943 cache = cp->entry_at(index);
1944 if (!cache->is_resolved((Bytecodes::Code)opcode)) {
1945 CALL_VM(InterpreterRuntime::resolve_from_cache(THREAD, (Bytecodes::Code)opcode),
1946 handle_exception);
1947 cache = cp->entry_at(index);
1948 }
1949
1950 #ifdef VM_JVMTI
1951 if (_jvmti_interp_events) {
1952 int *count_addr;
1953 oop obj;
1954 // Check to see if a field modification watch has been set
1955 // before we take the time to call into the VM.
1956 count_addr = (int *)JvmtiExport::get_field_access_count_addr();
1957 if ( *count_addr > 0 ) {
1958 if ((Bytecodes::Code)opcode == Bytecodes::_getstatic) {
1959 obj = (oop)NULL;
1960 } else {
1961 obj = (oop) STACK_OBJECT(-1);
1962 VERIFY_OOP(obj);
1963 }
1964 CALL_VM(InterpreterRuntime::post_field_access(THREAD,
1965 obj,
1966 cache),
1967 handle_exception);
1968 }
1969 }
2015 } else if (tos_type == itos) {
2016 SET_STACK_INT(obj->int_field(field_offset), -1);
2017 } else if (tos_type == ltos) {
2018 SET_STACK_LONG(obj->long_field(field_offset), 0);
2019 MORE_STACK(1);
2020 } else if (tos_type == btos) {
2021 SET_STACK_INT(obj->byte_field(field_offset), -1);
2022 } else if (tos_type == ctos) {
2023 SET_STACK_INT(obj->char_field(field_offset), -1);
2024 } else if (tos_type == stos) {
2025 SET_STACK_INT(obj->short_field(field_offset), -1);
2026 } else if (tos_type == ftos) {
2027 SET_STACK_FLOAT(obj->float_field(field_offset), -1);
2028 } else {
2029 SET_STACK_DOUBLE(obj->double_field(field_offset), 0);
2030 MORE_STACK(1);
2031 }
2032 }
2033
2034 UPDATE_PC_AND_CONTINUE(3);
2035 }
2036
2037 CASE(_putfield):
2038 CASE(_putstatic):
2039 {
2040 u2 index = Bytes::get_native_u2(pc+1);
2041 ConstantPoolCacheEntry* cache = cp->entry_at(index);
2042 if (!cache->is_resolved((Bytecodes::Code)opcode)) {
2043 CALL_VM(InterpreterRuntime::resolve_from_cache(THREAD, (Bytecodes::Code)opcode),
2044 handle_exception);
2045 cache = cp->entry_at(index);
2046 }
2047
2048 #ifdef VM_JVMTI
2049 if (_jvmti_interp_events) {
2050 int *count_addr;
2051 oop obj;
2052 // Check to see if a field modification watch has been set
2053 // before we take the time to call into the VM.
2054 count_addr = (int *)JvmtiExport::get_field_modification_count_addr();
2055 if ( *count_addr > 0 ) {
2056 if ((Bytecodes::Code)opcode == Bytecodes::_putstatic) {
|
1930 * constant pool index in the instruction.
1931 */
1932 CASE(_getfield):
1933 CASE(_getstatic):
1934 {
1935 u2 index;
1936 ConstantPoolCacheEntry* cache;
1937 index = Bytes::get_native_u2(pc+1);
1938
1939 // QQQ Need to make this as inlined as possible. Probably need to
1940 // split all the bytecode cases out so c++ compiler has a chance
1941 // for constant prop to fold everything possible away.
1942
1943 cache = cp->entry_at(index);
1944 if (!cache->is_resolved((Bytecodes::Code)opcode)) {
1945 CALL_VM(InterpreterRuntime::resolve_from_cache(THREAD, (Bytecodes::Code)opcode),
1946 handle_exception);
1947 cache = cp->entry_at(index);
1948 }
1949
1950 if (cache->is_field_entry()) {
1951 #ifdef VM_JVMTI
1952 if (_jvmti_interp_events) {
1953 int *count_addr;
1954 oop obj;
1955 // Check to see if a field modification watch has been set
1956 // before we take the time to call into the VM.
1957 count_addr = (int *)JvmtiExport::get_field_access_count_addr();
1958 if ( *count_addr > 0 ) {
1959 if ((Bytecodes::Code)opcode == Bytecodes::_getstatic) {
1960 obj = (oop)NULL;
1961 } else {
1962 obj = (oop) STACK_OBJECT(-1);
1963 VERIFY_OOP(obj);
1964 }
1965 CALL_VM(InterpreterRuntime::post_field_access(THREAD,
1966 obj,
1967 cache),
1968 handle_exception);
1969 }
1970 }
2016 } else if (tos_type == itos) {
2017 SET_STACK_INT(obj->int_field(field_offset), -1);
2018 } else if (tos_type == ltos) {
2019 SET_STACK_LONG(obj->long_field(field_offset), 0);
2020 MORE_STACK(1);
2021 } else if (tos_type == btos) {
2022 SET_STACK_INT(obj->byte_field(field_offset), -1);
2023 } else if (tos_type == ctos) {
2024 SET_STACK_INT(obj->char_field(field_offset), -1);
2025 } else if (tos_type == stos) {
2026 SET_STACK_INT(obj->short_field(field_offset), -1);
2027 } else if (tos_type == ftos) {
2028 SET_STACK_FLOAT(obj->float_field(field_offset), -1);
2029 } else {
2030 SET_STACK_DOUBLE(obj->double_field(field_offset), 0);
2031 MORE_STACK(1);
2032 }
2033 }
2034
2035 UPDATE_PC_AND_CONTINUE(3);
2036 }else {
2037 // mostly copied from _invokevirtual and _invokestatic
2038 istate->set_msg(call_method);
2039 Method* callee;
2040 if ((Bytecodes::Code)opcode == Bytecodes::_getstatic) {
2041 callee = cache->f1_as_method();
2042
2043 // Profile call.
2044 BI_PROFILE_UPDATE_CALL();
2045 }else {
2046 // get receiver
2047 int parms = cache->parameter_size();
2048 // this works but needs a resourcemark and seems to create a vtable on every call:
2049 // Method* callee = rcvr->klass()->vtable()->method_at(cache->f2_as_index());
2050 //
2051 // this fails with an assert
2052 // InstanceKlass* rcvrKlass = InstanceKlass::cast(STACK_OBJECT(-parms)->klass());
2053 // but this works
2054 oop rcvr = STACK_OBJECT(-parms);
2055 VERIFY_OOP(rcvr);
2056 InstanceKlass* rcvrKlass = (InstanceKlass*)rcvr->klass();
2057 /*
2058 Executing this code in java.lang.String:
2059 public String(char value[]) {
2060 this.count = value.length;
2061 this.value = (char[])value.clone();
2062 }
2063 a find on rcvr->klass() reports:
2064 {type array char}{type array class}
2065 - klass: {other class}
2066
2067 but using InstanceKlass::cast(STACK_OBJECT(-parms)->klass()) causes in assertion failure
2068 because rcvr->klass()->is_instance_klass() == 0
2069 However it seems to have a vtable in the right location. Huh?
2070 Because vtables have the same offset for ArrayKlass and InstanceKlass.
2071 */
2072 callee = (Method*) rcvrKlass->start_of_vtable()[ cache->f2_as_index()];
2073 // Profile virtual call.
2074 BI_PROFILE_UPDATE_VIRTUALCALL(rcvr->klass());
2075 }
2076 istate->set_callee(callee);
2077 istate->set_callee_entry_point(callee->from_interpreted_entry());
2078 #ifdef VM_JVMTI
2079 if (JvmtiExport::can_post_interpreter_events() && THREAD->is_interp_only_mode()) {
2080 istate->set_callee_entry_point(callee->interpreter_entry());
2081 }
2082 #endif /* VM_JVMTI */
2083 istate->set_bcp_advance(3);
2084 UPDATE_PC_AND_RETURN(0); // I'll be back...
2085 }
2086 }
2087 CASE(_putfield):
2088 CASE(_putstatic):
2089 {
2090 u2 index = Bytes::get_native_u2(pc+1);
2091 ConstantPoolCacheEntry* cache = cp->entry_at(index);
2092 if (!cache->is_resolved((Bytecodes::Code)opcode)) {
2093 CALL_VM(InterpreterRuntime::resolve_from_cache(THREAD, (Bytecodes::Code)opcode),
2094 handle_exception);
2095 cache = cp->entry_at(index);
2096 }
2097
2098 #ifdef VM_JVMTI
2099 if (_jvmti_interp_events) {
2100 int *count_addr;
2101 oop obj;
2102 // Check to see if a field modification watch has been set
2103 // before we take the time to call into the VM.
2104 count_addr = (int *)JvmtiExport::get_field_modification_count_addr();
2105 if ( *count_addr > 0 ) {
2106 if ((Bytecodes::Code)opcode == Bytecodes::_putstatic) {
|