1104 // v0: value
1105 // r1: index
1106 // r3: array
1107 index_check(r3, r1); // prefer index in r1
1108 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1109 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg);
1110 }
1111
1112 void TemplateTable::aastore() {
1113 Label is_null, ok_is_subtype, done;
1114 transition(vtos, vtos);
1115 // stack: ..., array, index, value
1116 __ ldr(r0, at_tos()); // value
1117 __ ldr(r2, at_tos_p1()); // index
1118 __ ldr(r3, at_tos_p2()); // array
1119
1120 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1121
1122 index_check(r3, r2); // kills r1
1123
1124 // DMS CHECK: what does line below do?
1125 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1126
1127 // do array store check - check for NULL value first
1128 __ cbz(r0, is_null);
1129
1130 Label is_flat_array;
1131 if (ValueArrayFlatten) {
1132 __ test_flattened_array_oop(r3, r8 /*temp*/, is_flat_array);
1133 }
1134
1135 // Move subklass into r1
1136 __ load_klass(r1, r0);
1137
1138 // Move superklass into r0
1139 __ load_klass(r0, r3);
1140 __ ldr(r0, Address(r0, ObjArrayKlass::element_klass_offset()));
1141 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1142
1143 // Generate subtype check. Blows r2, r5
1144 // Superklass in r0. Subklass in r1.
1194 // Load value class
1195 __ load_klass(r1, r0);
1196 __ profile_typecheck(r2, r1, r0); // blows r2, and r0
1197
1198 // flat value array needs exact type match
1199 // is "r8 == r0" (value subclass == array element superclass)
1200
1201 // Move element klass into r0
1202
1203 __ load_klass(r0, r3);
1204
1205 __ ldr(r0, Address(r0, ArrayKlass::element_klass_offset()));
1206 __ cmp(r0, r1);
1207 __ br(Assembler::EQ, is_type_ok);
1208
1209 __ profile_typecheck_failed(r2);
1210 __ b(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1211
1212 __ bind(is_type_ok);
1213
1214 // DMS CHECK: Reload from TOS to be safe, because of profile_typecheck that blows r2 and r0.
1215 // Should we really do it?
1216 __ ldr(r1, at_tos()); // value
1217 __ mov(r2, r3); // array, ldr(r2, at_tos_p2());
1218 __ ldr(r3, at_tos_p1()); // index
1219 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_store), r1, r2, r3);
1220 }
1221
1222
1223 // Pop stack arguments
1224 __ bind(done);
1225 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1226 }
1227
1228 void TemplateTable::bastore()
1229 {
1230 transition(itos, vtos);
1231 __ pop_i(r1);
1232 __ pop_ptr(r3);
1233 // r0: value
1234 // r1: index
1235 // r3: array
2087 // assume branch is more often taken than not (loops use backward branches)
2088 Label not_taken;
2089 if (cc == equal)
2090 __ cbnz(r0, not_taken);
2091 else
2092 __ cbz(r0, not_taken);
2093 branch(false, false);
2094 __ bind(not_taken);
2095 __ profile_not_taken_branch(r0);
2096 }
2097
2098 void TemplateTable::if_acmp(Condition cc) {
2099 transition(atos, vtos);
2100 // assume branch is more often taken than not (loops use backward branches)
2101 Label taken, not_taken;
2102 __ pop_ptr(r1);
2103
2104 Register is_value_mask = rscratch1;
2105 __ mov(is_value_mask, markOopDesc::always_locked_pattern);
2106
2107 if (EnableValhalla && ACmpOnValues == 3) {
2108 __ cmp(r1, r0);
2109 __ br(Assembler::EQ, (cc == equal) ? taken : not_taken);
2110
2111 // might be substitutable, test if either r0 or r1 is null
2112 __ andr(r2, r0, r1);
2113 __ cbz(r2, (cc == equal) ? not_taken : taken);
2114
2115 // and both are values ?
2116 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2117 __ andr(r2, r2, is_value_mask);
2118 __ ldr(r4, Address(r0, oopDesc::mark_offset_in_bytes()));
2119 __ andr(r4, r4, is_value_mask);
2120 __ andr(r2, r2, r4);
2121 __ cmp(r2, is_value_mask);
2122 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2123
2124 // same value klass ?
2125 __ load_metadata(r2, r1);
2126 __ load_metadata(r4, r0);
2127 __ cmp(r2, r4);
2128 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2129
2130 // Know both are the same type, let's test for substitutability...
2131 if (cc == equal) {
2132 invoke_is_substitutable(r0, r1, taken, not_taken);
2133 } else {
2134 invoke_is_substitutable(r0, r1, not_taken, taken);
2135 }
2136 __ stop("Not reachable");
2137 }
2138
2139 if (EnableValhalla && ACmpOnValues == 1) {
2140 Label is_null;
2141 __ cbz(r1, is_null);
2142 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2143 __ andr(r2, r2, is_value_mask);
2144 __ cmp(r2, is_value_mask);
2145 __ cset(r2, Assembler::EQ);
2146 __ orr(r1, r1, r2);
2147 __ bind(is_null);
2148 }
2149
2150 __ cmpoop(r1, r0);
2151
2152 if (EnableValhalla && ACmpOnValues == 2) {
2153 __ br(Assembler::NE, (cc == not_equal) ? taken : not_taken);
2154 __ cbz(r1, (cc == equal) ? taken : not_taken);
2155 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2156 __ andr(r2, r2, is_value_mask);
2157 __ cmp(r2, is_value_mask);
2158 cc = (cc == equal) ? not_equal : equal;
2159 }
2160
2161 __ br(j_not(cc), not_taken);
2162 __ bind(taken);
2163 branch(false, false);
2164 __ bind(not_taken);
2165 __ profile_not_taken_branch(r0);
2166 }
2167
2168 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2169 Label& is_subst, Label& not_subst) {
2170
2171 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2172 // Restored... r0 answer, jmp to outcome...
2173 __ cbz(r0, not_subst);
2174 __ b(is_subst);
2175 }
2176
2177
2178 void TemplateTable::ret() {
2179 transition(vtos, vtos);
2180 // We might be moving to a safepoint. The thread which calls
3330 const Address field(r0, r1);
3331
3332 // 8179954: We need to make sure that the code generated for
3333 // volatile accesses forms a sequentially-consistent set of
3334 // operations when combined with STLR and LDAR. Without a leading
3335 // membar it's possible for a simple Dekker test to fail if loads
3336 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3337 // the stores in one method and we interpret the loads in another.
3338 if (! UseBarriersForVolatile) {
3339 Label notVolatile;
3340 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3341 __ membar(MacroAssembler::AnyAny);
3342 __ bind(notVolatile);
3343 }
3344
3345 // access field
3346 switch (bytecode()) {
3347 case Bytecodes::_fast_qgetfield:
3348 {
3349 Label isFlattened, isInitialized, Done;
3350 // DMS CHECK: We don't need to reload multiple times, but stay close to original code
3351 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3352 __ test_field_is_flattened(r9, r8 /* temp */, isFlattened);
3353 // Non-flattened field case
3354 __ mov(r9, r0);
3355 __ load_heap_oop(r0, field);
3356 __ cbnz(r0, isInitialized);
3357 __ mov(r0, r9);
3358 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3359 __ andw(r9, r9, ConstantPoolCacheEntry::field_index_mask);
3360 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field), r0, r9);
3361 __ bind(isInitialized);
3362 __ verify_oop(r0);
3363 __ b(Done);
3364 __ bind(isFlattened);
3365 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3366 __ andw(r9, r9, ConstantPoolCacheEntry::field_index_mask);
3367 __ ldr(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f1_offset())));
3368 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field), r0, r9, r3);
3369 __ verify_oop(r0);
3370 __ bind(Done);
|
1104 // v0: value
1105 // r1: index
1106 // r3: array
1107 index_check(r3, r1); // prefer index in r1
1108 __ add(r1, r1, arrayOopDesc::base_offset_in_bytes(T_DOUBLE) >> 3);
1109 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY, Address(r3, r1, Address::uxtw(3)), noreg /* dtos */, noreg, noreg);
1110 }
1111
1112 void TemplateTable::aastore() {
1113 Label is_null, ok_is_subtype, done;
1114 transition(vtos, vtos);
1115 // stack: ..., array, index, value
1116 __ ldr(r0, at_tos()); // value
1117 __ ldr(r2, at_tos_p1()); // index
1118 __ ldr(r3, at_tos_p2()); // array
1119
1120 Address element_address(r3, r4, Address::uxtw(LogBytesPerHeapOop));
1121
1122 index_check(r3, r2); // kills r1
1123
1124 // FIXME: Could we remove the line below?
1125 __ add(r4, r2, arrayOopDesc::base_offset_in_bytes(T_OBJECT) >> LogBytesPerHeapOop);
1126
1127 // do array store check - check for NULL value first
1128 __ cbz(r0, is_null);
1129
1130 Label is_flat_array;
1131 if (ValueArrayFlatten) {
1132 __ test_flattened_array_oop(r3, r8 /*temp*/, is_flat_array);
1133 }
1134
1135 // Move subklass into r1
1136 __ load_klass(r1, r0);
1137
1138 // Move superklass into r0
1139 __ load_klass(r0, r3);
1140 __ ldr(r0, Address(r0, ObjArrayKlass::element_klass_offset()));
1141 // Compress array + index*oopSize + 12 into a single register. Frees r2.
1142
1143 // Generate subtype check. Blows r2, r5
1144 // Superklass in r0. Subklass in r1.
1194 // Load value class
1195 __ load_klass(r1, r0);
1196 __ profile_typecheck(r2, r1, r0); // blows r2, and r0
1197
1198 // flat value array needs exact type match
1199 // is "r8 == r0" (value subclass == array element superclass)
1200
1201 // Move element klass into r0
1202
1203 __ load_klass(r0, r3);
1204
1205 __ ldr(r0, Address(r0, ArrayKlass::element_klass_offset()));
1206 __ cmp(r0, r1);
1207 __ br(Assembler::EQ, is_type_ok);
1208
1209 __ profile_typecheck_failed(r2);
1210 __ b(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
1211
1212 __ bind(is_type_ok);
1213
1214 // Reload from TOS to be safe, because of profile_typecheck that blows r2 and r0.
1215 // FIXME: Should we really do it?
1216 __ ldr(r1, at_tos()); // value
1217 __ mov(r2, r3); // array, ldr(r2, at_tos_p2());
1218 __ ldr(r3, at_tos_p1()); // index
1219 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::value_array_store), r1, r2, r3);
1220 }
1221
1222
1223 // Pop stack arguments
1224 __ bind(done);
1225 __ add(esp, esp, 3 * Interpreter::stackElementSize);
1226 }
1227
1228 void TemplateTable::bastore()
1229 {
1230 transition(itos, vtos);
1231 __ pop_i(r1);
1232 __ pop_ptr(r3);
1233 // r0: value
1234 // r1: index
1235 // r3: array
2087 // assume branch is more often taken than not (loops use backward branches)
2088 Label not_taken;
2089 if (cc == equal)
2090 __ cbnz(r0, not_taken);
2091 else
2092 __ cbz(r0, not_taken);
2093 branch(false, false);
2094 __ bind(not_taken);
2095 __ profile_not_taken_branch(r0);
2096 }
2097
2098 void TemplateTable::if_acmp(Condition cc) {
2099 transition(atos, vtos);
2100 // assume branch is more often taken than not (loops use backward branches)
2101 Label taken, not_taken;
2102 __ pop_ptr(r1);
2103
2104 Register is_value_mask = rscratch1;
2105 __ mov(is_value_mask, markOopDesc::always_locked_pattern);
2106
2107 if (EnableValhalla) {
2108 __ cmp(r1, r0);
2109 __ br(Assembler::EQ, (cc == equal) ? taken : not_taken);
2110
2111 // might be substitutable, test if either r0 or r1 is null
2112 __ andr(r2, r0, r1);
2113 __ cbz(r2, (cc == equal) ? not_taken : taken);
2114
2115 // and both are values ?
2116 __ ldr(r2, Address(r1, oopDesc::mark_offset_in_bytes()));
2117 __ andr(r2, r2, is_value_mask);
2118 __ ldr(r4, Address(r0, oopDesc::mark_offset_in_bytes()));
2119 __ andr(r4, r4, is_value_mask);
2120 __ andr(r2, r2, r4);
2121 __ cmp(r2, is_value_mask);
2122 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2123
2124 // same value klass ?
2125 __ load_metadata(r2, r1);
2126 __ load_metadata(r4, r0);
2127 __ cmp(r2, r4);
2128 __ br(Assembler::NE, (cc == equal) ? not_taken : taken);
2129
2130 // Know both are the same type, let's test for substitutability...
2131 if (cc == equal) {
2132 invoke_is_substitutable(r0, r1, taken, not_taken);
2133 } else {
2134 invoke_is_substitutable(r0, r1, not_taken, taken);
2135 }
2136 __ stop("Not reachable");
2137 }
2138
2139 __ cmpoop(r1, r0);
2140 __ br(j_not(cc), not_taken);
2141 __ bind(taken);
2142 branch(false, false);
2143 __ bind(not_taken);
2144 __ profile_not_taken_branch(r0);
2145 }
2146
2147 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2148 Label& is_subst, Label& not_subst) {
2149
2150 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2151 // Restored... r0 answer, jmp to outcome...
2152 __ cbz(r0, not_subst);
2153 __ b(is_subst);
2154 }
2155
2156
2157 void TemplateTable::ret() {
2158 transition(vtos, vtos);
2159 // We might be moving to a safepoint. The thread which calls
3309 const Address field(r0, r1);
3310
3311 // 8179954: We need to make sure that the code generated for
3312 // volatile accesses forms a sequentially-consistent set of
3313 // operations when combined with STLR and LDAR. Without a leading
3314 // membar it's possible for a simple Dekker test to fail if loads
3315 // use LDR;DMB but stores use STLR. This can happen if C2 compiles
3316 // the stores in one method and we interpret the loads in another.
3317 if (! UseBarriersForVolatile) {
3318 Label notVolatile;
3319 __ tbz(r3, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
3320 __ membar(MacroAssembler::AnyAny);
3321 __ bind(notVolatile);
3322 }
3323
3324 // access field
3325 switch (bytecode()) {
3326 case Bytecodes::_fast_qgetfield:
3327 {
3328 Label isFlattened, isInitialized, Done;
3329 // FIXME: We don't need to reload registers multiple times, but stay close to x86 code
3330 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3331 __ test_field_is_flattened(r9, r8 /* temp */, isFlattened);
3332 // Non-flattened field case
3333 __ mov(r9, r0);
3334 __ load_heap_oop(r0, field);
3335 __ cbnz(r0, isInitialized);
3336 __ mov(r0, r9);
3337 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3338 __ andw(r9, r9, ConstantPoolCacheEntry::field_index_mask);
3339 __ call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::uninitialized_instance_value_field), r0, r9);
3340 __ bind(isInitialized);
3341 __ verify_oop(r0);
3342 __ b(Done);
3343 __ bind(isFlattened);
3344 __ ldrw(r9, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::flags_offset())));
3345 __ andw(r9, r9, ConstantPoolCacheEntry::field_index_mask);
3346 __ ldr(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f1_offset())));
3347 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_flattened_field), r0, r9, r3);
3348 __ verify_oop(r0);
3349 __ bind(Done);
|