1296 enum {
1297 ac_failed = -1, // arraycopy failed
1298 ac_ok = 0 // arraycopy succeeded
1299 };
1300
1301
1302 // Below length is the # elements copied.
1303 template <class T> int obj_arraycopy_work(oopDesc* src, T* src_addr,
1304 oopDesc* dst, T* dst_addr,
1305 int length) {
1306
1307 // For performance reasons, we assume we are using a card marking write
1308 // barrier. The assert will fail if this is not the case.
1309 // Note that we use the non-virtual inlineable variant of write_ref_array.
1310 BarrierSet* bs = Universe::heap()->barrier_set();
1311 assert(bs->has_write_ref_array_opt(), "Barrier set must have ref array opt");
1312 assert(bs->has_write_ref_array_pre_opt(), "For pre-barrier as well.");
1313
1314 #if INCLUDE_ALL_GCS
1315 if (UseShenandoahGC) {
1316 ShenandoahBarrierSet::barrier_set()->arraycopy_pre(src_addr, dst_addr, length);
1317 }
1318 #endif
1319
1320 if (src == dst) {
1321 // same object, no check
1322 bs->write_ref_array_pre(dst_addr, length);
1323 Copy::conjoint_oops_atomic(src_addr, dst_addr, length);
1324 bs->write_ref_array((HeapWord*)dst_addr, length);
1325 return ac_ok;
1326 } else {
1327 Klass* bound = ObjArrayKlass::cast(dst->klass())->element_klass();
1328 Klass* stype = ObjArrayKlass::cast(src->klass())->element_klass();
1329 if (stype == bound || stype->is_subtype_of(bound)) {
1330 // Elements are guaranteed to be subtypes, so no check necessary
1331 bs->write_ref_array_pre(dst_addr, length);
1332 Copy::conjoint_oops_atomic(src_addr, dst_addr, length);
1333 bs->write_ref_array((HeapWord*)dst_addr, length);
1334 return ac_ok;
1335 }
1336 }
|
1296 enum {
1297 ac_failed = -1, // arraycopy failed
1298 ac_ok = 0 // arraycopy succeeded
1299 };
1300
1301
1302 // Below length is the # elements copied.
1303 template <class T> int obj_arraycopy_work(oopDesc* src, T* src_addr,
1304 oopDesc* dst, T* dst_addr,
1305 int length) {
1306
1307 // For performance reasons, we assume we are using a card marking write
1308 // barrier. The assert will fail if this is not the case.
1309 // Note that we use the non-virtual inlineable variant of write_ref_array.
1310 BarrierSet* bs = Universe::heap()->barrier_set();
1311 assert(bs->has_write_ref_array_opt(), "Barrier set must have ref array opt");
1312 assert(bs->has_write_ref_array_pre_opt(), "For pre-barrier as well.");
1313
1314 #if INCLUDE_ALL_GCS
1315 if (UseShenandoahGC) {
1316 ShenandoahBarrierSet::barrier_set()->arraycopy_barrier(src_addr, dst_addr, length);
1317 }
1318 #endif
1319
1320 if (src == dst) {
1321 // same object, no check
1322 bs->write_ref_array_pre(dst_addr, length);
1323 Copy::conjoint_oops_atomic(src_addr, dst_addr, length);
1324 bs->write_ref_array((HeapWord*)dst_addr, length);
1325 return ac_ok;
1326 } else {
1327 Klass* bound = ObjArrayKlass::cast(dst->klass())->element_klass();
1328 Klass* stype = ObjArrayKlass::cast(src->klass())->element_klass();
1329 if (stype == bound || stype->is_subtype_of(bound)) {
1330 // Elements are guaranteed to be subtypes, so no check necessary
1331 bs->write_ref_array_pre(dst_addr, length);
1332 Copy::conjoint_oops_atomic(src_addr, dst_addr, length);
1333 bs->write_ref_array((HeapWord*)dst_addr, length);
1334 return ac_ok;
1335 }
1336 }
|