1 /* 2 * Copyright (c) 2019, 2020, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "jvm.h" 27 #include "classfile/classFileParser.hpp" 28 #include "classfile/fieldLayoutBuilder.hpp" 29 #include "memory/resourceArea.hpp" 30 #include "oops/array.hpp" 31 #include "oops/fieldStreams.inline.hpp" 32 #include "oops/instanceMirrorKlass.hpp" 33 #include "oops/klass.inline.hpp" 34 #include "oops/valueKlass.inline.hpp" 35 #include "runtime/fieldDescriptor.inline.hpp" 36 37 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) : 38 _next_block(NULL), 39 _prev_block(NULL), 40 _value_klass(NULL), 41 _kind(kind), 42 _offset(-1), 43 _alignment(1), 44 _size(size), 45 _field_index(-1), 46 _is_reference(false) { 47 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED, 48 "Otherwise, should use the constructor with a field index argument"); 49 assert(size > 0, "Sanity check"); 50 } 51 52 53 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) : 54 _next_block(NULL), 55 _prev_block(NULL), 56 _value_klass(NULL), 57 _kind(kind), 58 _offset(-1), 59 _alignment(alignment), 60 _size(size), 61 _field_index(index), 62 _is_reference(is_reference) { 63 assert(kind == REGULAR || kind == FLATTENED || kind == INHERITED, 64 "Other kind do not have a field index"); 65 assert(size > 0, "Sanity check"); 66 assert(alignment > 0, "Sanity check"); 67 } 68 69 bool LayoutRawBlock::fit(int size, int alignment) { 70 int adjustment = 0; 71 if ((_offset % alignment) != 0) { 72 adjustment = alignment - (_offset % alignment); 73 } 74 return _size >= size + adjustment; 75 } 76 77 FieldGroup::FieldGroup(int contended_group) : 78 _next(NULL), 79 _primitive_fields(NULL), 80 _oop_fields(NULL), 81 _flattened_fields(NULL), 82 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group 83 _oop_count(0) {} 84 85 void FieldGroup::add_primitive_field(AllFieldStream fs, BasicType type) { 86 int size = type2aelembytes(type); 87 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false); 88 if (_primitive_fields == NULL) { 89 _primitive_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE); 90 } 91 _primitive_fields->append(block); 92 } 93 94 void FieldGroup::add_oop_field(AllFieldStream fs) { 95 int size = type2aelembytes(T_OBJECT); 96 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true); 97 if (_oop_fields == NULL) { 98 _oop_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE); 99 } 100 _oop_fields->append(block); 101 _oop_count++; 102 } 103 104 void FieldGroup::add_flattened_field(AllFieldStream fs, ValueKlass* vk) { 105 // _flattened_fields list might be merged with the _primitive_fields list in the future 106 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::FLATTENED, vk->get_exact_size_in_bytes(), vk->get_alignment(), false); 107 block->set_value_klass(vk); 108 if (_flattened_fields == NULL) { 109 _flattened_fields = new(ResourceObj::RESOURCE_AREA, mtInternal) GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE); 110 } 111 _flattened_fields->append(block); 112 } 113 114 void FieldGroup::sort_by_size() { 115 if (_primitive_fields != NULL) { 116 _primitive_fields->sort(LayoutRawBlock::compare_size_inverted); 117 } 118 if (_flattened_fields != NULL) { 119 _flattened_fields->sort(LayoutRawBlock::compare_size_inverted); 120 } 121 } 122 123 FieldLayout::FieldLayout(Array<u2>* fields, ConstantPool* cp) : 124 _fields(fields), 125 _cp(cp), 126 _blocks(NULL), 127 _start(_blocks), 128 _last(_blocks) {} 129 130 void FieldLayout::initialize_static_layout() { 131 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX); 132 _blocks->set_offset(0); 133 _last = _blocks; 134 _start = _blocks; 135 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because 136 // during bootstrapping, the size of the java.lang.Class is still not known when layout 137 // of static field is computed. Field offsets are fixed later when the size is known 138 // (see java_lang_Class::fixup_mirror()) 139 if (InstanceMirrorKlass::offset_of_static_fields() > 0) { 140 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields())); 141 _blocks->set_offset(0); 142 } 143 } 144 145 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) { 146 if (super_klass == NULL) { 147 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX); 148 _blocks->set_offset(0); 149 _last = _blocks; 150 _start = _blocks; 151 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes())); 152 } else { 153 bool has_fields = reconstruct_layout(super_klass); 154 fill_holes(super_klass); 155 if ((UseEmptySlotsInSupers && !super_klass->has_contended_annotations()) || !has_fields) { 156 _start = _blocks; // Setting _start to _blocks instead of _last would allow subclasses 157 // to allocate fields in empty slots of their super classes 158 } else { 159 _start = _last; 160 } 161 } 162 } 163 164 LayoutRawBlock* FieldLayout::first_field_block() { 165 LayoutRawBlock* block = _blocks; 166 while (block != NULL 167 && block->kind() != LayoutRawBlock::INHERITED 168 && block->kind() != LayoutRawBlock::REGULAR 169 && block->kind() != LayoutRawBlock::FLATTENED) { 170 block = block->next_block(); 171 } 172 return block; 173 } 174 175 // Insert a set of fields into a layout. 176 // For each field, search for an empty slot able to fit the field 177 // (satisfying both size and alignment requirements), if none is found, 178 // add the field at the end of the layout. 179 // Fields cannot be inserted before the block specified in the "start" argument 180 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) { 181 if (list == NULL) return; 182 if (start == NULL) start = this->_start; 183 bool last_search_success = false; 184 int last_size = 0; 185 int last_alignment = 0; 186 for (int i = 0; i < list->length(); i ++) { 187 LayoutRawBlock* b = list->at(i); 188 LayoutRawBlock* cursor = NULL; 189 LayoutRawBlock* candidate = NULL; 190 // if start is the last block, just append the field 191 if (start == last_block()) { 192 candidate = last_block(); 193 } 194 // Before iterating over the layout to find an empty slot fitting the field's requirements, 195 // check if the previous field had the same requirements and if the search for a fitting slot 196 // was successful. If the requirements were the same but the search failed, a new search will 197 // fail the same way, so just append the field at the of the layout. 198 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) { 199 candidate = last_block(); 200 } else { 201 // Iterate over the layout to find an empty slot fitting the field's requirements 202 last_size = b->size(); 203 last_alignment = b->alignment(); 204 cursor = last_block()->prev_block(); 205 assert(cursor != NULL, "Sanity check"); 206 last_search_success = true; 207 208 while (cursor != start) { 209 if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) { 210 if (candidate == NULL || cursor->size() < candidate->size()) { 211 candidate = cursor; 212 } 213 } 214 cursor = cursor->prev_block(); 215 } 216 if (candidate == NULL) { 217 candidate = last_block(); 218 last_search_success = false; 219 } 220 assert(candidate != NULL, "Candidate must not be null"); 221 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block"); 222 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block"); 223 } 224 insert_field_block(candidate, b); 225 } 226 } 227 228 // Used for classes with hard coded field offsets, insert a field at the specified offset */ 229 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) { 230 assert(block != NULL, "Sanity check"); 231 block->set_offset(offset); 232 if (start == NULL) { 233 start = this->_start; 234 } 235 LayoutRawBlock* slot = start; 236 while (slot != NULL) { 237 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) || 238 slot == _last){ 239 assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot"); 240 assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough"); 241 if (slot->offset() < block->offset()) { 242 int adjustment = block->offset() - slot->offset(); 243 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment); 244 insert(slot, adj); 245 } 246 insert(slot, block); 247 if (slot->size() == 0) { 248 remove(slot); 249 } 250 FieldInfo::from_field_array(_fields, block->field_index())->set_offset(block->offset()); 251 return; 252 } 253 slot = slot->next_block(); 254 } 255 fatal("Should have found a matching slot above, corrupted layout or invalid offset"); 256 } 257 258 // The allocation logic uses a best fit strategy: the set of fields is allocated 259 // in the first empty slot big enough to contain the whole set ((including padding 260 // to fit alignment constraints). 261 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) { 262 if (list == NULL) return; 263 if (start == NULL) { 264 start = _start; 265 } 266 // This code assumes that if the first block is well aligned, the following 267 // blocks would naturally be well aligned (no need for adjustment) 268 int size = 0; 269 for (int i = 0; i < list->length(); i++) { 270 size += list->at(i)->size(); 271 } 272 273 LayoutRawBlock* candidate = NULL; 274 if (start == last_block()) { 275 candidate = last_block(); 276 } else { 277 LayoutRawBlock* first = list->at(0); 278 candidate = last_block()->prev_block(); 279 while (candidate->kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) { 280 if (candidate == start) { 281 candidate = last_block(); 282 break; 283 } 284 candidate = candidate->prev_block(); 285 } 286 assert(candidate != NULL, "Candidate must not be null"); 287 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block"); 288 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block"); 289 } 290 291 for (int i = 0; i < list->length(); i++) { 292 LayoutRawBlock* b = list->at(i); 293 insert_field_block(candidate, b); 294 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned"); 295 } 296 } 297 298 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) { 299 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks"); 300 if (slot->offset() % block->alignment() != 0) { 301 int adjustment = block->alignment() - (slot->offset() % block->alignment()); 302 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment); 303 insert(slot, adj); 304 } 305 insert(slot, block); 306 if (slot->size() == 0) { 307 remove(slot); 308 } 309 FieldInfo::from_field_array(_fields, block->field_index())->set_offset(block->offset()); 310 return block; 311 } 312 313 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) { 314 bool has_instance_fields = false; 315 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32); 316 while (ik != NULL) { 317 for (AllFieldStream fs(ik->fields(), ik->constants()); !fs.done(); fs.next()) { 318 BasicType type = Signature::basic_type(fs.signature()); 319 // distinction between static and non-static fields is missing 320 if (fs.access_flags().is_static()) continue; 321 has_instance_fields = true; 322 LayoutRawBlock* block; 323 if (type == T_VALUETYPE) { 324 ValueKlass* vk = ValueKlass::cast(ik->get_value_field_klass(fs.index())); 325 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, vk->get_exact_size_in_bytes(), 326 vk->get_alignment(), false); 327 328 } else { 329 int size = type2aelembytes(type); 330 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class 331 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false); 332 } 333 block->set_offset(fs.offset()); 334 all_fields->append(block); 335 } 336 ik = ik->super() == NULL ? NULL : InstanceKlass::cast(ik->super()); 337 } 338 all_fields->sort(LayoutRawBlock::compare_offset); 339 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()); 340 _blocks->set_offset(0); 341 _last = _blocks; 342 for(int i = 0; i < all_fields->length(); i++) { 343 LayoutRawBlock* b = all_fields->at(i); 344 _last->set_next_block(b); 345 b->set_prev_block(_last); 346 _last = b; 347 } 348 _start = _blocks; 349 return has_instance_fields; 350 } 351 352 // Called during the reconstruction of a layout, after fields from super 353 // classes have been inserted. It fills unused slots between inserted fields 354 // with EMPTY blocks, so the regular field insertion methods would work. 355 // This method handles classes with @Contended annotations differently 356 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses' 357 // fields to interfere with contended fields/classes. 358 void FieldLayout::fill_holes(const InstanceKlass* super_klass) { 359 assert(_blocks != NULL, "Sanity check"); 360 assert(_blocks->offset() == 0, "first block must be at offset zero"); 361 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY; 362 LayoutRawBlock* b = _blocks; 363 while (b->next_block() != NULL) { 364 if (b->next_block()->offset() > (b->offset() + b->size())) { 365 int size = b->next_block()->offset() - (b->offset() + b->size()); 366 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size); 367 empty->set_offset(b->offset() + b->size()); 368 empty->set_next_block(b->next_block()); 369 b->next_block()->set_prev_block(empty); 370 b->set_next_block(empty); 371 empty->set_prev_block(b); 372 } 373 b = b->next_block(); 374 } 375 assert(b->next_block() == NULL, "Invariant at this point"); 376 assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check"); 377 // If the super class has @Contended annotation, a padding block is 378 // inserted at the end to ensure that fields from the subclasses won't share 379 // the cache line of the last field of the contended class 380 if (super_klass->has_contended_annotations()) { 381 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth); 382 p->set_offset(b->offset() + b->size()); 383 b->set_next_block(p); 384 p->set_prev_block(b); 385 b = p; 386 } 387 if (!UseEmptySlotsInSupers) { 388 // Add an empty slots to align fields of the subclass on a heapOopSize boundary 389 // in order to emulate the behavior of the previous algorithm 390 int align = (b->offset() + b->size()) % heapOopSize; 391 if (align != 0) { 392 int sz = heapOopSize - align; 393 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::EMPTY, sz); 394 p->set_offset(b->offset() + b->size()); 395 b->set_next_block(p); 396 p->set_prev_block(b); 397 b = p; 398 } 399 } 400 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX); 401 last->set_offset(b->offset() + b->size()); 402 assert(last->offset() > 0, "Sanity check"); 403 b->set_next_block(last); 404 last->set_prev_block(b); 405 _last = last; 406 } 407 408 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) { 409 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks"); 410 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment"); 411 block->set_offset(slot->offset()); 412 slot->set_offset(slot->offset() + block->size()); 413 assert((slot->size() - block->size()) < slot->size(), "underflow checking"); 414 assert(slot->size() - block->size() >= 0, "no negative size allowed"); 415 slot->set_size(slot->size() - block->size()); 416 block->set_prev_block(slot->prev_block()); 417 block->set_next_block(slot); 418 slot->set_prev_block(block); 419 if (block->prev_block() != NULL) { 420 block->prev_block()->set_next_block(block); 421 } 422 if (_blocks == slot) { 423 _blocks = block; 424 } 425 return block; 426 } 427 428 void FieldLayout::remove(LayoutRawBlock* block) { 429 assert(block != NULL, "Sanity check"); 430 assert(block != _last, "Sanity check"); 431 if (_blocks == block) { 432 _blocks = block->next_block(); 433 if (_blocks != NULL) { 434 _blocks->set_prev_block(NULL); 435 } 436 } else { 437 assert(block->prev_block() != NULL, "_prev should be set for non-head blocks"); 438 block->prev_block()->set_next_block(block->next_block()); 439 block->next_block()->set_prev_block(block->prev_block()); 440 } 441 if (block == _start) { 442 _start = block->prev_block(); 443 } 444 } 445 446 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) { 447 ResourceMark rm; 448 LayoutRawBlock* b = _blocks; 449 while(b != _last) { 450 switch(b->kind()) { 451 case LayoutRawBlock::REGULAR: { 452 FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index()); 453 output->print_cr(" @%d \"%s\" %s %d/%d %s", 454 b->offset(), 455 fi->name(_cp)->as_C_string(), 456 fi->signature(_cp)->as_C_string(), 457 b->size(), 458 b->alignment(), 459 "REGULAR"); 460 break; 461 } 462 case LayoutRawBlock::FLATTENED: { 463 FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index()); 464 output->print_cr(" @%d \"%s\" %s %d/%d %s", 465 b->offset(), 466 fi->name(_cp)->as_C_string(), 467 fi->signature(_cp)->as_C_string(), 468 b->size(), 469 b->alignment(), 470 "FLATTENED"); 471 break; 472 } 473 case LayoutRawBlock::RESERVED: { 474 output->print_cr(" @%d %d/- %s", 475 b->offset(), 476 b->size(), 477 "RESERVED"); 478 break; 479 } 480 case LayoutRawBlock::INHERITED: { 481 assert(!is_static, "Static fields are not inherited in layouts"); 482 assert(super != NULL, "super klass must be provided to retrieve inherited fields info"); 483 bool found = false; 484 const InstanceKlass* ik = super; 485 while (!found && ik != NULL) { 486 for (AllFieldStream fs(ik->fields(), ik->constants()); !fs.done(); fs.next()) { 487 if (fs.offset() == b->offset()) { 488 output->print_cr(" @%d \"%s\" %s %d/%d %s", 489 b->offset(), 490 fs.name()->as_C_string(), 491 fs.signature()->as_C_string(), 492 b->size(), 493 b->size(), // so far, alignment constraint == size, will change with Valhalla 494 "INHERITED"); 495 found = true; 496 break; 497 } 498 } 499 ik = ik->java_super(); 500 } 501 break; 502 } 503 case LayoutRawBlock::EMPTY: 504 output->print_cr(" @%d %d/1 %s", 505 b->offset(), 506 b->size(), 507 "EMPTY"); 508 break; 509 case LayoutRawBlock::PADDING: 510 output->print_cr(" @%d %d/1 %s", 511 b->offset(), 512 b->size(), 513 "PADDING"); 514 break; 515 } 516 b = b->next_block(); 517 } 518 } 519 520 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool, 521 Array<u2>* fields, bool is_contended, bool is_value_type, ClassLoaderData* class_loader_data, 522 Handle protection_domain, FieldLayoutInfo* info) : 523 _classname(classname), 524 _super_klass(super_klass), 525 _constant_pool(constant_pool), 526 _fields(fields), 527 _info(info), 528 _root_group(NULL), 529 _contended_groups(GrowableArray<FieldGroup*>(8)), 530 _static_fields(NULL), 531 _layout(NULL), 532 _static_layout(NULL), 533 _class_loader_data(class_loader_data), 534 _protection_domain(protection_domain), 535 _nonstatic_oopmap_count(0), 536 _alignment(-1), 537 _first_field_offset(-1), 538 _exact_size_in_bytes(-1), 539 _has_nonstatic_fields(false), 540 _is_contended(is_contended), 541 _is_value_type(is_value_type), 542 _has_flattening_information(is_value_type) {} 543 544 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) { 545 assert(g > 0, "must only be called for named contended groups"); 546 FieldGroup* fg = NULL; 547 for (int i = 0; i < _contended_groups.length(); i++) { 548 fg = _contended_groups.at(i); 549 if (fg->contended_group() == g) return fg; 550 } 551 fg = new FieldGroup(g); 552 _contended_groups.append(fg); 553 return fg; 554 } 555 556 void FieldLayoutBuilder::prologue() { 557 _layout = new FieldLayout(_fields, _constant_pool); 558 const InstanceKlass* super_klass = _super_klass; 559 _layout->initialize_instance_layout(super_klass); 560 if (super_klass != NULL) { 561 _has_nonstatic_fields = super_klass->has_nonstatic_fields(); 562 } 563 _static_layout = new FieldLayout(_fields, _constant_pool); 564 _static_layout->initialize_static_layout(); 565 _static_fields = new FieldGroup(); 566 _root_group = new FieldGroup(); 567 } 568 569 // Field sorting for regular (non-inline) classes: 570 // - fields are sorted in static and non-static fields 571 // - non-static fields are also sorted according to their contention group 572 // (support of the @Contended annotation) 573 // - @Contended annotation is ignored for static fields 574 // - field flattening decisions are taken in this method 575 void FieldLayoutBuilder::regular_field_sorting() { 576 for (AllFieldStream fs(_fields, _constant_pool); !fs.done(); fs.next()) { 577 FieldGroup* group = NULL; 578 if (fs.access_flags().is_static()) { 579 group = _static_fields; 580 } else { 581 _has_nonstatic_fields = true; 582 if (fs.is_contended()) { 583 int g = fs.contended_group(); 584 if (g == 0) { 585 group = new FieldGroup(true); 586 _contended_groups.append(group); 587 } else { 588 group = get_or_create_contended_group(g); 589 } 590 } else { 591 group = _root_group; 592 } 593 } 594 assert(group != NULL, "invariant"); 595 BasicType type = Signature::basic_type(fs.signature()); 596 switch(type) { 597 case T_BYTE: 598 case T_CHAR: 599 case T_DOUBLE: 600 case T_FLOAT: 601 case T_INT: 602 case T_LONG: 603 case T_SHORT: 604 case T_BOOLEAN: 605 group->add_primitive_field(fs, type); 606 break; 607 case T_OBJECT: 608 case T_ARRAY: 609 if (group != _static_fields) _nonstatic_oopmap_count++; 610 group->add_oop_field(fs); 611 break; 612 case T_VALUETYPE: 613 if (group == _static_fields) { 614 // static fields are never flattened 615 group->add_oop_field(fs); 616 } else { 617 _has_flattening_information = true; 618 // Flattening decision to be taken here 619 // This code assumes all verification have been performed before 620 // (field is a flattenable field, field's type has been loaded 621 // and it is an inline klass 622 Thread* THREAD = Thread::current(); 623 Klass* klass = 624 SystemDictionary::resolve_flattenable_field_or_fail(&fs, 625 Handle(THREAD, _class_loader_data->class_loader()), 626 _protection_domain, true, THREAD); 627 assert(klass != NULL, "Sanity check"); 628 ValueKlass* vk = ValueKlass::cast(klass); 629 bool has_flattenable_size = (ValueFieldMaxFlatSize < 0) 630 || (vk->size_helper() * HeapWordSize) <= ValueFieldMaxFlatSize; 631 // volatile fields are currently never flattened, this could change in the future 632 bool flattened = !fs.access_flags().is_volatile() && has_flattenable_size; 633 if (flattened) { 634 group->add_flattened_field(fs, vk); 635 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count(); 636 fs.set_flattened(true); 637 } else { 638 _nonstatic_oopmap_count++; 639 group->add_oop_field(fs); 640 } 641 } 642 break; 643 default: 644 fatal("Something wrong?"); 645 } 646 } 647 _root_group->sort_by_size(); 648 _static_fields->sort_by_size(); 649 if (!_contended_groups.is_empty()) { 650 for (int i = 0; i < _contended_groups.length(); i++) { 651 _contended_groups.at(i)->sort_by_size(); 652 } 653 } 654 } 655 656 /* Field sorting for inline classes: 657 * - because inline classes are immutable, the @Contended annotation is ignored 658 * when computing their layout (with only read operation, there's no false 659 * sharing issue) 660 * - this method also records the alignment of the field with the most 661 * constraining alignment, this value is then used as the alignment 662 * constraint when flattening this inline type into another container 663 * - field flattening decisions are taken in this method (those decisions are 664 * currently only based in the size of the fields to be flattened, the size 665 * of the resulting instance is not considered) 666 */ 667 void FieldLayoutBuilder::inline_class_field_sorting(TRAPS) { 668 assert(_is_value_type, "Should only be used for inline classes"); 669 int alignment = 1; 670 for (AllFieldStream fs(_fields, _constant_pool); !fs.done(); fs.next()) { 671 FieldGroup* group = NULL; 672 int field_alignment = 1; 673 if (fs.access_flags().is_static()) { 674 group = _static_fields; 675 } else { 676 _has_nonstatic_fields = true; 677 group = _root_group; 678 } 679 assert(group != NULL, "invariant"); 680 BasicType type = Signature::basic_type(fs.signature()); 681 switch(type) { 682 case T_BYTE: 683 case T_CHAR: 684 case T_DOUBLE: 685 case T_FLOAT: 686 case T_INT: 687 case T_LONG: 688 case T_SHORT: 689 case T_BOOLEAN: 690 if (group != _static_fields) { 691 field_alignment = type2aelembytes(type); // alignment == size for primitive types 692 } 693 group->add_primitive_field(fs, type); 694 break; 695 case T_OBJECT: 696 case T_ARRAY: 697 if (group != _static_fields) { 698 _nonstatic_oopmap_count++; 699 field_alignment = type2aelembytes(type); // alignment == size for oops 700 } 701 group->add_oop_field(fs); 702 break; 703 case T_VALUETYPE: { 704 if (group == _static_fields) { 705 // static fields are never flattened 706 group->add_oop_field(fs); 707 } else { 708 // Flattening decision to be taken here 709 // This code assumes all verifications have been performed before 710 // (field is a flattenable field, field's type has been loaded 711 // and it is an inline klass 712 Thread* THREAD = Thread::current(); 713 Klass* klass = 714 SystemDictionary::resolve_flattenable_field_or_fail(&fs, 715 Handle(THREAD, _class_loader_data->class_loader()), 716 _protection_domain, true, CHECK); 717 assert(klass != NULL, "Sanity check"); 718 ValueKlass* vk = ValueKlass::cast(klass); 719 bool flattened = (ValueFieldMaxFlatSize < 0) 720 || (vk->size_helper() * HeapWordSize) <= ValueFieldMaxFlatSize; 721 if (flattened) { 722 group->add_flattened_field(fs, vk); 723 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count(); 724 field_alignment = vk->get_alignment(); 725 fs.set_flattened(true); 726 } else { 727 _nonstatic_oopmap_count++; 728 field_alignment = type2aelembytes(T_OBJECT); 729 group->add_oop_field(fs); 730 } 731 } 732 break; 733 } 734 default: 735 fatal("Unexpected BasicType"); 736 } 737 if (!fs.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment; 738 } 739 _alignment = alignment; 740 if (!_has_nonstatic_fields) { 741 // There are a number of fixes required throughout the type system and JIT 742 Exceptions::fthrow(THREAD_AND_LOCATION, 743 vmSymbols::java_lang_ClassFormatError(), 744 "Value Types do not support zero instance size yet"); 745 return; 746 } 747 } 748 749 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) { 750 if (ContendedPaddingWidth > 0) { 751 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth); 752 _layout->insert(slot, padding); 753 } 754 } 755 756 /* Computation of regular classes layout is an evolution of the previous default layout 757 * (FieldAllocationStyle 1): 758 * - flattened fields are allocated first (because they have potentially the 759 * least regular shapes, and are more likely to create empty slots between them, 760 * which can then be used to allocation primitive or oop fields). Allocation is 761 * performed from the biggest to the smallest flattened field. 762 * - then primitive fields (from the biggest to the smallest) 763 * - then oop fields are allocated contiguously (to reduce the number of oopmaps 764 * and reduce the work of the GC). 765 */ 766 void FieldLayoutBuilder::compute_regular_layout() { 767 bool need_tail_padding = false; 768 prologue(); 769 regular_field_sorting(); 770 if (_is_contended) { 771 _layout->set_start(_layout->last_block()); 772 // insertion is currently easy because the current strategy doesn't try to fill holes 773 // in super classes layouts => the _start block is by consequence the _last_block 774 insert_contended_padding(_layout->start()); 775 need_tail_padding = true; 776 } 777 _layout->add(_root_group->flattened_fields()); 778 _layout->add(_root_group->primitive_fields()); 779 _layout->add(_root_group->oop_fields()); 780 781 if (!_contended_groups.is_empty()) { 782 for (int i = 0; i < _contended_groups.length(); i++) { 783 FieldGroup* cg = _contended_groups.at(i); 784 LayoutRawBlock* start = _layout->last_block(); 785 insert_contended_padding(start); 786 _layout->add(_root_group->flattened_fields()); 787 _layout->add(cg->primitive_fields(), start); 788 _layout->add(cg->oop_fields(), start); 789 need_tail_padding = true; 790 } 791 } 792 793 if (need_tail_padding) { 794 insert_contended_padding(_layout->last_block()); 795 } 796 _static_layout->add(_static_fields->flattened_fields()); 797 _static_layout->add_contiguously(_static_fields->oop_fields()); 798 _static_layout->add(_static_fields->primitive_fields()); 799 800 epilogue(); 801 } 802 803 /* Computation of inline classes has a slightly different strategy than for 804 * regular classes. Regular classes have their oop fields allocated at the end 805 * of the layout to increase GC performances. Unfortunately, this strategy 806 * increases the number of empty slots inside an instance. Because the purpose 807 * of inline classes is to be embedded into other containers, it is critical 808 * to keep their size as small as possible. For this reason, the allocation 809 * strategy is: 810 * - flattened fields are allocated first (because they have potentially the 811 * least regular shapes, and are more likely to create empty slots between them, 812 * which can then be used to allocation primitive or oop fields). Allocation is 813 * performed from the biggest to the smallest flattened field. 814 * - then oop fields are allocated contiguously (to reduce the number of oopmaps 815 * and reduce the work of the GC) 816 * - then primitive fields (from the biggest to the smallest) 817 */ 818 void FieldLayoutBuilder::compute_inline_class_layout(TRAPS) { 819 prologue(); 820 inline_class_field_sorting(CHECK); 821 // Inline types are not polymorphic, so they cannot inherit fields. 822 // By consequence, at this stage, the layout must be composed of a RESERVED 823 // block, followed by an EMPTY block. 824 assert(_layout->start()->kind() == LayoutRawBlock::RESERVED, "Unexpected"); 825 assert(_layout->start()->next_block()->kind() == LayoutRawBlock::EMPTY, "Unexpected"); 826 LayoutRawBlock* first_empty = _layout->start()->next_block(); 827 if (first_empty->offset() % _alignment != 0) { 828 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _alignment - (first_empty->offset() % _alignment)); 829 _layout->insert(first_empty, padding); 830 _layout->set_start(padding->next_block()); 831 } 832 833 _layout->add(_root_group->flattened_fields()); 834 _layout->add(_root_group->oop_fields()); 835 _layout->add(_root_group->primitive_fields()); 836 837 LayoutRawBlock* first_field = _layout->first_field_block(); 838 if (first_field != NULL) { 839 _first_field_offset = _layout->first_field_block()->offset(); 840 _exact_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset(); 841 } else { 842 // special case for empty value types 843 _first_field_offset = _layout->blocks()->size(); 844 _exact_size_in_bytes = 0; 845 } 846 _exact_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset(); 847 848 _static_layout->add(_static_fields->flattened_fields()); 849 _static_layout->add_contiguously(_static_fields->oop_fields()); 850 _static_layout->add(_static_fields->primitive_fields()); 851 852 853 epilogue(); 854 } 855 856 // Compute layout of the java/lang/ref/Reference class according 857 // to the hard coded offsets of its fields 858 void FieldLayoutBuilder::compute_java_lang_ref_Reference_layout() { 859 prologue(); 860 regular_field_sorting(); 861 862 assert(_contended_groups.is_empty(), "java.lang.Reference has no @Contended annotations"); 863 assert(_root_group->primitive_fields() == NULL, "java.lang.Reference has no nonstatic primitive fields"); 864 int field_count = 0; 865 int offset = -1; 866 for (int i = 0; i < _root_group->oop_fields()->length(); i++) { 867 LayoutRawBlock* b = _root_group->oop_fields()->at(i); 868 FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index()); 869 if (fi->name(_constant_pool)->equals("referent")) { 870 offset = java_lang_ref_Reference::referent_offset; 871 } else if (fi->name(_constant_pool)->equals("queue")) { 872 offset = java_lang_ref_Reference::queue_offset; 873 } else if (fi->name(_constant_pool)->equals("next")) { 874 offset = java_lang_ref_Reference::next_offset; 875 } else if (fi->name(_constant_pool)->equals("discovered")) { 876 offset = java_lang_ref_Reference::discovered_offset; 877 } 878 assert(offset != -1, "Unknown field"); 879 _layout->add_field_at_offset(b, offset); 880 field_count++; 881 } 882 assert(field_count == 4, "Wrong number of fields in java.lang.ref.Reference"); 883 884 _static_layout->add_contiguously(this->_static_fields->oop_fields()); 885 _static_layout->add(this->_static_fields->primitive_fields()); 886 887 epilogue(); 888 } 889 890 // Compute layout of the boxing class according 891 // to the hard coded offsets of their fields 892 void FieldLayoutBuilder::compute_boxing_class_layout() { 893 prologue(); 894 regular_field_sorting(); 895 896 assert(_contended_groups.is_empty(), "Boxing classes have no @Contended annotations"); 897 assert(_root_group->oop_fields() == NULL, "Boxing classes have no nonstatic oops fields"); 898 int field_count = 0; 899 int offset = -1; 900 for (int i = 0; i < _root_group->primitive_fields()->length(); i++) { 901 LayoutRawBlock* b = _root_group->primitive_fields()->at(i); 902 FieldInfo* fi = FieldInfo::from_field_array(_fields, b->field_index()); 903 assert(fi->name(_constant_pool)->equals("value"), "Boxing classes have a single nonstatic field named 'value'"); 904 BasicType type = Signature::basic_type(fi->signature(_constant_pool)); 905 offset = java_lang_boxing_object::value_offset_in_bytes(type); 906 assert(offset != -1, "Unknown field"); 907 _layout->add_field_at_offset(b, offset); 908 field_count++; 909 } 910 assert(field_count == 1, "Wrong number of fields for a boxing class"); 911 912 _static_layout->add_contiguously(this->_static_fields->oop_fields()); 913 _static_layout->add(this->_static_fields->primitive_fields()); 914 915 epilogue(); 916 } 917 918 void FieldLayoutBuilder::add_flattened_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps, 919 ValueKlass* vklass, int offset) { 920 int diff = offset - vklass->first_field_offset(); 921 const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps(); 922 const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count(); 923 while (map < last_map) { 924 nonstatic_oop_maps->add(map->offset() + diff, map->count()); 925 map++; 926 } 927 } 928 929 void FieldLayoutBuilder::epilogue() { 930 // Computing oopmaps 931 int super_oop_map_count = (_super_klass == NULL) ? 0 :_super_klass->nonstatic_oop_map_count(); 932 int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count; 933 934 OopMapBlocksBuilder* nonstatic_oop_maps = 935 new OopMapBlocksBuilder(max_oop_map_count); 936 if (super_oop_map_count > 0) { 937 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(), 938 _super_klass->nonstatic_oop_map_count()); 939 } 940 941 if (_root_group->oop_fields() != NULL) { 942 for (int i = 0; i < _root_group->oop_fields()->length(); i++) { 943 LayoutRawBlock* b = _root_group->oop_fields()->at(i); 944 nonstatic_oop_maps->add(b->offset(), 1); 945 } 946 } 947 948 GrowableArray<LayoutRawBlock*>* ff = _root_group->flattened_fields(); 949 if (ff != NULL) { 950 for (int i = 0; i < ff->length(); i++) { 951 LayoutRawBlock* f = ff->at(i); 952 ValueKlass* vk = f->value_klass(); 953 assert(vk != NULL, "Should have been initialized"); 954 if (vk->contains_oops()) { 955 add_flattened_field_oopmap(nonstatic_oop_maps, vk, f->offset()); 956 } 957 } 958 } 959 960 if (!_contended_groups.is_empty()) { 961 for (int i = 0; i < _contended_groups.length(); i++) { 962 FieldGroup* cg = _contended_groups.at(i); 963 if (cg->oop_count() > 0) { 964 assert(cg->oop_fields() != NULL && cg->oop_fields()->at(0) != NULL, "oop_count > 0 but no oop fields found"); 965 nonstatic_oop_maps->add(cg->oop_fields()->at(0)->offset(), cg->oop_count()); 966 } 967 } 968 } 969 970 nonstatic_oop_maps->compact(); 971 972 int instance_end = align_up(_layout->last_block()->offset(), wordSize); 973 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize); 974 int static_fields_size = (static_fields_end - 975 InstanceMirrorKlass::offset_of_static_fields()) / wordSize; 976 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize); 977 978 // Pass back information needed for InstanceKlass creation 979 980 _info->oop_map_blocks = nonstatic_oop_maps; 981 _info->_instance_size = align_object_size(instance_end / wordSize); 982 _info->_static_field_size = static_fields_size; 983 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize; 984 _info->_has_nonstatic_fields = _has_nonstatic_fields; 985 986 if (PrintFieldLayout) { 987 ResourceMark rm; 988 tty->print_cr("Layout of class %s", _classname->as_C_string()); 989 tty->print_cr("Instance fields:"); 990 _layout->print(tty, false, _super_klass); 991 tty->print_cr("Static fields:"); 992 _static_layout->print(tty, true, NULL); 993 tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize); 994 if (_is_value_type) { 995 tty->print_cr("First field offset = %d", _first_field_offset); 996 tty->print_cr("Alignment = %d bytes", _alignment); 997 tty->print_cr("Exact size = %d bytes", _exact_size_in_bytes); 998 } 999 tty->print_cr("---"); 1000 } 1001 } 1002 1003 void FieldLayoutBuilder::build_layout(TRAPS) { 1004 if (_classname == vmSymbols::java_lang_ref_Reference()) { 1005 compute_java_lang_ref_Reference_layout(); 1006 } else if (_classname == vmSymbols::java_lang_Boolean() || 1007 _classname == vmSymbols::java_lang_Character() || 1008 _classname == vmSymbols::java_lang_Float() || 1009 _classname == vmSymbols::java_lang_Double() || 1010 _classname == vmSymbols::java_lang_Byte() || 1011 _classname == vmSymbols::java_lang_Short() || 1012 _classname == vmSymbols::java_lang_Integer() || 1013 _classname == vmSymbols::java_lang_Long()) { 1014 compute_boxing_class_layout(); 1015 } else if (_is_value_type) { 1016 compute_inline_class_layout(CHECK); 1017 } else { 1018 compute_regular_layout(); 1019 } 1020 }