< prev index next >

src/hotspot/share/memory/metaspace.cpp

Print this page


 959 
 960 size_t Metaspace::_first_chunk_word_size = 0;
 961 size_t Metaspace::_first_class_chunk_word_size = 0;
 962 
 963 size_t Metaspace::_commit_alignment = 0;
 964 size_t Metaspace::_reserve_alignment = 0;
 965 
 966 VirtualSpaceList* Metaspace::_space_list = NULL;
 967 VirtualSpaceList* Metaspace::_class_space_list = NULL;
 968 
 969 ChunkManager* Metaspace::_chunk_manager_metadata = NULL;
 970 ChunkManager* Metaspace::_chunk_manager_class = NULL;
 971 
 972 bool Metaspace::_initialized = false;
 973 
 974 #define VIRTUALSPACEMULTIPLIER 2
 975 
 976 #ifdef _LP64
 977 static const uint64_t UnscaledClassSpaceMax = (uint64_t(max_juint) + 1);
 978 
 979 void Metaspace::set_narrow_klass_base_and_shift(address metaspace_base, address cds_base) {
 980   assert(!DumpSharedSpaces, "narrow_klass is set by MetaspaceShared class.");
 981   // Figure out the narrow_klass_base and the narrow_klass_shift.  The
 982   // narrow_klass_base is the lower of the metaspace base and the cds base
 983   // (if cds is enabled).  The narrow_klass_shift depends on the distance
 984   // between the lower base and higher address.
 985   address lower_base;
 986   address higher_address;
 987 #if INCLUDE_CDS
 988   if (UseSharedSpaces) {
 989     higher_address = MAX2((address)(cds_base + MetaspaceShared::core_spaces_size()),
 990                           (address)(metaspace_base + compressed_class_space_size()));
 991     lower_base = MIN2(metaspace_base, cds_base);
 992   } else
 993 #endif
 994   {
 995     higher_address = metaspace_base + compressed_class_space_size();
 996     lower_base = metaspace_base;
 997 
 998     uint64_t klass_encoding_max = UnscaledClassSpaceMax << LogKlassAlignmentInBytes;
 999     // If compressed class space fits in lower 32G, we don't need a base.
1000     if (higher_address <= (address)klass_encoding_max) {
1001       lower_base = 0; // Effectively lower base is zero.
1002     }
1003   }
1004 
1005   CompressedKlassPointers::set_base(lower_base);
1006 
1007   // CDS uses LogKlassAlignmentInBytes for narrow_klass_shift. See
1008   // MetaspaceShared::initialize_dumptime_shared_and_meta_spaces() for
1009   // how dump time narrow_klass_shift is set. Although, CDS can work
1010   // with zero-shift mode also, to be consistent with AOT it uses
1011   // LogKlassAlignmentInBytes for klass shift so archived java heap objects
1012   // can be used at same time as AOT code.
1013   if (!UseSharedSpaces
1014       && (uint64_t)(higher_address - lower_base) <= UnscaledClassSpaceMax) {
1015     CompressedKlassPointers::set_shift(0);
1016   } else {
1017     CompressedKlassPointers::set_shift(LogKlassAlignmentInBytes);
1018   }
1019   AOTLoader::set_narrow_klass_shift();
1020 }
1021 
1022 #if INCLUDE_CDS
1023 // Return TRUE if the specified metaspace_base and cds_base are close enough
1024 // to work with compressed klass pointers.
1025 bool Metaspace::can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base) {
1026   assert(cds_base != 0 && UseSharedSpaces, "Only use with CDS");
1027   assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
1028   address lower_base = MIN2((address)metaspace_base, cds_base);
1029   address higher_address = MAX2((address)(cds_base + MetaspaceShared::core_spaces_size()),
1030                                 (address)(metaspace_base + compressed_class_space_size()));
1031   return ((uint64_t)(higher_address - lower_base) <= UnscaledClassSpaceMax);
1032 }
1033 #endif
1034 
1035 // Try to allocate the metaspace at the requested addr.
1036 void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base) {
1037   assert(!DumpSharedSpaces, "compress klass space is allocated by MetaspaceShared class.");
1038   assert(using_class_space(), "called improperly");
1039   assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
1040   assert(compressed_class_space_size() < KlassEncodingMetaspaceMax,
1041          "Metaspace size is too big");
1042   assert_is_aligned(requested_addr, _reserve_alignment);
1043   assert_is_aligned(cds_base, _reserve_alignment);
1044   assert_is_aligned(compressed_class_space_size(), _reserve_alignment);
1045 
1046   // Don't use large pages for the class space.
1047   bool large_pages = false;
1048 






1049 #if !(defined(AARCH64) || defined(AIX))
1050   ReservedSpace metaspace_rs = ReservedSpace(compressed_class_space_size(),
1051                                              _reserve_alignment,
1052                                              large_pages,
1053                                              requested_addr);
1054 #else // AARCH64
1055   ReservedSpace metaspace_rs;
1056 
1057   // Our compressed klass pointers may fit nicely into the lower 32
1058   // bits.
1059   if ((uint64_t)requested_addr + compressed_class_space_size() < 4*G) {
1060     metaspace_rs = ReservedSpace(compressed_class_space_size(),
1061                                  _reserve_alignment,
1062                                  large_pages,
1063                                  requested_addr);
1064   }
1065 
1066   if (! metaspace_rs.is_reserved()) {
1067     // Aarch64: Try to align metaspace so that we can decode a compressed
1068     // klass with a single MOVK instruction.  We can do this iff the
1069     // compressed class base is a multiple of 4G.
1070     // Aix: Search for a place where we can find memory. If we need to load
1071     // the base, 4G alignment is helpful, too.
1072     size_t increment = AARCH64_ONLY(4*)G;
1073     for (char *a = align_up(requested_addr, increment);
1074          a < (char*)(1024*G);
1075          a += increment) {
1076       if (a == (char *)(32*G)) {
1077         // Go faster from here on. Zero-based is no longer possible.
1078         increment = 4*G;
1079       }
1080 
1081 #if INCLUDE_CDS
1082       if (UseSharedSpaces
1083           && ! can_use_cds_with_metaspace_addr(a, cds_base)) {
1084         // We failed to find an aligned base that will reach.  Fall
1085         // back to using our requested addr.
1086         metaspace_rs = ReservedSpace(compressed_class_space_size(),
1087                                      _reserve_alignment,
1088                                      large_pages,
1089                                      requested_addr);
1090         break;
1091       }
1092 #endif
1093 
1094       metaspace_rs = ReservedSpace(compressed_class_space_size(),
1095                                    _reserve_alignment,
1096                                    large_pages,
1097                                    a);
1098       if (metaspace_rs.is_reserved())
1099         break;
1100     }
1101   }
1102 
1103 #endif // AARCH64

1104 
1105   if (!metaspace_rs.is_reserved()) {
1106 #if INCLUDE_CDS
1107     if (UseSharedSpaces) {
1108       size_t increment = align_up(1*G, _reserve_alignment);
1109 
1110       // Keep trying to allocate the metaspace, increasing the requested_addr
1111       // by 1GB each time, until we reach an address that will no longer allow
1112       // use of CDS with compressed klass pointers.
1113       char *addr = requested_addr;
1114       while (!metaspace_rs.is_reserved() && (addr + increment > addr) &&
1115              can_use_cds_with_metaspace_addr(addr + increment, cds_base)) {
1116         addr = addr + increment;
1117         metaspace_rs = ReservedSpace(compressed_class_space_size(),
1118                                      _reserve_alignment, large_pages, addr);
1119       }
1120     }
1121 #endif
1122     // If no successful allocation then try to allocate the space anywhere.  If
1123     // that fails then OOM doom.  At this point we cannot try allocating the
1124     // metaspace as if UseCompressedClassPointers is off because too much
1125     // initialization has happened that depends on UseCompressedClassPointers.
1126     // So, UseCompressedClassPointers cannot be turned off at this point.
1127     if (!metaspace_rs.is_reserved()) {
1128       metaspace_rs = ReservedSpace(compressed_class_space_size(),
1129                                    _reserve_alignment, large_pages);
1130       if (!metaspace_rs.is_reserved()) {
1131         vm_exit_during_initialization(err_msg("Could not allocate metaspace: " SIZE_FORMAT " bytes",
1132                                               compressed_class_space_size()));
1133       }
1134     }
1135   }
1136 

1137   // If we got here then the metaspace got allocated.
1138   MemTracker::record_virtual_memory_type((address)metaspace_rs.base(), mtClass);
1139 
1140 #if INCLUDE_CDS
1141   // Verify that we can use shared spaces.  Otherwise, turn off CDS.
1142   if (UseSharedSpaces && !can_use_cds_with_metaspace_addr(metaspace_rs.base(), cds_base)) {
1143     FileMapInfo::stop_sharing_and_unmap(
1144         "Could not allocate metaspace at a compatible address");
1145   }
1146 #endif
1147   set_narrow_klass_base_and_shift((address)metaspace_rs.base(),
1148                                   UseSharedSpaces ? (address)cds_base : 0);
1149 
1150   initialize_class_space(metaspace_rs);
1151 
1152   LogTarget(Trace, gc, metaspace) lt;
1153   if (lt.is_enabled()) {
1154     ResourceMark rm;
1155     LogStream ls(lt);
1156     print_compressed_class_space(&ls, requested_addr);
1157   }
1158 }
1159 
1160 void Metaspace::print_compressed_class_space(outputStream* st, const char* requested_addr) {
1161   st->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: %d",
1162                p2i(CompressedKlassPointers::base()), CompressedKlassPointers::shift());
1163   if (_class_space_list != NULL) {
1164     address base = (address)_class_space_list->current_virtual_space()->bottom();
1165     st->print("Compressed class space size: " SIZE_FORMAT " Address: " PTR_FORMAT,
1166                  compressed_class_space_size(), p2i(base));
1167     if (requested_addr != 0) {
1168       st->print(" Req Addr: " PTR_FORMAT, p2i(requested_addr));


1231   if (UseCompressedClassPointers) {
1232     if ((min_metaspace_sz + CompressedClassSpaceSize) >  MaxMetaspaceSize) {
1233       if (min_metaspace_sz >= MaxMetaspaceSize) {
1234         vm_exit_during_initialization("MaxMetaspaceSize is too small.");
1235       } else {
1236         FLAG_SET_ERGO(CompressedClassSpaceSize,
1237                       MaxMetaspaceSize - min_metaspace_sz);
1238       }
1239     }
1240   } else if (min_metaspace_sz >= MaxMetaspaceSize) {
1241     FLAG_SET_ERGO(InitialBootClassLoaderMetaspaceSize,
1242                   min_metaspace_sz);
1243   }
1244 
1245   set_compressed_class_space_size(CompressedClassSpaceSize);
1246 }
1247 
1248 void Metaspace::global_initialize() {
1249   MetaspaceGC::initialize();
1250 

1251 #if INCLUDE_CDS
1252   if (DumpSharedSpaces) {
1253     MetaspaceShared::initialize_dumptime_shared_and_meta_spaces();

1254   } else if (UseSharedSpaces) {
1255     // If any of the archived space fails to map, UseSharedSpaces
1256     // is reset to false. Fall through to the
1257     // (!DumpSharedSpaces && !UseSharedSpaces) case to set up class
1258     // metaspace.
1259     MetaspaceShared::initialize_runtime_shared_and_meta_spaces();

1260   }
1261 
1262   if (DynamicDumpSharedSpaces && !UseSharedSpaces) {
1263     vm_exit_during_initialization("DynamicDumpSharedSpaces is unsupported when base CDS archive is not loaded", NULL);
1264   }
1265 
1266   if (!DumpSharedSpaces && !UseSharedSpaces)
1267 #endif // INCLUDE_CDS
1268   {
1269 #ifdef _LP64
1270     if (using_class_space()) {
1271       char* base = (char*)align_up(CompressedOops::end(), _reserve_alignment);
1272       allocate_metaspace_compressed_klass_ptrs(base, 0);
1273     }
1274 #endif // _LP64
1275   }

1276 
1277   // Initialize these before initializing the VirtualSpaceList
1278   _first_chunk_word_size = InitialBootClassLoaderMetaspaceSize / BytesPerWord;
1279   _first_chunk_word_size = align_word_size_up(_first_chunk_word_size);
1280   // Make the first class chunk bigger than a medium chunk so it's not put
1281   // on the medium chunk list.   The next chunk will be small and progress
1282   // from there.  This size calculated by -version.
1283   _first_class_chunk_word_size = MIN2((size_t)MediumChunk*6,
1284                                      (CompressedClassSpaceSize/BytesPerWord)*2);
1285   _first_class_chunk_word_size = align_word_size_up(_first_class_chunk_word_size);
1286   // Arbitrarily set the initial virtual space to a multiple
1287   // of the boot class loader size.
1288   size_t word_size = VIRTUALSPACEMULTIPLIER * _first_chunk_word_size;
1289   word_size = align_up(word_size, Metaspace::reserve_alignment_words());
1290 
1291   // Initialize the list of virtual spaces.
1292   _space_list = new VirtualSpaceList(word_size);
1293   _chunk_manager_metadata = new ChunkManager(false/*metaspace*/);
1294 
1295   if (!_space_list->initialization_succeeded()) {




 959 
 960 size_t Metaspace::_first_chunk_word_size = 0;
 961 size_t Metaspace::_first_class_chunk_word_size = 0;
 962 
 963 size_t Metaspace::_commit_alignment = 0;
 964 size_t Metaspace::_reserve_alignment = 0;
 965 
 966 VirtualSpaceList* Metaspace::_space_list = NULL;
 967 VirtualSpaceList* Metaspace::_class_space_list = NULL;
 968 
 969 ChunkManager* Metaspace::_chunk_manager_metadata = NULL;
 970 ChunkManager* Metaspace::_chunk_manager_class = NULL;
 971 
 972 bool Metaspace::_initialized = false;
 973 
 974 #define VIRTUALSPACEMULTIPLIER 2
 975 
 976 #ifdef _LP64
 977 static const uint64_t UnscaledClassSpaceMax = (uint64_t(max_juint) + 1);
 978 
 979 void Metaspace::set_narrow_klass_base_and_shift(ReservedSpace metaspace_rs, address cds_base) {
 980   assert(!DumpSharedSpaces, "narrow_klass is set by MetaspaceShared class.");
 981   // Figure out the narrow_klass_base and the narrow_klass_shift.  The
 982   // narrow_klass_base is the lower of the metaspace base and the cds base
 983   // (if cds is enabled).  The narrow_klass_shift depends on the distance
 984   // between the lower base and higher address.
 985   address lower_base = (address)metaspace_rs.base();
 986   address higher_address = (address)metaspace_rs.end();
 987   if (cds_base != NULL) {
 988     assert(UseSharedSpaces, "must be");
 989     lower_base = MIN2(lower_base, cds_base);
 990   } else {







 991     uint64_t klass_encoding_max = UnscaledClassSpaceMax << LogKlassAlignmentInBytes;
 992     // If compressed class space fits in lower 32G, we don't need a base.
 993     if (higher_address <= (address)klass_encoding_max) {
 994       lower_base = 0; // Effectively lower base is zero.
 995     }
 996   }
 997 
 998   CompressedKlassPointers::set_base(lower_base);
 999 
1000   // CDS uses LogKlassAlignmentInBytes for narrow_klass_shift. See
1001   // MetaspaceShared::initialize_dumptime_shared_and_meta_spaces() for
1002   // how dump time narrow_klass_shift is set. Although, CDS can work
1003   // with zero-shift mode also, to be consistent with AOT it uses
1004   // LogKlassAlignmentInBytes for klass shift so archived java heap objects
1005   // can be used at same time as AOT code.
1006   if (!UseSharedSpaces
1007       && (uint64_t)(higher_address - lower_base) <= UnscaledClassSpaceMax) {
1008     CompressedKlassPointers::set_shift(0);
1009   } else {
1010     CompressedKlassPointers::set_shift(LogKlassAlignmentInBytes);
1011   }
1012   AOTLoader::set_narrow_klass_shift();
1013 }
1014 













1015 // Try to allocate the metaspace at the requested addr.
1016 void Metaspace::allocate_metaspace_compressed_klass_ptrs(ReservedSpace metaspace_rs, char* requested_addr, address cds_base) {
1017   assert(!DumpSharedSpaces, "compress klass space is allocated by MetaspaceShared class.");
1018   assert(using_class_space(), "called improperly");
1019   assert(UseCompressedClassPointers, "Only use with CompressedKlassPtrs");
1020   assert(compressed_class_space_size() < KlassEncodingMetaspaceMax,
1021          "Metaspace size is too big");
1022   assert_is_aligned(requested_addr, _reserve_alignment);
1023   assert_is_aligned(cds_base, _reserve_alignment);
1024   assert_is_aligned(compressed_class_space_size(), _reserve_alignment);
1025 
1026   // Don't use large pages for the class space.
1027   bool large_pages = false;
1028 
1029  if (metaspace_rs.is_reserved()) {
1030    // CDS should have already reserved the space.
1031    assert(requested_addr == NULL, "not used");
1032    assert(cds_base != NULL, "CDS should have already reserved the memory space");
1033  } else {
1034    assert(cds_base == NULL, "must be");
1035 #if !(defined(AARCH64) || defined(AIX))
1036   metaspace_rs = ReservedSpace(compressed_class_space_size(),
1037                                              _reserve_alignment,
1038                                              large_pages,
1039                                              requested_addr);
1040 #else // AARCH64


1041   // Our compressed klass pointers may fit nicely into the lower 32
1042   // bits.
1043   if ((uint64_t)requested_addr + compressed_class_space_size() < 4*G) {
1044     metaspace_rs = ReservedSpace(compressed_class_space_size(),
1045                                  _reserve_alignment,
1046                                  large_pages,
1047                                  requested_addr);
1048   }
1049 
1050   if (! metaspace_rs.is_reserved()) {
1051     // Aarch64: Try to align metaspace so that we can decode a compressed
1052     // klass with a single MOVK instruction.  We can do this iff the
1053     // compressed class base is a multiple of 4G.
1054     // Aix: Search for a place where we can find memory. If we need to load
1055     // the base, 4G alignment is helpful, too.
1056     size_t increment = AARCH64_ONLY(4*)G;
1057     for (char *a = align_up(requested_addr, increment);
1058          a < (char*)(1024*G);
1059          a += increment) {
1060       if (a == (char *)(32*G)) {
1061         // Go faster from here on. Zero-based is no longer possible.
1062         increment = 4*G;
1063       }
1064 













1065       metaspace_rs = ReservedSpace(compressed_class_space_size(),
1066                                    _reserve_alignment,
1067                                    large_pages,
1068                                    a);
1069       if (metaspace_rs.is_reserved())
1070         break;
1071     }
1072   }

1073 #endif // AARCH64
1074  }
1075 
1076   if (!metaspace_rs.is_reserved()) {
1077     assert(cds_base == NULL, "CDS should have already reserved the memory space");















1078     // If no successful allocation then try to allocate the space anywhere.  If
1079     // that fails then OOM doom.  At this point we cannot try allocating the
1080     // metaspace as if UseCompressedClassPointers is off because too much
1081     // initialization has happened that depends on UseCompressedClassPointers.
1082     // So, UseCompressedClassPointers cannot be turned off at this point.

1083     metaspace_rs = ReservedSpace(compressed_class_space_size(),
1084                                  _reserve_alignment, large_pages);
1085     if (!metaspace_rs.is_reserved()) {
1086       vm_exit_during_initialization(err_msg("Could not allocate metaspace: " SIZE_FORMAT " bytes",
1087                                             compressed_class_space_size()));
1088     }
1089   }

1090 
1091   if (cds_base == NULL) {
1092     // If we got here then the metaspace got allocated.
1093     MemTracker::record_virtual_memory_type((address)metaspace_rs.base(), mtClass);






1094   }
1095 
1096   set_narrow_klass_base_and_shift(metaspace_rs, cds_base);

1097 
1098   initialize_class_space(metaspace_rs);
1099 
1100   LogTarget(Trace, gc, metaspace) lt;
1101   if (lt.is_enabled()) {
1102     ResourceMark rm;
1103     LogStream ls(lt);
1104     print_compressed_class_space(&ls, requested_addr);
1105   }
1106 }
1107 
1108 void Metaspace::print_compressed_class_space(outputStream* st, const char* requested_addr) {
1109   st->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: %d",
1110                p2i(CompressedKlassPointers::base()), CompressedKlassPointers::shift());
1111   if (_class_space_list != NULL) {
1112     address base = (address)_class_space_list->current_virtual_space()->bottom();
1113     st->print("Compressed class space size: " SIZE_FORMAT " Address: " PTR_FORMAT,
1114                  compressed_class_space_size(), p2i(base));
1115     if (requested_addr != 0) {
1116       st->print(" Req Addr: " PTR_FORMAT, p2i(requested_addr));


1179   if (UseCompressedClassPointers) {
1180     if ((min_metaspace_sz + CompressedClassSpaceSize) >  MaxMetaspaceSize) {
1181       if (min_metaspace_sz >= MaxMetaspaceSize) {
1182         vm_exit_during_initialization("MaxMetaspaceSize is too small.");
1183       } else {
1184         FLAG_SET_ERGO(CompressedClassSpaceSize,
1185                       MaxMetaspaceSize - min_metaspace_sz);
1186       }
1187     }
1188   } else if (min_metaspace_sz >= MaxMetaspaceSize) {
1189     FLAG_SET_ERGO(InitialBootClassLoaderMetaspaceSize,
1190                   min_metaspace_sz);
1191   }
1192 
1193   set_compressed_class_space_size(CompressedClassSpaceSize);
1194 }
1195 
1196 void Metaspace::global_initialize() {
1197   MetaspaceGC::initialize();
1198 
1199   bool class_space_inited = false;
1200 #if INCLUDE_CDS
1201   if (DumpSharedSpaces) {
1202     MetaspaceShared::initialize_dumptime_shared_and_meta_spaces();
1203     class_space_inited = true;
1204   } else if (UseSharedSpaces) {
1205     // If any of the archived space fails to map, UseSharedSpaces
1206     // is reset to false.


1207     MetaspaceShared::initialize_runtime_shared_and_meta_spaces();
1208     class_space_inited = UseSharedSpaces;
1209   }
1210 
1211   if (DynamicDumpSharedSpaces && !UseSharedSpaces) {
1212     vm_exit_during_initialization("DynamicDumpSharedSpaces is unsupported when base CDS archive is not loaded", NULL);
1213   }


1214 #endif // INCLUDE_CDS
1215 
1216 #ifdef _LP64
1217   if (using_class_space() && !class_space_inited) {
1218     char* base = (char*)align_up(CompressedOops::end(), _reserve_alignment);
1219     ReservedSpace dummy;
1220     allocate_metaspace_compressed_klass_ptrs(dummy, base, 0);

1221   }
1222 #endif
1223 
1224   // Initialize these before initializing the VirtualSpaceList
1225   _first_chunk_word_size = InitialBootClassLoaderMetaspaceSize / BytesPerWord;
1226   _first_chunk_word_size = align_word_size_up(_first_chunk_word_size);
1227   // Make the first class chunk bigger than a medium chunk so it's not put
1228   // on the medium chunk list.   The next chunk will be small and progress
1229   // from there.  This size calculated by -version.
1230   _first_class_chunk_word_size = MIN2((size_t)MediumChunk*6,
1231                                      (CompressedClassSpaceSize/BytesPerWord)*2);
1232   _first_class_chunk_word_size = align_word_size_up(_first_class_chunk_word_size);
1233   // Arbitrarily set the initial virtual space to a multiple
1234   // of the boot class loader size.
1235   size_t word_size = VIRTUALSPACEMULTIPLIER * _first_chunk_word_size;
1236   word_size = align_up(word_size, Metaspace::reserve_alignment_words());
1237 
1238   // Initialize the list of virtual spaces.
1239   _space_list = new VirtualSpaceList(word_size);
1240   _chunk_manager_metadata = new ChunkManager(false/*metaspace*/);
1241 
1242   if (!_space_list->initialization_succeeded()) {


< prev index next >