1 /* 2 * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP 26 #define SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP 27 28 #include "runtime/atomic.inline.hpp" 29 #include "runtime/os.hpp" 30 #include "services/memTracker.hpp" 31 32 // Explicit C-heap memory management 33 34 void trace_heap_malloc(size_t size, const char* name, void *p); 35 void trace_heap_free(void *p); 36 37 #ifndef PRODUCT 38 // Increments unsigned long value for statistics (not atomic on MP). 39 inline void inc_stat_counter(volatile julong* dest, julong add_value) { 40 #if defined(SPARC) || defined(X86) 41 // Sparc and X86 have atomic jlong (8 bytes) instructions 42 julong value = Atomic::load((volatile jlong*)dest); 43 value += add_value; 44 Atomic::store((jlong)value, (volatile jlong*)dest); 45 #else 46 // possible word-tearing during load/store 47 *dest += add_value; 48 #endif 49 } 50 #endif 51 52 // allocate using malloc; will fail if no memory available 53 inline char* AllocateHeap(size_t size, MEMFLAGS flags, 54 const NativeCallStack& stack, 55 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) { 56 char* p = (char*) os::malloc(size, flags, stack); 57 #ifdef ASSERT 58 if (PrintMallocFree) trace_heap_malloc(size, "AllocateHeap", p); 59 #endif 60 if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) { 61 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "AllocateHeap"); 62 } 63 return p; 64 } 65 66 #ifdef __GNUC__ 67 __attribute__((always_inline)) 68 #endif 69 inline char* AllocateHeap(size_t size, MEMFLAGS flags, 70 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) { 71 return AllocateHeap(size, flags, CURRENT_PC, alloc_failmode); 72 } 73 74 #ifdef __GNUC__ 75 __attribute__((always_inline)) 76 #endif 77 inline char* ReallocateHeap(char *old, size_t size, MEMFLAGS flag, 78 AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM) { 79 char* p = (char*) os::realloc(old, size, flag, CURRENT_PC); 80 #ifdef ASSERT 81 if (PrintMallocFree) trace_heap_malloc(size, "ReallocateHeap", p); 82 #endif 83 if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) { 84 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "ReallocateHeap"); 85 } 86 return p; 87 } 88 89 inline void FreeHeap(void* p) { 90 #ifdef ASSERT 91 if (PrintMallocFree) trace_heap_free(p); 92 #endif 93 os::free(p); 94 } 95 96 97 template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size, 98 const NativeCallStack& stack) throw() { 99 void* p = (void*)AllocateHeap(size, F, stack); 100 #ifdef ASSERT 101 if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p); 102 #endif 103 return p; 104 } 105 106 template <MEMFLAGS F> void* CHeapObj<F>::operator new(size_t size) throw() { 107 return CHeapObj<F>::operator new(size, CALLER_PC); 108 } 109 110 template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size, 111 const std::nothrow_t& nothrow_constant, const NativeCallStack& stack) throw() { 112 void* p = (void*)AllocateHeap(size, F, stack, 113 AllocFailStrategy::RETURN_NULL); 114 #ifdef ASSERT 115 if (PrintMallocFree) trace_heap_malloc(size, "CHeapObj-new", p); 116 #endif 117 return p; 118 } 119 120 template <MEMFLAGS F> void* CHeapObj<F>::operator new (size_t size, 121 const std::nothrow_t& nothrow_constant) throw() { 122 return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC); 123 } 124 125 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size, 126 const NativeCallStack& stack) throw() { 127 return CHeapObj<F>::operator new(size, stack); 128 } 129 130 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size) 131 throw() { 132 return CHeapObj<F>::operator new(size, CALLER_PC); 133 } 134 135 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size, 136 const std::nothrow_t& nothrow_constant, const NativeCallStack& stack) throw() { 137 return CHeapObj<F>::operator new(size, nothrow_constant, stack); 138 } 139 140 template <MEMFLAGS F> void* CHeapObj<F>::operator new [](size_t size, 141 const std::nothrow_t& nothrow_constant) throw() { 142 return CHeapObj<F>::operator new(size, nothrow_constant, CALLER_PC); 143 } 144 145 template <MEMFLAGS F> void CHeapObj<F>::operator delete(void* p){ 146 FreeHeap(p); 147 } 148 149 template <MEMFLAGS F> void CHeapObj<F>::operator delete [](void* p){ 150 FreeHeap(p); 151 } 152 153 template <class E, MEMFLAGS F> 154 size_t MmapArrayAllocator<E, F>::size_for(size_t length) { 155 size_t size = length * sizeof(E); 156 int alignment = os::vm_allocation_granularity(); 157 return align_size_up(size, alignment); 158 } 159 160 template <class E, MEMFLAGS F> 161 E* MmapArrayAllocator<E, F>::allocate(size_t length) { 162 size_t size = size_for(length); 163 int alignment = os::vm_allocation_granularity(); 164 165 char* addr = os::reserve_memory(size, NULL, alignment, F); 166 if (addr == NULL) { 167 vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)"); 168 } 169 170 os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)"); 171 172 return (E*)addr; 173 } 174 175 template <class E, MEMFLAGS F> 176 void MmapArrayAllocator<E, F>::free(E* addr, size_t length) { 177 bool result = os::release_memory((char*)addr, size_for(length)); 178 assert(result, "Failed to release memory"); 179 } 180 181 template <class E, MEMFLAGS F> 182 size_t MallocArrayAllocator<E, F>::size_for(size_t length) { 183 return length * sizeof(E); 184 } 185 186 template <class E, MEMFLAGS F> 187 E* MallocArrayAllocator<E, F>::allocate(size_t length) { 188 return (E*)AllocateHeap(size_for(length), F); 189 } 190 191 template<class E, MEMFLAGS F> 192 void MallocArrayAllocator<E, F>::free(E* addr, size_t /*length*/) { 193 FreeHeap(addr); 194 } 195 196 template <class E, MEMFLAGS F> 197 bool ArrayAllocator<E, F>::should_use_malloc(size_t length) { 198 return MallocArrayAllocator<E, F>::size_for(length) < ArrayAllocatorMallocLimit; 199 } 200 201 template <class E, MEMFLAGS F> 202 E* ArrayAllocator<E, F>::allocate_malloc(size_t length) { 203 return MallocArrayAllocator<E, F>::allocate(length); 204 } 205 206 template <class E, MEMFLAGS F> 207 E* ArrayAllocator<E, F>::allocate_mmap(size_t length) { 208 return MmapArrayAllocator<E, F>::allocate(length); 209 } 210 211 template <class E, MEMFLAGS F> 212 E* ArrayAllocator<E, F>::allocate(size_t length) { 213 if (should_use_malloc(length)) { 214 return allocate_malloc(length); 215 } 216 217 return allocate_mmap(length); 218 } 219 220 template <class E, MEMFLAGS F> 221 E* ArrayAllocator<E, F>::reallocate(E* old_addr, size_t old_length, size_t new_length) { 222 E* new_addr = (new_length > 0) 223 ? allocate(new_length) 224 : NULL; 225 226 if (new_addr != NULL && old_addr != NULL) { 227 memcpy(new_addr, old_addr, MIN2(old_length, new_length) * sizeof(E)); 228 } 229 230 if (old_addr != NULL) { 231 free(old_addr, old_length); 232 } 233 234 return new_addr; 235 } 236 237 template<class E, MEMFLAGS F> 238 void ArrayAllocator<E, F>::free_malloc(E* addr, size_t length) { 239 MallocArrayAllocator<E, F>::free(addr, length); 240 } 241 242 template<class E, MEMFLAGS F> 243 void ArrayAllocator<E, F>::free_mmap(E* addr, size_t length) { 244 MmapArrayAllocator<E, F>::free(addr, length); 245 } 246 247 template<class E, MEMFLAGS F> 248 void ArrayAllocator<E, F>::free(E* addr, size_t length) { 249 if (addr != NULL) { 250 if (should_use_malloc(length)) { 251 free_malloc(addr, length); 252 } else { 253 free_mmap(addr, length); 254 } 255 } 256 } 257 258 #endif // SHARE_VM_MEMORY_ALLOCATION_INLINE_HPP --- EOF ---