1 /* 2 * Copyright (c) 1997, 2015, Oracle and/or its affiliates. All rights reserved. 3 * Copyright (c) 2012, 2015 SAP SE. All rights reserved. 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 5 * 6 * This code is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License version 2 only, as 8 * published by the Free Software Foundation. 9 * 10 * This code is distributed in the hope that it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 * version 2 for more details (a copy is included in the LICENSE file that 14 * accompanied this code). 15 * 16 * You should have received a copy of the GNU General Public License version 17 * 2 along with this work; if not, write to the Free Software Foundation, 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 19 * 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 21 * or visit www.oracle.com if you need additional information or have any 22 * questions. 23 * 24 */ 25 26 #include "precompiled.hpp" 27 #include "asm/macroAssembler.inline.hpp" 28 #include "memory/resourceArea.hpp" 29 #include "nativeInst_ppc.hpp" 30 #include "oops/oop.inline.hpp" 31 #include "runtime/handles.hpp" 32 #include "runtime/sharedRuntime.hpp" 33 #include "runtime/stubRoutines.hpp" 34 #include "utilities/ostream.hpp" 35 #ifdef COMPILER1 36 #include "c1/c1_Runtime1.hpp" 37 #endif 38 39 // We use an illtrap for marking a method as not_entrant or zombie iff !UseSIGTRAP 40 // Work around a C++ compiler bug which changes 'this' 41 bool NativeInstruction::is_sigill_zombie_not_entrant_at(address addr) { 42 assert(!UseSIGTRAP, "precondition"); 43 if (*(int*)addr != 0 /*illtrap*/) return false; 44 CodeBlob* cb = CodeCache::find_blob_unsafe(addr); 45 if (cb == NULL || !cb->is_nmethod()) return false; 46 nmethod *nm = (nmethod *)cb; 47 // This method is not_entrant or zombie iff the illtrap instruction is 48 // located at the verified entry point. 49 return nm->verified_entry_point() == addr; 50 } 51 52 #ifdef ASSERT 53 void NativeInstruction::verify() { 54 // Make sure code pattern is actually an instruction address. 55 address addr = addr_at(0); 56 if (addr == 0 || ((intptr_t)addr & 3) != 0) { 57 fatal("not an instruction address"); 58 } 59 } 60 #endif // ASSERT 61 62 // Extract call destination from a NativeCall. The call might use a trampoline stub. 63 address NativeCall::destination() const { 64 address addr = (address)this; 65 address destination = Assembler::bxx_destination(addr); 66 67 // Do we use a trampoline stub for this call? 68 // Trampoline stubs are located behind the main code. 69 if (destination > addr) { 70 // Filter out recursive method invocation (call to verified/unverified entry point). 71 CodeBlob* cb = CodeCache::find_blob_unsafe(addr); // Else we get assertion if nmethod is zombie. 72 assert(cb && cb->is_nmethod(), "sanity"); 73 nmethod *nm = (nmethod *)cb; 74 if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) { 75 // Yes we do, so get the destination from the trampoline stub. 76 const address trampoline_stub_addr = destination; 77 destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination(nm); 78 } 79 } 80 81 return destination; 82 } 83 84 // Similar to replace_mt_safe, but just changes the destination. The 85 // important thing is that free-running threads are able to execute this 86 // call instruction at all times. Thus, the displacement field must be 87 // instruction-word-aligned. 88 // 89 // Used in the runtime linkage of calls; see class CompiledIC. 90 // 91 // Add parameter assert_lock to switch off assertion 92 // during code generation, where no patching lock is needed. 93 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) { 94 assert(!assert_lock || 95 (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()), 96 "concurrent code patching"); 97 98 ResourceMark rm; 99 int code_size = 1 * BytesPerInstWord; 100 address addr_call = addr_at(0); 101 assert(MacroAssembler::is_bl(*(int*)addr_call), "unexpected code at call-site"); 102 103 CodeBuffer cb(addr_call, code_size + 1); 104 MacroAssembler* a = new MacroAssembler(&cb); 105 106 // Patch the call. 107 if (!ReoptimizeCallSequences || !a->is_within_range_of_b(dest, addr_call)) { 108 address trampoline_stub_addr = get_trampoline(); 109 110 // We did not find a trampoline stub because the current codeblob 111 // does not provide this information. The branch will be patched 112 // later during a final fixup, when all necessary information is 113 // available. 114 if (trampoline_stub_addr == 0) 115 return; 116 117 // Patch the constant in the call's trampoline stub. 118 NativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest); 119 dest = trampoline_stub_addr; 120 } 121 122 OrderAccess::release(); 123 a->bl(dest); 124 125 ICache::ppc64_flush_icache_bytes(addr_call, code_size); 126 } 127 128 address NativeCall::get_trampoline() { 129 address call_addr = addr_at(0); 130 131 CodeBlob *code = CodeCache::find_blob(call_addr); 132 assert(code != NULL, "Could not find the containing code blob"); 133 134 // There are no relocations available when the code gets relocated 135 // because of CodeBuffer expansion. 136 if (code->relocation_size() == 0) 137 return NULL; 138 139 address bl_destination = Assembler::bxx_destination(call_addr); 140 if (code->contains(bl_destination) && 141 is_NativeCallTrampolineStub_at(bl_destination)) 142 return bl_destination; 143 144 // If the codeBlob is not a nmethod, this is because we get here from the 145 // CodeBlob constructor, which is called within the nmethod constructor. 146 return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code); 147 } 148 149 #ifdef ASSERT 150 void NativeCall::verify() { 151 address addr = addr_at(0); 152 153 if (!NativeCall::is_call_at(addr)) { 154 tty->print_cr("not a NativeCall at " PTR_FORMAT, p2i(addr)); 155 // TODO: PPC port: Disassembler::decode(addr - 20, addr + 20, tty); 156 fatal("not a NativeCall at " PTR_FORMAT, p2i(addr)); 157 } 158 } 159 #endif // ASSERT 160 161 #ifdef ASSERT 162 void NativeFarCall::verify() { 163 address addr = addr_at(0); 164 165 NativeInstruction::verify(); 166 if (!NativeFarCall::is_far_call_at(addr)) { 167 tty->print_cr("not a NativeFarCall at " PTR_FORMAT, p2i(addr)); 168 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty); 169 fatal("not a NativeFarCall at " PTR_FORMAT, p2i(addr)); 170 } 171 } 172 #endif // ASSERT 173 174 address NativeMovConstReg::next_instruction_address() const { 175 #ifdef ASSERT 176 CodeBlob* nm = CodeCache::find_blob(instruction_address()); 177 assert(!MacroAssembler::is_set_narrow_oop(addr_at(0), nm->content_begin()), "Should not patch narrow oop here"); 178 #endif 179 180 if (MacroAssembler::is_load_const_from_method_toc_at(addr_at(0))) { 181 return addr_at(load_const_from_method_toc_instruction_size); 182 } else { 183 return addr_at(load_const_instruction_size); 184 } 185 } 186 187 intptr_t NativeMovConstReg::data() const { 188 address addr = addr_at(0); 189 190 if (MacroAssembler::is_load_const_at(addr)) { 191 return MacroAssembler::get_const(addr); 192 } 193 194 CodeBlob* cb = CodeCache::find_blob_unsafe(addr); 195 if (MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) { 196 narrowOop no = (narrowOop)MacroAssembler::get_narrow_oop(addr, cb->content_begin()); 197 return cast_from_oop<intptr_t>(oopDesc::decode_heap_oop(no)); 198 } else { 199 assert(MacroAssembler::is_load_const_from_method_toc_at(addr), "must be load_const_from_pool"); 200 201 address ctable = cb->content_begin(); 202 int offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr); 203 return *(intptr_t *)(ctable + offset); 204 } 205 } 206 207 address NativeMovConstReg::set_data_plain(intptr_t data, CodeBlob *cb) { 208 address addr = instruction_address(); 209 address next_address = NULL; 210 if (!cb) cb = CodeCache::find_blob(addr); 211 212 if (cb != NULL && MacroAssembler::is_load_const_from_method_toc_at(addr)) { 213 // A load from the method's TOC (ctable). 214 assert(cb->is_nmethod(), "must be nmethod"); 215 const address ctable = cb->content_begin(); 216 const int toc_offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr); 217 *(intptr_t *)(ctable + toc_offset) = data; 218 next_address = addr + BytesPerInstWord; 219 } else if (cb != NULL && 220 MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) { 221 // A calculation relative to the global TOC. 222 if (MacroAssembler::get_address_of_calculate_address_from_global_toc_at(addr, cb->content_begin()) != 223 (address)data) { 224 const int invalidated_range = 225 MacroAssembler::patch_calculate_address_from_global_toc_at(addr, cb->content_begin(), 226 (address)data); 227 const address start = invalidated_range < 0 ? addr + invalidated_range : addr; 228 // FIXME: 229 const int range = invalidated_range < 0 ? 4 - invalidated_range : 8; 230 ICache::ppc64_flush_icache_bytes(start, range); 231 } 232 next_address = addr + 1 * BytesPerInstWord; 233 } else if (MacroAssembler::is_load_const_at(addr)) { 234 // A normal 5 instruction load_const code sequence. 235 if (MacroAssembler::get_const(addr) != (long)data) { 236 // This is not mt safe, ok in methods like CodeBuffer::copy_code(). 237 MacroAssembler::patch_const(addr, (long)data); 238 ICache::ppc64_flush_icache_bytes(addr, load_const_instruction_size); 239 } 240 next_address = addr + 5 * BytesPerInstWord; 241 } else if (MacroAssembler::is_bl(* (int*) addr)) { 242 // A single branch-and-link instruction. 243 ResourceMark rm; 244 const int code_size = 1 * BytesPerInstWord; 245 CodeBuffer cb(addr, code_size + 1); 246 MacroAssembler* a = new MacroAssembler(&cb); 247 a->bl((address) data); 248 ICache::ppc64_flush_icache_bytes(addr, code_size); 249 next_address = addr + code_size; 250 } else { 251 ShouldNotReachHere(); 252 } 253 254 return next_address; 255 } 256 257 void NativeMovConstReg::set_data(intptr_t data) { 258 // Store the value into the instruction stream. 259 CodeBlob *cb = CodeCache::find_blob(instruction_address()); 260 address next_address = set_data_plain(data, cb); 261 262 // Also store the value into an oop_Relocation cell, if any. 263 if (cb && cb->is_nmethod()) { 264 RelocIterator iter((nmethod *) cb, instruction_address(), next_address); 265 oop* oop_addr = NULL; 266 Metadata** metadata_addr = NULL; 267 while (iter.next()) { 268 if (iter.type() == relocInfo::oop_type) { 269 oop_Relocation *r = iter.oop_reloc(); 270 if (oop_addr == NULL) { 271 oop_addr = r->oop_addr(); 272 *oop_addr = cast_to_oop(data); 273 } else { 274 assert(oop_addr == r->oop_addr(), "must be only one set-oop here"); 275 } 276 } 277 if (iter.type() == relocInfo::metadata_type) { 278 metadata_Relocation *r = iter.metadata_reloc(); 279 if (metadata_addr == NULL) { 280 metadata_addr = r->metadata_addr(); 281 *metadata_addr = (Metadata*)data; 282 } else { 283 assert(metadata_addr == r->metadata_addr(), "must be only one set-metadata here"); 284 } 285 } 286 } 287 } 288 } 289 290 void NativeMovConstReg::set_narrow_oop(narrowOop data, CodeBlob *code /* = NULL */) { 291 address addr = addr_at(0); 292 CodeBlob* cb = (code) ? code : CodeCache::find_blob(instruction_address()); 293 if (MacroAssembler::get_narrow_oop(addr, cb->content_begin()) == (long)data) return; 294 const int invalidated_range = 295 MacroAssembler::patch_set_narrow_oop(addr, cb->content_begin(), (long)data); 296 const address start = invalidated_range < 0 ? addr + invalidated_range : addr; 297 // FIXME: 298 const int range = invalidated_range < 0 ? 4 - invalidated_range : 8; 299 ICache::ppc64_flush_icache_bytes(start, range); 300 } 301 302 // Do not use an assertion here. Let clients decide whether they only 303 // want this when assertions are enabled. 304 #ifdef ASSERT 305 void NativeMovConstReg::verify() { 306 address addr = addr_at(0); 307 if (! MacroAssembler::is_load_const_at(addr) && 308 ! MacroAssembler::is_load_const_from_method_toc_at(addr)) { 309 CodeBlob* cb = CodeCache::find_blob_unsafe(addr); // find_nmethod() asserts if nmethod is zombie. 310 if (! (cb != NULL && MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) && 311 ! (cb != NULL && MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) && 312 ! MacroAssembler::is_bl(*((int*) addr))) { 313 tty->print_cr("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr)); 314 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty); 315 fatal("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr)); 316 } 317 } 318 } 319 #endif // ASSERT 320 321 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) { 322 ResourceMark rm; 323 int code_size = 1 * BytesPerInstWord; 324 CodeBuffer cb(verified_entry, code_size + 1); 325 MacroAssembler* a = new MacroAssembler(&cb); 326 #ifdef COMPILER2 327 assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch"); 328 #endif 329 // Patch this nmethod atomically. Always use illtrap/trap in debug build. 330 if (DEBUG_ONLY(false &&) a->is_within_range_of_b(dest, a->pc())) { 331 a->b(dest); 332 } else { 333 // The signal handler will continue at dest=OptoRuntime::handle_wrong_method_stub(). 334 if (TrapBasedNotEntrantChecks) { 335 // We use a special trap for marking a method as not_entrant or zombie. 336 a->trap_zombie_not_entrant(); 337 } else { 338 // We use an illtrap for marking a method as not_entrant or zombie. 339 a->illtrap(); 340 } 341 } 342 ICache::ppc64_flush_icache_bytes(verified_entry, code_size); 343 } 344 345 #ifdef ASSERT 346 void NativeJump::verify() { 347 address addr = addr_at(0); 348 349 NativeInstruction::verify(); 350 if (!NativeJump::is_jump_at(addr)) { 351 tty->print_cr("not a NativeJump at " PTR_FORMAT, p2i(addr)); 352 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty); 353 fatal("not a NativeJump at " PTR_FORMAT, p2i(addr)); 354 } 355 } 356 #endif // ASSERT 357 358 359 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) { 360 CodeBuffer cb(code_pos, BytesPerInstWord + 1); 361 MacroAssembler* a = new MacroAssembler(&cb); 362 a->b(entry); 363 ICache::ppc64_flush_icache_bytes(code_pos, NativeGeneralJump::instruction_size); 364 } 365 366 // MT-safe patching of a jmp instruction. 367 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) { 368 // Bytes beyond offset NativeGeneralJump::instruction_size are copied by caller. 369 370 // Finally patch out the jump. 371 volatile juint *jump_addr = (volatile juint*)instr_addr; 372 // Release not needed because caller uses invalidate_range after copying the remaining bytes. 373 //OrderAccess::release_store(jump_addr, *((juint*)code_buffer)); 374 *jump_addr = *((juint*)code_buffer); // atomically store code over branch instruction 375 ICache::ppc64_flush_icache_bytes(instr_addr, NativeGeneralJump::instruction_size); 376 } 377 378 379 //------------------------------------------------------------------- 380 381 // Call trampoline stubs. 382 // 383 // Layout and instructions of a call trampoline stub: 384 // 0: load the TOC (part 1) 385 // 4: load the TOC (part 2) 386 // 8: load the call target from the constant pool (part 1) 387 // [12: load the call target from the constant pool (part 2, optional)] 388 // ..: branch via CTR 389 // 390 391 address NativeCallTrampolineStub::encoded_destination_addr() const { 392 address instruction_addr = addr_at(0 * BytesPerInstWord); 393 if (!MacroAssembler::is_ld_largeoffset(instruction_addr)) { 394 instruction_addr = addr_at(2 * BytesPerInstWord); 395 assert(MacroAssembler::is_ld_largeoffset(instruction_addr), 396 "must be a ld with large offset (from the constant pool)"); 397 } 398 return instruction_addr; 399 } 400 401 address NativeCallTrampolineStub::destination(nmethod *nm) const { 402 CodeBlob* cb = nm ? nm : CodeCache::find_blob_unsafe(addr_at(0)); 403 address ctable = cb->content_begin(); 404 405 return *(address*)(ctable + destination_toc_offset()); 406 } 407 408 int NativeCallTrampolineStub::destination_toc_offset() const { 409 return MacroAssembler::get_ld_largeoffset_offset(encoded_destination_addr()); 410 } 411 412 void NativeCallTrampolineStub::set_destination(address new_destination) { 413 CodeBlob* cb = CodeCache::find_blob(addr_at(0)); 414 address ctable = cb->content_begin(); 415 416 *(address*)(ctable + destination_toc_offset()) = new_destination; 417 } 418 --- EOF ---