diff --git a/src/hotspot/cpu/riscv/c1_LIRAssembler_riscv.hpp b/src/hotspot/cpu/riscv/c1_LIRAssembler_riscv.hpp index ce23213776c08..df25812a1ef64 100644 --- a/src/hotspot/cpu/riscv/c1_LIRAssembler_riscv.hpp +++ b/src/hotspot/cpu/riscv/c1_LIRAssembler_riscv.hpp @@ -70,7 +70,7 @@ friend class ArrayCopyStub; // See emit_static_call_stub for detail // CompiledDirectCall::to_interp_stub_size() (14) + CompiledDirectCall::to_trampoline_stub_size() (1 + 3 + address) _call_stub_size = 14 * NativeInstruction::instruction_size + - (NativeInstruction::instruction_size + NativeCallTrampolineStub::instruction_size), + (NativeInstruction::instruction_size + NativeShortCall::trampoline_size), // See emit_exception_handler for detail // verify_not_null_oop + far_call + should_not_reach_here + invalidate_registers(DEBUG_ONLY) _exception_handler_size = DEBUG_ONLY(584) NOT_DEBUG(548), // or smaller diff --git a/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp b/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp index d62d595e4bc5e..c481037ff1d78 100644 --- a/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp +++ b/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp @@ -50,6 +50,7 @@ static bool emit_shared_trampolines(CodeBuffer* cb, CodeBuffer::SharedTrampoline if (requests == nullptr) { return true; } + assert(!UseNewCode, "Not null?"); MacroAssembler masm(cb); @@ -60,7 +61,7 @@ static bool emit_shared_trampolines(CodeBuffer* cb, CodeBuffer::SharedTrampoline address stub = __ emit_trampoline_stub(offset, dest); assert(stub, "pre-allocated trampolines"); - address reloc_pc = cb->stubs()->end() - NativeCallTrampolineStub::instruction_size; + address reloc_pc = cb->stubs()->end() - NativeShortCall::trampoline_size; while (!it.is_empty()) { offset = *it.next(); address caller_pc = cb->insts()->start() + offset; diff --git a/src/hotspot/cpu/riscv/codeBuffer_riscv.hpp b/src/hotspot/cpu/riscv/codeBuffer_riscv.hpp index 73a7f1cb89f9b..1618647a3f0e3 100644 --- a/src/hotspot/cpu/riscv/codeBuffer_riscv.hpp +++ b/src/hotspot/cpu/riscv/codeBuffer_riscv.hpp @@ -33,7 +33,7 @@ public: void flush_bundle(bool start_new_bundle) {} - static constexpr bool supports_shared_stubs() { return true; } + static bool supports_shared_stubs() { return !UseNewCode; } void share_trampoline_for(address dest, int caller_offset); diff --git a/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp b/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp index 15fb841f4e5cc..f1dcdd31ea5f3 100644 --- a/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp +++ b/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp @@ -849,6 +849,20 @@ void MacroAssembler::li(Register Rd, int64_t imm) { } } +void MacroAssembler::load_link(const address source, Register temp) { + assert(temp != noreg && temp != x0, "expecting a register"); + assert(temp == x5, "expecting a register"); + assert_cond(source != nullptr); + int64_t distance = source - pc(); + assert(is_simm32(distance), "Must be"); + //printf("Trampo auipc: %p\n", pc()); + Assembler::auipc(temp, (int32_t)distance + 0x800); + //printf("Trampo ld: %p -> source: %p\n", pc(), source); + Assembler::_ld(temp, temp, ((int32_t)distance << 20) >> 20); + //printf("Trampo jalr: %p\n", pc()); + Assembler::jalr(x1, temp, 0); +} + void MacroAssembler::jump_link(const address dest, Register temp) { assert_cond(dest != nullptr); int64_t distance = dest - pc(); @@ -898,7 +912,7 @@ void MacroAssembler::j(const address dest, Register temp) { } } -void MacroAssembler::j(const Address &adr, Register temp) { +void MacroAssembler::j(const Address &adr, Register temp) { switch (adr.getMode()) { case Address::literal: { relocate(adr.rspec(), [&] { @@ -3517,11 +3531,16 @@ address MacroAssembler::trampoline_call(Address entry) { // We need a trampoline if branches are far. if (!in_scratch_emit_size()) { - if (entry.rspec().type() == relocInfo::runtime_call_type) { + if (entry.rspec().type() == relocInfo::runtime_call_type && !UseNewCode) { assert(CodeBuffer::supports_shared_stubs(), "must support shared stubs"); code()->share_trampoline_for(entry.target(), offset()); } else { - address stub = emit_trampoline_stub(offset(), target); + address stub = nullptr; + if (UseNewCode) { + stub = emit_address_stub(offset(), target); + } else { + stub = emit_trampoline_stub(offset(), target); + } if (stub == nullptr) { postcond(pc() == badAddress); return nullptr; // CodeCache is full @@ -3537,7 +3556,11 @@ address MacroAssembler::trampoline_call(Address entry) { } #endif relocate(entry.rspec(), [&] { - jump_link(target, t0); + if (UseNewCode) { + load_link(target, t0); + } else { + jump_link(target, t0); + } }); postcond(pc() != badAddress); @@ -3594,6 +3617,38 @@ int MacroAssembler::ic_check(int end_alignment) { return uep_offset; } +address MacroAssembler::emit_address_stub(int insts_call_instruction_offset, address dest) { + address stub = start_a_stub(max_trampoline_stub_size()); + if (stub == nullptr) { + return nullptr; // CodeBuffer::expand failed + } + + // We are always 4-byte aligned here. + assert_alignment(pc()); + + // Make sure the address of destination 8-byte aligned. + align(wordSize, 0); + + RelocationHolder rh = trampoline_stub_Relocation::spec(code()->insts()->start() + + insts_call_instruction_offset); + const int stub_start_offset = offset(); + relocate(rh, [&] { + assert(offset() - stub_start_offset == 0, + "%ld - %ld == %ld : should be", (long)offset(), (long)stub_start_offset, (long)0); + assert(offset() % wordSize == 0, "bad alignment"); + //printf("Data trampo at %p with dest: %p\n", pc(), dest); + //fflush(stdout); + emit_int64((int64_t)dest); + emit_int64((int64_t)0xababababababababull); + }); + + const address stub_start_addr = addr_at(stub_start_offset); + assert(NativeFarCall::is_stub_address_at(stub_start_addr), "doesn't look like an address"); + end_a_stub(); + + return stub_start_addr; +} + // Emit a trampoline stub for a call to a target which is too far away. // // code sequences: @@ -3613,6 +3668,8 @@ address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset, return nullptr; // CodeBuffer::expand failed } + assert(!UseNewCode, "Bad"); + // We are always 4-byte aligned here. assert_alignment(pc()); @@ -3621,7 +3678,7 @@ address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset, // instructions code-section. // Make sure the address of destination 8-byte aligned after 3 instructions. - align(wordSize, NativeCallTrampolineStub::data_offset); + align(wordSize, NativeShortCall::trampoline_data_offset); RelocationHolder rh = trampoline_stub_Relocation::spec(code()->insts()->start() + insts_call_instruction_offset); @@ -3634,7 +3691,7 @@ address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset, ld(t0, target); // auipc + ld jr(t0); // jalr bind(target); - assert(offset() - stub_start_offset == NativeCallTrampolineStub::data_offset, + assert(offset() - stub_start_offset == NativeShortCall::trampoline_data_offset, "should be"); assert(offset() % wordSize == 0, "bad alignment"); emit_int64((int64_t)dest); @@ -3642,15 +3699,18 @@ address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset, const address stub_start_addr = addr_at(stub_start_offset); - assert(is_NativeCallTrampolineStub_at(stub_start_addr), "doesn't look like a trampoline"); - end_a_stub(); + return stub_start_addr; } int MacroAssembler::max_trampoline_stub_size() { // Max stub size: alignment nop, TrampolineStub. - return NativeInstruction::instruction_size + NativeCallTrampolineStub::instruction_size; + if (UseNewCode) { + return 2 * wordSize; + } else { + return NativeInstruction::instruction_size + NativeShortCall::trampoline_size; + } } int MacroAssembler::static_call_stub_size() { diff --git a/src/hotspot/cpu/riscv/macroAssembler_riscv.hpp b/src/hotspot/cpu/riscv/macroAssembler_riscv.hpp index bd31ec6a60f67..c47429b65c941 100644 --- a/src/hotspot/cpu/riscv/macroAssembler_riscv.hpp +++ b/src/hotspot/cpu/riscv/macroAssembler_riscv.hpp @@ -436,8 +436,10 @@ class MacroAssembler: public Assembler { return false; } + address emit_address_stub(int insts_call_instruction_offset, address target); address emit_trampoline_stub(int insts_call_instruction_offset, address target); static int max_trampoline_stub_size(); + void emit_static_call_stub(); static int static_call_stub_size(); @@ -590,6 +592,7 @@ class MacroAssembler: public Assembler { void bgtz(Register Rs, const address dest); private: + void load_link(const address source, Register temp); void jump_link(const address dest, Register temp); void jump_link(const Address &adr, Register temp); public: diff --git a/src/hotspot/cpu/riscv/nativeInst_riscv.cpp b/src/hotspot/cpu/riscv/nativeInst_riscv.cpp index 399de3a2805d2..28079df17319d 100644 --- a/src/hotspot/cpu/riscv/nativeInst_riscv.cpp +++ b/src/hotspot/cpu/riscv/nativeInst_riscv.cpp @@ -39,6 +39,8 @@ #include "c1/c1_Runtime1.hpp" #endif +//----------------------------------------------------------------------------- +// NativeInstruction Register NativeInstruction::extract_rs1(address instr) { assert_cond(instr != nullptr); return as_Register(Assembler::extract(Assembler::ld_instr(instr), 19, 15)); @@ -84,6 +86,10 @@ bool NativeInstruction::is_load_pc_relative_at(address instr) { check_load_pc_relative_data_dependency(instr); } +bool NativeInstruction::is_call_at(address addr) { + return NativeCall::is_at(addr); +} + bool NativeInstruction::is_movptr_at(address instr) { return is_lui_at(instr) && // Lui is_addi_at(instr + instruction_size) && // Addi @@ -120,50 +126,89 @@ bool NativeInstruction::is_li64_at(address instr) { check_li64_data_dependency(instr); } -void NativeCall::verify() { - assert(NativeCall::is_call_at((address)this), "unexpected code at call site"); -} +//----------------------------------------------------------------------------- +// NativeShortCall -address NativeCall::destination() const { - address addr = (address)this; +address NativeShortCall::destination() const { + address addr = addr_at(0); assert(NativeInstruction::is_jal_at(instruction_address()), "inst must be jal."); + address destination = MacroAssembler::target_addr_for_insn(instruction_address()); // Do we use a trampoline stub for this call? CodeBlob* cb = CodeCache::find_blob(addr); assert(cb && cb->is_nmethod(), "sanity"); nmethod *nm = (nmethod *)cb; - if (nm != nullptr && nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) { + if (nm != nullptr && nm->stub_contains(destination) && NativeShortCallTrampolineStub::is_at(destination)) { // Yes we do, so get the destination from the trampoline stub. const address trampoline_stub_addr = destination; - destination = nativeCallTrampolineStub_at(trampoline_stub_addr)->destination(); + destination = NativeShortCallTrampolineStub::at(trampoline_stub_addr)->destination(); } return destination; } + +address NativeShortCall::reloc_destination(address orig_address) { + address addr = addr_at(0); + if (NativeShortCall::is_at(addr)) { + NativeShortCall* call = NativeShortCall::at(addr); + if (call->has_trampoline()) { + return call->trampoline_destination(); + } + } + if (orig_address != nullptr) { + // the extracted address from the instructions in address orig_addr + address new_addr = MacroAssembler::pd_call_destination(orig_address); + // If call is branch to self, don't try to relocate it, just leave it + // as branch to self. This happens during code generation if the code + // buffer expands. It will be relocated to the trampoline above once + // code generation is complete. + new_addr = (new_addr == orig_address) ? addr : new_addr; + return new_addr; + } + return MacroAssembler::pd_call_destination(addr); +} + +void NativeShortCall::set_destination(address dest) { + assert(NativeShortCall::is_at(addr_at(0)), "unexpected code at call site"); + assert(is_jal(), "Should be jal instruction!"); + intptr_t offset = (intptr_t)(dest - instruction_address()); + assert((offset & 0x1) == 0, "bad alignment"); + assert(Assembler::is_simm21(offset), "encoding constraint"); + unsigned int insn = 0b1101111; // jal + address pInsn = (address)(&insn); + Assembler::patch(pInsn, 31, 31, (offset >> 20) & 0x1); + Assembler::patch(pInsn, 30, 21, (offset >> 1) & 0x3ff); + Assembler::patch(pInsn, 20, 20, (offset >> 11) & 0x1); + Assembler::patch(pInsn, 19, 12, (offset >> 12) & 0xff); + Assembler::patch(pInsn, 11, 7, ra->encoding()); // Rd must be x1, need ra + set_int_at(0, insn); +} -// Similar to replace_mt_safe, but just changes the destination. The -// important thing is that free-running threads are able to execute this -// call instruction at all times. -// -// Used in the runtime linkage of calls; see class CompiledIC. -// -// Add parameter assert_lock to switch off assertion -// during code generation, where no patching lock is needed. -void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) { +void NativeShortCall::verify() { + assert(NativeShortCall::is_at(addr_at(0)), + "unexpected code at call site: %p", addr_at(0)); +} + +void NativeShortCall::print() { + assert(NativeShortCall::is_at(addr_at(0)), "unexpected code at call site"); + tty->print_cr(PTR_FORMAT ": jal/auipc,ld,jalr x1, offset/reg", p2i(instruction_address())); +} + +bool NativeShortCall::set_destination_mt_safe(address dest, bool assert_lock) { assert(!assert_lock || (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()) || CompiledICLocker::is_safe(addr_at(0)), "concurrent code patching"); address addr_call = addr_at(0); - assert(NativeCall::is_call_at(addr_call), "unexpected code at call site"); + assert(NativeCall::is_at(addr_call), "unexpected code at call site"); // Patch the constant in the call's trampoline stub. address trampoline_stub_addr = get_trampoline(); if (trampoline_stub_addr != nullptr) { - assert (!is_NativeCallTrampolineStub_at(dest), "chained trampolines"); - nativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest); + assert (!NativeShortCallTrampolineStub::is_at(dest), "chained trampolines"); + NativeShortCallTrampolineStub::at(trampoline_stub_addr)->set_destination(dest); } // Patch the call. @@ -175,28 +220,424 @@ void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) { } ICache::invalidate_range(addr_call, instruction_size); + return true; } -address NativeCall::get_trampoline() { +address NativeShortCall::get_trampoline() { address call_addr = addr_at(0); CodeBlob *code = CodeCache::find_blob(call_addr); assert(code != nullptr, "Could not find the containing code blob"); address jal_destination = MacroAssembler::pd_call_destination(call_addr); - if (code != nullptr && code->contains(jal_destination) && is_NativeCallTrampolineStub_at(jal_destination)) { + if (code != nullptr && code->contains(jal_destination) && NativeShortCallTrampolineStub::is_at(jal_destination)) { return jal_destination; } if (code != nullptr && code->is_nmethod()) { - return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code); + address ret = trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code); + return ret; + } + + return nullptr; +} + +bool NativeShortCall::has_trampoline() { + return NativeShortCall::get_trampoline() != nullptr; +} + +address NativeShortCall::trampoline_destination() { + return NativeShortCallTrampolineStub::at(get_trampoline())->destination(); +} + +NativeShortCall* NativeShortCall::at(address addr) { + assert_cond(addr != nullptr); + assert(NativeShortCall::is_at(addr), "unexpected code at call site: %p", addr); + NativeShortCall* call = (NativeShortCall*)(addr); + return call; +} + +bool NativeShortCall::is_at(address addr) { + if (is_jal_at(addr)) { + if (NativeInstruction::extract_rd(addr) == x1) { + return true; + } + } + return false; +} + +bool NativeShortCall::is_call_before(address return_address) { + return NativeShortCall::is_at(return_address - Assembler::instruction_size); +} + +void NativeShortCall::insert(address code_pos, address entry) { + Unimplemented(); +} + +void NativeShortCall::replace_mt_safe(address instr_addr, address code_buffer) { + Unimplemented(); +} + +//----------------------------------------------------------------------------- +// NativeShortCallTrampoline + +address NativeShortCallTrampolineStub::destination(nmethod *nm) const { + return ptr_at(NativeShortCall::trampoline_data_offset); +} + +void NativeShortCallTrampolineStub::set_destination(address new_destination) { + set_ptr_at(NativeShortCall::trampoline_data_offset, new_destination); + OrderAccess::release(); +} + +bool NativeShortCallTrampolineStub::is_at(address addr) { + // Ensure that the stub is exactly + // ld t0, L--->auipc + ld + // jr t0 + // L: + + // judge inst + register + imm + // 1). check the instructions: auipc + ld + jalr + // 2). check if auipc[11:7] == t0 and ld[11:7] == t0 and ld[19:15] == t0 && jr[19:15] == t0 + // 3). check if the offset in ld[31:20] equals the data_offset + assert_cond(addr != nullptr); + const int instr_size = NativeInstruction::instruction_size; + if (NativeInstruction::is_auipc_at(addr) && + NativeInstruction::is_ld_at(addr + instr_size) && + NativeInstruction::is_jalr_at(addr + 2 * instr_size) && + (NativeInstruction::extract_rd(addr) == x5) && + (NativeInstruction::extract_rd(addr + instr_size) == x5) && + (NativeInstruction::extract_rs1(addr + instr_size) == x5) && + (NativeInstruction::extract_rs1(addr + 2 * instr_size) == x5) && + (Assembler::extract(Assembler::ld_instr(addr + 4), 31, 20) == NativeShortCall::trampoline_data_offset)) { + return true; + } + return false; +} + +NativeShortCallTrampolineStub* NativeShortCallTrampolineStub::at(address addr) { + assert_cond(addr != nullptr); + assert(NativeShortCallTrampolineStub::is_at(addr), "no call trampoline found"); + return (NativeShortCallTrampolineStub*)addr; +} + +//----------------------------------------------------------------------------- +// NativeFarCall + +address NativeFarCall::destination() const { + address addr = addr_at(0); + assert(NativeFarCall::is_at(addr), "unexpected code at call site"); + + address destination = MacroAssembler::target_addr_for_insn(addr); + + //printf("%s| %p contains %p\n", __PRETTY_FUNCTION__, addr, destination); + //fflush(stdout); + + CodeBlob* cb = CodeCache::find_blob(addr); + assert(cb && cb->is_nmethod(), "sanity"); + nmethod *nm = (nmethod *)cb; + if (nm != nullptr && nm->stub_contains(destination) && is_stub_address_at(destination)) { + destination = stub_address_destination_at(destination); + } + + //printf("%s| %p contains %p\n", __PRETTY_FUNCTION__, addr, destination); + //fflush(stdout); + + return destination; +} + +address NativeFarCall::reloc_destination(address orig_address) { + address addr = addr_at(0); + if (NativeFarCall::is_at(addr)) { + NativeFarCall* call = NativeFarCall::at(addr); + if (call->has_address_stub()) { + return call->stub_address_destination(); + } + } + if (orig_address != nullptr) { + // the extracted address from the instructions in address orig_addr + address new_addr = MacroAssembler::pd_call_destination(orig_address); + // If call is branch to self, don't try to relocate it, just leave it + // as branch to self. This happens during code generation if the code + // buffer expands. It will be relocated to the trampoline above once + // code generation is complete. + new_addr = (new_addr == orig_address) ? addr : new_addr; + return new_addr; + } + return MacroAssembler::pd_call_destination(addr); +} + +void NativeFarCall::set_destination(address dest) { + address addr = addr_at(0); + assert(NativeFarCall::is_at(addr), "unexpected code at call site"); + address old_dest = MacroAssembler::target_addr_for_insn(addr); + //printf("%s| %p contains %p\n", __PRETTY_FUNCTION__, addr, old_dest); + //fflush(stdout); + MacroAssembler::pd_patch_instruction_size(addr, dest); + old_dest = MacroAssembler::target_addr_for_insn(addr); + //printf("%s| %p contains %p\n", __PRETTY_FUNCTION__, addr, old_dest); + //fflush(stdout); +} + +void NativeFarCall::verify() { + assert(NativeFarCall::is_at(addr_at(0)), "unexpected code at call site"); +} + +void NativeFarCall::print() { + assert(NativeFarCall::is_at(addr_at(0)), "unexpected code at call site"); + tty->print_cr(PTR_FORMAT ": auipc,ld,jalr x1, offset/reg, ", p2i(addr_at(0))); +} + +bool NativeFarCall::set_destination_mt_safe(address dest, bool assert_lock) { + assert(NativeFarCall::is_at(addr_at(0)), "unexpected code at call site"); + assert(!assert_lock || + (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()) || + CompiledICLocker::is_safe(addr_at(0)), + "concurrent code patching"); + + address addr_call = addr_at(0); + assert(NativeFarCall::is_at(addr_call), "unexpected code at call site"); + + address stub_addr = stub_address(); + + if (stub_addr != nullptr) { + assert (!is_stub_address_at(dest), "chained trampolines"); + set_stub_address_destination_at(stub_addr, dest); + // Patch the call. + set_destination(stub_addr); + + ICache::invalidate_range(addr_call, instruction_size); + return true; + } + + //printf("%s| NO PATCH, NO TRAMP call %p contains %p | -> %p\n", __PRETTY_FUNCTION__, addr_call, stub_addr, dest); + //fflush(stdout); + return false; +} + +void NativeFarCall::set_stub_address_destination_at(address dest, address value) { + assert_cond(dest != nullptr); + assert_cond(value != nullptr); + + const int instr_size = NativeInstruction::instruction_size; + address old_dest = (address)get_data64_at(dest); + address trail = (address)get_data64_at(dest + wordSize); + //printf("%s| %p contains %p | %p\n", __PRETTY_FUNCTION__, dest, old_dest, trail); + //fflush(stdout); + assert(trail == (void*)0xababababababababull, "BAD"); + + // -- + set_data64_at(dest, (uint64_t)value); + // -- + + old_dest = (address)get_data64_at(dest); + trail = (address)get_data64_at(dest + wordSize); + //printf("%s| %p contains %p | %p\n", __PRETTY_FUNCTION__, dest, old_dest, trail); + //fflush(stdout); + assert(trail == (void*)0xababababababababull, "BAD"); + + OrderAccess::release(); +} + +bool NativeFarCall::is_stub_address_at(address src) { + assert_cond(src != nullptr); + const int instr_size = NativeInstruction::instruction_size; + address dest = (address)get_data64_at(src); + address trail = (address)get_data64_at(src + wordSize); + //printf("%s| %p contains %p | %p\n", __PRETTY_FUNCTION__, src, dest, trail); + //fflush(stdout); + // -- + if (trail == (void*)0xababababababababull) { + return true; + } + return false; +} + +address NativeFarCall::stub_address_destination_at(address src) { + assert_cond(src != nullptr); + const int instr_size = NativeInstruction::instruction_size; + // -- + address dest = (address)get_data64_at(src); + // -- + address trail = (address)get_data64_at(src + wordSize); + //printf("%s| %p contains %p | %p\n", __PRETTY_FUNCTION__, src, dest, trail); + //fflush(stdout); + assert(trail == (void*)0xababababababababull, "BAD"); + // -- + return dest; +} + +address NativeFarCall::stub_address() { + address call_addr = addr_at(0); + + CodeBlob *code = CodeCache::find_blob(call_addr); + assert(code != nullptr, "Could not find the containing code blob"); + + address dest = MacroAssembler::pd_call_destination(call_addr); + if (code != nullptr && code->contains(dest) && is_stub_address_at(dest)) { + //printf("%s| Call: %p to stub address: %p\n", + //__PRETTY_FUNCTION__, call_addr, dest); + //fflush(stdout); + return dest; + } + + if (code != nullptr && code->is_nmethod()) { + address ret = trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code); + //printf("%s| Call reloc adr: %p to stub address: %p\n", + //__PRETTY_FUNCTION__, call_addr, ret); + //fflush(stdout); + return ret; } + //printf("%s| CALL NO STOB: %p to stub address: %p\n", + //__PRETTY_FUNCTION__, call_addr, dest); + return nullptr; } -// Inserts a native call instruction at a given pc -void NativeCall::insert(address code_pos, address entry) { Unimplemented(); } +bool NativeFarCall::has_address_stub() { + return stub_address() != nullptr; +} + +address NativeFarCall::stub_address_destination() { + return stub_address_destination_at(stub_address()); +} + +NativeFarCall* NativeFarCall::at(address addr) { + assert_cond(addr != nullptr); + assert(NativeFarCall::is_at(addr), "unexpected code at call site: %p", addr); + NativeFarCall* call = (NativeFarCall*)(addr); + return call; +} + +bool NativeFarCall::is_at(address addr) { + assert_cond(addr != nullptr); + const int instr_size = NativeInstruction::instruction_size; + if (NativeInstruction::is_auipc_at(addr) && + NativeInstruction::is_ld_at(addr + instr_size) && + NativeInstruction::is_jalr_at(addr + 2 * instr_size) && + (NativeInstruction::extract_rd(addr) == x5) && + (NativeInstruction::extract_rd(addr + instr_size) == x5) && + (NativeInstruction::extract_rs1(addr + instr_size) == x5) && + (NativeInstruction::extract_rs1(addr + 2 * instr_size) == x5) && + (NativeInstruction::extract_rd(addr + 2 * instr_size) == x1)) { + return true; + } + return false; +} + +bool NativeFarCall::is_call_before(address return_address) { + return NativeFarCall::is_at(return_address - return_address_offset); +} + +void NativeFarCall::insert(address code_pos, address entry) { + Unimplemented(); +} + +void NativeFarCall::replace_mt_safe(address instr_addr, address code_buffer) { + assert(NativeFarCall::is_at((address)instr_addr), "unexpected code at call site"); + Unimplemented(); +} + +//----------------------------------------------------------------------------- +// NativeCall + +address NativeCall::instruction_address() const { + if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + return NativeFarCall::at(addr_at(0))->instruction_address(); + } else { + return NativeShortCall::at(addr_at(0))->instruction_address(); + } +} + +address NativeCall::next_instruction_address() const { + if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + return NativeFarCall::at(addr_at(0))->next_instruction_address(); + } else { + return NativeShortCall::at(addr_at(0))->next_instruction_address(); + } +} + +address NativeCall::return_address() const { + if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + return NativeFarCall::at(addr_at(0))->return_address(); + } else { + return NativeShortCall::at(addr_at(0))->return_address(); + } +} + +address NativeCall::destination() const { + if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + return NativeFarCall::at(addr_at(0))->destination(); + } else { + return NativeShortCall::at(addr_at(0))->destination(); + } +} + +address NativeCall::reloc_destination(address orig_address) { + if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + return NativeFarCall::at(addr_at(0))->reloc_destination(orig_address); + } else { + return NativeShortCall::at(addr_at(0))->reloc_destination(orig_address); + } +} + +void NativeCall::set_destination(address dest) { + if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + NativeFarCall::at(addr_at(0))->set_destination(dest); + } else { + NativeShortCall::at(addr_at(0))->set_destination(dest); + } +} + +void NativeCall::verify() { + if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + NativeFarCall::at(addr_at(0))->verify();; + } else { + NativeShortCall::at(addr_at(0))->verify(); + } +} + +void NativeCall::print() { + if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + NativeFarCall::at(addr_at(0))->print();; + } else { + NativeShortCall::at(addr_at(0))->print(); + } +} + +bool NativeCall::set_destination_mt_safe(address dest, bool assert_lock) { + if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + return NativeFarCall::at(addr_at(0))->set_destination_mt_safe(dest, assert_lock); + } else { + return NativeShortCall::at(addr_at(0))->set_destination_mt_safe(dest, assert_lock); + } +} + +bool NativeCall::is_call_before(address return_address) { + if (UseNewCode) { + return NativeFarCall::is_call_before(return_address) || + NativeShortCall::is_call_before(return_address); + } else { + return NativeShortCall::is_call_before(return_address); + } +} + +void NativeCall::insert(address code_pos, address entry) { + if (UseNewCode && NativeFarCall::is_at(code_pos)) { + NativeFarCall::insert(code_pos, entry); + } else { + NativeShortCall::insert(code_pos, entry); + } +} + +void NativeCall::replace_mt_safe(address instr_addr, address code_buffer) { + if (UseNewCode && NativeFarCall::is_at(instr_addr)) { + NativeFarCall::replace_mt_safe(instr_addr, code_buffer); + } else { + NativeShortCall::replace_mt_safe(instr_addr, code_buffer); + } +} //------------------------------------------------------------------- @@ -329,7 +770,7 @@ address NativeGeneralJump::jump_destination() const { //------------------------------------------------------------------- bool NativeInstruction::is_safepoint_poll() { - return is_lwu_to_zr(address(this)); + return is_lwu_to_zr(addr_at(0)); } bool NativeInstruction::is_lwu_to_zr(address instr) { @@ -410,14 +851,6 @@ void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) ShouldNotCallThis(); } -address NativeCallTrampolineStub::destination(nmethod *nm) const { - return ptr_at(data_offset); -} - -void NativeCallTrampolineStub::set_destination(address new_destination) { - set_ptr_at(data_offset, new_destination); - OrderAccess::release(); -} uint32_t NativeMembar::get_kind() { uint32_t insn = uint_at(0); diff --git a/src/hotspot/cpu/riscv/nativeInst_riscv.hpp b/src/hotspot/cpu/riscv/nativeInst_riscv.hpp index 48bbb2b3b181d..76b06dec9326b 100644 --- a/src/hotspot/cpu/riscv/nativeInst_riscv.hpp +++ b/src/hotspot/cpu/riscv/nativeInst_riscv.hpp @@ -210,15 +210,8 @@ class NativeInstruction { static bool is_li64_at(address instr); static bool is_pc_relative_at(address branch); static bool is_load_pc_relative_at(address branch); - - static bool is_call_at(address instr) { - if (is_jal_at(instr) || is_jalr_at(instr)) { - return true; - } - return false; - } + static bool is_call_at(address addr); static bool is_lwu_to_zr(address instr); - inline bool is_nop() const; inline bool is_jump_or_nop(); bool is_safepoint_poll(); @@ -226,20 +219,20 @@ class NativeInstruction { bool is_stop(); protected: - address addr_at(int offset) const { return address(this) + offset; } - - jint int_at(int offset) const { return (jint)Bytes::get_native_u4(addr_at(offset)); } - juint uint_at(int offset) const { return Bytes::get_native_u4(addr_at(offset)); } - - address ptr_at(int offset) const { return (address)Bytes::get_native_u8(addr_at(offset)); } + address addr_at(int offset) const { return address(this) + offset; } + jint int_at(int offset) const { return (jint) Bytes::get_native_u4(addr_at(offset)); } + juint uint_at(int offset) const { return Bytes::get_native_u4(addr_at(offset)); } + address ptr_at(int offset) const { return (address) Bytes::get_native_u8(addr_at(offset)); } + oop oop_at(int offset) const { return cast_to_oop(Bytes::get_native_u8(addr_at(offset))); } - oop oop_at (int offset) const { return cast_to_oop(Bytes::get_native_u8(addr_at(offset))); } + void set_int_at(int offset, jint i) { Bytes::put_native_u4(addr_at(offset), i); } + void set_uint_at(int offset, jint i) { Bytes::put_native_u4(addr_at(offset), i); } + void set_ptr_at(int offset, address ptr) { Bytes::put_native_u8(addr_at(offset), (u8)ptr); } + void set_oop_at(int offset, oop o) { Bytes::put_native_u8(addr_at(offset), cast_from_oop(o)); } - void set_int_at(int offset, jint i) { Bytes::put_native_u4(addr_at(offset), i); } - void set_uint_at(int offset, jint i) { Bytes::put_native_u4(addr_at(offset), i); } - void set_ptr_at (int offset, address ptr) { Bytes::put_native_u8(addr_at(offset), (u8)ptr); } - void set_oop_at (int offset, oop o) { Bytes::put_native_u8(addr_at(offset), cast_from_oop(o)); } + static void set_data64_at(address dest, uint64_t data) { Bytes::put_native_u8(dest, (u8)data); } + static uint64_t get_data64_at(address src) { return Bytes::get_native_u8(src); } public: @@ -254,94 +247,152 @@ class NativeInstruction { } }; -inline NativeInstruction* nativeInstruction_at(address addr) { +NativeInstruction* nativeInstruction_at(address addr) { return (NativeInstruction*)addr; } -// The natural type of an RISCV instruction is uint32_t -inline NativeInstruction* nativeInstruction_at(uint32_t *addr) { - return (NativeInstruction*)addr; -} - -inline NativeCall* nativeCall_at(address addr); -// The NativeCall is an abstraction for accessing/manipulating native -// call instructions (used to manipulate inline caches, primitive & -// DSO calls, etc.). +NativeCall* nativeCall_at(address addr); +NativeCall* nativeCall_before(address return_address); -class NativeCall: public NativeInstruction { +class NativeShortCall: private NativeInstruction { public: enum RISCV_specific_constants { - instruction_size = 4, - instruction_offset = 0, - displacement_offset = 0, - return_address_offset = 4 + trampoline_size = 3 * NativeInstruction::instruction_size + wordSize, // auipc + ld + jr + target address + trampoline_data_offset = 3 * NativeInstruction::instruction_size // auipc + ld + jr }; - address instruction_address() const { return addr_at(instruction_offset); } - address next_instruction_address() const { return addr_at(return_address_offset); } - address return_address() const { return addr_at(return_address_offset); } - address destination() const; + // Creation + inline friend NativeCall* nativeCall_at(address addr); + inline friend NativeCall* nativeCall_before(address return_address); - void set_destination(address dest) { - assert(is_jal(), "Should be jal instruction!"); - intptr_t offset = (intptr_t)(dest - instruction_address()); - assert((offset & 0x1) == 0, "bad alignment"); - assert(Assembler::is_simm21(offset), "encoding constraint"); - unsigned int insn = 0b1101111; // jal - address pInsn = (address)(&insn); - Assembler::patch(pInsn, 31, 31, (offset >> 20) & 0x1); - Assembler::patch(pInsn, 30, 21, (offset >> 1) & 0x3ff); - Assembler::patch(pInsn, 20, 20, (offset >> 11) & 0x1); - Assembler::patch(pInsn, 19, 12, (offset >> 12) & 0xff); - Assembler::patch(pInsn, 11, 7, ra->encoding()); // Rd must be x1, need ra - set_int_at(displacement_offset, insn); - } + address instruction_address() const { return addr_at(0); } + address next_instruction_address() const { return addr_at(NativeInstruction::instruction_size); } + address return_address() const { return addr_at(NativeInstruction::instruction_size); } + address destination() const; + address reloc_destination(address orig_address); + void set_destination(address dest); void verify_alignment() {} // do nothing on riscv void verify(); void print(); + bool set_destination_mt_safe(address dest, bool assert_lock = true); + + private: + address get_trampoline(); + bool has_trampoline(); + address trampoline_destination(); + public: + + static NativeShortCall* at(address addr); + static bool is_at(address addr); + static bool is_call_before(address return_address); + static void insert(address code_pos, address entry); + static void replace_mt_safe(address instr_addr, address code_buffer); +}; + +class NativeShortCallTrampolineStub : public NativeInstruction { + private: + friend NativeShortCall; + + address destination(nmethod *nm = nullptr) const; + void set_destination(address new_destination); + ptrdiff_t destination_offset() const; + + static bool is_at(address addr); + static NativeShortCallTrampolineStub* at(address addr); +}; + +class NativeFarCall: public NativeInstruction { + public: // Creation inline friend NativeCall* nativeCall_at(address addr); inline friend NativeCall* nativeCall_before(address return_address); - static bool is_call_before(address return_address) { - return is_call_at(return_address - NativeCall::return_address_offset); - } + enum RISCV_specific_constants { + instruction_size = 3 * NativeInstruction::instruction_size, // ld auipc jalr + return_address_offset = 3 * NativeInstruction::instruction_size, // ld auipc jalr + }; - // MT-safe patching of a call instruction. - static void insert(address code_pos, address entry); + address instruction_address() const { return addr_at(0); } + address next_instruction_address() const { return addr_at(return_address_offset); } + address return_address() const { return addr_at(return_address_offset); } + address destination() const; + address reloc_destination(address orig_address); + + void set_destination(address dest); + void verify_alignment() {} // do nothing on riscv + void verify(); + void print(); + + bool set_destination_mt_safe(address dest, bool assert_lock = true); + + private: + address stub_address(); + address stub_address_destination(); + bool has_address_stub(); + + static void set_stub_address_destination_at(address dest, address value); + static address stub_address_destination_at(address src); + public: + static bool is_stub_address_at(address src); + static NativeFarCall* at(address addr); + static bool is_at(address addr); + static bool is_call_before(address return_address); + static void insert(address code_pos, address entry); static void replace_mt_safe(address instr_addr, address code_buffer); +}; - // Similar to replace_mt_safe, but just changes the destination. The - // important thing is that free-running threads are able to execute - // this call instruction at all times. If the call is an immediate BL - // instruction we can simply rely on atomicity of 32-bit writes to - // make sure other threads will see no intermediate states. +// The NativeCall is an abstraction for accessing/manipulating native +// call instructions (used to manipulate inline caches, primitive & +// DSO calls, etc.). +class NativeCall: private NativeInstruction { + public: + enum { + instruction_size = 3 * Assembler::instruction_size, + }; - // We cannot rely on locks here, since the free-running threads must run at - // full speed. - // - // Used in the runtime linkage of calls; see class CompiledIC. - // (Cf. 4506997 and 4479829, where threads witnessed garbage displacements.) + // Creation + inline friend NativeCall* nativeCall_at(address addr); + inline friend NativeCall* nativeCall_before(address return_address); - // The parameter assert_lock disables the assertion during code generation. - void set_destination_mt_safe(address dest, bool assert_lock = true); + address instruction_address() const; + address next_instruction_address() const; + address return_address() const; + address destination() const; + address reloc_destination(address orig_address); - address get_trampoline(); + void set_destination(address dest); + void verify_alignment() {} // do nothing on riscv + void verify(); + void print(); + + bool set_destination_mt_safe(address dest, bool assert_lock = true); + + static bool is_at(address addr) { return NativeShortCall::is_at(addr) || + NativeFarCall::is_at(addr); } + static bool is_call_before(address return_address); + static void insert(address code_pos, address entry); + static void replace_mt_safe(address instr_addr, address code_buffer); }; inline NativeCall* nativeCall_at(address addr) { assert_cond(addr != nullptr); - NativeCall* call = (NativeCall*)(addr - NativeCall::instruction_offset); + //printf("nativeCall_at: %p\n", addr); + NativeCall* call = (NativeCall*)(addr); DEBUG_ONLY(call->verify()); return call; } inline NativeCall* nativeCall_before(address return_address) { assert_cond(return_address != nullptr); - NativeCall* call = (NativeCall*)(return_address - NativeCall::return_address_offset); + NativeCall* call = nullptr; + if (NativeFarCall::is_call_before(return_address)) { + call = (NativeCall*)(return_address - NativeFarCall::return_address_offset); + } else { + call = (NativeCall*)(return_address - NativeInstruction::instruction_size); + } DEBUG_ONLY(call->verify()); return call; } @@ -518,51 +569,6 @@ inline bool NativeInstruction::is_jump_or_nop() { return is_nop() || is_jump(); } -// Call trampoline stubs. -class NativeCallTrampolineStub : public NativeInstruction { - public: - - enum RISCV_specific_constants { - // Refer to function emit_trampoline_stub. - instruction_size = 3 * NativeInstruction::instruction_size + wordSize, // auipc + ld + jr + target address - data_offset = 3 * NativeInstruction::instruction_size, // auipc + ld + jr - }; - - address destination(nmethod *nm = nullptr) const; - void set_destination(address new_destination); - ptrdiff_t destination_offset() const; -}; - -inline bool is_NativeCallTrampolineStub_at(address addr) { - // Ensure that the stub is exactly - // ld t0, L--->auipc + ld - // jr t0 - // L: - - // judge inst + register + imm - // 1). check the instructions: auipc + ld + jalr - // 2). check if auipc[11:7] == t0 and ld[11:7] == t0 and ld[19:15] == t0 && jr[19:15] == t0 - // 3). check if the offset in ld[31:20] equals the data_offset - assert_cond(addr != nullptr); - const int instr_size = NativeInstruction::instruction_size; - if (NativeInstruction::is_auipc_at(addr) && - NativeInstruction::is_ld_at(addr + instr_size) && - NativeInstruction::is_jalr_at(addr + 2 * instr_size) && - (NativeInstruction::extract_rd(addr) == x5) && - (NativeInstruction::extract_rd(addr + instr_size) == x5) && - (NativeInstruction::extract_rs1(addr + instr_size) == x5) && - (NativeInstruction::extract_rs1(addr + 2 * instr_size) == x5) && - (Assembler::extract(Assembler::ld_instr(addr + 4), 31, 20) == NativeCallTrampolineStub::data_offset)) { - return true; - } - return false; -} - -inline NativeCallTrampolineStub* nativeCallTrampolineStub_at(address addr) { - assert_cond(addr != nullptr); - assert(is_NativeCallTrampolineStub_at(addr), "no call trampoline found"); - return (NativeCallTrampolineStub*)addr; -} class NativeMembar : public NativeInstruction { public: diff --git a/src/hotspot/cpu/riscv/relocInfo_riscv.cpp b/src/hotspot/cpu/riscv/relocInfo_riscv.cpp index 0e50d0b0796ca..c24347f5e1a1c 100644 --- a/src/hotspot/cpu/riscv/relocInfo_riscv.cpp +++ b/src/hotspot/cpu/riscv/relocInfo_riscv.cpp @@ -60,12 +60,10 @@ void Relocation::pd_set_data_value(address x, intptr_t o, bool verify_only) { address Relocation::pd_call_destination(address orig_addr) { assert(is_call(), "should be an address instruction here"); - if (NativeCall::is_call_at(addr())) { - address trampoline = nativeCall_at(addr())->get_trampoline(); - if (trampoline != nullptr) { - return nativeCallTrampolineStub_at(trampoline)->destination(); - } + if (NativeCall::is_at(addr())) { + return nativeCall_at(addr())->reloc_destination(orig_addr); } + // Non call reloc if (orig_addr != nullptr) { // the extracted address from the instructions in address orig_addr address new_addr = MacroAssembler::pd_call_destination(orig_addr); @@ -81,20 +79,24 @@ address Relocation::pd_call_destination(address orig_addr) { void Relocation::pd_set_call_destination(address x) { assert(is_call(), "should be an address instruction here"); - if (NativeCall::is_call_at(addr())) { - address trampoline = nativeCall_at(addr())->get_trampoline(); - if (trampoline != nullptr) { - nativeCall_at(addr())->set_destination_mt_safe(x, /* assert_lock */false); + if (NativeCall::is_at(addr())) { + NativeCall* nc = nativeCall_at(addr()); + if (nc->set_destination_mt_safe(x, false)) { return; } } - MacroAssembler::pd_patch_instruction_size(addr(), x); address pd_call = pd_call_destination(addr()); + // printf("%s| No tramp patching of %p contains %p\n", __PRETTY_FUNCTION__, addr(), pd_call); + // fflush(stdout); + MacroAssembler::pd_patch_instruction_size(addr(), x); + pd_call = pd_call_destination(addr()); + // printf("%s| No tramp patching of %p contains TO %p\n", __PRETTY_FUNCTION__, addr(), pd_call); + // fflush(stdout); assert(pd_call == x, "fail in reloc"); } address* Relocation::pd_address_in_code() { - assert(NativeCall::is_load_pc_relative_at(addr()), "Not the expected instruction sequence!"); + assert(NativeInstruction::is_load_pc_relative_at(addr()), "Not the expected instruction sequence!"); return (address*)(MacroAssembler::target_addr_for_insn(addr())); } diff --git a/src/hotspot/cpu/riscv/riscv.ad b/src/hotspot/cpu/riscv/riscv.ad index 2c69486a0e595..a9a079dc817b0 100644 --- a/src/hotspot/cpu/riscv/riscv.ad +++ b/src/hotspot/cpu/riscv/riscv.ad @@ -1225,12 +1225,17 @@ bool needs_acquiring_load_reserved(const Node *n) int MachCallStaticJavaNode::ret_addr_offset() { - // jal - return 1 * NativeInstruction::instruction_size; + if (UseNewCode) { + return 3 * NativeInstruction::instruction_size; + } + return 1 * NativeInstruction::instruction_size; // jal } int MachCallDynamicJavaNode::ret_addr_offset() { + if (UseNewCode) { + return 9 * NativeInstruction::instruction_size; // movptr, auipc + ld + jal + } return 7 * NativeInstruction::instruction_size; // movptr, jal } @@ -1248,6 +1253,9 @@ int MachCallRuntimeNode::ret_addr_offset() { // jalr(t0) -> jalr CodeBlob *cb = CodeCache::find_blob(_entry_point); if (cb != nullptr) { + if (UseNewCode) { + return 3 * NativeInstruction::instruction_size; + } return 1 * NativeInstruction::instruction_size; } else { return 11 * NativeInstruction::instruction_size; @@ -2350,6 +2358,10 @@ encode %{ // The NOP here is purely to ensure that eliding a call to // JVM_EnsureMaterializedForStackWalk doesn't change the code size. __ nop(); + if (UseNewCode) { + __ nop(); + __ nop(); + } __ block_comment("call JVM_EnsureMaterializedForStackWalk (elided)"); } else { int method_index = resolved_method_index(masm); diff --git a/src/hotspot/os/windows/os_windows.cpp b/src/hotspot/os/windows/os_windows.cpp index ddc1b1c335d0f..779e29afefca0 100644 --- a/src/hotspot/os/windows/os_windows.cpp +++ b/src/hotspot/os/windows/os_windows.cpp @@ -2173,14 +2173,7 @@ size_t os::lasterror(char* buf, size_t len) { buf, (DWORD)len, nullptr); - if (n > 3) { - // Drop final '.', CR, LF - if (buf[n - 1] == '\n') n--; - if (buf[n - 1] == '\r') n--; - if (buf[n - 1] == '.') n--; - buf[n] = '\0'; - } - return n; + return format_message_fixup(buf, n); } if (errno != 0) { diff --git a/src/hotspot/share/code/nmethod.cpp b/src/hotspot/share/code/nmethod.cpp index 51ba872b3ac66..4c92285b0a67a 100644 --- a/src/hotspot/share/code/nmethod.cpp +++ b/src/hotspot/share/code/nmethod.cpp @@ -1192,6 +1192,9 @@ nmethod* nmethod::new_nmethod(const methodHandle& method, } // Do verification and logging outside CodeCache_lock. if (nm != nullptr) { +// if (UseNewCode && PrintNMethods) { +// nm->print_nmethod(true); +// } // Safepoints in nmethod::verify aren't allowed because nm hasn't been installed yet. DEBUG_ONLY(nm->verify();) nm->log_new_nmethod(); @@ -2887,8 +2890,9 @@ void nmethod::verify_interrupt_point(address call_site, bool is_inline_cache) { HandleMark hm(Thread::current()); - PcDesc* pd = pc_desc_at(nativeCall_at(call_site)->return_address()); - assert(pd != nullptr, "PcDesc must exist"); + address check = nativeCall_at(call_site)->return_address(); + PcDesc* pd = pc_desc_at(check); + assert(pd != nullptr, "PcDesc must exist: call site: %p return: %p", call_site, check); for (ScopeDesc* sd = new ScopeDesc(this, pd); !sd->is_top(); sd = sd->sender()) { sd->verify(); diff --git a/src/hotspot/share/runtime/globals.hpp b/src/hotspot/share/runtime/globals.hpp index 575d9a3de36fc..d8a8cfca58f0d 100644 --- a/src/hotspot/share/runtime/globals.hpp +++ b/src/hotspot/share/runtime/globals.hpp @@ -1698,7 +1698,7 @@ const int ObjectAlignmentInBytes = 8; \ /* Flags used for temporary code during development */ \ \ - product(bool, UseNewCode, false, DIAGNOSTIC, \ + product(bool, UseNewCode, true, DIAGNOSTIC, \ "Testing Only: Use the new version while testing") \ \ product(bool, UseNewCode2, false, DIAGNOSTIC, \