diff --git a/src/hotspot/cpu/riscv/c1_CodeStubs_riscv.cpp b/src/hotspot/cpu/riscv/c1_CodeStubs_riscv.cpp index 1b913df49e0d0..b7e1b7863efdb 100644 --- a/src/hotspot/cpu/riscv/c1_CodeStubs_riscv.cpp +++ b/src/hotspot/cpu/riscv/c1_CodeStubs_riscv.cpp @@ -318,7 +318,7 @@ void ArrayCopyStub::emit_code(LIR_Assembler* ce) { } Address resolve(SharedRuntime::get_resolve_static_call_stub(), relocInfo::static_call_type); - address call = __ patchable_far_call(resolve); + address call = __ reloc_call(resolve); if (call == nullptr) { ce->bailout("trampoline stub overflow"); return; diff --git a/src/hotspot/cpu/riscv/c1_LIRAssembler_riscv.cpp b/src/hotspot/cpu/riscv/c1_LIRAssembler_riscv.cpp index 909f6d7597a77..798679185d3a2 100644 --- a/src/hotspot/cpu/riscv/c1_LIRAssembler_riscv.cpp +++ b/src/hotspot/cpu/riscv/c1_LIRAssembler_riscv.cpp @@ -1346,7 +1346,7 @@ void LIR_Assembler::align_call(LIR_Code code) { } void LIR_Assembler::call(LIR_OpJavaCall* op, relocInfo::relocType rtype) { - address call = __ patchable_far_call(Address(op->addr(), rtype)); + address call = __ reloc_call(Address(op->addr(), rtype)); if (call == nullptr) { bailout("trampoline stub overflow"); return; diff --git a/src/hotspot/cpu/riscv/c2_MacroAssembler_riscv.cpp b/src/hotspot/cpu/riscv/c2_MacroAssembler_riscv.cpp index a80ff93997999..8479b001ead87 100644 --- a/src/hotspot/cpu/riscv/c2_MacroAssembler_riscv.cpp +++ b/src/hotspot/cpu/riscv/c2_MacroAssembler_riscv.cpp @@ -1040,7 +1040,7 @@ void C2_MacroAssembler::string_indexof(Register haystack, Register needle, stub = RuntimeAddress(StubRoutines::riscv::string_indexof_linear_uu()); assert(stub.target() != nullptr, "string_indexof_linear_uu stub has not been generated"); } - address call = patchable_far_call(stub); + address call = reloc_call(stub); if (call == nullptr) { DEBUG_ONLY(reset_labels(LINEARSEARCH, DONE, NOMATCH)); ciEnv::current()->record_failure("CodeCache is full"); @@ -1478,7 +1478,7 @@ void C2_MacroAssembler::string_compare(Register str1, Register str2, ShouldNotReachHere(); } assert(stub.target() != nullptr, "compare_long_string stub has not been generated"); - address call = patchable_far_call(stub); + address call = reloc_call(stub); if (call == nullptr) { DEBUG_ONLY(reset_labels(DONE, SHORT_LOOP, SHORT_STRING, SHORT_LAST, SHORT_LOOP_TAIL, SHORT_LAST2, SHORT_LAST_INIT, SHORT_LOOP_START)); ciEnv::current()->record_failure("CodeCache is full"); diff --git a/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp b/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp index ec0fb7152f480..e772959ed0845 100644 --- a/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp +++ b/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp @@ -55,7 +55,7 @@ static bool emit_shared_trampolines(CodeBuffer* cb, CodeBuffer::SharedTrampoline MacroAssembler masm(cb); auto emit = [&](address dest, const CodeBuffer::Offsets &offsets) { - assert(cb->stubs()->remaining() >= MacroAssembler::max_patchable_far_call_stub_size(), "pre-allocated trampolines"); + assert(cb->stubs()->remaining() >= MacroAssembler::max_reloc_call_stub_size(), "pre-allocated trampolines"); LinkedListIterator it(offsets.head()); int offset = *it.next(); address stub = __ emit_trampoline_stub(offset, dest); @@ -71,7 +71,7 @@ static bool emit_shared_trampolines(CodeBuffer* cb, CodeBuffer::SharedTrampoline }; assert(requests->number_of_entries() >= 1, "at least one"); - const int total_requested_size = MacroAssembler::max_patchable_far_call_stub_size() * requests->number_of_entries(); + const int total_requested_size = MacroAssembler::max_reloc_call_stub_size() * requests->number_of_entries(); if (cb->stubs()->maybe_expand_to_ensure_remaining(total_requested_size) && cb->blob() == nullptr) { return false; } diff --git a/src/hotspot/cpu/riscv/compiledIC_riscv.cpp b/src/hotspot/cpu/riscv/compiledIC_riscv.cpp index bcbba85e26b3e..4bbea8f356f0b 100644 --- a/src/hotspot/cpu/riscv/compiledIC_riscv.cpp +++ b/src/hotspot/cpu/riscv/compiledIC_riscv.cpp @@ -71,7 +71,7 @@ int CompiledDirectCall::to_interp_stub_size() { int CompiledDirectCall::to_trampoline_stub_size() { // We count instructions and an additional alignment nop. // Trampoline stubs are always word aligned. - return MacroAssembler::max_patchable_far_call_stub_size(); + return MacroAssembler::max_reloc_call_stub_size(); } // Relocation entries for call stub, compiled java to interpreter. diff --git a/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp b/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp index b8cc7833e9498..fe7ecd7741817 100644 --- a/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp +++ b/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp @@ -4228,7 +4228,7 @@ address MacroAssembler::ic_call(address entry, jint method_index) { IncompressibleRegion ir(this); // relocations movptr(t1, (address)Universe::non_oop_word(), t0); assert_cond(entry != nullptr); - return patchable_far_call(Address(entry, rh)); + return reloc_call(Address(entry, rh)); } int MacroAssembler::ic_check_size() { @@ -4274,7 +4274,7 @@ int MacroAssembler::ic_check(int end_alignment) { } address MacroAssembler::emit_address_stub(int insts_call_instruction_offset, address dest) { - address stub = start_a_stub(max_patchable_far_call_stub_size()); + address stub = start_a_stub(max_reloc_call_stub_size()); if (stub == nullptr) { return nullptr; // CodeBuffer::expand failed } @@ -4315,7 +4315,7 @@ address MacroAssembler::emit_address_stub(int insts_call_instruction_offset, add address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset, address dest) { // Max stub size: alignment nop, TrampolineStub. - address stub = start_a_stub(max_patchable_far_call_stub_size()); + address stub = start_a_stub(max_reloc_call_stub_size()); if (stub == nullptr) { return nullptr; // CodeBuffer::expand failed } @@ -4356,7 +4356,7 @@ address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset, return stub_start_addr; } -int MacroAssembler::max_patchable_far_call_stub_size() { +int MacroAssembler::max_reloc_call_stub_size() { // Max stub size: alignment nop, TrampolineStub. if (UseTrampolines) { return instruction_size + MacroAssembler::NativeShortCall::trampoline_size; @@ -5080,7 +5080,7 @@ address MacroAssembler::zero_words(Register ptr, Register cnt) { RuntimeAddress zero_blocks(StubRoutines::riscv::zero_blocks()); assert(zero_blocks.target() != nullptr, "zero_blocks stub has not been generated"); if (StubRoutines::riscv::complete()) { - address tpc = patchable_far_call(zero_blocks); + address tpc = reloc_call(zero_blocks); if (tpc == nullptr) { DEBUG_ONLY(reset_labels(around)); postcond(pc() == badAddress); diff --git a/src/hotspot/cpu/riscv/macroAssembler_riscv.hpp b/src/hotspot/cpu/riscv/macroAssembler_riscv.hpp index 25df49bc2c585..7845c393a6f1a 100644 --- a/src/hotspot/cpu/riscv/macroAssembler_riscv.hpp +++ b/src/hotspot/cpu/riscv/macroAssembler_riscv.hpp @@ -466,7 +466,7 @@ class MacroAssembler: public Assembler { address emit_address_stub(int insts_call_instruction_offset, address target); address emit_trampoline_stub(int insts_call_instruction_offset, address target); - static int max_patchable_far_call_stub_size(); + static int max_reloc_call_stub_size(); void emit_static_call_stub(); static int static_call_stub_size(); @@ -1211,7 +1211,7 @@ class MacroAssembler: public Assembler { // This form of call is most suitable for targets at fixed addresses, which // will never be patched. // - // - patchable far call: + // - reloc call: // This is only available in C1/C2-generated code (nmethod). // // [Main code section] @@ -1225,7 +1225,7 @@ class MacroAssembler: public Assembler { // To change the destination we simply atomically store the new // address in the stub section. // - // - trampoline call (old patchable far call / -XX:+UseTrampolines): + // - trampoline call (old reloc call / -XX:+UseTrampolines): // This is only available in C1/C2-generated code (nmethod). It is a combination // of a direct call, which is used if the destination of a call is in range, // and a register-indirect call. It has the advantages of reaching anywhere in @@ -1264,7 +1264,7 @@ class MacroAssembler: public Assembler { // -- // Emit a direct call if the entry address will always be in range, - // otherwise a patchable far call. + // otherwise a reloc call. // Supported entry.rspec(): // - relocInfo::runtime_call_type // - relocInfo::opt_virtual_call_type @@ -1272,7 +1272,7 @@ class MacroAssembler: public Assembler { // - relocInfo::virtual_call_type // // Return: the call PC or null if CodeCache is full. - address patchable_far_call(Address entry) { + address reloc_call(Address entry) { return UseTrampolines ? trampoline_call(entry) : load_and_call(entry); } private: diff --git a/src/hotspot/cpu/riscv/nativeInst_riscv.cpp b/src/hotspot/cpu/riscv/nativeInst_riscv.cpp index eaf30030ffd26..f0357f1cd3029 100644 --- a/src/hotspot/cpu/riscv/nativeInst_riscv.cpp +++ b/src/hotspot/cpu/riscv/nativeInst_riscv.cpp @@ -49,7 +49,7 @@ bool NativeInstruction::is_call_at(address addr) { //----------------------------------------------------------------------------- // NativeShortCallTrampoline // -// Implements the trampoline part of patchable far call - trampoline call. +// Implements the trampoline part of reloc call - trampoline call. class NativeShortCall; @@ -110,7 +110,7 @@ NativeShortCallTrampolineStub* NativeShortCallTrampolineStub::at(address addr) { //----------------------------------------------------------------------------- // NativeShortCall // -// Implements the trampoline call, a short call with a trampoline, version of patchable far call. +// Implements the trampoline call, a short call with a trampoline, version of reloc call. // Enabled by setting the experimental UseTrampolines to true. class NativeShortCall: private NativeInstruction { @@ -302,7 +302,7 @@ bool NativeShortCall::is_call_before(address return_address) { //----------------------------------------------------------------------------- // NativeFarCall // -// Implements direct far calling loading an address from the stub section version of patchable far call. +// Implements direct far calling loading an address from the stub section version of reloc call. // This is the default (experimental flag UseTrampolines, default false). class NativeFarCall: public NativeInstruction { diff --git a/src/hotspot/cpu/riscv/riscv.ad b/src/hotspot/cpu/riscv/riscv.ad index 85ffb1c2c2899..b354d88d0fa63 100644 --- a/src/hotspot/cpu/riscv/riscv.ad +++ b/src/hotspot/cpu/riscv/riscv.ad @@ -2413,7 +2413,7 @@ encode %{ assert_cond(addr != nullptr); if (!_method) { // A call to a runtime wrapper, e.g. new, new_typeArray_Java, uncommon_trap. - call = __ patchable_far_call(Address(addr, relocInfo::runtime_call_type)); + call = __ reloc_call(Address(addr, relocInfo::runtime_call_type)); if (call == nullptr) { ciEnv::current()->record_failure("CodeCache is full"); return; @@ -2431,7 +2431,7 @@ encode %{ int method_index = resolved_method_index(masm); RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index) : static_call_Relocation::spec(method_index); - call = __ patchable_far_call(Address(addr, rspec)); + call = __ reloc_call(Address(addr, rspec)); if (call == nullptr) { ciEnv::current()->record_failure("CodeCache is full"); return; @@ -2483,7 +2483,7 @@ encode %{ address entry = (address)$meth$$method; CodeBlob *cb = CodeCache::find_blob(entry); if (cb != nullptr) { - address call = __ patchable_far_call(Address(entry, relocInfo::runtime_call_type)); + address call = __ reloc_call(Address(entry, relocInfo::runtime_call_type)); if (call == nullptr) { ciEnv::current()->record_failure("CodeCache is full"); return; @@ -10100,7 +10100,7 @@ instruct partialSubtypeCheckConstSuper(iRegP_R14 sub, iRegP_R10 super_reg, immP $tmpR11$$Register, $tmpR12$$Register, $tmpR13$$Register, $tmpR16$$Register, super_klass_slot); } else { - address call = __ patchable_far_call(RuntimeAddress(StubRoutines::lookup_secondary_supers_table_stub(super_klass_slot))); + address call = __ reloc_call(RuntimeAddress(StubRoutines::lookup_secondary_supers_table_stub(super_klass_slot))); success = (call != nullptr); } if (!success) { diff --git a/src/hotspot/cpu/riscv/sharedRuntime_riscv.cpp b/src/hotspot/cpu/riscv/sharedRuntime_riscv.cpp index d749e9b278f3b..da9a74ea1ec41 100644 --- a/src/hotspot/cpu/riscv/sharedRuntime_riscv.cpp +++ b/src/hotspot/cpu/riscv/sharedRuntime_riscv.cpp @@ -1005,7 +1005,7 @@ static void gen_continuation_enter(MacroAssembler* masm, // Make sure the call is patchable __ align(NativeInstruction::instruction_size); - const address tr_call = __ patchable_far_call(resolve); + const address tr_call = __ reloc_call(resolve); if (tr_call == nullptr) { fatal("CodeCache is full at gen_continuation_enter"); } @@ -1037,7 +1037,7 @@ static void gen_continuation_enter(MacroAssembler* masm, // Make sure the call is patchable __ align(NativeInstruction::instruction_size); - const address tr_call = __ patchable_far_call(resolve); + const address tr_call = __ reloc_call(resolve); if (tr_call == nullptr) { fatal("CodeCache is full at gen_continuation_enter"); }