diff --git a/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp b/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp index 967c8710411b5..f14dda0f81295 100644 --- a/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp +++ b/src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp @@ -5340,7 +5340,41 @@ bool MacroAssembler::set_klass_decode_mode(address base, int shift, const size_t return _klass_decode_mode != KlassDecodeNone; } +static Register pick_different_tmp(Register dst, Register src) { + auto tmps = RegSet::of(r0, r1, r2) - RegSet::of(src, dst); + return *tmps.begin(); +} + +void MacroAssembler::encode_klass_not_null_for_aot(Register dst, Register src) { + // we have to load the klass base from the AOT constants area but + // not the shift because it is not allowed to change + int shift = CompressedKlassPointers::shift(); + assert(shift >= 0 && shift < 4, "unexpected compressd klass shift!"); + if (dst != src) { + // we can load the base into dst, subtract it formthe src and shift down + lea(dst, ExternalAddress(CompressedKlassPointers::base_addr())); + ldr(dst, dst); + sub(dst, src, dst); + lsr(dst, dst, shift); + } else { + // we need an extra register in order to load the coop base + Register tmp = pick_different_tmp(dst, src); + RegSet regs = RegSet::of(tmp); + push(regs, sp); + lea(tmp, ExternalAddress(CompressedKlassPointers::base_addr())); + ldr(tmp, tmp); + sub(dst, src, tmp); + lsr(dst, dst, shift); + pop(regs, sp); + } +} + void MacroAssembler::encode_klass_not_null(Register dst, Register src) { + if (AOTCodeCache::is_on_for_dump()) { + encode_klass_not_null_for_aot(dst, src); + return; + } + switch (klass_decode_mode()) { case KlassDecodeZero: if (CompressedKlassPointers::shift() != 0) { @@ -5377,9 +5411,36 @@ void MacroAssembler::encode_klass_not_null(Register r) { encode_klass_not_null(r, r); } +void MacroAssembler::decode_klass_not_null_for_aot(Register dst, Register src) { + // we have to load the klass base from the AOT constants area but + // not the shift because it is not allowed to change + int shift = CompressedKlassPointers::shift(); + assert(shift >= 0 && shift < 4, "unexpected compressd klass shift!"); + if (dst != src) { + // we can load the base into dst then add the offset with a suitable shift + lea(dst, ExternalAddress(CompressedKlassPointers::base_addr())); + ldr(dst, dst); + add(dst, dst, src, LSL, shift); + } else { + // we need an extra register in order to load the coop base + Register tmp = pick_different_tmp(dst, src); + RegSet regs = RegSet::of(tmp); + push(regs, sp); + lea(tmp, ExternalAddress(CompressedKlassPointers::base_addr())); + ldr(tmp, tmp); + add(dst, tmp, src, LSL, shift); + pop(regs, sp); + } +} + void MacroAssembler::decode_klass_not_null(Register dst, Register src) { assert (UseCompressedClassPointers, "should only be used for compressed headers"); + if (AOTCodeCache::is_on_for_dump()) { + decode_klass_not_null_for_aot(dst, src); + return; + } + switch (klass_decode_mode()) { case KlassDecodeZero: if (CompressedKlassPointers::shift() != 0) { @@ -6654,7 +6715,7 @@ void MacroAssembler::get_thread(Register dst) { protect_return_address(); push(saved_regs, sp); - mov(lr, CAST_FROM_FN_PTR(address, JavaThread::aarch64_get_thread_helper)); + mov(lr, ExternalAddress(CAST_FROM_FN_PTR(address, JavaThread::aarch64_get_thread_helper))); blr(lr); if (dst != c_rarg0) { mov(dst, c_rarg0); diff --git a/src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp b/src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp index 17ee72a00c0e0..32506c49cfa42 100644 --- a/src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp +++ b/src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp @@ -935,6 +935,8 @@ class MacroAssembler: public Assembler { void set_narrow_oop(Register dst, jobject obj); + void decode_klass_not_null_for_aot(Register dst, Register src); + void encode_klass_not_null_for_aot(Register dst, Register src); void encode_klass_not_null(Register r); void decode_klass_not_null(Register r); void encode_klass_not_null(Register dst, Register src); diff --git a/src/hotspot/cpu/aarch64/runtime_aarch64.cpp b/src/hotspot/cpu/aarch64/runtime_aarch64.cpp index 2361d584f4252..3fcb0e70b5714 100644 --- a/src/hotspot/cpu/aarch64/runtime_aarch64.cpp +++ b/src/hotspot/cpu/aarch64/runtime_aarch64.cpp @@ -26,6 +26,7 @@ #ifdef COMPILER2 #include "asm/macroAssembler.hpp" #include "asm/macroAssembler.inline.hpp" +#include "code/aotCodeCache.hpp" #include "code/vmreg.hpp" #include "interpreter/interpreter.hpp" #include "opto/runtime.hpp" @@ -60,10 +61,15 @@ class SimpleRuntimeFrame { //------------------------------generate_uncommon_trap_blob-------------------- UncommonTrapBlob* OptoRuntime::generate_uncommon_trap_blob() { + const char* name = OptoRuntime::stub_name(OptoStubId::uncommon_trap_id); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, (uint)OptoStubId::uncommon_trap_id, name); + if (blob != nullptr) { + return blob->as_uncommon_trap_blob(); + } + // Allocate space for the code ResourceMark rm; // Setup code generation tools - const char* name = OptoRuntime::stub_name(OptoStubId::uncommon_trap_id); CodeBuffer buffer(name, 2048, 1024); if (buffer.blob() == nullptr) { return nullptr; @@ -246,8 +252,10 @@ UncommonTrapBlob* OptoRuntime::generate_uncommon_trap_blob() { // Make sure all code is generated masm->flush(); - return UncommonTrapBlob::create(&buffer, oop_maps, - SimpleRuntimeFrame::framesize >> 1); + UncommonTrapBlob *ut_blob = UncommonTrapBlob::create(&buffer, oop_maps, + SimpleRuntimeFrame::framesize >> 1); + AOTCodeCache::store_code_blob(*ut_blob, AOTCodeEntry::C2Blob, (uint)OptoStubId::uncommon_trap_id, name); + return ut_blob; } //------------------------------generate_exception_blob--------------------------- @@ -283,10 +291,15 @@ ExceptionBlob* OptoRuntime::generate_exception_blob() { assert(SimpleRuntimeFrame::framesize % 4 == 0, "sp not 16-byte aligned"); + const char* name = OptoRuntime::stub_name(OptoStubId::exception_id); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, (uint)OptoStubId::exception_id, name); + if (blob != nullptr) { + return blob->as_exception_blob(); + } + // Allocate space for the code ResourceMark rm; // Setup code generation tools - const char* name = OptoRuntime::stub_name(OptoStubId::exception_id); CodeBuffer buffer(name, 2048, 1024); if (buffer.blob() == nullptr) { return nullptr; @@ -384,7 +397,9 @@ ExceptionBlob* OptoRuntime::generate_exception_blob() { masm->flush(); // Set exception blob - return ExceptionBlob::create(&buffer, oop_maps, SimpleRuntimeFrame::framesize >> 1); + ExceptionBlob* ex_blob = ExceptionBlob::create(&buffer, oop_maps, SimpleRuntimeFrame::framesize >> 1); + AOTCodeCache::store_code_blob(*ex_blob, AOTCodeEntry::C2Blob, (uint)OptoStubId::exception_id, name); + return ex_blob; } #endif // COMPILER2 diff --git a/src/hotspot/cpu/aarch64/sharedRuntime_aarch64.cpp b/src/hotspot/cpu/aarch64/sharedRuntime_aarch64.cpp index 0c3dfabc93e88..ed296f60e2ddd 100644 --- a/src/hotspot/cpu/aarch64/sharedRuntime_aarch64.cpp +++ b/src/hotspot/cpu/aarch64/sharedRuntime_aarch64.cpp @@ -26,6 +26,7 @@ #include "asm/macroAssembler.hpp" #include "asm/macroAssembler.inline.hpp" +#include "code/aotCodeCache.hpp" #include "code/codeCache.hpp" #include "code/compiledIC.hpp" #include "code/debugInfoRec.hpp" @@ -2184,6 +2185,12 @@ void SharedRuntime::generate_deopt_blob() { } #endif const char* name = SharedRuntime::stub_name(SharedStubId::deopt_id); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::SharedBlob, (uint)SharedStubId::deopt_id, name); + if (blob != nullptr) { + _deopt_blob = blob->as_deoptimization_blob(); + return; + } + CodeBuffer buffer(name, 2048+pad, 1024); MacroAssembler* masm = new MacroAssembler(&buffer); int frame_size_in_words; @@ -2547,6 +2554,8 @@ void SharedRuntime::generate_deopt_blob() { _deopt_blob->set_implicit_exception_uncommon_trap_offset(implicit_exception_uncommon_trap_offset); } #endif + + AOTCodeCache::store_code_blob(*_deopt_blob, AOTCodeEntry::SharedBlob, (uint)SharedStubId::deopt_id, name); } // Number of stack slots between incoming argument block and the start of @@ -2575,12 +2584,16 @@ VMReg SharedRuntime::thread_register() { SafepointBlob* SharedRuntime::generate_handler_blob(SharedStubId id, address call_ptr) { assert(is_polling_page_id(id), "expected a polling page stub id"); + // Allocate space for the code. Setup code generation tools. + const char* name = SharedRuntime::stub_name(id); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::SharedBlob, (uint)id, name); + if (blob != nullptr) { + return blob->as_safepoint_blob(); + } + ResourceMark rm; OopMapSet *oop_maps = new OopMapSet(); OopMap* map; - - // Allocate space for the code. Setup code generation tools. - const char* name = SharedRuntime::stub_name(id); CodeBuffer buffer(name, 2048, 1024); MacroAssembler* masm = new MacroAssembler(&buffer); @@ -2689,7 +2702,10 @@ SafepointBlob* SharedRuntime::generate_handler_blob(SharedStubId id, address cal masm->flush(); // Fill-out other meta info - return SafepointBlob::create(&buffer, oop_maps, frame_size_in_words); + SafepointBlob* sp_blob = SafepointBlob::create(&buffer, oop_maps, frame_size_in_words); + + AOTCodeCache::store_code_blob(*sp_blob, AOTCodeEntry::SharedBlob, (uint)id, name); + return sp_blob; } // @@ -2704,10 +2720,14 @@ RuntimeStub* SharedRuntime::generate_resolve_blob(SharedStubId id, address desti assert (StubRoutines::forward_exception_entry() != nullptr, "must be generated before"); assert(is_resolve_id(id), "expected a resolve stub id"); + const char* name = SharedRuntime::stub_name(id); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::SharedBlob, (uint)id, name); + if (blob != nullptr) { + return blob->as_runtime_stub(); + } + // allocate space for the code ResourceMark rm; - - const char* name = SharedRuntime::stub_name(id); CodeBuffer buffer(name, 1000, 512); MacroAssembler* masm = new MacroAssembler(&buffer); @@ -2780,7 +2800,10 @@ RuntimeStub* SharedRuntime::generate_resolve_blob(SharedStubId id, address desti // return the blob // frame_size_words or bytes?? - return RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, true); + RuntimeStub* rs_blob = RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, true); + + AOTCodeCache::store_code_blob(*rs_blob, AOTCodeEntry::SharedBlob, (uint)id, name); + return rs_blob; } // Continuation point for throwing of implicit exceptions that are @@ -2820,10 +2843,15 @@ RuntimeStub* SharedRuntime::generate_throw_exception(SharedStubId id, address ru int insts_size = 512; int locs_size = 64; - ResourceMark rm; const char* timer_msg = "SharedRuntime generate_throw_exception"; TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime)); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::SharedBlob, (uint)id, name); + if (blob != nullptr) { + return blob->as_runtime_stub(); + } + + ResourceMark rm; CodeBuffer code(name, insts_size, locs_size); OopMapSet* oop_maps = new OopMapSet(); MacroAssembler* masm = new MacroAssembler(&code); @@ -2850,7 +2878,7 @@ RuntimeStub* SharedRuntime::generate_throw_exception(SharedStubId id, address ru __ mov(c_rarg0, rthread); BLOCK_COMMENT("call runtime_entry"); - __ mov(rscratch1, runtime_entry); + __ lea(rscratch1, RuntimeAddress(runtime_entry)); __ blr(rscratch1); // Generate oop map @@ -2883,6 +2911,8 @@ RuntimeStub* SharedRuntime::generate_throw_exception(SharedStubId id, address ru frame_complete, (framesize >> (LogBytesPerWord - LogBytesPerInt)), oop_maps, false); + AOTCodeCache::store_code_blob(*stub, AOTCodeEntry::SharedBlob, (uint)id, name); + return stub; } diff --git a/src/hotspot/cpu/x86/macroAssembler_x86.cpp b/src/hotspot/cpu/x86/macroAssembler_x86.cpp index 35e461b601f0f..30f06533c390d 100644 --- a/src/hotspot/cpu/x86/macroAssembler_x86.cpp +++ b/src/hotspot/cpu/x86/macroAssembler_x86.cpp @@ -5404,7 +5404,11 @@ void MacroAssembler::decode_heap_oop_not_null(Register dst, Register src) { void MacroAssembler::encode_klass_not_null(Register r, Register tmp) { assert_different_registers(r, tmp); if (CompressedKlassPointers::base() != nullptr) { - mov64(tmp, (int64_t)CompressedKlassPointers::base()); + if (AOTCodeCache::is_on_for_dump()) { + movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr())); + } else { + mov64(tmp, (int64_t)CompressedKlassPointers::base()); + } subq(r, tmp); } if (CompressedKlassPointers::shift() != 0) { @@ -5436,7 +5440,11 @@ void MacroAssembler::decode_klass_not_null(Register r, Register tmp) { shlq(r, CompressedKlassPointers::shift()); } if (CompressedKlassPointers::base() != nullptr) { - mov64(tmp, (int64_t)CompressedKlassPointers::base()); + if (AOTCodeCache::is_on_for_dump()) { + movptr(tmp, ExternalAddress(CompressedKlassPointers::base_addr())); + } else { + mov64(tmp, (int64_t)CompressedKlassPointers::base()); + } addq(r, tmp); } } diff --git a/src/hotspot/cpu/x86/runtime_x86_64.cpp b/src/hotspot/cpu/x86/runtime_x86_64.cpp index 027a523b33d72..5865bec2e3918 100644 --- a/src/hotspot/cpu/x86/runtime_x86_64.cpp +++ b/src/hotspot/cpu/x86/runtime_x86_64.cpp @@ -25,6 +25,7 @@ #ifdef COMPILER2 #include "asm/macroAssembler.hpp" #include "asm/macroAssembler.inline.hpp" +#include "code/aotCodeCache.hpp" #include "code/vmreg.hpp" #include "interpreter/interpreter.hpp" #include "opto/runtime.hpp" @@ -56,10 +57,15 @@ class SimpleRuntimeFrame { //------------------------------generate_uncommon_trap_blob-------------------- UncommonTrapBlob* OptoRuntime::generate_uncommon_trap_blob() { + const char* name = OptoRuntime::stub_name(OptoStubId::uncommon_trap_id); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, (uint)OptoStubId::uncommon_trap_id, name); + if (blob != nullptr) { + return blob->as_uncommon_trap_blob(); + } + // Allocate space for the code ResourceMark rm; // Setup code generation tools - const char* name = OptoRuntime::stub_name(OptoStubId::uncommon_trap_id); CodeBuffer buffer(name, 2048, 1024); if (buffer.blob() == nullptr) { return nullptr; @@ -228,8 +234,10 @@ UncommonTrapBlob* OptoRuntime::generate_uncommon_trap_blob() { // Make sure all code is generated masm->flush(); - return UncommonTrapBlob::create(&buffer, oop_maps, - SimpleRuntimeFrame::framesize >> 1); + UncommonTrapBlob *ut_blob = UncommonTrapBlob::create(&buffer, oop_maps, + SimpleRuntimeFrame::framesize >> 1); + AOTCodeCache::store_code_blob(*ut_blob, AOTCodeEntry::C2Blob, (uint)OptoStubId::uncommon_trap_id, name); + return ut_blob; } //------------------------------generate_exception_blob--------------------------- @@ -265,10 +273,15 @@ ExceptionBlob* OptoRuntime::generate_exception_blob() { assert(SimpleRuntimeFrame::framesize % 4 == 0, "sp not 16-byte aligned"); + const char* name = OptoRuntime::stub_name(OptoStubId::exception_id); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, (uint)OptoStubId::exception_id, name); + if (blob != nullptr) { + return blob->as_exception_blob(); + } + // Allocate space for the code ResourceMark rm; // Setup code generation tools - const char* name = OptoRuntime::stub_name(OptoStubId::exception_id); CodeBuffer buffer(name, 2048, 1024); if (buffer.blob() == nullptr) { return nullptr; @@ -363,6 +376,8 @@ ExceptionBlob* OptoRuntime::generate_exception_blob() { masm->flush(); // Set exception blob - return ExceptionBlob::create(&buffer, oop_maps, SimpleRuntimeFrame::framesize >> 1); + ExceptionBlob* ex_blob = ExceptionBlob::create(&buffer, oop_maps, SimpleRuntimeFrame::framesize >> 1); + AOTCodeCache::store_code_blob(*ex_blob, AOTCodeEntry::C2Blob, (uint)OptoStubId::exception_id, name); + return ex_blob; } #endif // COMPILER2 diff --git a/src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp b/src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp index 7811d59d12d11..f26d8243ddca9 100644 --- a/src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp +++ b/src/hotspot/cpu/x86/sharedRuntime_x86_64.cpp @@ -27,6 +27,7 @@ #endif #include "asm/macroAssembler.hpp" #include "asm/macroAssembler.inline.hpp" +#include "code/aotCodeCache.hpp" #include "code/compiledIC.hpp" #include "code/debugInfoRec.hpp" #include "code/nativeInst.hpp" @@ -2600,6 +2601,12 @@ void SharedRuntime::generate_deopt_blob() { } #endif const char* name = SharedRuntime::stub_name(SharedStubId::deopt_id); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::SharedBlob, (uint)SharedStubId::deopt_id, name); + if (blob != nullptr) { + _deopt_blob = blob->as_deoptimization_blob(); + return; + } + CodeBuffer buffer(name, 2560+pad, 1024); MacroAssembler* masm = new MacroAssembler(&buffer); int frame_size_in_words; @@ -2951,6 +2958,8 @@ void SharedRuntime::generate_deopt_blob() { _deopt_blob->set_implicit_exception_uncommon_trap_offset(implicit_exception_uncommon_trap_offset); } #endif + + AOTCodeCache::store_code_blob(*_deopt_blob, AOTCodeEntry::SharedBlob, (uint)SharedStubId::deopt_id, name); } //------------------------------generate_handler_blob------ @@ -2963,12 +2972,16 @@ SafepointBlob* SharedRuntime::generate_handler_blob(SharedStubId id, address cal "must be generated before"); assert(is_polling_page_id(id), "expected a polling page stub id"); + // Allocate space for the code. Setup code generation tools. + const char* name = SharedRuntime::stub_name(id); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::SharedBlob, (uint)id, name); + if (blob != nullptr) { + return blob->as_safepoint_blob(); + } + ResourceMark rm; OopMapSet *oop_maps = new OopMapSet(); OopMap* map; - - // Allocate space for the code. Setup code generation tools. - const char* name = SharedRuntime::stub_name(id); CodeBuffer buffer(name, 2548, 1024); MacroAssembler* masm = new MacroAssembler(&buffer); @@ -3128,7 +3141,10 @@ SafepointBlob* SharedRuntime::generate_handler_blob(SharedStubId id, address cal masm->flush(); // Fill-out other meta info - return SafepointBlob::create(&buffer, oop_maps, frame_size_in_words); + SafepointBlob* sp_blob = SafepointBlob::create(&buffer, oop_maps, frame_size_in_words); + + AOTCodeCache::store_code_blob(*sp_blob, AOTCodeEntry::SharedBlob, (uint)id, name); + return sp_blob; } // @@ -3143,10 +3159,14 @@ RuntimeStub* SharedRuntime::generate_resolve_blob(SharedStubId id, address desti assert (StubRoutines::forward_exception_entry() != nullptr, "must be generated before"); assert(is_resolve_id(id), "expected a resolve stub id"); + const char* name = SharedRuntime::stub_name(id); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::SharedBlob, (uint)id, name); + if (blob != nullptr) { + return blob->as_runtime_stub(); + } + // allocate space for the code ResourceMark rm; - - const char* name = SharedRuntime::stub_name(id); CodeBuffer buffer(name, 1552, 512); MacroAssembler* masm = new MacroAssembler(&buffer); @@ -3215,7 +3235,10 @@ RuntimeStub* SharedRuntime::generate_resolve_blob(SharedStubId id, address desti // return the blob // frame_size_words or bytes?? - return RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, true); + RuntimeStub* rs_blob = RuntimeStub::new_runtime_stub(name, &buffer, frame_complete, frame_size_in_words, oop_maps, true); + + AOTCodeCache::store_code_blob(*rs_blob, AOTCodeEntry::SharedBlob, (uint)id, name); + return rs_blob; } // Continuation point for throwing of implicit exceptions that are @@ -3253,10 +3276,15 @@ RuntimeStub* SharedRuntime::generate_throw_exception(SharedStubId id, address ru int insts_size = 512; int locs_size = 64; - ResourceMark rm; const char* timer_msg = "SharedRuntime generate_throw_exception"; TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime)); + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::SharedBlob, (uint)id, name); + if (blob != nullptr) { + return blob->as_runtime_stub(); + } + + ResourceMark rm; CodeBuffer code(name, insts_size, locs_size); OopMapSet* oop_maps = new OopMapSet(); MacroAssembler* masm = new MacroAssembler(&code); @@ -3314,6 +3342,8 @@ RuntimeStub* SharedRuntime::generate_throw_exception(SharedStubId id, address ru frame_complete, (framesize >> (LogBytesPerWord - LogBytesPerInt)), oop_maps, false); + AOTCodeCache::store_code_blob(*stub, AOTCodeEntry::SharedBlob, (uint)id, name); + return stub; } diff --git a/src/hotspot/cpu/x86/x86_64.ad b/src/hotspot/cpu/x86/x86_64.ad index 25cee7a3094cd..0f11aa061ddc6 100644 --- a/src/hotspot/cpu/x86/x86_64.ad +++ b/src/hotspot/cpu/x86/x86_64.ad @@ -1859,8 +1859,7 @@ encode %{ %} enc_class Java_To_Runtime(method meth) %{ - // No relocation needed - __ mov64(r10, (int64_t) $meth$$method); + __ lea(r10, RuntimeAddress((address)$meth$$method)); __ call(r10); __ post_call_nop(); %} diff --git a/src/hotspot/share/asm/codeBuffer.cpp b/src/hotspot/share/asm/codeBuffer.cpp index be7a39380d922..ca25cf56be085 100644 --- a/src/hotspot/share/asm/codeBuffer.cpp +++ b/src/hotspot/share/asm/codeBuffer.cpp @@ -23,6 +23,7 @@ */ #include "asm/codeBuffer.hpp" +#include "code/aotCodeCache.hpp" #include "code/compiledIC.hpp" #include "code/oopRecorder.inline.hpp" #include "compiler/disassembler.hpp" @@ -1087,104 +1088,10 @@ void CodeBuffer::print_on(outputStream* st) { } } -// ----- CHeapString ----------------------------------------------------------- - -class CHeapString : public CHeapObj { - public: - CHeapString(const char* str) : _string(os::strdup(str)) {} - ~CHeapString() { - os::free((void*)_string); - _string = nullptr; - } - const char* string() const { return _string; } - - private: - const char* _string; -}; - -// ----- AsmRemarkCollection --------------------------------------------------- - -class AsmRemarkCollection : public CHeapObj { - public: - AsmRemarkCollection() : _ref_cnt(1), _remarks(nullptr), _next(nullptr) {} - ~AsmRemarkCollection() { - assert(is_empty(), "Must 'clear()' before deleting!"); - assert(_ref_cnt == 0, "No uses must remain when deleting!"); - } - AsmRemarkCollection* reuse() { - precond(_ref_cnt > 0); - return _ref_cnt++, this; - } - - const char* insert(uint offset, const char* remark); - const char* lookup(uint offset) const; - const char* next(uint offset) const; - - bool is_empty() const { return _remarks == nullptr; } - uint clear(); - - private: - struct Cell : CHeapString { - Cell(const char* remark, uint offset) : - CHeapString(remark), offset(offset), prev(nullptr), next(nullptr) {} - void push_back(Cell* cell) { - Cell* head = this; - Cell* tail = prev; - tail->next = cell; - cell->next = head; - cell->prev = tail; - prev = cell; - } - uint offset; - Cell* prev; - Cell* next; - }; - uint _ref_cnt; - Cell* _remarks; - // Using a 'mutable' iteration pointer to allow 'const' on lookup/next (that - // does not change the state of the list per se), supportig a simplistic - // iteration scheme. - mutable Cell* _next; -}; - -// ----- DbgStringCollection --------------------------------------------------- - -class DbgStringCollection : public CHeapObj { - public: - DbgStringCollection() : _ref_cnt(1), _strings(nullptr) {} - ~DbgStringCollection() { - assert(is_empty(), "Must 'clear()' before deleting!"); - assert(_ref_cnt == 0, "No uses must remain when deleting!"); - } - DbgStringCollection* reuse() { - precond(_ref_cnt > 0); - return _ref_cnt++, this; - } - - const char* insert(const char* str); - const char* lookup(const char* str) const; - - bool is_empty() const { return _strings == nullptr; } - uint clear(); - - private: - struct Cell : CHeapString { - Cell(const char* dbgstr) : - CHeapString(dbgstr), prev(nullptr), next(nullptr) {} - void push_back(Cell* cell) { - Cell* head = this; - Cell* tail = prev; - tail->next = cell; - cell->next = head; - cell->prev = tail; - prev = cell; - } - Cell* prev; - Cell* next; - }; - uint _ref_cnt; - Cell* _strings; -}; +CHeapString::~CHeapString() { + os::free((void*)_string); + _string = nullptr; +} // ----- AsmRemarks ------------------------------------------------------------ // @@ -1210,13 +1117,13 @@ bool AsmRemarks::is_empty() const { } void AsmRemarks::share(const AsmRemarks &src) { - precond(is_empty()); + precond(_remarks == nullptr || is_empty()); clear(); _remarks = src._remarks->reuse(); } void AsmRemarks::clear() { - if (_remarks->clear() == 0) { + if (_remarks != nullptr && _remarks->clear() == 0) { delete _remarks; } _remarks = nullptr; @@ -1262,13 +1169,13 @@ bool DbgStrings::is_empty() const { } void DbgStrings::share(const DbgStrings &src) { - precond(is_empty()); + precond(_strings == nullptr || is_empty()); clear(); _strings = src._strings->reuse(); } void DbgStrings::clear() { - if (_strings->clear() == 0) { + if (_strings != nullptr && _strings->clear() == 0) { delete _strings; } _strings = nullptr; diff --git a/src/hotspot/share/asm/codeBuffer.hpp b/src/hotspot/share/asm/codeBuffer.hpp index 95cd4b7f912af..e6dac484649e6 100644 --- a/src/hotspot/share/asm/codeBuffer.hpp +++ b/src/hotspot/share/asm/codeBuffer.hpp @@ -28,6 +28,7 @@ #include "code/oopRecorder.hpp" #include "code/relocInfo.hpp" #include "compiler/compiler_globals.hpp" +#include "runtime/os.hpp" #include "utilities/align.hpp" #include "utilities/debug.hpp" #include "utilities/growableArray.hpp" @@ -291,8 +292,129 @@ class CodeSection { #ifndef PRODUCT -class AsmRemarkCollection; -class DbgStringCollection; +// ----- CHeapString ----------------------------------------------------------- + +class CHeapString : public CHeapObj { + public: + CHeapString(const char* str) : _string(os::strdup(str)) {} + ~CHeapString(); + const char* string() const { return _string; } + + private: + const char* _string; +}; + +// ----- AsmRemarkCollection --------------------------------------------------- + +class AsmRemarkCollection : public CHeapObj { + public: + AsmRemarkCollection() : _ref_cnt(1), _remarks(nullptr), _next(nullptr) {} + ~AsmRemarkCollection() { + assert(is_empty(), "Must 'clear()' before deleting!"); + assert(_ref_cnt == 0, "No uses must remain when deleting!"); + } + AsmRemarkCollection* reuse() { + precond(_ref_cnt > 0); + return _ref_cnt++, this; + } + + const char* insert(uint offset, const char* remark); + const char* lookup(uint offset) const; + const char* next(uint offset) const; + + bool is_empty() const { return _remarks == nullptr; } + uint clear(); + + template + bool iterate(Function function) const { // lambda enabled API + if (_remarks != nullptr) { + Cell* tmp = _remarks; + do { + if(!function(tmp->offset, tmp->string())) { + return false; + } + tmp = tmp->next; + } while (tmp != _remarks); + } + return true; + } + + private: + struct Cell : CHeapString { + Cell(const char* remark, uint offset) : + CHeapString(remark), offset(offset), prev(nullptr), next(nullptr) {} + void push_back(Cell* cell) { + Cell* head = this; + Cell* tail = prev; + tail->next = cell; + cell->next = head; + cell->prev = tail; + prev = cell; + } + uint offset; + Cell* prev; + Cell* next; + }; + uint _ref_cnt; + Cell* _remarks; + // Using a 'mutable' iteration pointer to allow 'const' on lookup/next (that + // does not change the state of the list per se), supportig a simplistic + // iteration scheme. + mutable Cell* _next; +}; + +// ----- DbgStringCollection --------------------------------------------------- + +class DbgStringCollection : public CHeapObj { + public: + DbgStringCollection() : _ref_cnt(1), _strings(nullptr) {} + ~DbgStringCollection() { + assert(is_empty(), "Must 'clear()' before deleting!"); + assert(_ref_cnt == 0, "No uses must remain when deleting!"); + } + DbgStringCollection* reuse() { + precond(_ref_cnt > 0); + return _ref_cnt++, this; + } + + const char* insert(const char* str); + const char* lookup(const char* str) const; + + bool is_empty() const { return _strings == nullptr; } + uint clear(); + + template + bool iterate(Function function) const { // lambda enabled API + if (_strings != nullptr) { + Cell* tmp = _strings; + do { + if (!function(tmp->string())) { + return false; + } + tmp = tmp->next; + } while (tmp != _strings); + } + return true; + } + + private: + struct Cell : CHeapString { + Cell(const char* dbgstr) : + CHeapString(dbgstr), prev(nullptr), next(nullptr) {} + void push_back(Cell* cell) { + Cell* head = this; + Cell* tail = prev; + tail->next = cell; + cell->next = head; + cell->prev = tail; + prev = cell; + } + Cell* prev; + Cell* next; + }; + uint _ref_cnt; + Cell* _strings; +}; // The assumption made here is that most code remarks (or comments) added to // the generated assembly code are unique, i.e. there is very little gain in @@ -315,6 +437,9 @@ class AsmRemarks { // For testing purposes only. const AsmRemarkCollection* ref() const { return _remarks; } + template + inline bool iterate(Function function) const { return _remarks->iterate(function); } + private: AsmRemarkCollection* _remarks; }; @@ -337,6 +462,9 @@ class DbgStrings { // For testing purposes only. const DbgStringCollection* ref() const { return _strings; } + template + bool iterate(Function function) const { return _strings->iterate(function); } + private: DbgStringCollection* _strings; }; diff --git a/src/hotspot/share/c1/c1_Runtime1.cpp b/src/hotspot/share/c1/c1_Runtime1.cpp index 0f87a90a417a7..4bf056fe312ca 100644 --- a/src/hotspot/share/c1/c1_Runtime1.cpp +++ b/src/hotspot/share/c1/c1_Runtime1.cpp @@ -31,6 +31,7 @@ #include "classfile/javaClasses.inline.hpp" #include "classfile/vmClasses.hpp" #include "classfile/vmSymbols.hpp" +#include "code/aotCodeCache.hpp" #include "code/codeBlob.hpp" #include "code/compiledIC.hpp" #include "code/scopeDesc.hpp" @@ -198,6 +199,13 @@ class C1StubIdStubAssemblerCodeGenClosure: public StubAssemblerCodeGenClosure { }; CodeBlob* Runtime1::generate_blob(BufferBlob* buffer_blob, C1StubId id, const char* name, bool expect_oop_map, StubAssemblerCodeGenClosure* cl) { + if ((int)id >= 0) { + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C1Blob, (uint)id, name, 0, nullptr); + if (blob != nullptr) { + return blob; + } + } + ResourceMark rm; // create code buffer for code storage CodeBuffer code(buffer_blob); @@ -231,6 +239,9 @@ CodeBlob* Runtime1::generate_blob(BufferBlob* buffer_blob, C1StubId id, const ch oop_maps, must_gc_arguments, false /* alloc_fail_is_fatal */ ); + if (blob != nullptr && (int)id >= 0) { + AOTCodeCache::store_code_blob(*blob, AOTCodeEntry::C1Blob, (uint)id, name, 0, nullptr); + } return blob; } @@ -265,7 +276,13 @@ bool Runtime1::initialize(BufferBlob* blob) { initialize_pd(); // generate stubs int limit = (int)C1StubId::NUM_STUBIDS; - for (int id = 0; id < limit; id++) { + for (int id = 0; id <= (int)C1StubId::forward_exception_id; id++) { + if (!generate_blob_for(blob, (C1StubId) id)) { + return false; + } + } + AOTCodeCache::init_early_c1_table(); + for (int id = (int)C1StubId::forward_exception_id+1; id < limit; id++) { if (!generate_blob_for(blob, (C1StubId) id)) { return false; } diff --git a/src/hotspot/share/c1/c1_Runtime1.hpp b/src/hotspot/share/c1/c1_Runtime1.hpp index c09de00ce553c..9912b6b515e17 100644 --- a/src/hotspot/share/c1/c1_Runtime1.hpp +++ b/src/hotspot/share/c1/c1_Runtime1.hpp @@ -54,6 +54,7 @@ enum class C1StubId :int { class Runtime1: public AllStatic { friend class ArrayCopyStub; + friend class AOTCodeAddressTable; public: // statistics diff --git a/src/hotspot/share/cds/archiveBuilder.cpp b/src/hotspot/share/cds/archiveBuilder.cpp index 549b7b3ba6a7d..a544b4921d481 100644 --- a/src/hotspot/share/cds/archiveBuilder.cpp +++ b/src/hotspot/share/cds/archiveBuilder.cpp @@ -536,7 +536,7 @@ ArchiveBuilder::FollowMode ArchiveBuilder::get_follow_mode(MetaspaceClosure::Ref ref->msotype() == MetaspaceObj::MethodCountersType) { return set_to_null; } else if (ref->msotype() == MetaspaceObj::AdapterHandlerEntryType) { - if (AOTCodeCache::is_dumping_adapters()) { + if (AOTCodeCache::is_dumping_adapter()) { AdapterHandlerEntry* entry = (AdapterHandlerEntry*)ref->obj(); return AdapterHandlerLibrary::is_abstract_method_adapter(entry) ? set_to_null : make_a_copy; } else { diff --git a/src/hotspot/share/cds/cdsConfig.cpp b/src/hotspot/share/cds/cdsConfig.cpp index 463bfe3a98df3..7dbace07b4923 100644 --- a/src/hotspot/share/cds/cdsConfig.cpp +++ b/src/hotspot/share/cds/cdsConfig.cpp @@ -405,6 +405,9 @@ void CDSConfig::check_aot_flags() { if (FLAG_IS_DEFAULT(AOTCache) && AOTAdapterCaching) { log_debug(aot,codecache,init)("AOTCache is not specified - AOTAdapterCaching is ignored"); } + if (FLAG_IS_DEFAULT(AOTCache) && AOTStubCaching) { + log_debug(aot,codecache,init)("AOTCache is not specified - AOTStubCaching is ignored"); + } if (FLAG_IS_DEFAULT(AOTCache) && FLAG_IS_DEFAULT(AOTConfiguration) && FLAG_IS_DEFAULT(AOTMode)) { // AOTCache/AOTConfiguration/AOTMode not used. diff --git a/src/hotspot/share/cds/cds_globals.hpp b/src/hotspot/share/cds/cds_globals.hpp index b5657a73ef132..61edf593091ab 100644 --- a/src/hotspot/share/cds/cds_globals.hpp +++ b/src/hotspot/share/cds/cds_globals.hpp @@ -134,6 +134,9 @@ product(bool, AOTAdapterCaching, false, DIAGNOSTIC, \ "Enable saving and restoring i2c2i adapters in AOT cache") \ \ + product(bool, AOTStubCaching, false, DIAGNOSTIC, \ + "Enable saving and restoring stubs and code blobs in AOT cache") \ + \ product(uint, AOTCodeMaxSize, 10*M, DIAGNOSTIC, \ "Buffer size in bytes for AOT code caching") \ range(1*M, max_jint) \ diff --git a/src/hotspot/share/cds/metaspaceShared.cpp b/src/hotspot/share/cds/metaspaceShared.cpp index 6ea81c0543766..45a41e9f679ae 100644 --- a/src/hotspot/share/cds/metaspaceShared.cpp +++ b/src/hotspot/share/cds/metaspaceShared.cpp @@ -612,7 +612,7 @@ char* VM_PopulateDumpSharedSpace::dump_read_only_tables(AOTClassLocationConfig*& // Write lambform lines into archive LambdaFormInvokers::dump_static_archive_invokers(); - if (AOTCodeCache::is_dumping_adapters()) { + if (AOTCodeCache::is_dumping_adapter()) { AdapterHandlerLibrary::dump_aot_adapter_table(); } diff --git a/src/hotspot/share/code/aotCodeCache.cpp b/src/hotspot/share/code/aotCodeCache.cpp index ff9f82f548803..ebd428193750a 100644 --- a/src/hotspot/share/code/aotCodeCache.cpp +++ b/src/hotspot/share/code/aotCodeCache.cpp @@ -22,6 +22,7 @@ * */ + #include "asm/macroAssembler.hpp" #include "cds/aotCacheAccess.hpp" #include "cds/cds_globals.hpp" @@ -34,6 +35,7 @@ #include "gc/shared/gcConfig.hpp" #include "logging/logStream.hpp" #include "memory/memoryReserver.hpp" +#include "runtime/deoptimization.hpp" #include "runtime/flags/flagSetting.hpp" #include "runtime/globals_extension.hpp" #include "runtime/java.hpp" @@ -42,6 +44,9 @@ #include "runtime/sharedRuntime.hpp" #include "runtime/stubRoutines.hpp" #include "utilities/copy.hpp" +#ifdef COMPILER1 +#include "c1/c1_Runtime1.hpp" +#endif #ifdef COMPILER2 #include "opto/runtime.hpp" #endif @@ -58,12 +63,19 @@ #include #include +const char* aot_code_entry_kind_name[] = { +#define DECL_KIND_STRING(kind) XSTR(kind), + DO_AOTCODEENTRY_KIND(DECL_KIND_STRING) +#undef DECL_KIND_STRING +}; + static void report_load_failure() { if (AbortVMOnAOTCodeFailure) { vm_exit_during_initialization("Unable to use AOT Code Cache.", nullptr); } log_info(aot, codecache, init)("Unable to use AOT Code Cache."); AOTAdapterCaching = false; + AOTStubCaching = false; } static void report_store_failure() { @@ -73,16 +85,41 @@ static void report_store_failure() { } log_info(aot, codecache, exit)("Unable to create AOT Code Cache."); AOTAdapterCaching = false; + AOTStubCaching = false; } -bool AOTCodeCache::is_dumping_adapters() { +bool AOTCodeCache::is_dumping_adapter() { return AOTAdapterCaching && is_on_for_dump(); } -bool AOTCodeCache::is_using_adapters() { +bool AOTCodeCache::is_using_adapter() { return AOTAdapterCaching && is_on_for_use(); } +bool AOTCodeCache::is_dumping_stub() { + return AOTStubCaching && is_on_for_dump(); +} + +bool AOTCodeCache::is_using_stub() { + return AOTStubCaching && is_on_for_use(); +} + +static uint32_t encode_id(AOTCodeEntry::Kind kind, int id) { + assert(AOTCodeEntry::is_valid_entry_kind(kind), "invalid AOTCodeEntry kind %d", (int)kind); + // There can be a conflict of id between an Adapter and *Blob, but that should not cause any functional issue + // becasue both id and kind are used to find an entry, and that combination should be unique + if (kind == AOTCodeEntry::Adapter) { + return id; + } else if (kind == AOTCodeEntry::SharedBlob) { + return id; + } else if (kind == AOTCodeEntry::C1Blob) { + return (int)SharedStubId::NUM_STUBIDS + id; + } else { + // kind must be AOTCodeEntry::C2Blob + return (int)SharedStubId::NUM_STUBIDS + COMPILER1_PRESENT((int)C1StubId::NUM_STUBIDS) + id; + } +} + static uint _max_aot_code_size = 0; uint AOTCodeCache::max_aot_code_size() { return _max_aot_code_size; @@ -104,15 +141,17 @@ void AOTCodeCache::initialize() { bool is_using = false; if (CDSConfig::is_dumping_final_static_archive() && CDSConfig::is_dumping_aot_linked_classes()) { FLAG_SET_ERGO_IF_DEFAULT(AOTAdapterCaching, true); + FLAG_SET_ERGO_IF_DEFAULT(AOTStubCaching, true); is_dumping = true; } else if (CDSConfig::is_using_archive() && CDSConfig::is_using_aot_linked_classes()) { FLAG_SET_ERGO_IF_DEFAULT(AOTAdapterCaching, true); + FLAG_SET_ERGO_IF_DEFAULT(AOTStubCaching, true); is_using = true; } else { log_info(aot, codecache, init)("AOT Code Cache is not used: AOT Class Linking is not used."); return; // nothing to do } - if (!AOTAdapterCaching) { + if (!AOTAdapterCaching && !AOTStubCaching) { return; // AOT code caching disabled on command line } _max_aot_code_size = AOTCodeMaxSize; @@ -150,9 +189,11 @@ void AOTCodeCache::init2() { close(); report_load_failure(); } + // initialize the table of external routines so we can save // generated code blobs that reference them init_extrs_table(); + init_early_stubs_table(); } AOTCodeCache* AOTCodeCache::_cache = nullptr; @@ -225,7 +266,9 @@ AOTCodeCache::AOTCodeCache(bool is_dumping, bool is_using) : } log_info (aot, codecache, init)("Loaded %u AOT code entries from AOT Code Cache", _load_header->entries_count()); log_debug(aot, codecache, init)(" Adapters: total=%u", _load_header->adapters_count()); - log_debug(aot, codecache, init)(" All Blobs: total=%u", _load_header->blobs_count()); + log_debug(aot, codecache, init)(" Shared Blobs: total=%u", _load_header->shared_blobs_count()); + log_debug(aot, codecache, init)(" C1 Blobs: total=%u", _load_header->C1_blobs_count()); + log_debug(aot, codecache, init)(" C2 Blobs: total=%u", _load_header->C2_blobs_count()); log_debug(aot, codecache, init)(" AOT code cache size: %u bytes", _load_header->cache_size()); // Read strings @@ -248,6 +291,13 @@ void AOTCodeCache::init_extrs_table() { } } +void AOTCodeCache::init_early_stubs_table() { + AOTCodeAddressTable* table = addr_table(); + if (table != nullptr) { + table->init_early_stubs(); + } +} + void AOTCodeCache::init_shared_blobs_table() { AOTCodeAddressTable* table = addr_table(); if (table != nullptr) { @@ -255,6 +305,13 @@ void AOTCodeCache::init_shared_blobs_table() { } } +void AOTCodeCache::init_early_c1_table() { + AOTCodeAddressTable* table = addr_table(); + if (table != nullptr) { + table->init_early_c1(); + } +} + AOTCodeCache::~AOTCodeCache() { if (_closing) { return; // Already closed @@ -305,6 +362,7 @@ void AOTCodeCache::Config::record() { _flags |= restrictContendedPadding; } _compressedOopShift = CompressedOops::shift(); + _compressedOopBase = CompressedOops::base(); _compressedKlassShift = CompressedKlassPointers::shift(); _contendedPaddingWidth = ContendedPaddingWidth; _objectAlignment = ObjectAlignmentInBytes; @@ -357,7 +415,7 @@ bool AOTCodeCache::Config::verify() const { return false; } if (_compressedOopShift != (uint)CompressedOops::shift()) { - log_debug(aot, codecache, init)("AOT Code Cache disabled: it was created with CompressedOops::shift() = %d vs current %d", _compressedOopShift, CompressedOops::shift()); + log_debug(aot, codecache, init)("AOT Code Cache disabled: it was created with different CompressedOops::shift(): %d vs current %d", _compressedOopShift, CompressedOops::shift()); return false; } if (_compressedKlassShift != (uint)CompressedKlassPointers::shift()) { @@ -372,6 +430,13 @@ bool AOTCodeCache::Config::verify() const { log_debug(aot, codecache, init)("AOT Code Cache disabled: it was created with ObjectAlignmentInBytes = %d vs current %d", _objectAlignment, ObjectAlignmentInBytes); return false; } + + // This should be the last check as it only disables AOTStubCaching + if ((_compressedOopBase == nullptr || CompressedOops::base() == nullptr) && (_compressedOopBase != CompressedOops::base())) { + log_debug(aot, codecache, init)("AOTStubCaching is disabled: incompatible CompressedOops::base(): %p vs current %p", _compressedOopBase, CompressedOops::base()); + AOTStubCaching = false; + } + return true; } @@ -539,6 +604,31 @@ AOTCodeEntry* AOTCodeCache::find_entry(AOTCodeEntry::Kind kind, uint id) { if (check_entry(kind, id, entry)) { return entry; // Found } + // Linear search around to handle id collission + for (int i = mid - 1; i >= l; i--) { // search back + ix = i * 2; + is = _search_entries[ix]; + if (is != id) { + break; + } + index = _search_entries[ix + 1]; + AOTCodeEntry* entry = &(_load_entries[index]); + if (check_entry(kind, id, entry)) { + return entry; // Found + } + } + for (int i = mid + 1; i <= h; i++) { // search forward + ix = i * 2; + is = _search_entries[ix]; + if (is != id) { + break; + } + index = _search_entries[ix + 1]; + AOTCodeEntry* entry = &(_load_entries[index]); + if (check_entry(kind, id, entry)) { + return entry; // Found + } + } break; // Not found match } else if (is < id) { l = mid + 1; @@ -595,7 +685,9 @@ bool AOTCodeCache::finish_write() { AOTCodeEntry* entries_address = _store_entries; // Pointer to latest entry uint adapters_count = 0; - uint blobs_count = 0; + uint shared_blobs_count = 0; + uint C1_blobs_count = 0; + uint C2_blobs_count = 0; uint max_size = 0; // AOTCodeEntry entries were allocated in reverse in store buffer. // Process them in reverse order to cache first code first. @@ -619,8 +711,12 @@ bool AOTCodeCache::finish_write() { AOTCodeEntry::Kind kind = entries_address[i].kind(); if (kind == AOTCodeEntry::Adapter) { adapters_count++; - } else if (kind == AOTCodeEntry::Blob) { - blobs_count++; + } else if (kind == AOTCodeEntry::SharedBlob) { + shared_blobs_count++; + } else if (kind == AOTCodeEntry::C1Blob) { + C1_blobs_count++; + } else if (kind == AOTCodeEntry::C2Blob) { + C2_blobs_count++; } } if (entries_count == 0) { @@ -652,14 +748,17 @@ bool AOTCodeCache::finish_write() { assert(size <= total_size, "%d > %d", size , total_size); log_debug(aot, codecache, exit)(" Adapters: total=%u", adapters_count); - log_debug(aot, codecache, exit)(" All Blobs: total=%u", blobs_count); + log_debug(aot, codecache, exit)(" Shared Blobs: total=%d", shared_blobs_count); + log_debug(aot, codecache, exit)(" C1 Blobs: total=%d", C1_blobs_count); + log_debug(aot, codecache, exit)(" C2 Blobs: total=%d", C2_blobs_count); log_debug(aot, codecache, exit)(" AOT code cache size: %u bytes, max entry's size: %u bytes", size, max_size); // Finalize header AOTCodeCache::Header* header = (AOTCodeCache::Header*)start; header->init(size, (uint)strings_count, strings_offset, entries_count, new_entries_offset, - adapters_count, blobs_count); + adapters_count, shared_blobs_count, + C1_blobs_count, C2_blobs_count); log_info(aot, codecache, exit)("Wrote %d AOT code entries to AOT Code Cache", entries_count); } @@ -675,10 +774,13 @@ bool AOTCodeCache::store_code_blob(CodeBlob& blob, AOTCodeEntry::Kind entry_kind } assert(AOTCodeEntry::is_valid_entry_kind(entry_kind), "invalid entry_kind %d", entry_kind); - if ((entry_kind == AOTCodeEntry::Adapter) && !AOTAdapterCaching) { + if (AOTCodeEntry::is_adapter(entry_kind) && !is_dumping_adapter()) { + return false; + } + if (AOTCodeEntry::is_blob(entry_kind) && !is_dumping_stub()) { return false; } - log_debug(aot, codecache, stubs)("Writing blob '%s' to AOT Code Cache", name); + log_debug(aot, codecache, stubs)("Writing blob '%s' (id=%u, kind=%s) to AOT Code Cache", name, id, aot_code_entry_kind_name[entry_kind]); #ifdef ASSERT LogStreamHandle(Trace, aot, codecache, stubs) log; @@ -728,6 +830,16 @@ bool AOTCodeCache::store_code_blob(CodeBlob& blob, AOTCodeEntry::Kind entry_kind has_oop_maps = true; } +#ifndef PRODUCT + // Write asm remarks + if (!cache->write_asm_remarks(blob)) { + return false; + } + if (!cache->write_dbg_strings(blob)) { + return false; + } +#endif /* PRODUCT */ + if (!cache->write_relocations(blob)) { return false; } @@ -745,10 +857,10 @@ bool AOTCodeCache::store_code_blob(CodeBlob& blob, AOTCodeEntry::Kind entry_kind } } uint entry_size = cache->_write_position - entry_position; - AOTCodeEntry* entry = new(cache) AOTCodeEntry(entry_kind, id, + AOTCodeEntry* entry = new(cache) AOTCodeEntry(entry_kind, encode_id(entry_kind, id), entry_position, entry_size, name_offset, name_size, blob_offset, has_oop_maps, blob.content_begin()); - log_debug(aot, codecache, stubs)("Wrote code blob '%s(id=%d)' to AOT Code Cache", name, id); + log_debug(aot, codecache, stubs)("Wrote code blob '%s' (id=%u, kind=%s) to AOT Code Cache", name, id, aot_code_entry_kind_name[entry_kind]); return true; } @@ -759,17 +871,23 @@ CodeBlob* AOTCodeCache::load_code_blob(AOTCodeEntry::Kind entry_kind, uint id, c } assert(AOTCodeEntry::is_valid_entry_kind(entry_kind), "invalid entry_kind %d", entry_kind); - if ((entry_kind == AOTCodeEntry::Adapter) && !AOTAdapterCaching) { + if (AOTCodeEntry::is_adapter(entry_kind) && !is_using_adapter()) { + return nullptr; + } + if (AOTCodeEntry::is_blob(entry_kind) && !is_using_stub()) { return nullptr; } - log_debug(aot, codecache, stubs)("Reading blob '%s' from AOT Code Cache", name); + log_debug(aot, codecache, stubs)("Reading blob '%s' (id=%u, kind=%s) from AOT Code Cache", name, id, aot_code_entry_kind_name[entry_kind]); - AOTCodeEntry* entry = cache->find_entry(entry_kind, id); + AOTCodeEntry* entry = cache->find_entry(entry_kind, encode_id(entry_kind, id)); if (entry == nullptr) { return nullptr; } AOTCodeReader reader(cache, entry); - return reader.compile_code_blob(name, entry_offset_count, entry_offsets); + CodeBlob* blob = reader.compile_code_blob(name, entry_offset_count, entry_offsets); + + log_debug(aot, codecache, stubs)("Read blob '%s' (id=%u, kind=%s) from AOT Code Cache", name, id, aot_code_entry_kind_name[entry_kind]); + return blob; } CodeBlob* AOTCodeReader::compile_code_blob(const char* name, int entry_offset_count, int* entry_offsets) { @@ -802,7 +920,22 @@ CodeBlob* AOTCodeReader::compile_code_blob(const char* name, int entry_offset_co oop_maps = read_oop_map_set(); } - CodeBlob* code_blob = CodeBlob::create(archived_blob, stored_name, reloc_data, oop_maps); +#ifndef PRODUCT + AsmRemarks asm_remarks; + read_asm_remarks(asm_remarks); + DbgStrings dbg_strings; + read_dbg_strings(dbg_strings); +#endif // PRODUCT + + CodeBlob* code_blob = CodeBlob::create(archived_blob, + stored_name, + reloc_data, + oop_maps +#ifndef PRODUCT + , asm_remarks + , dbg_strings +#endif + ); if (code_blob == nullptr) { // no space left in CodeCache return nullptr; } @@ -824,7 +957,6 @@ CodeBlob* AOTCodeReader::compile_code_blob(const char* name, int entry_offset_co entry_offsets[i] = off; } - log_debug(aot, codecache, stubs)("Read blob '%s' from AOT Code Cache", name); #ifdef ASSERT LogStreamHandle(Trace, aot, codecache, stubs) log; if (log.is_enabled()) { @@ -869,6 +1001,8 @@ bool AOTCodeCache::write_relocations(CodeBlob& code_blob) { break; case relocInfo::section_word_type: break; + case relocInfo::post_call_nop_type: + break; default: fatal("relocation %d unimplemented", (int)iter.type()); break; @@ -942,6 +1076,8 @@ void AOTCodeReader::fix_relocations(CodeBlob* code_blob) { r->fix_relocation_after_aot_load(aot_code_entry()->dumptime_content_start_addr(), code_blob->content_begin()); break; } + case relocInfo::post_call_nop_type: + break; default: fatal("relocation %d unimplemented", (int)iter.type()); break; @@ -977,6 +1113,88 @@ ImmutableOopMapSet* AOTCodeReader::read_oop_map_set() { return oopmaps; } +#ifndef PRODUCT +bool AOTCodeCache::write_asm_remarks(CodeBlob& cb) { + // Write asm remarks + uint* count_ptr = (uint *)reserve_bytes(sizeof(uint)); + if (count_ptr == nullptr) { + return false; + } + uint count = 0; + bool result = cb.asm_remarks().iterate([&] (uint offset, const char* str) -> bool { + log_trace(aot, codecache, stubs)("asm remark offset=%d, str='%s'", offset, str); + uint n = write_bytes(&offset, sizeof(uint)); + if (n != sizeof(uint)) { + return false; + } + const char* cstr = add_C_string(str); + int id = _table->id_for_C_string((address)cstr); + assert(id != -1, "asm remark string '%s' not found in AOTCodeAddressTable", str); + n = write_bytes(&id, sizeof(int)); + if (n != sizeof(int)) { + return false; + } + count += 1; + return true; + }); + *count_ptr = count; + return result; +} + +void AOTCodeReader::read_asm_remarks(AsmRemarks& asm_remarks) { + // Read asm remarks + uint offset = read_position(); + uint count = *(uint *)addr(offset); + offset += sizeof(uint); + for (uint i = 0; i < count; i++) { + uint remark_offset = *(uint *)addr(offset); + offset += sizeof(uint); + int remark_string_id = *(uint *)addr(offset); + offset += sizeof(int); + const char* remark = (const char*)_cache->address_for_C_string(remark_string_id); + asm_remarks.insert(remark_offset, remark); + } + set_read_position(offset); +} + +bool AOTCodeCache::write_dbg_strings(CodeBlob& cb) { + // Write dbg strings + uint* count_ptr = (uint *)reserve_bytes(sizeof(uint)); + if (count_ptr == nullptr) { + return false; + } + uint count = 0; + bool result = cb.dbg_strings().iterate([&] (const char* str) -> bool { + log_trace(aot, codecache, stubs)("dbg string=%s", str); + const char* cstr = add_C_string(str); + int id = _table->id_for_C_string((address)cstr); + assert(id != -1, "db string '%s' not found in AOTCodeAddressTable", str); + uint n = write_bytes(&id, sizeof(int)); + if (n != sizeof(int)) { + return false; + } + count += 1; + return true; + }); + *count_ptr = count; + return result; +} + +void AOTCodeReader::read_dbg_strings(DbgStrings& dbg_strings) { + // Read dbg strings + uint offset = read_position(); + uint count = *(uint *)addr(offset); + offset += sizeof(uint); + for (uint i = 0; i < count; i++) { + int string_id = *(uint *)addr(offset); + offset += sizeof(int); + const char* str = (const char*)_cache->address_for_C_string(string_id); + dbg_strings.insert(str); + } + set_read_position(offset); +} +#endif // PRODUCT + //======================= AOTCodeAddressTable =============== // address table ids for generated routines, external addresses and C @@ -986,17 +1204,20 @@ ImmutableOopMapSet* AOTCodeReader::read_oop_map_set() { // [_blobs_base, _blobs_base + _blobs_max -1], // ... // [_c_str_base, _c_str_base + _c_str_max -1], -#define _extrs_max 13 -#define _blobs_max 10 -#define _all_max 23 -#define _extrs_base 0 -#define _blobs_base (_extrs_base + _extrs_max) -#define _blobs_end (_blobs_base + _blobs_max) +#define _extrs_max 100 +#define _stubs_max 3 -#if (_blobs_end > _all_max) -#error AOTCodeAddress table ranges need adjusting -#endif +#define _shared_blobs_max 20 +#define _C1_blobs_max 10 +#define _blobs_max (_shared_blobs_max+_C1_blobs_max) +#define _all_max (_extrs_max+_stubs_max+_blobs_max) + +#define _extrs_base 0 +#define _stubs_base (_extrs_base + _extrs_max) +#define _shared_blobs_base (_stubs_base + _stubs_max) +#define _C1_blobs_base (_shared_blobs_base + _shared_blobs_max) +#define _blobs_end (_shared_blobs_base + _blobs_max) #define SET_ADDRESS(type, addr) \ { \ @@ -1008,16 +1229,106 @@ static bool initializing_extrs = false; void AOTCodeAddressTable::init_extrs() { if (_extrs_complete || initializing_extrs) return; // Done already + + assert(_blobs_end <= _all_max, "AOTCodeAddress table ranges need adjusting"); + initializing_extrs = true; _extrs_addr = NEW_C_HEAP_ARRAY(address, _extrs_max, mtCode); _extrs_length = 0; - // Recored addresses of VM runtime methods + // Record addresses of VM runtime methods SET_ADDRESS(_extrs, SharedRuntime::fixup_callers_callsite); SET_ADDRESS(_extrs, SharedRuntime::handle_wrong_method); SET_ADDRESS(_extrs, SharedRuntime::handle_wrong_method_abstract); SET_ADDRESS(_extrs, SharedRuntime::handle_wrong_method_ic_miss); +#if defined(AARCH64) + SET_ADDRESS(_extrs, JavaThread::aarch64_get_thread_helper); +#endif + { + // Required by Shared blobs + SET_ADDRESS(_extrs, Deoptimization::fetch_unroll_info); + SET_ADDRESS(_extrs, Deoptimization::unpack_frames); + SET_ADDRESS(_extrs, SafepointSynchronize::handle_polling_page_exception); + SET_ADDRESS(_extrs, SharedRuntime::resolve_opt_virtual_call_C); + SET_ADDRESS(_extrs, SharedRuntime::resolve_virtual_call_C); + SET_ADDRESS(_extrs, SharedRuntime::resolve_static_call_C); + SET_ADDRESS(_extrs, SharedRuntime::throw_delayed_StackOverflowError); + SET_ADDRESS(_extrs, SharedRuntime::throw_AbstractMethodError); + SET_ADDRESS(_extrs, SharedRuntime::throw_IncompatibleClassChangeError); + SET_ADDRESS(_extrs, SharedRuntime::throw_NullPointerException_at_call); + } + +#ifdef COMPILER1 + { + // Required by C1 blobs + SET_ADDRESS(_extrs, static_cast(SharedRuntime::dtrace_object_alloc)); + SET_ADDRESS(_extrs, SharedRuntime::exception_handler_for_return_address); + SET_ADDRESS(_extrs, SharedRuntime::register_finalizer); + SET_ADDRESS(_extrs, Runtime1::is_instance_of); + SET_ADDRESS(_extrs, Runtime1::exception_handler_for_pc); + SET_ADDRESS(_extrs, Runtime1::check_abort_on_vm_exception); + SET_ADDRESS(_extrs, Runtime1::new_instance); + SET_ADDRESS(_extrs, Runtime1::counter_overflow); + SET_ADDRESS(_extrs, Runtime1::new_type_array); + SET_ADDRESS(_extrs, Runtime1::new_object_array); + SET_ADDRESS(_extrs, Runtime1::new_multi_array); + SET_ADDRESS(_extrs, Runtime1::throw_range_check_exception); + SET_ADDRESS(_extrs, Runtime1::throw_index_exception); + SET_ADDRESS(_extrs, Runtime1::throw_div0_exception); + SET_ADDRESS(_extrs, Runtime1::throw_null_pointer_exception); + SET_ADDRESS(_extrs, Runtime1::throw_array_store_exception); + SET_ADDRESS(_extrs, Runtime1::throw_class_cast_exception); + SET_ADDRESS(_extrs, Runtime1::throw_incompatible_class_change_error); + SET_ADDRESS(_extrs, Runtime1::is_instance_of); + SET_ADDRESS(_extrs, Runtime1::monitorenter); + SET_ADDRESS(_extrs, Runtime1::monitorexit); + SET_ADDRESS(_extrs, Runtime1::deoptimize); + SET_ADDRESS(_extrs, Runtime1::access_field_patching); + SET_ADDRESS(_extrs, Runtime1::move_klass_patching); + SET_ADDRESS(_extrs, Runtime1::move_mirror_patching); + SET_ADDRESS(_extrs, Runtime1::move_appendix_patching); + SET_ADDRESS(_extrs, Runtime1::predicate_failed_trap); + SET_ADDRESS(_extrs, Runtime1::unimplemented_entry); + SET_ADDRESS(_extrs, Thread::current); + SET_ADDRESS(_extrs, CompressedKlassPointers::base_addr()); +#ifndef PRODUCT + SET_ADDRESS(_extrs, os::breakpoint); +#endif + } +#endif + +#ifdef COMPILER2 + { + // Required by C2 blobs + SET_ADDRESS(_extrs, Deoptimization::uncommon_trap); + SET_ADDRESS(_extrs, OptoRuntime::handle_exception_C); + SET_ADDRESS(_extrs, OptoRuntime::new_instance_C); + SET_ADDRESS(_extrs, OptoRuntime::new_array_C); + SET_ADDRESS(_extrs, OptoRuntime::new_array_nozero_C); + SET_ADDRESS(_extrs, OptoRuntime::multianewarray2_C); + SET_ADDRESS(_extrs, OptoRuntime::multianewarray3_C); + SET_ADDRESS(_extrs, OptoRuntime::multianewarray4_C); + SET_ADDRESS(_extrs, OptoRuntime::multianewarray5_C); + SET_ADDRESS(_extrs, OptoRuntime::multianewarrayN_C); +#if INCLUDE_JVMTI + SET_ADDRESS(_extrs, SharedRuntime::notify_jvmti_vthread_start); + SET_ADDRESS(_extrs, SharedRuntime::notify_jvmti_vthread_end); + SET_ADDRESS(_extrs, SharedRuntime::notify_jvmti_vthread_mount); + SET_ADDRESS(_extrs, SharedRuntime::notify_jvmti_vthread_unmount); +#endif + SET_ADDRESS(_extrs, OptoRuntime::complete_monitor_locking_C); + SET_ADDRESS(_extrs, OptoRuntime::monitor_notify_C); + SET_ADDRESS(_extrs, OptoRuntime::monitor_notifyAll_C); + SET_ADDRESS(_extrs, OptoRuntime::rethrow_C); + SET_ADDRESS(_extrs, OptoRuntime::slow_arraycopy_C); + SET_ADDRESS(_extrs, OptoRuntime::register_finalizer_C); +#if defined(AARCH64) + SET_ADDRESS(_extrs, JavaThread::verify_cross_modify_fence_failure); +#endif // AARCH64 + } +#endif // COMPILER2 + #if INCLUDE_G1GC SET_ADDRESS(_extrs, G1BarrierSetRuntime::write_ref_field_post_entry); SET_ADDRESS(_extrs, G1BarrierSetRuntime::write_ref_field_pre_entry); @@ -1033,9 +1344,6 @@ void AOTCodeAddressTable::init_extrs() { SET_ADDRESS(_extrs, &ZPointerLoadShift); #endif #endif -#ifdef COMPILER2 - SET_ADDRESS(_extrs, OptoRuntime::handle_exception_C); -#endif #ifndef ZERO #if defined(AMD64) || defined(AARCH64) || defined(RISCV64) SET_ADDRESS(_extrs, MacroAssembler::debug64); @@ -1046,32 +1354,88 @@ void AOTCodeAddressTable::init_extrs() { log_debug(aot, codecache, init)("External addresses recorded"); } +static bool initializing_early_stubs = false; + +void AOTCodeAddressTable::init_early_stubs() { + if (_complete || initializing_early_stubs) return; // Done already + initializing_early_stubs = true; + _stubs_addr = NEW_C_HEAP_ARRAY(address, _stubs_max, mtCode); + _stubs_length = 0; + SET_ADDRESS(_stubs, StubRoutines::forward_exception_entry()); + + { + // Required by C1 blobs +#if defined(AMD64) && !defined(ZERO) + SET_ADDRESS(_stubs, StubRoutines::x86::double_sign_flip()); + SET_ADDRESS(_stubs, StubRoutines::x86::d2l_fixup()); +#endif // AMD64 + } + + _early_stubs_complete = true; + log_info(aot, codecache, init)("Early stubs recorded"); +} + static bool initializing_shared_blobs = false; void AOTCodeAddressTable::init_shared_blobs() { if (_complete || initializing_shared_blobs) return; // Done already initializing_shared_blobs = true; - _blobs_addr = NEW_C_HEAP_ARRAY(address, _blobs_max, mtCode); - - _blobs_length = 0; // for shared blobs - - // Recored addresses of generated code blobs - SET_ADDRESS(_blobs, SharedRuntime::get_handle_wrong_method_stub()); - SET_ADDRESS(_blobs, SharedRuntime::get_ic_miss_stub()); + address* blobs_addr = NEW_C_HEAP_ARRAY(address, _blobs_max, mtCode); + _shared_blobs_addr = blobs_addr; + _C1_blobs_addr = _shared_blobs_addr + _shared_blobs_max; + _shared_blobs_length = _C1_blobs_length = 0; + + // clear the address table + memset(blobs_addr, 0, sizeof(address)* _blobs_max); + + // Record addresses of generated code blobs + SET_ADDRESS(_shared_blobs, SharedRuntime::get_handle_wrong_method_stub()); + SET_ADDRESS(_shared_blobs, SharedRuntime::get_ic_miss_stub()); + SET_ADDRESS(_shared_blobs, SharedRuntime::deopt_blob()->unpack()); + SET_ADDRESS(_shared_blobs, SharedRuntime::deopt_blob()->unpack_with_exception()); + SET_ADDRESS(_shared_blobs, SharedRuntime::deopt_blob()->unpack_with_reexecution()); + SET_ADDRESS(_shared_blobs, SharedRuntime::deopt_blob()->unpack_with_exception_in_tls()); +#if INCLUDE_JVMCI + if (EnableJVMCI) { + SET_ADDRESS(_shared_blobs, SharedRuntime::deopt_blob()->uncommon_trap()); + SET_ADDRESS(_shared_blobs, SharedRuntime::deopt_blob()->implicit_exception_uncommon_trap()); + } +#endif _shared_blobs_complete = true; log_debug(aot, codecache, init)("Early shared blobs recorded"); _complete = true; } +void AOTCodeAddressTable::init_early_c1() { +#ifdef COMPILER1 + // Runtime1 Blobs + for (int i = 0; i <= (int)C1StubId::forward_exception_id; i++) { + C1StubId id = (C1StubId)i; + if (Runtime1::blob_for(id) == nullptr) { + log_info(aot, codecache, init)("C1 blob %s is missing", Runtime1::name_for(id)); + continue; + } + if (Runtime1::entry_for(id) == nullptr) { + log_info(aot, codecache, init)("C1 blob %s is missing entry", Runtime1::name_for(id)); + continue; + } + address entry = Runtime1::entry_for(id); + SET_ADDRESS(_C1_blobs, entry); + } +#endif // COMPILER1 + assert(_C1_blobs_length <= _C1_blobs_max, "increase _C1_blobs_max to %d", _C1_blobs_length); + _early_c1_complete = true; +} + #undef SET_ADDRESS AOTCodeAddressTable::~AOTCodeAddressTable() { if (_extrs_addr != nullptr) { FREE_C_HEAP_ARRAY(address, _extrs_addr); } - if (_blobs_addr != nullptr) { - FREE_C_HEAP_ARRAY(address, _blobs_addr); + if (_shared_blobs_addr != nullptr) { + FREE_C_HEAP_ARRAY(address, _shared_blobs_addr); } } @@ -1081,7 +1445,7 @@ AOTCodeAddressTable::~AOTCodeAddressTable() { #define MAX_STR_COUNT 500 #endif #define _c_str_max MAX_STR_COUNT -#define _c_str_base _all_max +static const int _c_str_base = _all_max; static const char* _C_strings_in[MAX_STR_COUNT] = {nullptr}; // Incoming strings static const char* _C_strings[MAX_STR_COUNT] = {nullptr}; // Our duplicates @@ -1237,8 +1601,14 @@ address AOTCodeAddressTable::address_for_id(int idx) { if (/* id >= _extrs_base && */ id < _extrs_length) { return _extrs_addr[id - _extrs_base]; } - if (id >= _blobs_base && id < _blobs_base + _blobs_length) { - return _blobs_addr[id - _blobs_base]; + if (id >= _stubs_base && id < _stubs_base + _stubs_length) { + return _stubs_addr[id - _stubs_base]; + } + if (id >= _shared_blobs_base && id < _shared_blobs_base + _shared_blobs_length) { + return _shared_blobs_addr[id - _shared_blobs_base]; + } + if (id >= _C1_blobs_base && id < _C1_blobs_base + _C1_blobs_length) { + return _C1_blobs_addr[id - _C1_blobs_base]; } if (id >= _c_str_base && id < (_c_str_base + (uint)_C_strings_count)) { return address_for_C_string(id - _c_str_base); @@ -1262,18 +1632,23 @@ int AOTCodeAddressTable::id_for_address(address addr, RelocIterator reloc, CodeB } if (StubRoutines::contains(addr)) { // Search in stubs - StubCodeDesc* desc = StubCodeDesc::desc_for(addr); - if (desc == nullptr) { - desc = StubCodeDesc::desc_for(addr + frame::pc_return_offset); + id = search_address(addr, _stubs_addr, _stubs_length); + if (id < 0) { + StubCodeDesc* desc = StubCodeDesc::desc_for(addr); + if (desc == nullptr) { + desc = StubCodeDesc::desc_for(addr + frame::pc_return_offset); + } + const char* sub_name = (desc != nullptr) ? desc->name() : ""; + fatal("Address " INTPTR_FORMAT " for Stub:%s is missing in AOT Code Cache addresses table", p2i(addr), sub_name); + } else { + return id + _stubs_base; } - const char* sub_name = (desc != nullptr) ? desc->name() : ""; - fatal("Address " INTPTR_FORMAT " for Stub:%s is missing in AOT Code Cache addresses table", p2i(addr), sub_name); } else { CodeBlob* cb = CodeCache::find_blob(addr); if (cb != nullptr) { // Search in code blobs - int id_base = _blobs_base; - id = search_address(addr, _blobs_addr, _blobs_length); + int id_base = _shared_blobs_base; + id = search_address(addr, _shared_blobs_addr, _blobs_max); if (id < 0) { fatal("Address " INTPTR_FORMAT " for Blob:%s is missing in AOT Code Cache addresses table", p2i(addr), cb->name()); } else { @@ -1338,4 +1713,3 @@ void AOTCodeCache::print_on(outputStream* st) { st->print_cr("failed to map code cache"); } } - diff --git a/src/hotspot/share/code/aotCodeCache.hpp b/src/hotspot/share/code/aotCodeCache.hpp index d5c938e3b8b12..0956a37d3b955 100644 --- a/src/hotspot/share/code/aotCodeCache.hpp +++ b/src/hotspot/share/code/aotCodeCache.hpp @@ -38,17 +38,27 @@ class AOTCodeCache; class AdapterBlob; class ExceptionBlob; class ImmutableOopMapSet; +class AsmRemarks; +class DbgStrings; enum class vmIntrinsicID : int; enum CompLevel : signed char; +#define DO_AOTCODEENTRY_KIND(Fn) \ + Fn(None) \ + Fn(Adapter) \ + Fn(SharedBlob) \ + Fn(C1Blob) \ + Fn(C2Blob) \ + // Descriptor of AOT Code Cache's entry class AOTCodeEntry { public: - enum Kind { - None = 0, - Adapter = 1, - Blob = 2 + enum Kind : s1 { +#define DECL_KIND_ENUM(kind) kind, + DO_AOTCODEENTRY_KIND(DECL_KIND_ENUM) +#undef DECL_KIND_ENUM + Kind_count }; private: @@ -100,34 +110,49 @@ class AOTCodeEntry { bool has_oop_maps() const { return _has_oop_maps; } address dumptime_content_start_addr() const { return _dumptime_content_start_addr; } - static bool is_valid_entry_kind(Kind kind) { return kind == Adapter || kind == Blob; } + static bool is_valid_entry_kind(Kind kind) { return kind > None && kind < Kind_count; } + static bool is_blob(Kind kind) { return kind == SharedBlob || kind == C1Blob || kind == C2Blob; } + static bool is_adapter(Kind kind) { return kind == Adapter; } }; // Addresses of stubs, blobs and runtime finctions called from compiled code. class AOTCodeAddressTable : public CHeapObj { private: address* _extrs_addr; - address* _blobs_addr; + address* _stubs_addr; + address* _shared_blobs_addr; + address* _C1_blobs_addr; uint _extrs_length; - uint _blobs_length; + uint _stubs_length; + uint _shared_blobs_length; + uint _C1_blobs_length; bool _extrs_complete; + bool _early_stubs_complete; bool _shared_blobs_complete; + bool _early_c1_complete; bool _complete; public: AOTCodeAddressTable() : _extrs_addr(nullptr), - _blobs_addr(nullptr), + _shared_blobs_addr(nullptr), + _C1_blobs_addr(nullptr), _extrs_length(0), - _blobs_length(0), + _stubs_length(0), + _shared_blobs_length(0), + _C1_blobs_length(0), _extrs_complete(false), + _early_stubs_complete(false), _shared_blobs_complete(false), + _early_c1_complete(false), _complete(false) { } ~AOTCodeAddressTable(); void init_extrs(); + void init_early_stubs(); void init_shared_blobs(); + void init_early_c1(); const char* add_C_string(const char* str); int id_for_C_string(address str); address address_for_C_string(int idx); @@ -140,6 +165,7 @@ class AOTCodeCache : public CHeapObj { // Classes used to describe AOT code cache. protected: class Config { + address _compressedOopBase; uint _compressedOopShift; uint _compressedKlassShift; uint _contendedPaddingWidth; @@ -175,14 +201,17 @@ class AOTCodeCache : public CHeapObj { uint _entries_count; // number of recorded entries uint _entries_offset; // offset of AOTCodeEntry array describing entries uint _adapters_count; - uint _blobs_count; + uint _shared_blobs_count; + uint _C1_blobs_count; + uint _C2_blobs_count; Config _config; public: void init(uint cache_size, uint strings_count, uint strings_offset, uint entries_count, uint entries_offset, - uint adapters_count, uint blobs_count) { + uint adapters_count, uint shared_blobs_count, + uint C1_blobs_count, uint C2_blobs_count) { _version = AOT_CODE_VERSION; _cache_size = cache_size; _strings_count = strings_count; @@ -190,8 +219,9 @@ class AOTCodeCache : public CHeapObj { _entries_count = entries_count; _entries_offset = entries_offset; _adapters_count = adapters_count; - _blobs_count = blobs_count; - + _shared_blobs_count = shared_blobs_count; + _C1_blobs_count = C1_blobs_count; + _C2_blobs_count = C2_blobs_count; _config.record(); } @@ -202,7 +232,9 @@ class AOTCodeCache : public CHeapObj { uint entries_count() const { return _entries_count; } uint entries_offset() const { return _entries_offset; } uint adapters_count() const { return _adapters_count; } - uint blobs_count() const { return _blobs_count; } + uint shared_blobs_count() const { return _shared_blobs_count; } + uint C1_blobs_count() const { return _C1_blobs_count; } + uint C2_blobs_count() const { return _C2_blobs_count; } bool verify_config(uint load_size) const; bool verify_vm_config() const { // Called after Universe initialized @@ -267,8 +299,11 @@ class AOTCodeCache : public CHeapObj { int store_strings(); static void init_extrs_table() NOT_CDS_RETURN; + static void init_early_stubs_table() NOT_CDS_RETURN; static void init_shared_blobs_table() NOT_CDS_RETURN; + static void init_early_c1_table() NOT_CDS_RETURN; + address address_for_C_string(int idx) const { return _table->address_for_C_string(idx); } address address_for_id(int id) const { return _table->address_for_id(id); } bool for_use() const { return _for_use && !_failed; } @@ -288,17 +323,21 @@ class AOTCodeCache : public CHeapObj { bool write_relocations(CodeBlob& code_blob); bool write_oop_map_set(CodeBlob& cb); +#ifndef PRODUCT + bool write_asm_remarks(CodeBlob& cb); + bool write_dbg_strings(CodeBlob& cb); +#endif // PRODUCT static bool store_code_blob(CodeBlob& blob, AOTCodeEntry::Kind entry_kind, uint id, const char* name, - int entry_offset_count, - int* entry_offsets) NOT_CDS_RETURN_(false); + int entry_offset_count = 0, + int* entry_offsets = nullptr) NOT_CDS_RETURN_(false); static CodeBlob* load_code_blob(AOTCodeEntry::Kind kind, uint id, const char* name, - int entry_offset_count, - int* entry_offsets) NOT_CDS_RETURN_(nullptr); + int entry_offset_count = 0, + int* entry_offsets = nullptr) NOT_CDS_RETURN_(nullptr); static uint store_entries_cnt() { if (is_on_for_dump()) { @@ -328,8 +367,11 @@ class AOTCodeCache : public CHeapObj { static bool is_on_for_use() { return is_on() && _cache->for_use(); } static bool is_on_for_dump() { return is_on() && _cache->for_dump(); } - static bool is_dumping_adapters() NOT_CDS_RETURN_(false); - static bool is_using_adapters() NOT_CDS_RETURN_(false); + static bool is_dumping_adapter() NOT_CDS_RETURN_(false); + static bool is_using_adapter() NOT_CDS_RETURN_(false); + + static bool is_dumping_stub() NOT_CDS_RETURN_(false); + static bool is_using_stub() NOT_CDS_RETURN_(false); static const char* add_C_string(const char* str) NOT_CDS_RETURN_(str); @@ -361,5 +403,10 @@ class AOTCodeReader { ImmutableOopMapSet* read_oop_map_set(); void fix_relocations(CodeBlob* code_blob); +#ifndef PRODUCT + void read_asm_remarks(AsmRemarks& asm_remarks); + void read_dbg_strings(DbgStrings& dbg_strings); +#endif // PRODUCT }; -#endif // SHARE_CODE_AOTCODECACH_HPP + +#endif // SHARE_CODE_AOTCODECACHE_HPP diff --git a/src/hotspot/share/code/codeBlob.cpp b/src/hotspot/share/code/codeBlob.cpp index 1512f498b37cb..da0408d1270c7 100644 --- a/src/hotspot/share/code/codeBlob.cpp +++ b/src/hotspot/share/code/codeBlob.cpp @@ -264,7 +264,11 @@ void CodeBlob::post_restore() { vptr(_kind)->post_restore(this); } -CodeBlob* CodeBlob::restore(address code_cache_buffer, const char* name, address archived_reloc_data, ImmutableOopMapSet* archived_oop_maps) { +CodeBlob* CodeBlob::restore(address code_cache_buffer, + const char* name, + address archived_reloc_data, + ImmutableOopMapSet* archived_oop_maps) +{ copy_to(code_cache_buffer); CodeBlob* code_blob = (CodeBlob*)code_cache_buffer; code_blob->set_name(name); @@ -273,7 +277,16 @@ CodeBlob* CodeBlob::restore(address code_cache_buffer, const char* name, address return code_blob; } -CodeBlob* CodeBlob::create(CodeBlob* archived_blob, const char* name, address archived_reloc_data, ImmutableOopMapSet* archived_oop_maps) { +CodeBlob* CodeBlob::create(CodeBlob* archived_blob, + const char* name, + address archived_reloc_data, + ImmutableOopMapSet* archived_oop_maps +#ifndef PRODUCT + , AsmRemarks& archived_asm_remarks + , DbgStrings& archived_dbg_strings +#endif // PRODUCT + ) +{ ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock CodeCache::gc_on_allocation(); @@ -284,7 +297,17 @@ CodeBlob* CodeBlob::create(CodeBlob* archived_blob, const char* name, address ar MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag); address code_cache_buffer = (address)CodeCache::allocate(size, CodeBlobType::NonNMethod); if (code_cache_buffer != nullptr) { - blob = archived_blob->restore(code_cache_buffer, name, archived_reloc_data, archived_oop_maps); + blob = archived_blob->restore(code_cache_buffer, + name, + archived_reloc_data, + archived_oop_maps); +#ifndef PRODUCT + blob->use_remarks(archived_asm_remarks); + archived_asm_remarks.clear(); + blob->use_strings(archived_dbg_strings); + archived_dbg_strings.clear(); +#endif // PRODUCT + assert(blob != nullptr, "sanity check"); // Flush the code block ICache::invalidate_range(blob->code_begin(), blob->code_size()); diff --git a/src/hotspot/share/code/codeBlob.hpp b/src/hotspot/share/code/codeBlob.hpp index 37f83824987be..f813752e01e8a 100644 --- a/src/hotspot/share/code/codeBlob.hpp +++ b/src/hotspot/share/code/codeBlob.hpp @@ -99,6 +99,9 @@ class RuntimeStub; // for as_runtime_stub() class JavaFrameAnchor; // for UpcallStub::jfa_for_frame class AdapterBlob; class ExceptionBlob; +class DeoptimizationBlob; +class SafepointBlob; +class UncommonTrapBlob; class CodeBlob { friend class VMStructs; @@ -205,8 +208,12 @@ class CodeBlob { nmethod* as_nmethod() const { assert(is_nmethod(), "must be nmethod"); return (nmethod*) this; } CodeBlob* as_codeblob() const { return (CodeBlob*) this; } AdapterBlob* as_adapter_blob() const { assert(is_adapter_blob(), "must be adapter blob"); return (AdapterBlob*) this; } + ExceptionBlob* as_exception_blob() const { assert(is_exception_stub(), "must be exception stub"); return (ExceptionBlob*) this; } + DeoptimizationBlob* as_deoptimization_blob() const { assert(is_deoptimization_stub(), "must be deopt stub"); return (DeoptimizationBlob*) this; } + SafepointBlob* as_safepoint_blob() const { assert(is_safepoint_stub(), "must be safepoint stub"); return (SafepointBlob*) this; } UpcallStub* as_upcall_stub() const { assert(is_upcall_stub(), "must be upcall stub"); return (UpcallStub*) this; } RuntimeStub* as_runtime_stub() const { assert(is_runtime_stub(), "must be runtime blob"); return (RuntimeStub*) this; } + UncommonTrapBlob* as_uncommon_trap_blob() const { assert(is_uncommon_trap_stub(), "must be uncommon trap stub"); return (UncommonTrapBlob*) this; } // Boundaries address header_begin() const { return (address) this; } @@ -308,7 +315,15 @@ class CodeBlob { // methods to restore a blob from AOT code cache into the CodeCache void post_restore(); CodeBlob* restore(address code_cache_buffer, const char* name, address archived_reloc_data, ImmutableOopMapSet* archived_oop_maps); - static CodeBlob* create(CodeBlob* archived_blob, const char* name, address archived_reloc_data, ImmutableOopMapSet* archived_oop_maps); + static CodeBlob* create(CodeBlob* archived_blob, + const char* name, + address archived_reloc_data, + ImmutableOopMapSet* archived_oop_maps +#ifndef PRODUCT + , AsmRemarks& archived_asm_remarks + , DbgStrings& archived_dbg_strings +#endif // PRODUCT + ); }; //---------------------------------------------------------------------------------------------------- diff --git a/src/hotspot/share/oops/compressedKlass.hpp b/src/hotspot/share/oops/compressedKlass.hpp index 4ce644d9cef69..a07e758be7bce 100644 --- a/src/hotspot/share/oops/compressedKlass.hpp +++ b/src/hotspot/share/oops/compressedKlass.hpp @@ -213,6 +213,7 @@ class CompressedKlassPointers : public AllStatic { // Can only be used after initialization static address base() { check_init(_base); return _base; } + static address base_addr() { return (address)&_base; } static int shift() { check_init(_shift); return _shift; } static address klass_range_start() { return _klass_range_start; } diff --git a/src/hotspot/share/oops/method.cpp b/src/hotspot/share/oops/method.cpp index ce7d77a38ccc2..7496fe7c4e68e 100644 --- a/src/hotspot/share/oops/method.cpp +++ b/src/hotspot/share/oops/method.cpp @@ -407,7 +407,7 @@ void Method::metaspace_pointers_do(MetaspaceClosure* it) { void Method::remove_unshareable_info() { unlink_method(); - if (AOTCodeCache::is_dumping_adapters() && _adapter != nullptr) { + if (AOTCodeCache::is_dumping_adapter() && _adapter != nullptr) { _adapter->remove_unshareable_info(); } JFR_ONLY(REMOVE_METHOD_ID(this);) @@ -1146,7 +1146,7 @@ void Method::unlink_code() { void Method::unlink_method() { assert(CDSConfig::is_dumping_archive(), "sanity"); _code = nullptr; - if (!AOTCodeCache::is_dumping_adapters() || AdapterHandlerLibrary::is_abstract_method_adapter(_adapter)) { + if (!AOTCodeCache::is_dumping_adapter() || AdapterHandlerLibrary::is_abstract_method_adapter(_adapter)) { _adapter = nullptr; } _i2i_entry = nullptr; diff --git a/src/hotspot/share/opto/compile.cpp b/src/hotspot/share/opto/compile.cpp index 10846a326262a..64d21a59655d6 100644 --- a/src/hotspot/share/opto/compile.cpp +++ b/src/hotspot/share/opto/compile.cpp @@ -26,6 +26,7 @@ #include "asm/macroAssembler.inline.hpp" #include "ci/ciReplay.hpp" #include "classfile/javaClasses.hpp" +#include "code/aotCodeCache.hpp" #include "code/exceptionHandlerTable.hpp" #include "code/nmethod.hpp" #include "compiler/compilationFailureInfo.hpp" @@ -630,6 +631,7 @@ Compile::Compile(ciEnv* ci_env, ciMethod* target, int osr_bci, _ilt(nullptr), _stub_function(nullptr), _stub_name(nullptr), + _stub_id(-1), _stub_entry_point(nullptr), _max_node_limit(MaxNodeLimit), _post_loop_opts_phase(false), @@ -899,6 +901,7 @@ Compile::Compile(ciEnv* ci_env, TypeFunc_generator generator, address stub_function, const char* stub_name, + int stub_id, int is_fancy_jump, bool pass_tls, bool return_pc, @@ -910,6 +913,7 @@ Compile::Compile(ciEnv* ci_env, _entry_bci(InvocationEntryBci), _stub_function(stub_function), _stub_name(stub_name), + _stub_id(stub_id), _stub_entry_point(nullptr), _max_node_limit(MaxNodeLimit), _post_loop_opts_phase(false), @@ -961,6 +965,16 @@ Compile::Compile(ciEnv* ci_env, _allowed_reasons(0) { C = this; + // try to reuse an existing stub + { + CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, _stub_id, stub_name); + if (blob != nullptr) { + RuntimeStub* rs = blob->as_runtime_stub(); + _stub_entry_point = rs->entry_point(); + return; + } + } + TraceTime t1(nullptr, &_t_totalCompilation, CITime, false); TraceTime t2(nullptr, &_t_stubCompilation, CITime, false); diff --git a/src/hotspot/share/opto/compile.hpp b/src/hotspot/share/opto/compile.hpp index 6d43948964e90..f67d283961154 100644 --- a/src/hotspot/share/opto/compile.hpp +++ b/src/hotspot/share/opto/compile.hpp @@ -308,6 +308,7 @@ class Compile : public Phase { InlineTree* _ilt; // Ditto (temporary). address _stub_function; // VM entry for stub being compiled, or null const char* _stub_name; // Name of stub or adapter being compiled, or null + int _stub_id; // unique id for stub or -1 address _stub_entry_point; // Compile code entry for generated stub, or null // Control of this compilation. @@ -570,6 +571,7 @@ class Compile : public Phase { InlineTree* ilt() const { return _ilt; } address stub_function() const { return _stub_function; } const char* stub_name() const { return _stub_name; } + int stub_id() const { return _stub_id; } address stub_entry_point() const { return _stub_entry_point; } void set_stub_entry_point(address z) { _stub_entry_point = z; } @@ -1140,7 +1142,7 @@ class Compile : public Phase { // convention. Compile(ciEnv* ci_env, const TypeFunc *(*gen)(), address stub_function, const char *stub_name, - int is_fancy_jump, bool pass_tls, + int stub_id, int is_fancy_jump, bool pass_tls, bool return_pc, DirectiveSet* directive); ~Compile(); diff --git a/src/hotspot/share/opto/output.cpp b/src/hotspot/share/opto/output.cpp index 1cd6ebabc4bf7..930c355fa624b 100644 --- a/src/hotspot/share/opto/output.cpp +++ b/src/hotspot/share/opto/output.cpp @@ -23,6 +23,7 @@ */ #include "asm/assembler.inline.hpp" +#include "code/aotCodeCache.hpp" #include "code/compiledIC.hpp" #include "code/debugInfo.hpp" #include "code/debugInfoRec.hpp" @@ -3487,6 +3488,7 @@ void PhaseOutput::install_stub(const char* stub_name) { } else { assert(rs->is_runtime_stub(), "sanity check"); C->set_stub_entry_point(rs->entry_point()); + AOTCodeCache::store_code_blob(*rs, AOTCodeEntry::C2Blob, C->stub_id(), stub_name); } } } diff --git a/src/hotspot/share/opto/runtime.cpp b/src/hotspot/share/opto/runtime.cpp index fcb0ac38ace68..58ffa84a8dca6 100644 --- a/src/hotspot/share/opto/runtime.cpp +++ b/src/hotspot/share/opto/runtime.cpp @@ -141,6 +141,7 @@ static bool check_compiled_frame(JavaThread* thread) { #define C2_STUB_TYPEFUNC(name) name ## _Type #define C2_STUB_C_FUNC(name) CAST_FROM_FN_PTR(address, name ## _C) #define C2_STUB_NAME(name) stub_name(OptoStubId::name ## _id) +#define C2_STUB_ID(name) OptoStubId::name ## _id // Almost all the C functions targeted from the generated stubs are // implemented locally to OptoRuntime with names that can be generated @@ -153,27 +154,29 @@ static bool check_compiled_frame(JavaThread* thread) { #define GEN_C2_STUB(name, fancy_jump, pass_tls, pass_retpc ) \ C2_STUB_FIELD_NAME(name) = \ - generate_stub(env, \ + generate_stub(env, \ C2_STUB_TYPEFUNC(name), \ C2_STUB_C_FUNC(name), \ C2_STUB_NAME(name), \ - fancy_jump, \ - pass_tls, \ - pass_retpc); \ + (int)C2_STUB_ID(name), \ + fancy_jump, \ + pass_tls, \ + pass_retpc); \ if (C2_STUB_FIELD_NAME(name) == nullptr) { return false; } \ #define C2_JVMTI_STUB_C_FUNC(name) CAST_FROM_FN_PTR(address, SharedRuntime::name) #define GEN_C2_JVMTI_STUB(name) \ - STUB_FIELD_NAME(name) = \ - generate_stub(env, \ - notify_jvmti_vthread_Type, \ + STUB_FIELD_NAME(name) = \ + generate_stub(env, \ + notify_jvmti_vthread_Type, \ C2_JVMTI_STUB_C_FUNC(name), \ C2_STUB_NAME(name), \ - 0, \ - true, \ - false); \ - if (STUB_FIELD_NAME(name) == nullptr) { return false; } \ + (int)C2_STUB_ID(name), \ + 0, \ + true, \ + false); \ + if (STUB_FIELD_NAME(name) == nullptr) { return false; } \ bool OptoRuntime::generate(ciEnv* env) { @@ -277,15 +280,15 @@ const TypeFunc* OptoRuntime::_dtrace_object_alloc_Type = nullptr; // Helper method to do generation of RunTimeStub's address OptoRuntime::generate_stub(ciEnv* env, TypeFunc_generator gen, address C_function, - const char *name, int is_fancy_jump, - bool pass_tls, + const char *name, int stub_id, + int is_fancy_jump, bool pass_tls, bool return_pc) { // Matching the default directive, we currently have no method to match. DirectiveSet* directive = DirectivesStack::getDefaultDirective(CompileBroker::compiler(CompLevel_full_optimization)); CompilationMemoryStatisticMark cmsm(directive); ResourceMark rm; - Compile C(env, gen, C_function, name, is_fancy_jump, pass_tls, return_pc, directive); + Compile C(env, gen, C_function, name, stub_id, is_fancy_jump, pass_tls, return_pc, directive); DirectivesStack::release(directive); return C.stub_entry_point(); } diff --git a/src/hotspot/share/opto/runtime.hpp b/src/hotspot/share/opto/runtime.hpp index 633cced00909c..c35508397583d 100644 --- a/src/hotspot/share/opto/runtime.hpp +++ b/src/hotspot/share/opto/runtime.hpp @@ -217,7 +217,7 @@ class OptoRuntime : public AllStatic { static const char *_stub_names[]; // define stubs - static address generate_stub(ciEnv* ci_env, TypeFunc_generator gen, address C_function, const char* name, int is_fancy_jump, bool pass_tls, bool return_pc); + static address generate_stub(ciEnv* ci_env, TypeFunc_generator gen, address C_function, const char* name, int stub_id, int is_fancy_jump, bool pass_tls, bool return_pc); // // Implementation of runtime methods diff --git a/src/hotspot/share/runtime/sharedRuntime.cpp b/src/hotspot/share/runtime/sharedRuntime.cpp index 733e8a78d903e..7bfb9c6290976 100644 --- a/src/hotspot/share/runtime/sharedRuntime.cpp +++ b/src/hotspot/share/runtime/sharedRuntime.cpp @@ -2456,7 +2456,7 @@ AdapterHandlerEntry* AdapterHandlerLibrary::lookup(int total_args_passed, BasicT #if INCLUDE_CDS // if we are building the archive then the archived adapter table is // not valid and we need to use the ones added to the runtime table - if (!AOTCodeCache::is_dumping_adapters()) { + if (!AOTCodeCache::is_dumping_adapter()) { // Search archived table first. It is read-only table so can be searched without lock entry = _aot_adapter_handler_table.lookup(fp, fp->compute_hash(), 0 /* unused */); if (entry != nullptr) { @@ -2838,7 +2838,7 @@ bool AdapterHandlerLibrary::generate_adapter_code(AdapterBlob*& adapter_blob, // and we're some non descript Java thread. return false; } - if (!is_transient && AOTCodeCache::is_dumping_adapters()) { + if (!is_transient && AOTCodeCache::is_dumping_adapter()) { // try to save generated code const char* name = AdapterHandlerLibrary::name(handler->fingerprint()); const uint32_t id = AdapterHandlerLibrary::id(handler->fingerprint()); @@ -2850,7 +2850,7 @@ bool AdapterHandlerLibrary::generate_adapter_code(AdapterBlob*& adapter_blob, entry_offset[2] = handler->get_c2i_unverified_entry() - i2c_entry; entry_offset[3] = handler->get_c2i_no_clinit_check_entry() - i2c_entry; bool success = AOTCodeCache::store_code_blob(*adapter_blob, AOTCodeEntry::Adapter, id, name, AdapterHandlerEntry::ENTRIES_COUNT, entry_offset); - assert(success || !AOTCodeCache::is_dumping_adapters(), "caching of adapter must be disabled"); + assert(success || !AOTCodeCache::is_dumping_adapter(), "caching of adapter must be disabled"); } handler->relocate(adapter_blob->content_begin()); #ifndef PRODUCT @@ -2961,7 +2961,7 @@ void AdapterHandlerEntry::link() { // Generate code only if AOTCodeCache is not available, or // caching adapters is disabled, or we fail to link // the AdapterHandlerEntry to its code in the AOTCodeCache - if (AOTCodeCache::is_using_adapters()) { + if (AOTCodeCache::is_using_adapter()) { adapter_blob = AdapterHandlerLibrary::link_aot_adapter_handler(this); if (adapter_blob == nullptr) { log_warning(cds)("Failed to link AdapterHandlerEntry (fp=%s) to its code in the AOT code cache", _fingerprint->as_basic_args_string()); diff --git a/test/hotspot/jtreg/runtime/cds/appcds/aotCode/AOTCodeCompressedOopsTest.java b/test/hotspot/jtreg/runtime/cds/appcds/aotCode/AOTCodeCompressedOopsTest.java new file mode 100644 index 0000000000000..a65db6956c40f --- /dev/null +++ b/test/hotspot/jtreg/runtime/cds/appcds/aotCode/AOTCodeCompressedOopsTest.java @@ -0,0 +1,211 @@ +/* + * Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + * + */ + +/** + * @test + * @summary Sanity test of AOT Code Cache with compressed oops configurations + * @requires vm.cds + * @requires vm.cds.supports.aot.class.linking + * @requires vm.flagless + * @requires !vm.jvmci.enabled + * @library /test/lib /test/setup_aot + * @build AOTCodeCompressedOopsTest JavacBenchApp + * @run driver jdk.test.lib.helpers.ClassFileInstaller -jar app.jar + * JavacBenchApp + * JavacBenchApp$ClassFile + * JavacBenchApp$FileManager + * JavacBenchApp$SourceFile + * @run driver AOTCodeCompressedOopsTest + */ + +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import jdk.test.lib.cds.CDSAppTester; +import jdk.test.lib.process.OutputAnalyzer; + +public class AOTCodeCompressedOopsTest { + public static void main(String... args) throws Exception { + { + Tester t = new Tester(); + t.setHeapConfig(Tester.RunMode.ASSEMBLY, true, true); + t.runAOTAssemblyWorkflow(); + t.setHeapConfig(Tester.RunMode.PRODUCTION, true, true); + t.productionRun(); + t.setHeapConfig(Tester.RunMode.PRODUCTION, true, false); + t.productionRun(); + t.setHeapConfig(Tester.RunMode.PRODUCTION, false, false); + t.productionRun(); + } + { + Tester t = new Tester(); + t.setHeapConfig(Tester.RunMode.ASSEMBLY, true, false); + t.runAOTAssemblyWorkflow(); + t.setHeapConfig(Tester.RunMode.PRODUCTION, true, true); + t.productionRun(); + t.setHeapConfig(Tester.RunMode.PRODUCTION, true, false); + t.productionRun(); + t.setHeapConfig(Tester.RunMode.PRODUCTION, false, false); + t.productionRun(); + } + { + Tester t = new Tester(); + t.setHeapConfig(Tester.RunMode.ASSEMBLY, false, false); + t.runAOTAssemblyWorkflow(); + t.setHeapConfig(Tester.RunMode.PRODUCTION, true, true); + t.productionRun(); + t.setHeapConfig(Tester.RunMode.PRODUCTION, true, false); + t.productionRun(); + t.setHeapConfig(Tester.RunMode.PRODUCTION, false, false); + t.productionRun(); + } + } + static class Tester extends CDSAppTester { + boolean zeroBaseInAsmPhase, zeroBaseInProdPhase; + boolean zeroShiftInAsmPhase, zeroShiftInProdPhase; + + public Tester() { + super("AOTCodeCompressedOopsTest"); + } + + public void setHeapConfig(RunMode runMode, boolean isBaseZero, boolean isShiftZero) { + if (runMode == RunMode.ASSEMBLY) { + zeroBaseInAsmPhase = isBaseZero; + zeroShiftInAsmPhase = isShiftZero; + } else if (runMode == RunMode.PRODUCTION) { + zeroBaseInProdPhase = isBaseZero; + zeroShiftInProdPhase = isShiftZero; + } + } + + @Override + public String classpath(RunMode runMode) { + return "app.jar"; + } + + List getVMArgsForHeapConfig(boolean isBaseZero, boolean isShiftZero) { + List list = new ArrayList(); + // Note the VM options used are best-effort to get the desired base and shift, + // but it is OS dependent, so we may not get the desired configuration. + if (isBaseZero && isShiftZero) { + list.add("-Xmx128m"); // Set max heap < 4G; + } else if (isBaseZero && !isShiftZero) { + list.add("-Xmx6g"); // Set max heap > 4G for shift to be non-zero + } else if (!isBaseZero && !isShiftZero) { + list.add("-Xmx6g"); + list.add("-XX:HeapBaseMinAddress=32g"); + } + return list; + } + + @Override + public String[] vmArgs(RunMode runMode) { + switch (runMode) { + case RunMode.ASSEMBLY: { + List args = getVMArgsForHeapConfig(zeroBaseInAsmPhase, zeroShiftInAsmPhase); + args.addAll(List.of("-XX:+UnlockDiagnosticVMOptions", + "-Xlog:cds=info", + "-Xlog:aot+codecache+init=debug", + "-Xlog:aot+codecache+exit=debug")); + return args.toArray(new String[0]); + } + case RunMode.PRODUCTION: { + List args = getVMArgsForHeapConfig(zeroBaseInProdPhase, zeroShiftInProdPhase); + args.addAll(List.of("-XX:+UnlockDiagnosticVMOptions", + "-Xlog:cds=info", // we need this to parse CompressedOops settings + "-Xlog:aot+codecache+init=debug", + "-Xlog:aot+codecache+exit=debug")); + return args.toArray(new String[0]); + } + } + return new String[] {}; + } + + @Override + public String[] appCommandLine(RunMode runMode) { + return new String[] { + "JavacBenchApp", "10" + }; + } + + @Override + public void checkExecution(OutputAnalyzer out, RunMode runMode) throws Exception { + if (runMode == RunMode.PRODUCTION) { + int aotCacheShift = -1, currentShift = -1; + long aotCacheBase = -1, currentBase = -1; + List list = out.asLines(); + /* We tried to have CompressedOops settings as per the test requirement, + * but it ultimately depends on OS and is not guaranteed that we have got the desired settings. + * So we parse the log output from the production run to get the real settings. + * + * Parse the following Xlog:cds output to get the values of CompressedOops::base and CompressedOops::shift + * used during the AOTCache assembly and production run: + * + * [0.022s][info][cds] CDS archive was created with max heap size = 1024M, and the following configuration: + * [0.022s][info][cds] narrow_klass_base at mapping start address, narrow_klass_pointer_bits = 32, narrow_klass_shift = 0 + * [0.022s][info][cds] narrow_oop_mode = 1, narrow_oop_base = 0x0000000000000000, narrow_oop_shift = 3 + * [0.022s][info][cds] The current max heap size = 31744M, G1HeapRegion::GrainBytes = 16777216 + * [0.022s][info][cds] narrow_klass_base = 0x000007fc00000000, arrow_klass_pointer_bits = 32, narrow_klass_shift = 0 + * [0.022s][info][cds] narrow_oop_mode = 3, narrow_oop_base = 0x0000000300000000, narrow_oop_shift = 3 + * [0.022s][info][cds] heap range = [0x0000000301000000 - 0x0000000ac1000000] + */ + Pattern p = Pattern.compile("narrow_oop_base = 0x(\\d+), narrow_oop_shift = (\\d)"); + for (int i = 0; i < list.size(); i++) { + String line = list.get(i); + if (line.indexOf("CDS archive was created with max heap size") != -1) { + // Parse AOT Cache CompressedOops settings + line = list.get(i+2); + Matcher m = p.matcher(line); + if (!m.find()) { + throw new RuntimeException("Pattern \"" + p + "\" not found in the output"); + } + aotCacheBase = Long.valueOf(m.group(1), 16); + aotCacheShift = Integer.valueOf(m.group(2)); + // Parse current CompressedOops settings + line = list.get(i+5); + m = p.matcher(line); + if (!m.find()) { + throw new RuntimeException("Pattern \"" + p + "\" not found in the output"); + } + currentBase = Long.valueOf(m.group(1), 16); + currentShift = Integer.valueOf(m.group(2)); + break; + } + } + if (aotCacheShift == -1 || currentShift == -1 || aotCacheBase == -1 || currentBase == -1) { + throw new RuntimeException("Failed to find CompressedOops settings"); + } + if (aotCacheShift != currentShift) { + out.shouldContain("AOT Code Cache disabled: it was created with different CompressedOops::shift()"); + } else if (aotCacheBase != currentBase) { + out.shouldContain("AOTStubCaching is disabled: incompatible CompressedOops::base()"); + } else { + out.shouldMatch("Read \\d+ entries table at offset \\d+ from AOT Code Cache"); + } + } + } + } +} diff --git a/test/hotspot/jtreg/runtime/cds/appcds/aotCode/AOTCodeFlags.java b/test/hotspot/jtreg/runtime/cds/appcds/aotCode/AOTCodeFlags.java index 7532db8a1e846..b639855843eca 100644 --- a/test/hotspot/jtreg/runtime/cds/appcds/aotCode/AOTCodeFlags.java +++ b/test/hotspot/jtreg/runtime/cds/appcds/aotCode/AOTCodeFlags.java @@ -42,21 +42,45 @@ * @run driver AOTCodeFlags */ +import java.util.ArrayList; +import java.util.List; + import jdk.test.lib.cds.CDSAppTester; import jdk.test.lib.process.OutputAnalyzer; public class AOTCodeFlags { - public static int flag_sign = 0; public static void main(String... args) throws Exception { Tester t = new Tester(); - for (int i = 0; i < 2; i++) { - flag_sign = i; + for (int mode = 0; mode < 3; mode++) { + t.setTestMode(mode); t.run(new String[] {"AOT"}); } } static class Tester extends CDSAppTester { + private int testMode; + public Tester() { - super("AOTCodeFlags" + flag_sign); + super("AOTCodeFlags"); + testMode = 0; + } + + boolean isAdapterCachingOn() { + return (testMode & 0x1) != 0; + } + + boolean isStubCachingOn() { + return (testMode & 0x2) != 0; + } + + public void setTestMode(int mode) { + testMode = mode; + } + + public List getVMArgsForTestMode() { + List list = new ArrayList(); + list.add(isAdapterCachingOn() ? "-XX:+AOTAdapterCaching" : "-XX:-AOTAdapterCaching"); + list.add(isStubCachingOn() ? "-XX:+AOTStubCaching" : "-XX:-AOTStubCaching"); + return list; } @Override @@ -68,13 +92,13 @@ public String classpath(RunMode runMode) { public String[] vmArgs(RunMode runMode) { switch (runMode) { case RunMode.ASSEMBLY: - case RunMode.PRODUCTION: - return new String[] { - "-XX:+UnlockDiagnosticVMOptions", - "-XX:" + (flag_sign == 0 ? "-" : "+") + "AOTAdapterCaching", - "-Xlog:aot+codecache+init=debug", - "-Xlog:aot+codecache+exit=debug", - }; + case RunMode.PRODUCTION: { + List args = getVMArgsForTestMode(); + args.addAll(List.of("-XX:+UnlockDiagnosticVMOptions", + "-Xlog:aot+codecache+init=debug", + "-Xlog:aot+codecache+exit=debug")); + return args.toArray(new String[0]); + } } return new String[] {}; } @@ -88,23 +112,56 @@ public String[] appCommandLine(RunMode runMode) { @Override public void checkExecution(OutputAnalyzer out, RunMode runMode) throws Exception { - if (flag_sign == 0) { + if (!isAdapterCachingOn() && !isStubCachingOn()) { // this is equivalent to completely disable AOT code cache switch (runMode) { case RunMode.ASSEMBLY: case RunMode.PRODUCTION: - out.shouldNotContain("Adapters: total"); + out.shouldNotMatch("Adapters:\\s+total"); + out.shouldNotMatch("Shared Blobs:\\s+total"); + out.shouldNotMatch("C1 Blobs:\\s+total"); + out.shouldNotMatch("C2 Blobs:\\s+total"); break; } - } else { - switch (runMode) { - case RunMode.ASSEMBLY: - case RunMode.PRODUCTION: - out.shouldContain("Adapters: total"); - break; + if (isAdapterCachingOn()) { + switch (runMode) { + case RunMode.ASSEMBLY: + case RunMode.PRODUCTION: + // AOTAdapterCaching is on, non-zero adapters should be stored/loaded + out.shouldMatch("Adapters:\\s+total=[1-9][0-9]+"); + break; + } + } else { + switch (runMode) { + case RunMode.ASSEMBLY: + case RunMode.PRODUCTION: + // AOTAdapterCaching is off, no adapters should be stored/loaded + out.shouldMatch("Adapters:\\s+total=0"); + break; + } + } + if (isStubCachingOn()) { + switch (runMode) { + case RunMode.ASSEMBLY: + case RunMode.PRODUCTION: + // AOTStubCaching is on, non-zero stubs should be stored/loaded + out.shouldMatch("Shared Blobs:\\s+total=[1-9][0-9]+"); + out.shouldMatch("C1 Blobs:\\s+total=[1-9][0-9]+"); + out.shouldMatch("C2 Blobs:\\s+total=[1-9][0-9]+"); + break; + } + } else { + switch (runMode) { + case RunMode.ASSEMBLY: + case RunMode.PRODUCTION: + // AOTStubCaching is off, no stubs should be stored/loaded + out.shouldMatch("Shared Blobs:\\s+total=0"); + out.shouldMatch("C1 Blobs:\\s+total=0"); + out.shouldMatch("C2 Blobs:\\s+total=0"); + break; + } } } } - } } diff --git a/test/lib/jdk/test/lib/cds/CDSAppTester.java b/test/lib/jdk/test/lib/cds/CDSAppTester.java index 6ebf73f5f0898..0a21e340cc6d7 100644 --- a/test/lib/jdk/test/lib/cds/CDSAppTester.java +++ b/test/lib/jdk/test/lib/cds/CDSAppTester.java @@ -347,7 +347,7 @@ private OutputAnalyzer dumpDynamicArchive() throws Exception { return executeAndCheck(cmdLine, runMode, dynamicArchiveFile, dynamicArchiveFileLog); } - private OutputAnalyzer productionRun() throws Exception { + public OutputAnalyzer productionRun() throws Exception { return productionRun(null, null); } @@ -431,4 +431,11 @@ public void runAOTWorkflow() throws Exception { createAOTCache(); productionRun(); } + + // See JEP 483; stop at the assembly run; do not execute production run + public void runAOTAssemblyWorkflow() throws Exception { + this.workflow = Workflow.AOT; + recordAOTConfiguration(); + createAOTCache(); + } }