8331253: 16 bits is not enough for nmethod::_skipped_instructions_size field
Reviewed-by: dlong, thartmann
This commit is contained in:
parent
8771015d7e
commit
3383ad6397
@ -1141,7 +1141,6 @@ public:
|
|||||||
#define __ masm->
|
#define __ masm->
|
||||||
|
|
||||||
void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
|
void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
|
||||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
|
||||||
BLOCK_COMMENT("ZLoadBarrierStubC2");
|
BLOCK_COMMENT("ZLoadBarrierStubC2");
|
||||||
|
|
||||||
// Stub entry
|
// Stub entry
|
||||||
@ -1160,7 +1159,6 @@ void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, Z
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ZBarrierSetAssembler::generate_c2_store_barrier_stub(MacroAssembler* masm, ZStoreBarrierStubC2* stub) const {
|
void ZBarrierSetAssembler::generate_c2_store_barrier_stub(MacroAssembler* masm, ZStoreBarrierStubC2* stub) const {
|
||||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
|
||||||
BLOCK_COMMENT("ZStoreBarrierStubC2");
|
BLOCK_COMMENT("ZStoreBarrierStubC2");
|
||||||
|
|
||||||
// Stub entry
|
// Stub entry
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2021, 2023, Oracle and/or its affiliates. All rights reserved.
|
* Copyright (c) 2021, 2024, Oracle and/or its affiliates. All rights reserved.
|
||||||
* Copyright (c) 2021, 2023 SAP SE. All rights reserved.
|
* Copyright (c) 2021, 2023 SAP SE. All rights reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
@ -887,7 +887,6 @@ class ZSetupArguments {
|
|||||||
#define __ masm->
|
#define __ masm->
|
||||||
|
|
||||||
void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
|
void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
|
||||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
|
||||||
__ block_comment("generate_c2_load_barrier_stub (zgc) {");
|
__ block_comment("generate_c2_load_barrier_stub (zgc) {");
|
||||||
|
|
||||||
__ bind(*stub->entry());
|
__ bind(*stub->entry());
|
||||||
@ -911,7 +910,6 @@ void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, Z
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ZBarrierSetAssembler::generate_c2_store_barrier_stub(MacroAssembler* masm, ZStoreBarrierStubC2* stub) const {
|
void ZBarrierSetAssembler::generate_c2_store_barrier_stub(MacroAssembler* masm, ZStoreBarrierStubC2* stub) const {
|
||||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
|
||||||
__ block_comment("ZStoreBarrierStubC2");
|
__ block_comment("ZStoreBarrierStubC2");
|
||||||
|
|
||||||
// Stub entry
|
// Stub entry
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (c) 2018, 2023, Oracle and/or its affiliates. All rights reserved.
|
* Copyright (c) 2018, 2024, Oracle and/or its affiliates. All rights reserved.
|
||||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||||
*
|
*
|
||||||
* This code is free software; you can redistribute it and/or modify it
|
* This code is free software; you can redistribute it and/or modify it
|
||||||
@ -1213,7 +1213,6 @@ public:
|
|||||||
#define __ masm->
|
#define __ masm->
|
||||||
|
|
||||||
void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
|
void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, ZLoadBarrierStubC2* stub) const {
|
||||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
|
||||||
BLOCK_COMMENT("ZLoadBarrierStubC2");
|
BLOCK_COMMENT("ZLoadBarrierStubC2");
|
||||||
|
|
||||||
// Stub entry
|
// Stub entry
|
||||||
@ -1233,7 +1232,6 @@ void ZBarrierSetAssembler::generate_c2_load_barrier_stub(MacroAssembler* masm, Z
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ZBarrierSetAssembler::generate_c2_store_barrier_stub(MacroAssembler* masm, ZStoreBarrierStubC2* stub) const {
|
void ZBarrierSetAssembler::generate_c2_store_barrier_stub(MacroAssembler* masm, ZStoreBarrierStubC2* stub) const {
|
||||||
Assembler::InlineSkippedInstructionsCounter skipped_counter(masm);
|
|
||||||
BLOCK_COMMENT("ZStoreBarrierStubC2");
|
BLOCK_COMMENT("ZStoreBarrierStubC2");
|
||||||
|
|
||||||
// Stub entry
|
// Stub entry
|
||||||
|
@ -1012,6 +1012,8 @@ void CodeBuffer::log_section_sizes(const char* name) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool CodeBuffer::finalize_stubs() {
|
bool CodeBuffer::finalize_stubs() {
|
||||||
|
// Record size of code before we generate stubs in instructions section
|
||||||
|
_main_code_size = _insts.size();
|
||||||
if (_finalize_stubs && !pd_finalize_stubs()) {
|
if (_finalize_stubs && !pd_finalize_stubs()) {
|
||||||
// stub allocation failure
|
// stub allocation failure
|
||||||
return false;
|
return false;
|
||||||
|
@ -427,6 +427,9 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
|
|||||||
address _total_start; // first address of combined memory buffer
|
address _total_start; // first address of combined memory buffer
|
||||||
csize_t _total_size; // size in bytes of combined memory buffer
|
csize_t _total_size; // size in bytes of combined memory buffer
|
||||||
|
|
||||||
|
// Size of code without stubs generated at the end of instructions section
|
||||||
|
csize_t _main_code_size;
|
||||||
|
|
||||||
OopRecorder* _oop_recorder;
|
OopRecorder* _oop_recorder;
|
||||||
|
|
||||||
OopRecorder _default_oop_recorder; // override with initialize_oop_recorder
|
OopRecorder _default_oop_recorder; // override with initialize_oop_recorder
|
||||||
@ -457,6 +460,7 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
|
|||||||
_oop_recorder = nullptr;
|
_oop_recorder = nullptr;
|
||||||
_overflow_arena = nullptr;
|
_overflow_arena = nullptr;
|
||||||
_last_insn = nullptr;
|
_last_insn = nullptr;
|
||||||
|
_main_code_size = 0;
|
||||||
_finalize_stubs = false;
|
_finalize_stubs = false;
|
||||||
_shared_stub_to_interp_requests = nullptr;
|
_shared_stub_to_interp_requests = nullptr;
|
||||||
_shared_trampoline_requests = nullptr;
|
_shared_trampoline_requests = nullptr;
|
||||||
@ -630,6 +634,9 @@ class CodeBuffer: public StackObj DEBUG_ONLY(COMMA private Scrubber) {
|
|||||||
// number of bytes remaining in the insts section
|
// number of bytes remaining in the insts section
|
||||||
csize_t insts_remaining() const { return _insts.remaining(); }
|
csize_t insts_remaining() const { return _insts.remaining(); }
|
||||||
|
|
||||||
|
// size of code without stubs in instruction section
|
||||||
|
csize_t main_code_size() const { return _main_code_size; }
|
||||||
|
|
||||||
// is a given address in the insts section? (2nd version is end-inclusive)
|
// is a given address in the insts section? (2nd version is end-inclusive)
|
||||||
bool insts_contains(address pc) const { return _insts.contains(pc); }
|
bool insts_contains(address pc) const { return _insts.contains(pc); }
|
||||||
bool insts_contains2(address pc) const { return _insts.contains2(pc); }
|
bool insts_contains2(address pc) const { return _insts.contains2(pc); }
|
||||||
|
@ -1126,13 +1126,14 @@ int ciMethod::code_size_for_inlining() {
|
|||||||
// not highly relevant to an inlined method. So we use the more
|
// not highly relevant to an inlined method. So we use the more
|
||||||
// specific accessor nmethod::insts_size.
|
// specific accessor nmethod::insts_size.
|
||||||
// Also some instructions inside the code are excluded from inline
|
// Also some instructions inside the code are excluded from inline
|
||||||
// heuristic (e.g. post call nop instructions; see InlineSkippedInstructionsCounter)
|
// heuristic (e.g. post call nop instructions and GC barriers;
|
||||||
|
// see InlineSkippedInstructionsCounter).
|
||||||
int ciMethod::inline_instructions_size() {
|
int ciMethod::inline_instructions_size() {
|
||||||
if (_inline_instructions_size == -1) {
|
if (_inline_instructions_size == -1) {
|
||||||
GUARDED_VM_ENTRY(
|
GUARDED_VM_ENTRY(
|
||||||
nmethod* code = get_Method()->code();
|
nmethod* code = get_Method()->code();
|
||||||
if (code != nullptr && (code->comp_level() == CompLevel_full_optimization)) {
|
if (code != nullptr && (code->comp_level() == CompLevel_full_optimization)) {
|
||||||
int isize = code->insts_end() - code->verified_entry_point() - code->skipped_instructions_size();
|
int isize = code->inline_insts_size();
|
||||||
_inline_instructions_size = isize > 0 ? isize : 0;
|
_inline_instructions_size = isize > 0 ? isize : 0;
|
||||||
} else {
|
} else {
|
||||||
_inline_instructions_size = 0;
|
_inline_instructions_size = 0;
|
||||||
|
@ -131,6 +131,7 @@ struct java_nmethod_stats_struct {
|
|||||||
uint relocation_size;
|
uint relocation_size;
|
||||||
uint consts_size;
|
uint consts_size;
|
||||||
uint insts_size;
|
uint insts_size;
|
||||||
|
uint inline_insts_size;
|
||||||
uint stub_size;
|
uint stub_size;
|
||||||
uint oops_size;
|
uint oops_size;
|
||||||
uint metadata_size;
|
uint metadata_size;
|
||||||
@ -151,6 +152,7 @@ struct java_nmethod_stats_struct {
|
|||||||
relocation_size += nm->relocation_size();
|
relocation_size += nm->relocation_size();
|
||||||
consts_size += nm->consts_size();
|
consts_size += nm->consts_size();
|
||||||
insts_size += nm->insts_size();
|
insts_size += nm->insts_size();
|
||||||
|
inline_insts_size += nm->inline_insts_size();
|
||||||
stub_size += nm->stub_size();
|
stub_size += nm->stub_size();
|
||||||
oops_size += nm->oops_size();
|
oops_size += nm->oops_size();
|
||||||
metadata_size += nm->metadata_size();
|
metadata_size += nm->metadata_size();
|
||||||
@ -185,6 +187,9 @@ struct java_nmethod_stats_struct {
|
|||||||
if (insts_size != 0) {
|
if (insts_size != 0) {
|
||||||
tty->print_cr(" main code = %u (%f%%)", insts_size, (insts_size * 100.0f)/total_nm_size);
|
tty->print_cr(" main code = %u (%f%%)", insts_size, (insts_size * 100.0f)/total_nm_size);
|
||||||
}
|
}
|
||||||
|
if (inline_insts_size != 0) {
|
||||||
|
tty->print_cr(" inline code = %u (%f%%)", inline_insts_size, (inline_insts_size * 100.0f)/total_nm_size);
|
||||||
|
}
|
||||||
if (stub_size != 0) {
|
if (stub_size != 0) {
|
||||||
tty->print_cr(" stub code = %u (%f%%)", stub_size, (stub_size * 100.0f)/total_nm_size);
|
tty->print_cr(" stub code = %u (%f%%)", stub_size, (stub_size * 100.0f)/total_nm_size);
|
||||||
}
|
}
|
||||||
@ -1253,7 +1258,15 @@ void nmethod::init_defaults(CodeBuffer *code_buffer, CodeOffsets* offsets) {
|
|||||||
|
|
||||||
CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
|
CHECKED_CAST(_entry_offset, uint16_t, (offsets->value(CodeOffsets::Entry)));
|
||||||
CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
|
CHECKED_CAST(_verified_entry_offset, uint16_t, (offsets->value(CodeOffsets::Verified_Entry)));
|
||||||
CHECKED_CAST(_skipped_instructions_size, uint16_t, (code_buffer->total_skipped_instructions_size()));
|
|
||||||
|
int size = code_buffer->main_code_size();
|
||||||
|
assert(size >= 0, "should be initialized");
|
||||||
|
// Use instructions section size if it is 0 (e.g. native wrapper)
|
||||||
|
if (size == 0) size = code_size(); // requires _stub_offset to be set
|
||||||
|
assert(size <= code_size(), "incorrect size: %d > %d", size, code_size());
|
||||||
|
_inline_insts_size = size - _verified_entry_offset
|
||||||
|
- code_buffer->total_skipped_instructions_size();
|
||||||
|
assert(_inline_insts_size >= 0, "sanity");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Post initialization
|
// Post initialization
|
||||||
|
@ -218,6 +218,8 @@ class nmethod : public CodeBlob {
|
|||||||
|
|
||||||
// _consts_offset == _content_offset because SECT_CONSTS is first in code buffer
|
// _consts_offset == _content_offset because SECT_CONSTS is first in code buffer
|
||||||
|
|
||||||
|
int _inline_insts_size;
|
||||||
|
|
||||||
int _stub_offset;
|
int _stub_offset;
|
||||||
|
|
||||||
// Offsets for different stubs section parts
|
// Offsets for different stubs section parts
|
||||||
@ -232,7 +234,6 @@ class nmethod : public CodeBlob {
|
|||||||
int16_t _unwind_handler_offset;
|
int16_t _unwind_handler_offset;
|
||||||
// Number of arguments passed on the stack
|
// Number of arguments passed on the stack
|
||||||
uint16_t _num_stack_arg_slots;
|
uint16_t _num_stack_arg_slots;
|
||||||
uint16_t _skipped_instructions_size;
|
|
||||||
|
|
||||||
// Offsets in mutable data section
|
// Offsets in mutable data section
|
||||||
// _oops_offset == _data_offset, offset where embedded oop table begins (inside data)
|
// _oops_offset == _data_offset, offset where embedded oop table begins (inside data)
|
||||||
@ -589,7 +590,7 @@ public:
|
|||||||
int oops_count() const { assert(oops_size() % oopSize == 0, ""); return (oops_size() / oopSize) + 1; }
|
int oops_count() const { assert(oops_size() % oopSize == 0, ""); return (oops_size() / oopSize) + 1; }
|
||||||
int metadata_count() const { assert(metadata_size() % wordSize == 0, ""); return (metadata_size() / wordSize) + 1; }
|
int metadata_count() const { assert(metadata_size() % wordSize == 0, ""); return (metadata_size() / wordSize) + 1; }
|
||||||
|
|
||||||
int skipped_instructions_size () const { return _skipped_instructions_size; }
|
int inline_insts_size() const { return _inline_insts_size; }
|
||||||
int total_size() const;
|
int total_size() const;
|
||||||
|
|
||||||
// Containment
|
// Containment
|
||||||
|
@ -1324,8 +1324,9 @@ CodeBuffer* PhaseOutput::init_buffer() {
|
|||||||
|
|
||||||
int pad_req = NativeCall::instruction_size;
|
int pad_req = NativeCall::instruction_size;
|
||||||
|
|
||||||
|
// GC barrier stubs are generated in code section
|
||||||
BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
|
BarrierSetC2* bs = BarrierSet::barrier_set()->barrier_set_c2();
|
||||||
stub_req += bs->estimate_stub_size();
|
code_req += bs->estimate_stub_size();
|
||||||
|
|
||||||
// nmethod and CodeBuffer count stubs & constants as part of method's code.
|
// nmethod and CodeBuffer count stubs & constants as part of method's code.
|
||||||
// class HandlerImpl is platform-specific and defined in the *.ad files.
|
// class HandlerImpl is platform-specific and defined in the *.ad files.
|
||||||
@ -1334,9 +1335,9 @@ CodeBuffer* PhaseOutput::init_buffer() {
|
|||||||
stub_req += MAX_stubs_size; // ensure per-stub margin
|
stub_req += MAX_stubs_size; // ensure per-stub margin
|
||||||
code_req += MAX_inst_size; // ensure per-instruction margin
|
code_req += MAX_inst_size; // ensure per-instruction margin
|
||||||
|
|
||||||
if (StressCodeBuffers)
|
if (StressCodeBuffers) {
|
||||||
code_req = const_req = stub_req = exception_handler_req = deopt_handler_req = 0x10; // force expansion
|
code_req = const_req = stub_req = exception_handler_req = deopt_handler_req = 0x10; // force expansion
|
||||||
|
}
|
||||||
int total_req =
|
int total_req =
|
||||||
const_req +
|
const_req +
|
||||||
code_req +
|
code_req +
|
||||||
@ -1345,9 +1346,10 @@ CodeBuffer* PhaseOutput::init_buffer() {
|
|||||||
exception_handler_req +
|
exception_handler_req +
|
||||||
deopt_handler_req; // deopt handler
|
deopt_handler_req; // deopt handler
|
||||||
|
|
||||||
if (C->has_method_handle_invokes())
|
if (C->has_method_handle_invokes()) {
|
||||||
total_req += deopt_handler_req; // deopt MH handler
|
total_req += deopt_handler_req; // deopt MH handler
|
||||||
|
stub_req += deopt_handler_req;
|
||||||
|
}
|
||||||
CodeBuffer* cb = code_buffer();
|
CodeBuffer* cb = code_buffer();
|
||||||
cb->initialize(total_req, _buf_sizes._reloc);
|
cb->initialize(total_req, _buf_sizes._reloc);
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user