Merge
This commit is contained in:
commit
f64d1c0216
@ -7666,7 +7666,7 @@ RegisterOrConstant MacroAssembler::delayed_value_impl(intptr_t* delayed_value_ad
|
|||||||
|
|
||||||
#ifdef ASSERT
|
#ifdef ASSERT
|
||||||
Label L;
|
Label L;
|
||||||
testl(tmp, tmp);
|
testptr(tmp, tmp);
|
||||||
jccb(Assembler::notZero, L);
|
jccb(Assembler::notZero, L);
|
||||||
hlt();
|
hlt();
|
||||||
bind(L);
|
bind(L);
|
||||||
|
@ -196,6 +196,9 @@ void InterpreterMacroAssembler::get_cache_index_at_bcp(Register reg, int bcp_off
|
|||||||
} else {
|
} else {
|
||||||
assert(EnableInvokeDynamic, "giant index used only for EnableInvokeDynamic");
|
assert(EnableInvokeDynamic, "giant index used only for EnableInvokeDynamic");
|
||||||
movl(reg, Address(rsi, bcp_offset));
|
movl(reg, Address(rsi, bcp_offset));
|
||||||
|
// Check if the secondary index definition is still ~x, otherwise
|
||||||
|
// we have to change the following assembler code to calculate the
|
||||||
|
// plain index.
|
||||||
assert(constantPoolCacheOopDesc::decode_secondary_index(~123) == 123, "else change next line");
|
assert(constantPoolCacheOopDesc::decode_secondary_index(~123) == 123, "else change next line");
|
||||||
notl(reg); // convert to plain index
|
notl(reg); // convert to plain index
|
||||||
}
|
}
|
||||||
|
@ -185,12 +185,30 @@ void InterpreterMacroAssembler::get_unsigned_2_byte_index_at_bcp(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void InterpreterMacroAssembler::get_cache_index_at_bcp(Register index,
|
||||||
|
int bcp_offset,
|
||||||
|
bool giant_index) {
|
||||||
|
assert(bcp_offset > 0, "bcp is still pointing to start of bytecode");
|
||||||
|
if (!giant_index) {
|
||||||
|
load_unsigned_short(index, Address(r13, bcp_offset));
|
||||||
|
} else {
|
||||||
|
assert(EnableInvokeDynamic, "giant index used only for EnableInvokeDynamic");
|
||||||
|
movl(index, Address(r13, bcp_offset));
|
||||||
|
// Check if the secondary index definition is still ~x, otherwise
|
||||||
|
// we have to change the following assembler code to calculate the
|
||||||
|
// plain index.
|
||||||
|
assert(constantPoolCacheOopDesc::decode_secondary_index(~123) == 123, "else change next line");
|
||||||
|
notl(index); // convert to plain index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void InterpreterMacroAssembler::get_cache_and_index_at_bcp(Register cache,
|
void InterpreterMacroAssembler::get_cache_and_index_at_bcp(Register cache,
|
||||||
Register index,
|
Register index,
|
||||||
int bcp_offset) {
|
int bcp_offset,
|
||||||
assert(bcp_offset > 0, "bcp is still pointing to start of bytecode");
|
bool giant_index) {
|
||||||
assert(cache != index, "must use different registers");
|
assert(cache != index, "must use different registers");
|
||||||
load_unsigned_short(index, Address(r13, bcp_offset));
|
get_cache_index_at_bcp(index, bcp_offset, giant_index);
|
||||||
movptr(cache, Address(rbp, frame::interpreter_frame_cache_offset * wordSize));
|
movptr(cache, Address(rbp, frame::interpreter_frame_cache_offset * wordSize));
|
||||||
assert(sizeof(ConstantPoolCacheEntry) == 4 * wordSize, "adjust code below");
|
assert(sizeof(ConstantPoolCacheEntry) == 4 * wordSize, "adjust code below");
|
||||||
// convert from field index to ConstantPoolCacheEntry index
|
// convert from field index to ConstantPoolCacheEntry index
|
||||||
@ -200,10 +218,10 @@ void InterpreterMacroAssembler::get_cache_and_index_at_bcp(Register cache,
|
|||||||
|
|
||||||
void InterpreterMacroAssembler::get_cache_entry_pointer_at_bcp(Register cache,
|
void InterpreterMacroAssembler::get_cache_entry_pointer_at_bcp(Register cache,
|
||||||
Register tmp,
|
Register tmp,
|
||||||
int bcp_offset) {
|
int bcp_offset,
|
||||||
assert(bcp_offset > 0, "bcp is still pointing to start of bytecode");
|
bool giant_index) {
|
||||||
assert(cache != tmp, "must use different register");
|
assert(cache != tmp, "must use different register");
|
||||||
load_unsigned_short(tmp, Address(r13, bcp_offset));
|
get_cache_index_at_bcp(tmp, bcp_offset, giant_index);
|
||||||
assert(sizeof(ConstantPoolCacheEntry) == 4 * wordSize, "adjust code below");
|
assert(sizeof(ConstantPoolCacheEntry) == 4 * wordSize, "adjust code below");
|
||||||
// convert from field index to ConstantPoolCacheEntry index
|
// convert from field index to ConstantPoolCacheEntry index
|
||||||
// and from word offset to byte offset
|
// and from word offset to byte offset
|
||||||
@ -1236,7 +1254,8 @@ void InterpreterMacroAssembler::profile_final_call(Register mdp) {
|
|||||||
|
|
||||||
void InterpreterMacroAssembler::profile_virtual_call(Register receiver,
|
void InterpreterMacroAssembler::profile_virtual_call(Register receiver,
|
||||||
Register mdp,
|
Register mdp,
|
||||||
Register reg2) {
|
Register reg2,
|
||||||
|
bool receiver_can_be_null) {
|
||||||
if (ProfileInterpreter) {
|
if (ProfileInterpreter) {
|
||||||
Label profile_continue;
|
Label profile_continue;
|
||||||
|
|
||||||
@ -1246,8 +1265,15 @@ void InterpreterMacroAssembler::profile_virtual_call(Register receiver,
|
|||||||
// We are making a call. Increment the count.
|
// We are making a call. Increment the count.
|
||||||
increment_mdp_data_at(mdp, in_bytes(CounterData::count_offset()));
|
increment_mdp_data_at(mdp, in_bytes(CounterData::count_offset()));
|
||||||
|
|
||||||
|
Label skip_receiver_profile;
|
||||||
|
if (receiver_can_be_null) {
|
||||||
|
testptr(receiver, receiver);
|
||||||
|
jcc(Assembler::zero, skip_receiver_profile);
|
||||||
|
}
|
||||||
|
|
||||||
// Record the receiver type.
|
// Record the receiver type.
|
||||||
record_klass_in_profile(receiver, mdp, reg2);
|
record_klass_in_profile(receiver, mdp, reg2);
|
||||||
|
bind(skip_receiver_profile);
|
||||||
|
|
||||||
// The method data pointer needs to be updated to reflect the new target.
|
// The method data pointer needs to be updated to reflect the new target.
|
||||||
update_mdp_by_constant(mdp,
|
update_mdp_by_constant(mdp,
|
||||||
|
@ -95,9 +95,10 @@ class InterpreterMacroAssembler: public MacroAssembler {
|
|||||||
|
|
||||||
void get_unsigned_2_byte_index_at_bcp(Register reg, int bcp_offset);
|
void get_unsigned_2_byte_index_at_bcp(Register reg, int bcp_offset);
|
||||||
void get_cache_and_index_at_bcp(Register cache, Register index,
|
void get_cache_and_index_at_bcp(Register cache, Register index,
|
||||||
int bcp_offset);
|
int bcp_offset, bool giant_index = false);
|
||||||
void get_cache_entry_pointer_at_bcp(Register cache, Register tmp,
|
void get_cache_entry_pointer_at_bcp(Register cache, Register tmp,
|
||||||
int bcp_offset);
|
int bcp_offset, bool giant_index = false);
|
||||||
|
void get_cache_index_at_bcp(Register index, int bcp_offset, bool giant_index = false);
|
||||||
|
|
||||||
|
|
||||||
void pop_ptr(Register r = rax);
|
void pop_ptr(Register r = rax);
|
||||||
@ -236,7 +237,8 @@ class InterpreterMacroAssembler: public MacroAssembler {
|
|||||||
void profile_call(Register mdp);
|
void profile_call(Register mdp);
|
||||||
void profile_final_call(Register mdp);
|
void profile_final_call(Register mdp);
|
||||||
void profile_virtual_call(Register receiver, Register mdp,
|
void profile_virtual_call(Register receiver, Register mdp,
|
||||||
Register scratch2);
|
Register scratch2,
|
||||||
|
bool receiver_can_be_null = false);
|
||||||
void profile_ret(Register return_bci, Register mdp);
|
void profile_ret(Register return_bci, Register mdp);
|
||||||
void profile_null_seen(Register mdp);
|
void profile_null_seen(Register mdp);
|
||||||
void profile_typecheck(Register mdp, Register klass, Register scratch);
|
void profile_typecheck(Register mdp, Register klass, Register scratch);
|
||||||
|
@ -277,12 +277,11 @@ address InterpreterGenerator::generate_abstract_entry(void) {
|
|||||||
address entry_point = __ pc();
|
address entry_point = __ pc();
|
||||||
|
|
||||||
// abstract method entry
|
// abstract method entry
|
||||||
// remove return address. Not really needed, since exception
|
|
||||||
// handling throws away expression stack
|
|
||||||
__ pop(rbx);
|
|
||||||
|
|
||||||
// adjust stack to what a normal return would do
|
// pop return address, reset last_sp to NULL
|
||||||
__ mov(rsp, r13);
|
__ empty_expression_stack();
|
||||||
|
__ restore_bcp(); // rsi must be correct for exception handler (was destroyed)
|
||||||
|
__ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
|
||||||
|
|
||||||
// throw exception
|
// throw exception
|
||||||
__ call_VM(noreg, CAST_FROM_FN_PTR(address,
|
__ call_VM(noreg, CAST_FROM_FN_PTR(address,
|
||||||
@ -300,7 +299,10 @@ address InterpreterGenerator::generate_method_handle_entry(void) {
|
|||||||
if (!EnableMethodHandles) {
|
if (!EnableMethodHandles) {
|
||||||
return generate_abstract_entry();
|
return generate_abstract_entry();
|
||||||
}
|
}
|
||||||
return generate_abstract_entry(); //6815692//
|
|
||||||
|
address entry_point = MethodHandles::generate_method_handle_interpreter_entry(_masm);
|
||||||
|
|
||||||
|
return entry_point;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -448,7 +448,7 @@ void MethodHandles::generate_method_handle_stub(MacroAssembler* _masm, MethodHan
|
|||||||
rbx_index, Address::times_ptr,
|
rbx_index, Address::times_ptr,
|
||||||
base + vtableEntry::method_offset_in_bytes());
|
base + vtableEntry::method_offset_in_bytes());
|
||||||
Register rbx_method = rbx_temp;
|
Register rbx_method = rbx_temp;
|
||||||
__ movl(rbx_method, vtable_entry_addr);
|
__ movptr(rbx_method, vtable_entry_addr);
|
||||||
|
|
||||||
__ verify_oop(rbx_method);
|
__ verify_oop(rbx_method);
|
||||||
__ jmp(rbx_method_fie);
|
__ jmp(rbx_method_fie);
|
||||||
|
@ -2935,6 +2935,16 @@ class StubGenerator: public StubCodeGenerator {
|
|||||||
|
|
||||||
// arraycopy stubs used by compilers
|
// arraycopy stubs used by compilers
|
||||||
generate_arraycopy_stubs();
|
generate_arraycopy_stubs();
|
||||||
|
|
||||||
|
// generic method handle stubs
|
||||||
|
if (EnableMethodHandles && SystemDictionary::MethodHandle_klass() != NULL) {
|
||||||
|
for (MethodHandles::EntryKind ek = MethodHandles::_EK_FIRST;
|
||||||
|
ek < MethodHandles::_EK_LIMIT;
|
||||||
|
ek = MethodHandles::EntryKind(1 + (int)ek)) {
|
||||||
|
StubCodeMark mark(this, "MethodHandle", MethodHandles::entry_name(ek));
|
||||||
|
MethodHandles::generate_method_handle_stub(_masm, ek);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -100,21 +100,26 @@ address TemplateInterpreterGenerator::generate_ClassCastException_handler() {
|
|||||||
return entry;
|
return entry;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Arguments are: required type in rarg1, failing object (or NULL) in rarg2
|
// Arguments are: required type at TOS+8, failing object (or NULL) at TOS+4.
|
||||||
address TemplateInterpreterGenerator::generate_WrongMethodType_handler() {
|
address TemplateInterpreterGenerator::generate_WrongMethodType_handler() {
|
||||||
address entry = __ pc();
|
address entry = __ pc();
|
||||||
|
|
||||||
__ pop(c_rarg2); // failing object is at TOS
|
__ pop(c_rarg2); // failing object is at TOS
|
||||||
__ pop(c_rarg1); // required type is at TOS+8
|
__ pop(c_rarg1); // required type is at TOS+8
|
||||||
|
|
||||||
// expression stack must be empty before entering the VM if an
|
__ verify_oop(c_rarg1);
|
||||||
// exception happened
|
__ verify_oop(c_rarg2);
|
||||||
|
|
||||||
|
// Various method handle types use interpreter registers as temps.
|
||||||
|
__ restore_bcp();
|
||||||
|
__ restore_locals();
|
||||||
|
|
||||||
|
// Expression stack must be empty before entering the VM for an exception.
|
||||||
__ empty_expression_stack();
|
__ empty_expression_stack();
|
||||||
|
|
||||||
__ call_VM(noreg,
|
__ call_VM(noreg,
|
||||||
CAST_FROM_FN_PTR(address,
|
CAST_FROM_FN_PTR(address,
|
||||||
InterpreterRuntime::
|
InterpreterRuntime::throw_WrongMethodTypeException),
|
||||||
throw_WrongMethodTypeException),
|
|
||||||
// pass required type, failing object (or NULL)
|
// pass required type, failing object (or NULL)
|
||||||
c_rarg1, c_rarg2);
|
c_rarg1, c_rarg2);
|
||||||
return entry;
|
return entry;
|
||||||
@ -182,15 +187,29 @@ address TemplateInterpreterGenerator::generate_return_entry_for(TosState state,
|
|||||||
__ restore_bcp();
|
__ restore_bcp();
|
||||||
__ restore_locals();
|
__ restore_locals();
|
||||||
|
|
||||||
__ get_cache_and_index_at_bcp(rbx, rcx, 1);
|
Label L_got_cache, L_giant_index;
|
||||||
|
if (EnableInvokeDynamic) {
|
||||||
|
__ cmpb(Address(r13, 0), Bytecodes::_invokedynamic);
|
||||||
|
__ jcc(Assembler::equal, L_giant_index);
|
||||||
|
}
|
||||||
|
__ get_cache_and_index_at_bcp(rbx, rcx, 1, false);
|
||||||
|
__ bind(L_got_cache);
|
||||||
__ movl(rbx, Address(rbx, rcx,
|
__ movl(rbx, Address(rbx, rcx,
|
||||||
Address::times_8,
|
Address::times_ptr,
|
||||||
in_bytes(constantPoolCacheOopDesc::base_offset()) +
|
in_bytes(constantPoolCacheOopDesc::base_offset()) +
|
||||||
3 * wordSize));
|
3 * wordSize));
|
||||||
__ andl(rbx, 0xFF);
|
__ andl(rbx, 0xFF);
|
||||||
if (TaggedStackInterpreter) __ shll(rbx, 1); // 2 slots per parameter.
|
if (TaggedStackInterpreter) __ shll(rbx, 1); // 2 slots per parameter.
|
||||||
__ lea(rsp, Address(rsp, rbx, Address::times_8));
|
__ lea(rsp, Address(rsp, rbx, Address::times_8));
|
||||||
__ dispatch_next(state, step);
|
__ dispatch_next(state, step);
|
||||||
|
|
||||||
|
// out of the main line of code...
|
||||||
|
if (EnableInvokeDynamic) {
|
||||||
|
__ bind(L_giant_index);
|
||||||
|
__ get_cache_and_index_at_bcp(rbx, rcx, 1, true);
|
||||||
|
__ jmp(L_got_cache);
|
||||||
|
}
|
||||||
|
|
||||||
return entry;
|
return entry;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3146,7 +3146,6 @@ void TemplateTable::invokedynamic(int byte_no) {
|
|||||||
__ profile_call(rsi);
|
__ profile_call(rsi);
|
||||||
}
|
}
|
||||||
|
|
||||||
Label handle_unlinked_site;
|
|
||||||
__ movptr(rcx, Address(rax, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, rcx)));
|
__ movptr(rcx, Address(rax, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, rcx)));
|
||||||
__ null_check(rcx);
|
__ null_check(rcx);
|
||||||
__ prepare_to_jump_from_interpreted();
|
__ prepare_to_jump_from_interpreted();
|
||||||
|
@ -203,18 +203,15 @@ void TemplateTable::patch_bytecode(Bytecodes::Code bytecode, Register bc,
|
|||||||
__ jcc(Assembler::notEqual, fast_patch);
|
__ jcc(Assembler::notEqual, fast_patch);
|
||||||
__ get_method(scratch);
|
__ get_method(scratch);
|
||||||
// Let breakpoint table handling rewrite to quicker bytecode
|
// Let breakpoint table handling rewrite to quicker bytecode
|
||||||
__ call_VM(noreg,
|
__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, r13, bc);
|
||||||
CAST_FROM_FN_PTR(address,
|
|
||||||
InterpreterRuntime::set_original_bytecode_at),
|
|
||||||
scratch, r13, bc);
|
|
||||||
#ifndef ASSERT
|
#ifndef ASSERT
|
||||||
__ jmpb(patch_done);
|
__ jmpb(patch_done);
|
||||||
__ bind(fast_patch);
|
|
||||||
}
|
|
||||||
#else
|
#else
|
||||||
__ jmp(patch_done);
|
__ jmp(patch_done);
|
||||||
|
#endif
|
||||||
__ bind(fast_patch);
|
__ bind(fast_patch);
|
||||||
}
|
}
|
||||||
|
#ifdef ASSERT
|
||||||
Label okay;
|
Label okay;
|
||||||
__ load_unsigned_byte(scratch, at_bcp(0));
|
__ load_unsigned_byte(scratch, at_bcp(0));
|
||||||
__ cmpl(scratch, (int) Bytecodes::java_code(bytecode));
|
__ cmpl(scratch, (int) Bytecodes::java_code(bytecode));
|
||||||
@ -2054,26 +2051,28 @@ void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void TemplateTable::resolve_cache_and_index(int byte_no,
|
void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) {
|
||||||
Register Rcache,
|
|
||||||
Register index) {
|
|
||||||
assert(byte_no == 1 || byte_no == 2, "byte_no out of range");
|
assert(byte_no == 1 || byte_no == 2, "byte_no out of range");
|
||||||
|
bool is_invokedynamic = (bytecode() == Bytecodes::_invokedynamic);
|
||||||
|
|
||||||
const Register temp = rbx;
|
const Register temp = rbx;
|
||||||
assert_different_registers(Rcache, index, temp);
|
assert_different_registers(Rcache, index, temp);
|
||||||
|
|
||||||
const int shift_count = (1 + byte_no) * BitsPerByte;
|
const int shift_count = (1 + byte_no) * BitsPerByte;
|
||||||
Label resolved;
|
Label resolved;
|
||||||
__ get_cache_and_index_at_bcp(Rcache, index, 1);
|
__ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic);
|
||||||
__ movl(temp, Address(Rcache,
|
if (is_invokedynamic) {
|
||||||
index, Address::times_8,
|
// we are resolved if the f1 field contains a non-null CallSite object
|
||||||
constantPoolCacheOopDesc::base_offset() +
|
__ cmpptr(Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()), (int32_t) NULL_WORD);
|
||||||
ConstantPoolCacheEntry::indices_offset()));
|
__ jcc(Assembler::notEqual, resolved);
|
||||||
__ shrl(temp, shift_count);
|
} else {
|
||||||
// have we resolved this bytecode?
|
__ movl(temp, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset()));
|
||||||
__ andl(temp, 0xFF);
|
__ shrl(temp, shift_count);
|
||||||
__ cmpl(temp, (int) bytecode());
|
// have we resolved this bytecode?
|
||||||
__ jcc(Assembler::equal, resolved);
|
__ andl(temp, 0xFF);
|
||||||
|
__ cmpl(temp, (int) bytecode());
|
||||||
|
__ jcc(Assembler::equal, resolved);
|
||||||
|
}
|
||||||
|
|
||||||
// resolve first time through
|
// resolve first time through
|
||||||
address entry;
|
address entry;
|
||||||
@ -2090,6 +2089,9 @@ void TemplateTable::resolve_cache_and_index(int byte_no,
|
|||||||
case Bytecodes::_invokeinterface:
|
case Bytecodes::_invokeinterface:
|
||||||
entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);
|
entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);
|
||||||
break;
|
break;
|
||||||
|
case Bytecodes::_invokedynamic:
|
||||||
|
entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
ShouldNotReachHere();
|
ShouldNotReachHere();
|
||||||
break;
|
break;
|
||||||
@ -2098,7 +2100,7 @@ void TemplateTable::resolve_cache_and_index(int byte_no,
|
|||||||
__ call_VM(noreg, entry, temp);
|
__ call_VM(noreg, entry, temp);
|
||||||
|
|
||||||
// Update registers with resolved info
|
// Update registers with resolved info
|
||||||
__ get_cache_and_index_at_bcp(Rcache, index, 1);
|
__ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic);
|
||||||
__ bind(resolved);
|
__ bind(resolved);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2832,15 +2834,14 @@ void TemplateTable::count_calls(Register method, Register temp) {
|
|||||||
ShouldNotReachHere();
|
ShouldNotReachHere();
|
||||||
}
|
}
|
||||||
|
|
||||||
void TemplateTable::prepare_invoke(Register method,
|
void TemplateTable::prepare_invoke(Register method, Register index, int byte_no) {
|
||||||
Register index,
|
|
||||||
int byte_no,
|
|
||||||
Bytecodes::Code code) {
|
|
||||||
// determine flags
|
// determine flags
|
||||||
|
Bytecodes::Code code = bytecode();
|
||||||
const bool is_invokeinterface = code == Bytecodes::_invokeinterface;
|
const bool is_invokeinterface = code == Bytecodes::_invokeinterface;
|
||||||
|
const bool is_invokedynamic = code == Bytecodes::_invokedynamic;
|
||||||
const bool is_invokevirtual = code == Bytecodes::_invokevirtual;
|
const bool is_invokevirtual = code == Bytecodes::_invokevirtual;
|
||||||
const bool is_invokespecial = code == Bytecodes::_invokespecial;
|
const bool is_invokespecial = code == Bytecodes::_invokespecial;
|
||||||
const bool load_receiver = code != Bytecodes::_invokestatic;
|
const bool load_receiver = (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic);
|
||||||
const bool receiver_null_check = is_invokespecial;
|
const bool receiver_null_check = is_invokespecial;
|
||||||
const bool save_flags = is_invokeinterface || is_invokevirtual;
|
const bool save_flags = is_invokeinterface || is_invokevirtual;
|
||||||
// setup registers & access constant pool cache
|
// setup registers & access constant pool cache
|
||||||
@ -2858,9 +2859,13 @@ void TemplateTable::prepare_invoke(Register method,
|
|||||||
__ movl(recv, flags);
|
__ movl(recv, flags);
|
||||||
__ andl(recv, 0xFF);
|
__ andl(recv, 0xFF);
|
||||||
if (TaggedStackInterpreter) __ shll(recv, 1); // index*2
|
if (TaggedStackInterpreter) __ shll(recv, 1); // index*2
|
||||||
__ movptr(recv, Address(rsp, recv, Address::times_8,
|
Address recv_addr(rsp, recv, Address::times_8, -Interpreter::expr_offset_in_bytes(1));
|
||||||
-Interpreter::expr_offset_in_bytes(1)));
|
if (is_invokedynamic) {
|
||||||
__ verify_oop(recv);
|
__ lea(recv, recv_addr);
|
||||||
|
} else {
|
||||||
|
__ movptr(recv, recv_addr);
|
||||||
|
__ verify_oop(recv);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// do null check if needed
|
// do null check if needed
|
||||||
@ -2878,10 +2883,14 @@ void TemplateTable::prepare_invoke(Register method,
|
|||||||
ConstantPoolCacheEntry::verify_tosBits();
|
ConstantPoolCacheEntry::verify_tosBits();
|
||||||
// load return address
|
// load return address
|
||||||
{
|
{
|
||||||
ExternalAddress return_5((address)Interpreter::return_5_addrs_by_index_table());
|
address table_addr;
|
||||||
ExternalAddress return_3((address)Interpreter::return_3_addrs_by_index_table());
|
if (is_invokeinterface || is_invokedynamic)
|
||||||
__ lea(rscratch1, (is_invokeinterface ? return_5 : return_3));
|
table_addr = (address)Interpreter::return_5_addrs_by_index_table();
|
||||||
__ movptr(flags, Address(rscratch1, flags, Address::times_8));
|
else
|
||||||
|
table_addr = (address)Interpreter::return_3_addrs_by_index_table();
|
||||||
|
ExternalAddress table(table_addr);
|
||||||
|
__ lea(rscratch1, table);
|
||||||
|
__ movptr(flags, Address(rscratch1, flags, Address::times_ptr));
|
||||||
}
|
}
|
||||||
|
|
||||||
// push return address
|
// push return address
|
||||||
@ -2947,7 +2956,7 @@ void TemplateTable::invokevirtual_helper(Register index,
|
|||||||
|
|
||||||
void TemplateTable::invokevirtual(int byte_no) {
|
void TemplateTable::invokevirtual(int byte_no) {
|
||||||
transition(vtos, vtos);
|
transition(vtos, vtos);
|
||||||
prepare_invoke(rbx, noreg, byte_no, bytecode());
|
prepare_invoke(rbx, noreg, byte_no);
|
||||||
|
|
||||||
// rbx: index
|
// rbx: index
|
||||||
// rcx: receiver
|
// rcx: receiver
|
||||||
@ -2959,7 +2968,7 @@ void TemplateTable::invokevirtual(int byte_no) {
|
|||||||
|
|
||||||
void TemplateTable::invokespecial(int byte_no) {
|
void TemplateTable::invokespecial(int byte_no) {
|
||||||
transition(vtos, vtos);
|
transition(vtos, vtos);
|
||||||
prepare_invoke(rbx, noreg, byte_no, bytecode());
|
prepare_invoke(rbx, noreg, byte_no);
|
||||||
// do the call
|
// do the call
|
||||||
__ verify_oop(rbx);
|
__ verify_oop(rbx);
|
||||||
__ profile_call(rax);
|
__ profile_call(rax);
|
||||||
@ -2969,7 +2978,7 @@ void TemplateTable::invokespecial(int byte_no) {
|
|||||||
|
|
||||||
void TemplateTable::invokestatic(int byte_no) {
|
void TemplateTable::invokestatic(int byte_no) {
|
||||||
transition(vtos, vtos);
|
transition(vtos, vtos);
|
||||||
prepare_invoke(rbx, noreg, byte_no, bytecode());
|
prepare_invoke(rbx, noreg, byte_no);
|
||||||
// do the call
|
// do the call
|
||||||
__ verify_oop(rbx);
|
__ verify_oop(rbx);
|
||||||
__ profile_call(rax);
|
__ profile_call(rax);
|
||||||
@ -2983,7 +2992,7 @@ void TemplateTable::fast_invokevfinal(int byte_no) {
|
|||||||
|
|
||||||
void TemplateTable::invokeinterface(int byte_no) {
|
void TemplateTable::invokeinterface(int byte_no) {
|
||||||
transition(vtos, vtos);
|
transition(vtos, vtos);
|
||||||
prepare_invoke(rax, rbx, byte_no, bytecode());
|
prepare_invoke(rax, rbx, byte_no);
|
||||||
|
|
||||||
// rax: Interface
|
// rax: Interface
|
||||||
// rbx: index
|
// rbx: index
|
||||||
@ -3072,7 +3081,24 @@ void TemplateTable::invokedynamic(int byte_no) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
__ stop("invokedynamic NYI");//6815692//
|
prepare_invoke(rax, rbx, byte_no);
|
||||||
|
|
||||||
|
// rax: CallSite object (f1)
|
||||||
|
// rbx: unused (f2)
|
||||||
|
// rcx: receiver address
|
||||||
|
// rdx: flags (unused)
|
||||||
|
|
||||||
|
if (ProfileInterpreter) {
|
||||||
|
Label L;
|
||||||
|
// %%% should make a type profile for any invokedynamic that takes a ref argument
|
||||||
|
// profile this call
|
||||||
|
__ profile_call(r13);
|
||||||
|
}
|
||||||
|
|
||||||
|
__ movptr(rcx, Address(rax, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, rcx)));
|
||||||
|
__ null_check(rcx);
|
||||||
|
__ prepare_to_jump_from_interpreted();
|
||||||
|
__ jump_to_method_handle_entry(rcx, rdx);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -22,8 +22,7 @@
|
|||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
|
|
||||||
static void prepare_invoke(Register method, Register index, int byte_no,
|
static void prepare_invoke(Register method, Register index, int byte_no);
|
||||||
Bytecodes::Code code);
|
|
||||||
static void invokevirtual_helper(Register index, Register recv,
|
static void invokevirtual_helper(Register index, Register recv,
|
||||||
Register flags);
|
Register flags);
|
||||||
static void volatile_barrier(Assembler::Membar_mask_bits order_constraint);
|
static void volatile_barrier(Assembler::Membar_mask_bits order_constraint);
|
||||||
|
@ -2511,23 +2511,12 @@ void ClassFileParser::java_dyn_MethodHandle_fix_pre(constantPoolHandle cp,
|
|||||||
fac_ptr->nonstatic_byte_count -= 1;
|
fac_ptr->nonstatic_byte_count -= 1;
|
||||||
(*fields_ptr)->ushort_at_put(i + instanceKlass::signature_index_offset,
|
(*fields_ptr)->ushort_at_put(i + instanceKlass::signature_index_offset,
|
||||||
word_sig_index);
|
word_sig_index);
|
||||||
if (wordSize == jintSize) {
|
fac_ptr->nonstatic_word_count += 1;
|
||||||
fac_ptr->nonstatic_word_count += 1;
|
|
||||||
} else {
|
|
||||||
fac_ptr->nonstatic_double_count += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
FieldAllocationType atype = (FieldAllocationType) (*fields_ptr)->ushort_at(i+4);
|
FieldAllocationType atype = (FieldAllocationType) (*fields_ptr)->ushort_at(i + instanceKlass::low_offset);
|
||||||
assert(atype == NONSTATIC_BYTE, "");
|
assert(atype == NONSTATIC_BYTE, "");
|
||||||
FieldAllocationType new_atype = NONSTATIC_WORD;
|
FieldAllocationType new_atype = NONSTATIC_WORD;
|
||||||
if (wordSize > jintSize) {
|
(*fields_ptr)->ushort_at_put(i + instanceKlass::low_offset, new_atype);
|
||||||
if (Universe::field_type_should_be_aligned(T_LONG)) {
|
|
||||||
atype = NONSTATIC_ALIGNED_DOUBLE;
|
|
||||||
} else {
|
|
||||||
atype = NONSTATIC_DOUBLE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(*fields_ptr)->ushort_at_put(i+4, new_atype);
|
|
||||||
|
|
||||||
found_vmentry = true;
|
found_vmentry = true;
|
||||||
break;
|
break;
|
||||||
@ -3085,7 +3074,7 @@ instanceKlassHandle ClassFileParser::parseClassFile(symbolHandle name,
|
|||||||
int len = fields->length();
|
int len = fields->length();
|
||||||
for (int i = 0; i < len; i += instanceKlass::next_offset) {
|
for (int i = 0; i < len; i += instanceKlass::next_offset) {
|
||||||
int real_offset;
|
int real_offset;
|
||||||
FieldAllocationType atype = (FieldAllocationType) fields->ushort_at(i+4);
|
FieldAllocationType atype = (FieldAllocationType) fields->ushort_at(i + instanceKlass::low_offset);
|
||||||
switch (atype) {
|
switch (atype) {
|
||||||
case STATIC_OOP:
|
case STATIC_OOP:
|
||||||
real_offset = next_static_oop_offset;
|
real_offset = next_static_oop_offset;
|
||||||
@ -3173,8 +3162,8 @@ instanceKlassHandle ClassFileParser::parseClassFile(symbolHandle name,
|
|||||||
default:
|
default:
|
||||||
ShouldNotReachHere();
|
ShouldNotReachHere();
|
||||||
}
|
}
|
||||||
fields->short_at_put(i+4, extract_low_short_from_int(real_offset) );
|
fields->short_at_put(i + instanceKlass::low_offset, extract_low_short_from_int(real_offset));
|
||||||
fields->short_at_put(i+5, extract_high_short_from_int(real_offset) );
|
fields->short_at_put(i + instanceKlass::high_offset, extract_high_short_from_int(real_offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Size of instances
|
// Size of instances
|
||||||
|
@ -414,9 +414,8 @@ int nmethod::total_size() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const char* nmethod::compile_kind() const {
|
const char* nmethod::compile_kind() const {
|
||||||
if (method() == NULL) return "unloaded";
|
|
||||||
if (is_native_method()) return "c2n";
|
|
||||||
if (is_osr_method()) return "osr";
|
if (is_osr_method()) return "osr";
|
||||||
|
if (method() != NULL && is_native_method()) return "c2n";
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1127,6 +1126,9 @@ void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) {
|
|||||||
}
|
}
|
||||||
flags.state = unloaded;
|
flags.state = unloaded;
|
||||||
|
|
||||||
|
// Log the unloading.
|
||||||
|
log_state_change();
|
||||||
|
|
||||||
// The methodOop is gone at this point
|
// The methodOop is gone at this point
|
||||||
assert(_method == NULL, "Tautology");
|
assert(_method == NULL, "Tautology");
|
||||||
|
|
||||||
@ -1137,8 +1139,6 @@ void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) {
|
|||||||
|
|
||||||
void nmethod::invalidate_osr_method() {
|
void nmethod::invalidate_osr_method() {
|
||||||
assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
|
assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod");
|
||||||
if (_entry_bci != InvalidOSREntryBci)
|
|
||||||
inc_decompile_count();
|
|
||||||
// Remove from list of active nmethods
|
// Remove from list of active nmethods
|
||||||
if (method() != NULL)
|
if (method() != NULL)
|
||||||
instanceKlass::cast(method()->method_holder())->remove_osr_nmethod(this);
|
instanceKlass::cast(method()->method_holder())->remove_osr_nmethod(this);
|
||||||
@ -1146,59 +1146,63 @@ void nmethod::invalidate_osr_method() {
|
|||||||
_entry_bci = InvalidOSREntryBci;
|
_entry_bci = InvalidOSREntryBci;
|
||||||
}
|
}
|
||||||
|
|
||||||
void nmethod::log_state_change(int state) const {
|
void nmethod::log_state_change() const {
|
||||||
if (LogCompilation) {
|
if (LogCompilation) {
|
||||||
if (xtty != NULL) {
|
if (xtty != NULL) {
|
||||||
ttyLocker ttyl; // keep the following output all in one block
|
ttyLocker ttyl; // keep the following output all in one block
|
||||||
xtty->begin_elem("make_not_entrant %sthread='" UINTX_FORMAT "'",
|
if (flags.state == unloaded) {
|
||||||
(state == zombie ? "zombie='1' " : ""),
|
xtty->begin_elem("make_unloaded thread='" UINTX_FORMAT "'",
|
||||||
os::current_thread_id());
|
os::current_thread_id());
|
||||||
|
} else {
|
||||||
|
xtty->begin_elem("make_not_entrant thread='" UINTX_FORMAT "'%s",
|
||||||
|
os::current_thread_id(),
|
||||||
|
(flags.state == zombie ? " zombie='1'" : ""));
|
||||||
|
}
|
||||||
log_identity(xtty);
|
log_identity(xtty);
|
||||||
xtty->stamp();
|
xtty->stamp();
|
||||||
xtty->end_elem();
|
xtty->end_elem();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (PrintCompilation) {
|
if (PrintCompilation && flags.state != unloaded) {
|
||||||
print_on(tty, state == zombie ? "made zombie " : "made not entrant ");
|
print_on(tty, flags.state == zombie ? "made zombie " : "made not entrant ");
|
||||||
tty->cr();
|
tty->cr();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Common functionality for both make_not_entrant and make_zombie
|
// Common functionality for both make_not_entrant and make_zombie
|
||||||
void nmethod::make_not_entrant_or_zombie(int state) {
|
bool nmethod::make_not_entrant_or_zombie(int state) {
|
||||||
assert(state == zombie || state == not_entrant, "must be zombie or not_entrant");
|
assert(state == zombie || state == not_entrant, "must be zombie or not_entrant");
|
||||||
|
|
||||||
// Code for an on-stack-replacement nmethod is removed when a class gets unloaded.
|
// If the method is already zombie there is nothing to do
|
||||||
// They never become zombie/non-entrant, so the nmethod sweeper will never remove
|
if (is_zombie()) {
|
||||||
// them. Instead the entry_bci is set to InvalidOSREntryBci, so the osr nmethod
|
return false;
|
||||||
// will never be used anymore. That the nmethods only gets removed when class unloading
|
|
||||||
// happens, make life much simpler, since the nmethods are not just going to disappear
|
|
||||||
// out of the blue.
|
|
||||||
if (is_osr_method()) {
|
|
||||||
if (osr_entry_bci() != InvalidOSREntryBci) {
|
|
||||||
// only log this once
|
|
||||||
log_state_change(state);
|
|
||||||
}
|
|
||||||
invalidate_osr_method();
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the method is already zombie or set to the state we want, nothing to do
|
|
||||||
if (is_zombie() || (state == not_entrant && is_not_entrant())) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
log_state_change(state);
|
|
||||||
|
|
||||||
// Make sure the nmethod is not flushed in case of a safepoint in code below.
|
// Make sure the nmethod is not flushed in case of a safepoint in code below.
|
||||||
nmethodLocker nml(this);
|
nmethodLocker nml(this);
|
||||||
|
|
||||||
{
|
{
|
||||||
|
// invalidate osr nmethod before acquiring the patching lock since
|
||||||
|
// they both acquire leaf locks and we don't want a deadlock.
|
||||||
|
// This logic is equivalent to the logic below for patching the
|
||||||
|
// verified entry point of regular methods.
|
||||||
|
if (is_osr_method()) {
|
||||||
|
// this effectively makes the osr nmethod not entrant
|
||||||
|
invalidate_osr_method();
|
||||||
|
}
|
||||||
|
|
||||||
// Enter critical section. Does not block for safepoint.
|
// Enter critical section. Does not block for safepoint.
|
||||||
MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
|
MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
|
||||||
|
|
||||||
|
if (flags.state == state) {
|
||||||
|
// another thread already performed this transition so nothing
|
||||||
|
// to do, but return false to indicate this.
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
// The caller can be calling the method statically or through an inline
|
// The caller can be calling the method statically or through an inline
|
||||||
// cache call.
|
// cache call.
|
||||||
if (!is_not_entrant()) {
|
if (!is_osr_method() && !is_not_entrant()) {
|
||||||
NativeJump::patch_verified_entry(entry_point(), verified_entry_point(),
|
NativeJump::patch_verified_entry(entry_point(), verified_entry_point(),
|
||||||
SharedRuntime::get_handle_wrong_method_stub());
|
SharedRuntime::get_handle_wrong_method_stub());
|
||||||
assert (NativeJump::instruction_size == nmethod::_zombie_instruction_size, "");
|
assert (NativeJump::instruction_size == nmethod::_zombie_instruction_size, "");
|
||||||
@ -1217,6 +1221,10 @@ void nmethod::make_not_entrant_or_zombie(int state) {
|
|||||||
|
|
||||||
// Change state
|
// Change state
|
||||||
flags.state = state;
|
flags.state = state;
|
||||||
|
|
||||||
|
// Log the transition once
|
||||||
|
log_state_change();
|
||||||
|
|
||||||
} // leave critical region under Patching_lock
|
} // leave critical region under Patching_lock
|
||||||
|
|
||||||
if (state == not_entrant) {
|
if (state == not_entrant) {
|
||||||
@ -1240,7 +1248,6 @@ void nmethod::make_not_entrant_or_zombie(int state) {
|
|||||||
// It's a true state change, so mark the method as decompiled.
|
// It's a true state change, so mark the method as decompiled.
|
||||||
inc_decompile_count();
|
inc_decompile_count();
|
||||||
|
|
||||||
|
|
||||||
// zombie only - if a JVMTI agent has enabled the CompiledMethodUnload event
|
// zombie only - if a JVMTI agent has enabled the CompiledMethodUnload event
|
||||||
// and it hasn't already been reported for this nmethod then report it now.
|
// and it hasn't already been reported for this nmethod then report it now.
|
||||||
// (the event may have been reported earilier if the GC marked it for unloading).
|
// (the event may have been reported earilier if the GC marked it for unloading).
|
||||||
@ -1268,7 +1275,7 @@ void nmethod::make_not_entrant_or_zombie(int state) {
|
|||||||
|
|
||||||
// Check whether method got unloaded at a safepoint before this,
|
// Check whether method got unloaded at a safepoint before this,
|
||||||
// if so we can skip the flushing steps below
|
// if so we can skip the flushing steps below
|
||||||
if (method() == NULL) return;
|
if (method() == NULL) return true;
|
||||||
|
|
||||||
// Remove nmethod from method.
|
// Remove nmethod from method.
|
||||||
// We need to check if both the _code and _from_compiled_code_entry_point
|
// We need to check if both the _code and _from_compiled_code_entry_point
|
||||||
@ -1282,6 +1289,8 @@ void nmethod::make_not_entrant_or_zombie(int state) {
|
|||||||
HandleMark hm;
|
HandleMark hm;
|
||||||
method()->clear_code();
|
method()->clear_code();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -252,7 +252,9 @@ class nmethod : public CodeBlob {
|
|||||||
void* operator new(size_t size, int nmethod_size);
|
void* operator new(size_t size, int nmethod_size);
|
||||||
|
|
||||||
const char* reloc_string_for(u_char* begin, u_char* end);
|
const char* reloc_string_for(u_char* begin, u_char* end);
|
||||||
void make_not_entrant_or_zombie(int state);
|
// Returns true if this thread changed the state of the nmethod or
|
||||||
|
// false if another thread performed the transition.
|
||||||
|
bool make_not_entrant_or_zombie(int state);
|
||||||
void inc_decompile_count();
|
void inc_decompile_count();
|
||||||
|
|
||||||
// used to check that writes to nmFlags are done consistently.
|
// used to check that writes to nmFlags are done consistently.
|
||||||
@ -375,10 +377,12 @@ class nmethod : public CodeBlob {
|
|||||||
bool is_zombie() const { return flags.state == zombie; }
|
bool is_zombie() const { return flags.state == zombie; }
|
||||||
bool is_unloaded() const { return flags.state == unloaded; }
|
bool is_unloaded() const { return flags.state == unloaded; }
|
||||||
|
|
||||||
// Make the nmethod non entrant. The nmethod will continue to be alive.
|
// Make the nmethod non entrant. The nmethod will continue to be
|
||||||
// It is used when an uncommon trap happens.
|
// alive. It is used when an uncommon trap happens. Returns true
|
||||||
void make_not_entrant() { make_not_entrant_or_zombie(not_entrant); }
|
// if this thread changed the state of the nmethod or false if
|
||||||
void make_zombie() { make_not_entrant_or_zombie(zombie); }
|
// another thread performed the transition.
|
||||||
|
bool make_not_entrant() { return make_not_entrant_or_zombie(not_entrant); }
|
||||||
|
bool make_zombie() { return make_not_entrant_or_zombie(zombie); }
|
||||||
|
|
||||||
// used by jvmti to track if the unload event has been reported
|
// used by jvmti to track if the unload event has been reported
|
||||||
bool unload_reported() { return _unload_reported; }
|
bool unload_reported() { return _unload_reported; }
|
||||||
@ -563,7 +567,7 @@ class nmethod : public CodeBlob {
|
|||||||
// Logging
|
// Logging
|
||||||
void log_identity(xmlStream* log) const;
|
void log_identity(xmlStream* log) const;
|
||||||
void log_new_nmethod() const;
|
void log_new_nmethod() const;
|
||||||
void log_state_change(int state) const;
|
void log_state_change() const;
|
||||||
|
|
||||||
// Prints a comment for one native instruction (reloc info, pc desc)
|
// Prints a comment for one native instruction (reloc info, pc desc)
|
||||||
void print_code_comment_on(outputStream* st, int column, address begin, address end);
|
void print_code_comment_on(outputStream* st, int column, address begin, address end);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user