ruby/lib/ruby_vm/mjit/insn_compiler.rb

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

1387 lines
42 KiB
Ruby
Raw Normal View History

2022-12-17 13:39:35 -08:00
module RubyVM::MJIT
2022-12-26 14:09:45 -08:00
class InsnCompiler
2022-12-30 22:16:07 -08:00
# @param ocb [CodeBlock]
2023-01-02 14:11:06 -08:00
# @param exit_compiler [RubyVM::MJIT::ExitCompiler]
2023-02-03 22:42:13 -08:00
def initialize(cb, ocb, exit_compiler)
2022-12-30 22:16:07 -08:00
@ocb = ocb
2023-01-02 14:11:06 -08:00
@exit_compiler = exit_compiler
2023-02-03 22:42:13 -08:00
@invariants = Invariants.new(cb, ocb, exit_compiler)
2023-02-08 01:48:32 -08:00
@gc_refs = [] # TODO: GC offsets?
2023-01-07 21:24:30 -08:00
# freeze # workaround a binding.irb issue. TODO: resurrect this
2022-12-30 21:27:12 -08:00
end
2022-12-28 13:50:24 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
# @param insn `RubyVM::MJIT::Instruction`
def compile(jit, ctx, asm, insn)
asm.incr_counter(:mjit_insns_count)
asm.comment("Insn: #{insn.name}")
2023-02-08 17:42:16 -08:00
# 30/101
case insn.name
2023-02-07 23:40:05 -08:00
when :nop then nop(jit, ctx, asm)
# getlocal
# setlocal
# getblockparam
# setblockparam
# getblockparamproxy
# getspecial
# setspecial
2023-02-07 14:42:58 -08:00
when :getinstancevariable then getinstancevariable(jit, ctx, asm)
2023-02-08 17:27:21 -08:00
when :setinstancevariable then setinstancevariable(jit, ctx, asm)
# getclassvariable
# setclassvariable
# opt_getconstant_path
# getconstant
# setconstant
# getglobal
# setglobal
when :putnil then putnil(jit, ctx, asm)
2023-01-07 13:39:33 -08:00
when :putself then putself(jit, ctx, asm)
when :putobject then putobject(jit, ctx, asm)
# putspecialobject
# putstring
# concatstrings
# anytostring
# toregexp
# intern
# newarray
# newarraykwsplat
# duparray
# duphash
# expandarray
# concatarray
# splatarray
# newhash
# newrange
2023-02-08 17:33:27 -08:00
when :pop then pop(jit, ctx, asm)
2023-02-08 17:17:36 -08:00
when :dup then dup(jit, ctx, asm)
# dupn
# swap
# opt_reverse
# topn
# setn
# adjuststack
# defined
# checkmatch
# checkkeyword
# checktype
# defineclass
# definemethod
# definesmethod
# send
2023-01-07 21:24:30 -08:00
when :opt_send_without_block then opt_send_without_block(jit, ctx, asm)
# objtostring
# opt_str_freeze
when :opt_nil_p then opt_nil_p(jit, ctx, asm)
# opt_str_uminus
# opt_newarray_max
# opt_newarray_min
# invokesuper
# invokeblock
when :leave then leave(jit, ctx, asm)
# throw
2023-02-08 17:42:16 -08:00
when :jump then jump(jit, ctx, asm)
# branchif
2023-01-07 13:21:14 -08:00
when :branchunless then branchunless(jit, ctx, asm)
# branchnil
# once
# opt_case_dispatch
2023-02-06 15:44:34 -08:00
when :opt_plus then opt_plus(jit, ctx, asm)
2023-01-07 14:06:38 -08:00
when :opt_minus then opt_minus(jit, ctx, asm)
when :opt_mult then opt_mult(jit, ctx, asm)
when :opt_div then opt_div(jit, ctx, asm)
# opt_mod
# opt_eq
# opt_neq
when :opt_lt then opt_lt(jit, ctx, asm)
# opt_le
# opt_gt
# opt_ge
when :opt_ltlt then opt_ltlt(jit, ctx, asm)
# opt_and
# opt_or
2023-02-08 11:10:04 -08:00
when :opt_aref then opt_aref(jit, ctx, asm)
# opt_aset
# opt_aset_with
# opt_aref_with
when :opt_length then opt_length(jit, ctx, asm)
when :opt_size then opt_size(jit, ctx, asm)
when :opt_empty_p then opt_empty_p(jit, ctx, asm)
when :opt_succ then opt_succ(jit, ctx, asm)
when :opt_not then opt_not(jit, ctx, asm)
when :opt_regexpmatch2 then opt_regexpmatch2(jit, ctx, asm)
# invokebuiltin
# opt_invokebuiltin_delegate
# opt_invokebuiltin_delegate_leave
when :getlocal_WC_0 then getlocal_WC_0(jit, ctx, asm)
2023-02-08 01:17:39 -08:00
when :getlocal_WC_1 then getlocal_WC_1(jit, ctx, asm)
2023-01-07 13:54:26 -08:00
# setlocal_WC_0
# setlocal_WC_1
when :putobject_INT2FIX_0_ then putobject_INT2FIX_0_(jit, ctx, asm)
when :putobject_INT2FIX_1_ then putobject_INT2FIX_1_(jit, ctx, asm)
else CantCompile
end
end
private
#
# Insns
#
2023-02-07 23:40:05 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def nop(jit, ctx, asm)
# Do nothing
KeepCompiling
end
2022-12-28 13:50:24 -08:00
# getlocal
# setlocal
# getblockparam
# setblockparam
# getblockparamproxy
# getspecial
# setspecial
2023-02-07 14:42:58 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def getinstancevariable(jit, ctx, asm)
2023-02-08 01:48:32 -08:00
# Specialize on a compile-time receiver, and split a block for chain guards
2023-02-07 14:42:58 -08:00
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
id = jit.operand(0)
comptime_obj = jit.peek_at_self
jit_getivar(jit, ctx, asm, comptime_obj, id)
end
2023-02-08 17:27:21 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def setinstancevariable(jit, ctx, asm)
id = jit.operand(0)
ivc = jit.operand(1)
# rb_vm_setinstancevariable could raise exceptions
jit_prepare_routine_call(jit, ctx, asm)
val_opnd = ctx.stack_pop
asm.comment('rb_vm_setinstancevariable')
asm.mov(:rdi, jit.iseq.to_i)
asm.mov(:rsi, [CFP, C.rb_control_frame_t.offsetof(:self)])
asm.mov(:rdx, id)
asm.mov(:rcx, val_opnd)
asm.mov(:r8, ivc)
asm.call(C.rb_vm_setinstancevariable)
KeepCompiling
end
2022-12-28 13:50:24 -08:00
# getclassvariable
# setclassvariable
# opt_getconstant_path
# getconstant
# setconstant
# getglobal
# setglobal
2022-12-26 14:09:45 -08:00
# @param jit [RubyVM::MJIT::JITState]
2022-12-23 14:17:32 -08:00
# @param ctx [RubyVM::MJIT::Context]
2022-12-30 22:16:07 -08:00
# @param asm [RubyVM::MJIT::Assembler]
2022-12-26 14:09:45 -08:00
def putnil(jit, ctx, asm)
2023-02-08 09:30:47 -08:00
putobject(jit, ctx, asm, val: Qnil)
2022-12-17 13:39:35 -08:00
end
2023-01-07 13:39:33 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def putself(jit, ctx, asm)
2023-02-08 09:30:47 -08:00
stack_top = ctx.stack_push
2023-01-07 13:39:33 -08:00
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:self)])
2023-02-08 09:30:47 -08:00
asm.mov(stack_top, :rax)
2023-01-07 13:39:33 -08:00
KeepCompiling
end
2022-12-28 14:43:04 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
2022-12-30 22:16:07 -08:00
# @param asm [RubyVM::MJIT::Assembler]
2023-01-07 13:54:26 -08:00
def putobject(jit, ctx, asm, val: jit.operand(0))
2022-12-28 14:43:04 -08:00
# Push it to the stack
2023-02-08 09:30:47 -08:00
stack_top = ctx.stack_push
2022-12-28 14:43:04 -08:00
if asm.imm32?(val)
2023-02-08 09:30:47 -08:00
asm.mov(stack_top, val)
2022-12-28 14:43:04 -08:00
else # 64-bit immediates can't be directly written to memory
asm.mov(:rax, val)
2023-02-08 09:30:47 -08:00
asm.mov(stack_top, :rax)
2022-12-28 14:43:04 -08:00
end
2023-02-08 09:30:47 -08:00
# TODO: GC offsets?
2022-12-28 14:43:04 -08:00
KeepCompiling
end
2022-12-28 13:50:24 -08:00
# putspecialobject
# putstring
# concatstrings
# anytostring
# toregexp
# intern
# newarray
# newarraykwsplat
# duparray
# duphash
# expandarray
# concatarray
# splatarray
# newhash
# newrange
2023-02-08 17:33:27 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def pop(jit, ctx, asm)
ctx.stack_pop
KeepCompiling
end
2023-02-08 17:17:36 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def dup(jit, ctx, asm)
val1 = ctx.stack_opnd(0)
val2 = ctx.stack_push
asm.mov(:rax, val1)
asm.mov(val2, :rax)
KeepCompiling
end
2022-12-28 13:50:24 -08:00
# dupn
# swap
# opt_reverse
# topn
# setn
# adjuststack
# defined
# checkmatch
# checkkeyword
# checktype
# defineclass
# definemethod
# definesmethod
# send
2023-01-07 21:24:30 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
# @param cd `RubyVM::MJIT::CPointer::Struct_rb_call_data`
def opt_send_without_block(jit, ctx, asm)
cd = C.rb_call_data.new(jit.operand(0))
2023-02-03 22:42:13 -08:00
jit_call_method(jit, ctx, asm, cd)
2023-01-07 21:24:30 -08:00
end
2022-12-28 13:50:24 -08:00
# objtostring
# opt_str_freeze
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_nil_p(jit, ctx, asm)
opt_send_without_block(jit, ctx, asm)
end
2022-12-28 13:50:24 -08:00
# opt_str_uminus
# opt_newarray_max
# opt_newarray_min
# invokesuper
# invokeblock
2022-12-26 14:09:45 -08:00
# @param jit [RubyVM::MJIT::JITState]
2022-12-23 14:17:32 -08:00
# @param ctx [RubyVM::MJIT::Context]
2022-12-30 22:16:07 -08:00
# @param asm [RubyVM::MJIT::Assembler]
2022-12-26 14:09:45 -08:00
def leave(jit, ctx, asm)
2023-02-03 22:42:13 -08:00
assert_equal(ctx.stack_size, 1)
2022-12-26 14:09:45 -08:00
2023-02-07 14:42:58 -08:00
jit_check_ints(jit, ctx, asm)
2022-12-23 14:17:32 -08:00
2022-12-28 13:16:02 -08:00
asm.comment('pop stack frame')
2023-01-07 21:24:30 -08:00
asm.lea(:rax, [CFP, C.rb_control_frame_t.size])
asm.mov(CFP, :rax)
asm.mov([EC, C.rb_execution_context_t.offsetof(:cfp)], :rax)
2022-12-17 13:39:35 -08:00
2023-01-07 21:24:30 -08:00
# Return a value (for compile_leave_exit)
2023-02-06 15:44:34 -08:00
ret_opnd = ctx.stack_pop
asm.mov(:rax, ret_opnd)
2022-12-23 14:46:39 -08:00
2023-01-07 21:24:30 -08:00
# Set caller's SP and push a value to its stack (for JIT)
2023-02-03 22:42:13 -08:00
asm.mov(SP, [CFP, C.rb_control_frame_t.offsetof(:sp)]) # Note: SP is in the position after popping a receiver and arguments
2023-01-07 21:24:30 -08:00
asm.mov([SP], :rax)
# Jump to cfp->jit_return
asm.jmp([CFP, -C.rb_control_frame_t.size + C.rb_control_frame_t.offsetof(:jit_return)])
2022-12-23 14:46:39 -08:00
2022-12-17 13:39:35 -08:00
EndBlock
end
2022-12-23 14:17:32 -08:00
2022-12-28 13:50:24 -08:00
# throw
2023-02-08 17:42:16 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jump(jit, ctx, asm)
# Check for interrupts, but only on backward branches that may create loops
jump_offset = jit.operand(0)
if jump_offset < 0
jit_check_ints(jit, ctx, asm)
end
2023-02-08 17:42:16 -08:00
pc = jit.pc + C.VALUE.size * (jit.insn.len + jump_offset)
2023-02-08 17:42:16 -08:00
stub_next_block(jit.iseq, pc, ctx, asm)
EndBlock
end
2022-12-28 13:50:24 -08:00
# branchif
2023-01-07 13:21:14 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def branchunless(jit, ctx, asm)
# Check for interrupts, but only on backward branches that may create loops
jump_offset = jit.operand(0)
if jump_offset < 0
jit_check_ints(jit, ctx, asm)
end
2023-01-07 13:21:14 -08:00
# TODO: skip check for known truthy
# This `test` sets ZF only for Qnil and Qfalse, which let jz jump.
2023-02-08 17:37:25 -08:00
val = ctx.stack_pop
asm.test(val, ~Qnil)
2023-01-07 13:21:14 -08:00
# Set stubs
branch_stub = BranchStub.new(
iseq: jit.iseq,
2023-02-07 00:17:13 -08:00
shape: Default,
target0: BranchTarget.new(ctx:, pc: jit.pc + C.VALUE.size * (jit.insn.len + jump_offset)), # branch target
2023-02-07 00:17:13 -08:00
target1: BranchTarget.new(ctx:, pc: jit.pc + C.VALUE.size * jit.insn.len), # fallthrough
2023-01-07 13:21:14 -08:00
)
2023-02-07 00:17:13 -08:00
branch_stub.target0.address = Assembler.new.then do |ocb_asm|
2023-02-07 11:36:45 -08:00
@exit_compiler.compile_branch_stub(ctx, ocb_asm, branch_stub, true)
2023-01-07 13:21:14 -08:00
@ocb.write(ocb_asm)
end
2023-02-07 00:17:13 -08:00
branch_stub.target1.address = Assembler.new.then do |ocb_asm|
2023-02-07 11:36:45 -08:00
@exit_compiler.compile_branch_stub(ctx, ocb_asm, branch_stub, false)
2023-01-07 13:21:14 -08:00
@ocb.write(ocb_asm)
end
2023-02-07 00:17:13 -08:00
# Jump to target0 on jz
branch_stub.compile = proc do |branch_asm|
branch_asm.comment("branchunless #{branch_stub.shape}")
2023-01-07 13:21:14 -08:00
branch_asm.stub(branch_stub) do
2023-02-07 00:17:13 -08:00
case branch_stub.shape
in Default
branch_asm.jz(branch_stub.target0.address)
branch_asm.jmp(branch_stub.target1.address)
in Next0
branch_asm.jnz(branch_stub.target1.address)
in Next1
branch_asm.jz(branch_stub.target0.address)
end
2023-01-07 13:21:14 -08:00
end
end
2023-02-07 00:17:13 -08:00
branch_stub.compile.call(asm)
2023-01-07 13:21:14 -08:00
EndBlock
end
2022-12-28 13:50:24 -08:00
# branchnil
# once
# opt_case_dispatch
2023-02-06 15:44:34 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_plus(jit, ctx, asm)
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
comptime_recv = jit.peek_at_stack(1)
comptime_obj = jit.peek_at_stack(0)
if fixnum?(comptime_recv) && fixnum?(comptime_obj)
# Generate a side exit before popping operands
side_exit = side_exit(jit, ctx)
unless @invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_PLUS)
return CantCompile
end
obj_opnd = ctx.stack_pop
recv_opnd = ctx.stack_pop
asm.comment('guard recv is fixnum') # TODO: skip this with type information
asm.test(recv_opnd, C.RUBY_FIXNUM_FLAG)
asm.jz(side_exit)
asm.comment('guard obj is fixnum') # TODO: skip this with type information
asm.test(obj_opnd, C.RUBY_FIXNUM_FLAG)
asm.jz(side_exit)
asm.mov(:rax, recv_opnd)
asm.sub(:rax, 1) # untag
asm.mov(:rcx, obj_opnd)
asm.add(:rax, :rcx)
asm.jo(side_exit)
dst_opnd = ctx.stack_push
asm.mov(dst_opnd, :rax)
KeepCompiling
else
opt_send_without_block(jit, ctx, asm)
2023-02-06 15:44:34 -08:00
end
end
2023-01-07 14:06:38 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_minus(jit, ctx, asm)
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
comptime_recv = jit.peek_at_stack(1)
comptime_obj = jit.peek_at_stack(0)
if fixnum?(comptime_recv) && fixnum?(comptime_obj)
2023-02-08 09:30:47 -08:00
# Generate a side exit before popping operands
side_exit = side_exit(jit, ctx)
2023-01-07 14:06:38 -08:00
unless @invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_MINUS)
return CantCompile
end
2023-02-08 09:30:47 -08:00
obj_opnd = ctx.stack_pop
recv_opnd = ctx.stack_pop
2023-01-07 14:06:38 -08:00
asm.comment('guard recv is fixnum') # TODO: skip this with type information
2023-02-08 09:30:47 -08:00
asm.test(recv_opnd, C.RUBY_FIXNUM_FLAG)
asm.jz(side_exit)
2023-01-07 14:06:38 -08:00
asm.comment('guard obj is fixnum') # TODO: skip this with type information
2023-02-08 09:30:47 -08:00
asm.test(obj_opnd, C.RUBY_FIXNUM_FLAG)
asm.jz(side_exit)
2023-01-07 14:06:38 -08:00
2023-02-08 09:30:47 -08:00
asm.mov(:rax, recv_opnd)
asm.mov(:rcx, obj_opnd)
2023-01-07 14:06:38 -08:00
asm.sub(:rax, :rcx)
2023-02-08 09:30:47 -08:00
asm.jo(side_exit)
2023-02-06 15:44:34 -08:00
asm.add(:rax, 1) # re-tag
2023-01-07 14:06:38 -08:00
2023-02-08 09:30:47 -08:00
dst_opnd = ctx.stack_push
asm.mov(dst_opnd, :rax)
2023-01-07 14:06:38 -08:00
KeepCompiling
else
opt_send_without_block(jit, ctx, asm)
2023-01-07 14:06:38 -08:00
end
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_mult(jit, ctx, asm)
opt_send_without_block(jit, ctx, asm)
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_div(jit, ctx, asm)
opt_send_without_block(jit, ctx, asm)
end
2022-12-28 13:50:24 -08:00
# opt_mod
# opt_eq
# opt_neq
2022-12-31 13:41:32 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_lt(jit, ctx, asm)
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
2023-01-02 14:11:06 -08:00
2023-01-02 22:53:14 -08:00
comptime_recv = jit.peek_at_stack(1)
comptime_obj = jit.peek_at_stack(0)
if fixnum?(comptime_recv) && fixnum?(comptime_obj)
2023-02-08 09:30:47 -08:00
# Generate a side exit before popping operands
side_exit = side_exit(jit, ctx)
2023-01-02 22:53:14 -08:00
unless @invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_LT)
return CantCompile
end
2023-02-08 09:30:47 -08:00
obj_opnd = ctx.stack_pop
recv_opnd = ctx.stack_pop
2023-01-02 22:53:14 -08:00
2023-01-03 23:51:37 -08:00
asm.comment('guard recv is fixnum') # TODO: skip this with type information
2023-02-08 09:30:47 -08:00
asm.test(recv_opnd, C.RUBY_FIXNUM_FLAG)
asm.jz(side_exit)
2023-01-02 22:53:14 -08:00
2023-01-03 23:51:37 -08:00
asm.comment('guard obj is fixnum') # TODO: skip this with type information
2023-02-08 09:30:47 -08:00
asm.test(obj_opnd, C.RUBY_FIXNUM_FLAG)
asm.jz(side_exit)
2023-01-02 22:53:14 -08:00
2023-02-08 09:30:47 -08:00
asm.mov(:rax, obj_opnd)
asm.cmp(recv_opnd, :rax)
2023-01-02 22:53:14 -08:00
asm.mov(:rax, Qfalse)
asm.mov(:rcx, Qtrue)
asm.cmovl(:rax, :rcx)
2023-02-08 09:30:47 -08:00
dst_opnd = ctx.stack_push
asm.mov(dst_opnd, :rax)
2023-01-02 22:53:14 -08:00
KeepCompiling
else
opt_send_without_block(jit, ctx, asm)
2023-01-02 14:11:06 -08:00
end
2022-12-31 13:41:32 -08:00
end
2022-12-28 13:50:24 -08:00
# opt_le
# opt_gt
# opt_ge
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_ltlt(jit, ctx, asm)
opt_send_without_block(jit, ctx, asm)
end
2022-12-28 13:50:24 -08:00
# opt_and
# opt_or
2023-02-08 11:10:04 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_aref(jit, ctx, asm)
cd = C.rb_call_data.new(jit.operand(0))
argc = C.vm_ci_argc(cd.ci)
if argc != 1
asm.incr_counter(:optaref_argc_not_one)
return CantCompile
end
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
comptime_recv = jit.peek_at_stack(1)
comptime_obj = jit.peek_at_stack(0)
side_exit = side_exit(jit, ctx)
if comptime_recv.class == Array && fixnum?(comptime_obj)
asm.incr_counter(:optaref_array)
CantCompile
elsif comptime_recv.class == Hash
unless @invariants.assume_bop_not_redefined(jit, C.HASH_REDEFINED_OP_FLAG, C.BOP_AREF)
return CantCompile
end
recv_opnd = ctx.stack_opnd(1)
# Guard that the receiver is a Hash
not_hash_exit = counted_exit(side_exit, :optaref_not_hash)
if jit_guard_known_class(jit, ctx, asm, comptime_recv.class, recv_opnd, comptime_recv, not_hash_exit) == CantCompile
return CantCompile
end
# Prepare to call rb_hash_aref(). It might call #hash on the key.
jit_prepare_routine_call(jit, ctx, asm)
asm.comment('call rb_hash_aref')
key_opnd = ctx.stack_opnd(0)
recv_opnd = ctx.stack_opnd(1)
asm.mov(:rdi, recv_opnd)
asm.mov(:rsi, key_opnd)
asm.call(C.rb_hash_aref)
# Pop the key and the receiver
ctx.stack_pop(2)
stack_ret = ctx.stack_push
asm.mov(stack_ret, :rax)
# Let guard chains share the same successor
jump_to_next_insn(jit, ctx, asm)
EndBlock
else
opt_send_without_block(jit, ctx, asm)
2023-02-08 11:10:04 -08:00
end
end
2022-12-28 13:50:24 -08:00
# opt_aset
# opt_aset_with
# opt_aref_with
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_length(jit, ctx, asm)
opt_send_without_block(jit, ctx, asm)
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_size(jit, ctx, asm)
opt_send_without_block(jit, ctx, asm)
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_empty_p(jit, ctx, asm)
opt_send_without_block(jit, ctx, asm)
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_succ(jit, ctx, asm)
opt_send_without_block(jit, ctx, asm)
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_not(jit, ctx, asm)
opt_send_without_block(jit, ctx, asm)
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_regexpmatch2(jit, ctx, asm)
opt_send_without_block(jit, ctx, asm)
end
2022-12-28 13:50:24 -08:00
# invokebuiltin
# opt_invokebuiltin_delegate
# opt_invokebuiltin_delegate_leave
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
2022-12-30 22:16:07 -08:00
# @param asm [RubyVM::MJIT::Assembler]
2022-12-28 13:50:24 -08:00
def getlocal_WC_0(jit, ctx, asm)
# Get operands
idx = jit.operand(0)
level = 0
# Get EP
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:ep)])
# Get a local variable
asm.mov(:rax, [:rax, -idx * C.VALUE.size])
# Push it to the stack
2023-02-08 01:17:39 -08:00
stack_top = ctx.stack_push
asm.mov(stack_top, :rax)
KeepCompiling
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def getlocal_WC_1(jit, ctx, asm)
# Get operands
idx = jit.operand(0)
level = 1
# Get EP
ep_reg = jit_get_ep(asm, level)
# Get a local variable
asm.mov(:rax, [ep_reg, -idx * C.VALUE.size])
# Push it to the stack
stack_top = ctx.stack_push
asm.mov(stack_top, :rax)
2022-12-28 13:50:24 -08:00
KeepCompiling
end
# setlocal_WC_0
# setlocal_WC_1
2023-01-07 13:54:26 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def putobject_INT2FIX_0_(jit, ctx, asm)
putobject(jit, ctx, asm, val: C.to_value(0))
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def putobject_INT2FIX_1_(jit, ctx, asm)
putobject(jit, ctx, asm, val: C.to_value(1))
end
2022-12-28 13:50:24 -08:00
#
# Helpers
#
2022-12-23 14:17:32 -08:00
2023-02-07 14:42:58 -08:00
# @param asm [RubyVM::MJIT::Assembler]
def guard_object_is_heap(asm, object_opnd, side_exit)
asm.comment('guard object is heap')
# Test that the object is not an immediate
asm.test(object_opnd, C.RUBY_IMMEDIATE_MASK)
asm.jnz(side_exit)
# Test that the object is not false
asm.cmp(object_opnd, Qfalse)
asm.je(side_exit)
end
2023-02-08 00:47:01 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jit_chain_guard(opcode, jit, ctx, asm, side_exit, limit: 10)
2023-02-08 01:48:32 -08:00
case opcode
when :je, :jne, :jnz
# ok
else
raise ArgumentError, "jit_chain_guard: unexpected opcode #{opcode.inspect}"
end
2023-02-08 00:47:01 -08:00
if ctx.chain_depth < limit
deeper = ctx.dup
deeper.chain_depth += 1
branch_stub = BranchStub.new(
iseq: jit.iseq,
shape: Default,
target0: BranchTarget.new(ctx: deeper, pc: jit.pc),
)
branch_stub.target0.address = Assembler.new.then do |ocb_asm|
@exit_compiler.compile_branch_stub(deeper, ocb_asm, branch_stub, true)
@ocb.write(ocb_asm)
end
branch_stub.compile = proc do |branch_asm|
branch_asm.comment('jit_chain_guard')
branch_asm.stub(branch_stub) do
case branch_stub.shape
in Default
2023-02-08 01:48:32 -08:00
asm.public_send(opcode, branch_stub.target0.address)
2023-02-08 00:47:01 -08:00
end
end
end
branch_stub.compile.call(asm)
else
2023-02-08 01:48:32 -08:00
asm.public_send(opcode, side_exit)
end
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jit_guard_known_class(jit, ctx, asm, known_klass, obj_opnd, comptime_obj, side_exit, limit: 5)
if known_klass == NilClass
asm.incr_counter(:send_guard_nil)
return CantCompile
elsif known_klass == TrueClass
asm.incr_counter(:send_guard_true)
return CantCompile
elsif known_klass == FalseClass
asm.incr_counter(:send_guard_false)
return CantCompile
elsif known_klass == Integer
asm.incr_counter(:send_guard_integer)
return CantCompile
elsif known_klass == Symbol
asm.incr_counter(:send_guard_symbol)
return CantCompile
elsif known_klass == Float
asm.incr_counter(:send_guard_float)
return CantCompile
elsif known_klass.singleton_class?
asm.comment('guard known object with singleton class')
asm.mov(:rax, C.to_value(comptime_obj))
asm.cmp(obj_opnd, :rax)
jit_chain_guard(:jne, jit, ctx, asm, side_exit, limit:)
else
# If obj_opnd isn't already a register, load it.
if obj_opnd.is_a?(Array)
asm.mov(:rax, obj_opnd)
obj_opnd = :rax
end
# Check that the receiver is a heap object
# Note: if we get here, the class doesn't have immediate instances.
asm.comment('guard not immediate')
asm.test(obj_opnd, C.RUBY_IMMEDIATE_MASK)
jit_chain_guard(:jnz, jit, ctx, asm, side_exit, limit:)
asm.cmp(obj_opnd, Qfalse)
jit_chain_guard(:je, jit, ctx, asm, side_exit, limit:)
# Bail if receiver class is different from known_klass
klass_opnd = [obj_opnd, C.RBasic.offsetof(:klass)]
asm.comment('guard known class')
asm.mov(:rcx, to_value(known_klass))
asm.cmp(klass_opnd, :rcx)
jit_chain_guard(:jne, jit, ctx, asm, side_exit, limit:)
2023-02-08 00:47:01 -08:00
end
end
2023-02-08 11:10:04 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jit_prepare_routine_call(jit, ctx, asm)
jit_save_pc(jit, asm)
jit_save_sp(jit, ctx, asm)
end
# @param jit [RubyVM::MJIT::JITState]
# @param asm [RubyVM::MJIT::Assembler]
def jit_save_pc(jit, asm)
next_pc = jit.pc + jit.insn.len * C.VALUE.size # Use the next one for backtrace and side exits
asm.comment('save PC to CFP')
asm.mov(:rax, next_pc)
asm.mov([CFP, C.rb_control_frame_t.offsetof(:pc)], :rax)
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jit_save_sp(jit, ctx, asm)
if ctx.sp_offset != 0
asm.comment('save SP to CFP')
asm.lea(SP, ctx.sp_opnd(0))
asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], SP)
ctx.sp_offset = 0
end
end
2023-02-08 00:47:01 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jump_to_next_insn(jit, ctx, asm)
reset_depth = ctx.dup
reset_depth.chain_depth = 0
next_pc = jit.pc + jit.insn.len * C.VALUE.size
stub_next_block(jit.iseq, next_pc, reset_depth, asm, comment: 'jump_to_next_insn')
end
2023-02-07 14:42:58 -08:00
# rb_vm_check_ints
2023-01-07 21:24:30 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
2023-02-07 14:42:58 -08:00
def jit_check_ints(jit, ctx, asm)
2023-01-07 21:24:30 -08:00
asm.comment('RUBY_VM_CHECK_INTS(ec)')
asm.mov(:eax, [EC, C.rb_execution_context_t.offsetof(:interrupt_flag)])
asm.test(:eax, :eax)
asm.jnz(side_exit(jit, ctx))
end
2023-02-08 01:17:39 -08:00
# vm_get_ep
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jit_get_ep(asm, level)
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:ep)])
level.times do
# GET_PREV_EP: ep[VM_ENV_DATA_INDEX_SPECVAL] & ~0x03
asm.mov(:rax, [:rax, C.VALUE.size * C.VM_ENV_DATA_INDEX_SPECVAL])
asm.and(:rax, ~0x03)
end
return :rax
end
2023-02-07 14:42:58 -08:00
# vm_getivar
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
2023-02-08 14:36:55 -08:00
def jit_getivar(jit, ctx, asm, comptime_obj, ivar_id, obj_opnd = nil)
2023-02-07 14:42:58 -08:00
side_exit = side_exit(jit, ctx)
2023-02-08 00:47:01 -08:00
starting_ctx = ctx.dup # copy for jit_chain_guard
2023-02-07 14:42:58 -08:00
# Guard not special const
if C.SPECIAL_CONST_P(comptime_obj)
asm.incr_counter(:getivar_special_const)
return CantCompile
end
2023-02-08 14:36:55 -08:00
if obj_opnd.nil? # getivar
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:self)])
else # attr_reader
asm.mov(:rax, obj_opnd)
end
2023-02-07 23:32:13 -08:00
guard_object_is_heap(asm, :rax, counted_exit(side_exit, :getivar_not_heap))
2023-02-07 14:42:58 -08:00
case C.BUILTIN_TYPE(comptime_obj)
when C.T_OBJECT
2023-02-07 23:25:33 -08:00
# This is the only supported case for now (ROBJECT_IVPTR)
2023-02-07 14:42:58 -08:00
else
asm.incr_counter(:getivar_not_t_object)
return CantCompile
end
shape_id = C.rb_shape_get_shape_id(comptime_obj)
if shape_id == C.OBJ_TOO_COMPLEX_SHAPE_ID
asm.incr_counter(:getivar_too_complex)
return CantCompile
end
asm.comment('guard shape')
asm.cmp(DwordPtr[:rax, C.rb_shape_id_offset], shape_id)
2023-02-08 00:47:01 -08:00
jit_chain_guard(:jne, jit, starting_ctx, asm, counted_exit(side_exit, :getivar_megamorphic))
2023-02-07 14:42:58 -08:00
index = C.rb_shape_get_iv_index(shape_id, ivar_id)
if index
2023-02-08 14:36:55 -08:00
asm.comment('ROBJECT_IVPTR')
2023-02-07 14:42:58 -08:00
if C.FL_TEST_RAW(comptime_obj, C.ROBJECT_EMBED)
2023-02-07 23:25:33 -08:00
# Access embedded array
2023-02-07 14:42:58 -08:00
asm.mov(:rax, [:rax, C.RObject.offsetof(:as, :ary) + (index * C.VALUE.size)])
else
2023-02-07 23:25:33 -08:00
# Pull out an ivar table on heap
asm.mov(:rax, [:rax, C.RObject.offsetof(:as, :heap, :ivptr)])
# Read the table
asm.mov(:rax, [:rax, index * C.VALUE.size])
2023-02-07 14:42:58 -08:00
end
2023-02-07 23:25:33 -08:00
val_opnd = :rax
2023-02-07 14:42:58 -08:00
else
val_opnd = Qnil
end
2023-02-08 14:36:55 -08:00
if obj_opnd
ctx.stack_pop # pop receiver for attr_reader
end
2023-02-07 14:42:58 -08:00
stack_opnd = ctx.stack_push
asm.mov(stack_opnd, val_opnd)
2023-02-08 00:47:01 -08:00
# Let guard chains share the same successor
jump_to_next_insn(jit, ctx, asm)
EndBlock
2023-02-07 14:42:58 -08:00
end
2023-02-03 22:42:13 -08:00
# vm_call_method (vm_sendish -> vm_call_general -> vm_call_method)
2023-01-07 21:24:30 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
# @param cd `RubyVM::MJIT::CPointer::Struct_rb_call_data`
2023-02-03 22:42:13 -08:00
def jit_call_method(jit, ctx, asm, cd)
2023-01-07 21:24:30 -08:00
ci = cd.ci
argc = C.vm_ci_argc(ci)
mid = C.vm_ci_mid(ci)
flags = C.vm_ci_flag(ci)
2023-02-08 01:48:32 -08:00
# Specialize on a compile-time receiver, and split a block for chain guards
2023-01-07 21:24:30 -08:00
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
2023-02-08 01:48:32 -08:00
# Generate a side exit
side_exit = side_exit(jit, ctx)
# Calculate a receiver index
2023-02-03 22:42:13 -08:00
if flags & C.VM_CALL_KW_SPLAT != 0
# recv_index calculation may not work for this
2023-02-06 15:07:58 -08:00
asm.incr_counter(:send_kw_splat)
2023-02-03 22:42:13 -08:00
return CantCompile
end
2023-02-08 09:30:47 -08:00
recv_index = argc + ((flags & C.VM_CALL_ARGS_BLOCKARG == 0) ? 0 : 1)
2023-01-07 21:24:30 -08:00
2023-02-08 01:48:32 -08:00
# Get a compile-time receiver and its class
2023-02-08 09:30:47 -08:00
comptime_recv = jit.peek_at_stack(recv_index)
2023-01-07 21:24:30 -08:00
comptime_recv_klass = C.rb_class_of(comptime_recv)
2023-02-03 22:42:13 -08:00
# Guard the receiver class (part of vm_search_method_fastpath)
2023-02-08 14:19:09 -08:00
recv_opnd = ctx.stack_opnd(recv_index)
2023-02-08 01:48:32 -08:00
megamorphic_exit = counted_exit(side_exit, :send_klass_megamorphic)
if jit_guard_known_class(jit, ctx, asm, comptime_recv_klass, recv_opnd, comptime_recv, megamorphic_exit) == CantCompile
2023-01-07 21:24:30 -08:00
return CantCompile
end
2023-02-03 22:42:13 -08:00
# Do method lookup (vm_cc_cme(cc) != NULL)
2023-01-07 21:24:30 -08:00
cme = C.rb_callable_method_entry(comptime_recv_klass, mid)
if cme.nil?
2023-02-06 15:07:58 -08:00
asm.incr_counter(:send_missing_cme)
2023-02-03 22:42:13 -08:00
return CantCompile # We don't support vm_call_method_name
2023-01-07 21:24:30 -08:00
end
2023-02-03 22:42:13 -08:00
# The main check of vm_call_method before vm_call_method_each_type
2023-01-07 21:24:30 -08:00
case C.METHOD_ENTRY_VISI(cme)
when C.METHOD_VISI_PUBLIC
# You can always call public methods
when C.METHOD_VISI_PRIVATE
2023-02-03 22:42:13 -08:00
# Allow only callsites without a receiver
2023-01-07 21:24:30 -08:00
if flags & C.VM_CALL_FCALL == 0
2023-02-06 15:07:58 -08:00
asm.incr_counter(:send_private)
2023-01-07 21:24:30 -08:00
return CantCompile
end
when C.METHOD_VISI_PROTECTED
2023-02-06 15:07:58 -08:00
asm.incr_counter(:send_protected)
2023-01-07 21:24:30 -08:00
return CantCompile # TODO: support this
else
2023-02-07 00:17:13 -08:00
# TODO: Change them to a constant and use case-in instead
2023-02-03 22:42:13 -08:00
raise 'unreachable'
2023-01-07 21:24:30 -08:00
end
2023-02-03 22:42:13 -08:00
# Invalidate on redefinition (part of vm_search_method_fastpath)
@invariants.assume_method_lookup_stable(jit, cme)
2023-01-07 21:24:30 -08:00
2023-02-08 14:36:55 -08:00
jit_call_method_each_type(jit, ctx, asm, ci, argc, flags, cme, comptime_recv, recv_opnd)
2023-02-03 22:42:13 -08:00
end
2023-01-07 21:24:30 -08:00
2023-02-03 22:42:13 -08:00
# vm_call_method_each_type
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
2023-02-08 14:36:55 -08:00
def jit_call_method_each_type(jit, ctx, asm, ci, argc, flags, cme, comptime_recv, recv_opnd)
2023-01-07 21:24:30 -08:00
case cme.def.type
when C.VM_METHOD_TYPE_ISEQ
2023-02-03 22:42:13 -08:00
jit_call_iseq_setup(jit, ctx, asm, ci, cme, flags, argc)
2023-02-08 14:24:10 -08:00
# when C.VM_METHOD_TYPE_NOTIMPLEMENTED
when C.VM_METHOD_TYPE_CFUNC
asm.incr_counter(:send_cfunc)
return CantCompile
when C.VM_METHOD_TYPE_ATTRSET
asm.incr_counter(:send_attrset)
return CantCompile
when C.VM_METHOD_TYPE_IVAR
2023-02-08 14:36:55 -08:00
jit_call_ivar(jit, ctx, asm, ci, cme, flags, argc, comptime_recv, recv_opnd)
2023-02-08 14:24:10 -08:00
# when C.VM_METHOD_TYPE_MISSING
when C.VM_METHOD_TYPE_BMETHOD
asm.incr_counter(:send_bmethod)
return CantCompile
when C.VM_METHOD_TYPE_ALIAS
asm.incr_counter(:send_alias)
return CantCompile
when C.VM_METHOD_TYPE_OPTIMIZED
asm.incr_counter(:send_optimized)
return CantCompile
# when C.VM_METHOD_TYPE_UNDEF
when C.VM_METHOD_TYPE_ZSUPER
asm.incr_counter(:send_zsuper)
return CantCompile
when C.VM_METHOD_TYPE_REFINED
asm.incr_counter(:send_refined)
return CantCompile
2023-01-07 21:24:30 -08:00
else
2023-02-08 14:24:10 -08:00
asm.incr_counter(:send_not_implemented_type)
2023-01-07 21:24:30 -08:00
return CantCompile
end
end
2023-02-03 22:42:13 -08:00
# vm_call_iseq_setup
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jit_call_iseq_setup(jit, ctx, asm, ci, cme, flags, argc)
iseq = def_iseq_ptr(cme.def)
opt_pc = jit_callee_setup_arg(jit, ctx, asm, ci, flags, iseq)
if opt_pc == CantCompile
# We hit some unsupported path of vm_callee_setup_arg
return CantCompile
end
2023-01-07 21:24:30 -08:00
2023-02-03 22:42:13 -08:00
if flags & C.VM_CALL_TAILCALL != 0
# We don't support vm_call_iseq_setup_tailcall
2023-02-06 15:07:58 -08:00
asm.incr_counter(:send_tailcall)
2023-02-03 22:42:13 -08:00
return CantCompile
end
jit_call_iseq_setup_normal(jit, ctx, asm, ci, cme, flags, argc, iseq)
end
2023-01-07 21:24:30 -08:00
2023-02-03 22:42:13 -08:00
# vm_call_iseq_setup_normal (vm_call_iseq_setup_2 -> vm_call_iseq_setup_normal)
2023-02-08 14:36:55 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
2023-02-03 22:42:13 -08:00
def jit_call_iseq_setup_normal(jit, ctx, asm, ci, cme, flags, argc, iseq)
# Save caller SP and PC before pushing a callee frame for backtrace and side exits
2023-01-07 21:24:30 -08:00
asm.comment('save SP to caller CFP')
2023-02-08 09:30:47 -08:00
sp_index = ctx.sp_offset - 1 - argc - ((flags & C.VM_CALL_ARGS_BLOCKARG == 0) ? 0 : 1) # Pop receiver and arguments for side exits
2023-02-03 22:42:13 -08:00
asm.lea(:rax, [SP, C.VALUE.size * sp_index])
2023-01-07 21:24:30 -08:00
asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], :rax)
2023-02-03 22:42:13 -08:00
asm.comment('save PC to caller CFP')
next_pc = jit.pc + jit.insn.len * C.VALUE.size # Use the next one for backtrace and side exits
2023-01-07 21:24:30 -08:00
asm.mov(:rax, next_pc)
2023-02-03 22:42:13 -08:00
asm.mov([CFP, C.rb_control_frame_t.offsetof(:pc)], :rax)
2023-01-07 21:24:30 -08:00
2023-02-03 22:42:13 -08:00
frame_type = C.VM_FRAME_MAGIC_METHOD | C.VM_ENV_FLAG_LOCAL
jit_push_frame(jit, ctx, asm, ci, cme, flags, argc, iseq, frame_type, next_pc)
end
2023-01-07 21:24:30 -08:00
2023-02-08 14:36:55 -08:00
# vm_call_ivar
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jit_call_ivar(jit, ctx, asm, ci, cme, flags, argc, comptime_recv, recv_opnd)
if flags & C.VM_CALL_ARGS_SPLAT != 0
asm.incr_counter(:send_ivar_splat)
return CantCompile
end
if argc != 0
asm.incr_counter(:send_ivar_arity)
return CantCompile
end
if flags & C.VM_CALL_OPT_SEND != 0
asm.incr_counter(:send_ivar_opt_send)
return CantCompile
end
ivar_id = cme.def.body.attr.id
if flags & C.VM_CALL_OPT_SEND != 0
asm.incr_counter(:send_ivar_blockarg)
return CantCompile
end
jit_getivar(jit, ctx, asm, comptime_recv, ivar_id, recv_opnd)
end
2023-02-03 22:42:13 -08:00
# vm_push_frame
#
# Frame structure:
# | args | locals | cme/cref | block_handler/prev EP | frame type (EP here) | stack bottom (SP here)
2023-02-09 14:38:41 -08:00
#
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
2023-02-03 22:42:13 -08:00
def jit_push_frame(jit, ctx, asm, ci, cme, flags, argc, iseq, frame_type, next_pc)
2023-02-09 14:38:41 -08:00
# CHECK_VM_STACK_OVERFLOW0: next_cfp <= sp + (local_size + stack_max)
asm.comment('stack overflow check')
2023-02-06 15:07:58 -08:00
local_size = iseq.body.local_table_size - iseq.body.param.size
2023-02-09 14:38:41 -08:00
asm.lea(:rax, ctx.sp_opnd(C.rb_control_frame_t.size + C.VALUE.size * (local_size + iseq.body.stack_max)))
asm.cmp(CFP, :rax)
asm.jbe(counted_exit(side_exit(jit, ctx), :send_stackoverflow))
2023-02-06 15:07:58 -08:00
local_size.times do |i|
asm.comment('set local variables') if i == 0
2023-02-08 09:30:47 -08:00
local_index = ctx.sp_offset + i
2023-02-06 15:07:58 -08:00
asm.mov([SP, C.VALUE.size * local_index], Qnil)
2023-02-03 22:42:13 -08:00
end
2023-01-07 21:24:30 -08:00
2023-02-09 14:38:41 -08:00
# This moves SP register. Don't side-exit after this.
asm.comment('move SP register to callee stack')
2023-02-08 09:30:47 -08:00
sp_offset = ctx.sp_offset + local_size + 3
2023-02-03 22:42:13 -08:00
asm.add(SP, C.VALUE.size * sp_offset)
2023-01-07 21:24:30 -08:00
2023-02-03 22:42:13 -08:00
asm.comment('set cme')
asm.mov(:rax, cme.to_i)
asm.mov([SP, C.VALUE.size * -3], :rax)
2023-01-07 21:24:30 -08:00
asm.comment('set specval')
asm.mov([SP, C.VALUE.size * -2], C.VM_BLOCK_HANDLER_NONE)
2023-02-03 22:42:13 -08:00
asm.comment('set frame type')
asm.mov([SP, C.VALUE.size * -1], frame_type)
asm.comment('move CFP register to callee CFP')
asm.sub(CFP, C.rb_control_frame_t.size);
# Not setting PC since JIT code will do that as needed
asm.comment('set SP to callee CFP')
asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], SP)
asm.comment('set ISEQ to callee CFP')
asm.mov(:rax, iseq.to_i)
asm.mov([CFP, C.rb_control_frame_t.offsetof(:iseq)], :rax)
asm.comment('set self to callee CFP')
self_index = -(1 + argc + ((flags & C.VM_CALL_ARGS_BLOCKARG == 0) ? 0 : 1) + local_size + 3)
asm.mov(:rax, [SP, C.VALUE.size * self_index])
asm.mov([CFP, C.rb_control_frame_t.offsetof(:self)], :rax)
asm.comment('set EP to callee CFP')
asm.lea(:rax, [SP, C.VALUE.size * -1])
asm.mov([CFP, C.rb_control_frame_t.offsetof(:ep)], :rax)
asm.comment('set block_code to callee CFP')
asm.mov([CFP, C.rb_control_frame_t.offsetof(:block_code)], 0)
asm.comment('set BP to callee CFP')
asm.mov([CFP, C.rb_control_frame_t.offsetof(:__bp__)], SP) # TODO: get rid of this!!
# Stub cfp->jit_return
return_ctx = ctx.dup
2023-02-06 15:07:58 -08:00
return_ctx.stack_size -= argc + ((flags & C.VM_CALL_ARGS_BLOCKARG == 0) ? 0 : 1) # Pop args
2023-02-03 22:42:13 -08:00
return_ctx.sp_offset = 1 # SP is in the position after popping a receiver and arguments
2023-02-07 11:36:45 -08:00
branch_stub = BranchStub.new(
iseq: jit.iseq,
shape: Default,
target0: BranchTarget.new(ctx: return_ctx, pc: next_pc),
)
branch_stub.target0.address = Assembler.new.then do |ocb_asm|
@exit_compiler.compile_branch_stub(return_ctx, ocb_asm, branch_stub, true)
2023-01-07 21:24:30 -08:00
@ocb.write(ocb_asm)
end
2023-02-07 11:36:45 -08:00
branch_stub.compile = proc do |branch_asm|
branch_asm.comment('set jit_return to callee CFP')
branch_asm.stub(branch_stub) do
case branch_stub.shape
in Default
branch_asm.mov(:rax, branch_stub.target0.address)
branch_asm.mov([CFP, C.rb_control_frame_t.offsetof(:jit_return)], :rax)
end
2023-01-07 21:24:30 -08:00
end
end
2023-02-07 11:36:45 -08:00
branch_stub.compile.call(asm)
2023-01-07 21:24:30 -08:00
2023-02-03 22:42:13 -08:00
asm.comment('set callee CFP to ec->cfp')
asm.mov([EC, C.rb_execution_context_t.offsetof(:cfp)], CFP)
# Jump to a stub for the callee ISEQ
2023-01-07 21:24:30 -08:00
callee_ctx = Context.new
2023-02-07 11:51:11 -08:00
stub_next_block(iseq, iseq.body.iseq_encoded.to_i, callee_ctx, asm)
2023-01-07 21:24:30 -08:00
EndBlock
end
2023-02-03 22:42:13 -08:00
# vm_callee_setup_arg: Set up args and return opt_pc (or CantCompile)
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jit_callee_setup_arg(jit, ctx, asm, ci, flags, iseq)
if flags & C.VM_CALL_KW_SPLAT == 0
if C.rb_simple_iseq_p(iseq)
if jit_caller_setup_arg(jit, ctx, asm, flags) == CantCompile
return CantCompile
end
if jit_caller_remove_empty_kw_splat(jit, ctx, asm, flags) == CantCompile
return CantCompile
end
if C.vm_ci_argc(ci) != iseq.body.param.lead_num
# argument_arity_error
return CantCompile
end
return 0
else
# We don't support the remaining `else if`s yet.
return CantCompile
end
end
# We don't support setup_parameters_complex
return CantCompile
end
# CALLER_SETUP_ARG: Return CantCompile if not supported
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jit_caller_setup_arg(jit, ctx, asm, flags)
if flags & C.VM_CALL_ARGS_SPLAT != 0
# We don't support vm_caller_setup_arg_splat
2023-02-06 15:07:58 -08:00
asm.incr_counter(:send_args_splat)
2023-02-03 22:42:13 -08:00
return CantCompile
end
if flags & (C.VM_CALL_KWARG | C.VM_CALL_KW_SPLAT) != 0
# We don't support keyword args either
2023-02-06 15:07:58 -08:00
asm.incr_counter(:send_kwarg)
2023-02-03 22:42:13 -08:00
return CantCompile
end
end
# CALLER_REMOVE_EMPTY_KW_SPLAT: Return CantCompile if not supported
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def jit_caller_remove_empty_kw_splat(jit, ctx, asm, flags)
if (flags & C.VM_CALL_KW_SPLAT) > 0
# We don't support removing the last Hash argument
2023-02-07 14:42:58 -08:00
asm.incr_counter(:send_kw_splat)
2023-02-03 22:42:13 -08:00
return CantCompile
end
end
def assert_equal(left, right)
2022-12-23 14:17:32 -08:00
if left != right
raise "'#{left.inspect}' was not '#{right.inspect}'"
end
end
2022-12-30 22:16:07 -08:00
2023-01-02 22:53:14 -08:00
def fixnum?(obj)
flag = C.RUBY_FIXNUM_FLAG
(C.to_value(obj) & flag) == flag
end
2022-12-31 13:41:32 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def defer_compilation(jit, ctx, asm)
# Make a stub to compile the current insn
2023-02-07 11:55:07 -08:00
stub_next_block(jit.iseq, jit.pc, ctx, asm, comment: 'defer_compilation')
2022-12-31 13:41:32 -08:00
end
2023-02-07 11:51:11 -08:00
def stub_next_block(iseq, pc, ctx, asm, comment: 'stub_next_block')
branch_stub = BranchStub.new(
iseq:,
shape: Default,
target0: BranchTarget.new(ctx:, pc:),
)
branch_stub.target0.address = Assembler.new.then do |ocb_asm|
@exit_compiler.compile_branch_stub(ctx, ocb_asm, branch_stub, true)
@ocb.write(ocb_asm)
end
branch_stub.compile = proc do |branch_asm|
branch_asm.comment(comment)
branch_asm.stub(branch_stub) do
case branch_stub.shape
in Default
branch_asm.jmp(branch_stub.target0.address)
in Next0
# Just write the block without a jump
end
end
end
branch_stub.compile.call(asm)
end
2022-12-30 22:16:07 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
2023-01-02 22:53:14 -08:00
def side_exit(jit, ctx)
if side_exit = jit.side_exits[jit.pc]
return side_exit
end
2022-12-30 22:16:07 -08:00
asm = Assembler.new
2023-01-02 14:11:06 -08:00
@exit_compiler.compile_side_exit(jit, ctx, asm)
2023-01-02 22:53:14 -08:00
jit.side_exits[jit.pc] = @ocb.write(asm)
2022-12-30 22:16:07 -08:00
end
2023-01-07 21:24:30 -08:00
2023-02-07 23:32:13 -08:00
def counted_exit(side_exit, name)
asm = Assembler.new
asm.incr_counter(name)
asm.jmp(side_exit)
@ocb.write(asm)
end
2023-01-07 21:24:30 -08:00
def def_iseq_ptr(cme_def)
C.rb_iseq_check(cme_def.body.iseq.iseqptr)
end
2023-02-08 01:48:32 -08:00
def to_value(obj)
@gc_refs << obj
C.to_value(obj)
end
2022-12-17 13:39:35 -08:00
end
end