ruby/lib/ruby_vm/mjit/insn_compiler.rb

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

689 lines
20 KiB
Ruby
Raw Normal View History

2022-12-17 13:39:35 -08:00
module RubyVM::MJIT
2022-12-26 14:09:45 -08:00
class InsnCompiler
2022-12-30 22:16:07 -08:00
# @param ocb [CodeBlock]
2023-01-02 14:11:06 -08:00
# @param exit_compiler [RubyVM::MJIT::ExitCompiler]
def initialize(ocb, exit_compiler)
2022-12-30 22:16:07 -08:00
@ocb = ocb
2023-01-02 14:11:06 -08:00
@exit_compiler = exit_compiler
@invariants = Invariants.new(ocb, exit_compiler)
2023-01-07 21:24:30 -08:00
# freeze # workaround a binding.irb issue. TODO: resurrect this
2022-12-30 21:27:12 -08:00
end
2022-12-28 13:50:24 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
# @param insn `RubyVM::MJIT::Instruction`
def compile(jit, ctx, asm, insn)
asm.incr_counter(:mjit_insns_count)
asm.comment("Insn: #{insn.name}")
2023-01-07 21:24:30 -08:00
# 11/101
case insn.name
# nop
# getlocal
# setlocal
# getblockparam
# setblockparam
# getblockparamproxy
# getspecial
# setspecial
# getinstancevariable
# setinstancevariable
# getclassvariable
# setclassvariable
# opt_getconstant_path
# getconstant
# setconstant
# getglobal
# setglobal
when :putnil then putnil(jit, ctx, asm)
2023-01-07 13:39:33 -08:00
when :putself then putself(jit, ctx, asm)
when :putobject then putobject(jit, ctx, asm)
# putspecialobject
# putstring
# concatstrings
# anytostring
# toregexp
# intern
# newarray
# newarraykwsplat
# duparray
# duphash
# expandarray
# concatarray
# splatarray
# newhash
# newrange
# pop
# dup
# dupn
# swap
# opt_reverse
# topn
# setn
# adjuststack
# defined
# checkmatch
# checkkeyword
# checktype
# defineclass
# definemethod
# definesmethod
# send
2023-01-07 21:24:30 -08:00
when :opt_send_without_block then opt_send_without_block(jit, ctx, asm)
# objtostring
# opt_str_freeze
# opt_nil_p
# opt_str_uminus
# opt_newarray_max
# opt_newarray_min
# invokesuper
# invokeblock
when :leave then leave(jit, ctx, asm)
# throw
# jump
# branchif
2023-01-07 13:21:14 -08:00
when :branchunless then branchunless(jit, ctx, asm)
# branchnil
# once
# opt_case_dispatch
# opt_plus
2023-01-07 14:06:38 -08:00
when :opt_minus then opt_minus(jit, ctx, asm)
# opt_mult
# opt_div
# opt_mod
# opt_eq
# opt_neq
when :opt_lt then opt_lt(jit, ctx, asm)
# opt_le
# opt_gt
# opt_ge
# opt_ltlt
# opt_and
# opt_or
# opt_aref
# opt_aset
# opt_aset_with
# opt_aref_with
# opt_length
# opt_size
# opt_empty_p
# opt_succ
# opt_not
# opt_regexpmatch2
# invokebuiltin
# opt_invokebuiltin_delegate
# opt_invokebuiltin_delegate_leave
when :getlocal_WC_0 then getlocal_WC_0(jit, ctx, asm)
2023-01-07 13:54:26 -08:00
# setlocal_WC_0
# setlocal_WC_1
when :putobject_INT2FIX_0_ then putobject_INT2FIX_0_(jit, ctx, asm)
when :putobject_INT2FIX_1_ then putobject_INT2FIX_1_(jit, ctx, asm)
else CantCompile
end
end
private
#
# Insns
#
2022-12-28 13:50:24 -08:00
# nop
# getlocal
# setlocal
# getblockparam
# setblockparam
# getblockparamproxy
# getspecial
# setspecial
# getinstancevariable
# setinstancevariable
# getclassvariable
# setclassvariable
# opt_getconstant_path
# getconstant
# setconstant
# getglobal
# setglobal
2022-12-26 14:09:45 -08:00
# @param jit [RubyVM::MJIT::JITState]
2022-12-23 14:17:32 -08:00
# @param ctx [RubyVM::MJIT::Context]
2022-12-30 22:16:07 -08:00
# @param asm [RubyVM::MJIT::Assembler]
2022-12-26 14:09:45 -08:00
def putnil(jit, ctx, asm)
2023-01-02 22:53:14 -08:00
raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
2022-12-28 13:16:14 -08:00
asm.mov([SP, C.VALUE.size * ctx.stack_size], Qnil)
2022-12-31 13:41:32 -08:00
ctx.stack_push(1)
2022-12-17 13:39:35 -08:00
KeepCompiling
end
2023-01-07 13:39:33 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def putself(jit, ctx, asm)
raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:self)])
asm.mov([SP, C.VALUE.size * ctx.stack_size], :rax)
ctx.stack_push(1)
KeepCompiling
end
2022-12-28 14:43:04 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
2022-12-30 22:16:07 -08:00
# @param asm [RubyVM::MJIT::Assembler]
2023-01-07 13:54:26 -08:00
def putobject(jit, ctx, asm, val: jit.operand(0))
2022-12-28 14:43:04 -08:00
# Push it to the stack
# TODO: GC offsets
2023-01-02 22:53:14 -08:00
raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
2022-12-28 14:43:04 -08:00
if asm.imm32?(val)
asm.mov([SP, C.VALUE.size * ctx.stack_size], val)
else # 64-bit immediates can't be directly written to memory
asm.mov(:rax, val)
asm.mov([SP, C.VALUE.size * ctx.stack_size], :rax)
end
2022-12-31 13:41:32 -08:00
ctx.stack_push(1)
2022-12-28 14:43:04 -08:00
KeepCompiling
end
2022-12-28 13:50:24 -08:00
# putspecialobject
# putstring
# concatstrings
# anytostring
# toregexp
# intern
# newarray
# newarraykwsplat
# duparray
# duphash
# expandarray
# concatarray
# splatarray
# newhash
# newrange
# pop
# dup
# dupn
# swap
# opt_reverse
# topn
# setn
# adjuststack
# defined
# checkmatch
# checkkeyword
# checktype
# defineclass
# definemethod
# definesmethod
# send
2023-01-07 21:24:30 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
# @param cd `RubyVM::MJIT::CPointer::Struct_rb_call_data`
def opt_send_without_block(jit, ctx, asm)
cd = C.rb_call_data.new(jit.operand(0))
compile_send_general(jit, ctx, asm, cd)
end
2022-12-28 13:50:24 -08:00
# objtostring
# opt_str_freeze
# opt_nil_p
# opt_str_uminus
# opt_newarray_max
# opt_newarray_min
# invokesuper
# invokeblock
2022-12-26 14:09:45 -08:00
# @param jit [RubyVM::MJIT::JITState]
2022-12-23 14:17:32 -08:00
# @param ctx [RubyVM::MJIT::Context]
2022-12-30 22:16:07 -08:00
# @param asm [RubyVM::MJIT::Assembler]
2022-12-26 14:09:45 -08:00
def leave(jit, ctx, asm)
2022-12-23 14:17:32 -08:00
assert_eq!(ctx.stack_size, 1)
2022-12-26 14:09:45 -08:00
2023-01-07 21:24:30 -08:00
compile_check_ints(jit, ctx, asm)
2022-12-23 14:17:32 -08:00
2022-12-28 13:16:02 -08:00
asm.comment('pop stack frame')
2023-01-07 21:24:30 -08:00
asm.lea(:rax, [CFP, C.rb_control_frame_t.size])
asm.mov(CFP, :rax)
asm.mov([EC, C.rb_execution_context_t.offsetof(:cfp)], :rax)
2022-12-17 13:39:35 -08:00
2023-01-07 21:24:30 -08:00
# Return a value (for compile_leave_exit)
2022-12-23 16:23:21 -08:00
asm.mov(:rax, [SP])
2022-12-23 14:46:39 -08:00
2023-01-07 21:24:30 -08:00
# Set caller's SP and push a value to its stack (for JIT)
asm.mov(SP, [CFP, C.rb_control_frame_t.offsetof(:sp)])
asm.mov([SP], :rax)
# Jump to cfp->jit_return
asm.jmp([CFP, -C.rb_control_frame_t.size + C.rb_control_frame_t.offsetof(:jit_return)])
2022-12-23 14:46:39 -08:00
2022-12-17 13:39:35 -08:00
EndBlock
end
2022-12-23 14:17:32 -08:00
2022-12-28 13:50:24 -08:00
# throw
# jump
# branchif
2023-01-07 13:21:14 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def branchunless(jit, ctx, asm)
# TODO: check ints for backward branches
# TODO: skip check for known truthy
# This `test` sets ZF only for Qnil and Qfalse, which let jz jump.
asm.test([SP, C.VALUE.size * (ctx.stack_size - 1)], ~Qnil)
ctx.stack_pop(1)
# Set stubs
2023-01-07 13:39:33 -08:00
# TODO: reuse already-compiled blocks jumped from different blocks
2023-01-07 13:21:14 -08:00
branch_stub = BranchStub.new(
iseq: jit.iseq,
ctx: ctx.dup,
branch_target_pc: jit.pc + (jit.insn.len + jit.operand(0)) * C.VALUE.size,
fallthrough_pc: jit.pc + jit.insn.len * C.VALUE.size,
)
branch_stub.branch_target_addr = Assembler.new.then do |ocb_asm|
@exit_compiler.compile_branch_stub(jit, ctx, ocb_asm, branch_stub, true)
@ocb.write(ocb_asm)
end
branch_stub.fallthrough_addr = Assembler.new.then do |ocb_asm|
@exit_compiler.compile_branch_stub(jit, ctx, ocb_asm, branch_stub, false)
@ocb.write(ocb_asm)
end
# Prepare codegen for all cases
branch_stub.branch_target_next = proc do |branch_asm|
branch_asm.stub(branch_stub) do
branch_asm.comment('branch_target_next')
branch_asm.jnz(branch_stub.fallthrough_addr)
end
end
branch_stub.fallthrough_next = proc do |branch_asm|
branch_asm.stub(branch_stub) do
branch_asm.comment('fallthrough_next')
branch_asm.jz(branch_stub.branch_target_addr)
end
end
branch_stub.neither_next = proc do |branch_asm|
branch_asm.stub(branch_stub) do
branch_asm.comment('neither_next')
branch_asm.jz(branch_stub.branch_target_addr)
branch_asm.jmp(branch_stub.fallthrough_addr)
end
end
# Just jump to stubs
branch_stub.neither_next.call(asm)
EndBlock
end
2022-12-28 13:50:24 -08:00
# branchnil
# once
# opt_case_dispatch
# opt_plus
2023-01-07 14:06:38 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_minus(jit, ctx, asm)
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
comptime_recv = jit.peek_at_stack(1)
comptime_obj = jit.peek_at_stack(0)
if fixnum?(comptime_recv) && fixnum?(comptime_obj)
unless @invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_MINUS)
return CantCompile
end
raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
recv_index = ctx.stack_size - 2
obj_index = ctx.stack_size - 1
asm.comment('guard recv is fixnum') # TODO: skip this with type information
asm.test([SP, C.VALUE.size * recv_index], C.RUBY_FIXNUM_FLAG)
asm.jz(side_exit(jit, ctx))
asm.comment('guard obj is fixnum') # TODO: skip this with type information
asm.test([SP, C.VALUE.size * obj_index], C.RUBY_FIXNUM_FLAG)
asm.jz(side_exit(jit, ctx))
asm.mov(:rax, [SP, C.VALUE.size * recv_index])
asm.mov(:rcx, [SP, C.VALUE.size * obj_index])
asm.sub(:rax, :rcx)
asm.jo(side_exit(jit, ctx))
asm.add(:rax, 1)
asm.mov([SP, C.VALUE.size * recv_index], :rax)
ctx.stack_pop(1)
KeepCompiling
else
CantCompile # TODO: delegate to send
end
end
2022-12-28 13:50:24 -08:00
# opt_mult
# opt_div
# opt_mod
# opt_eq
# opt_neq
2022-12-31 13:41:32 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def opt_lt(jit, ctx, asm)
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
2023-01-02 14:11:06 -08:00
2023-01-02 22:53:14 -08:00
comptime_recv = jit.peek_at_stack(1)
comptime_obj = jit.peek_at_stack(0)
if fixnum?(comptime_recv) && fixnum?(comptime_obj)
unless @invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_LT)
return CantCompile
end
raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
recv_index = ctx.stack_size - 2
obj_index = ctx.stack_size - 1
2023-01-03 23:51:37 -08:00
asm.comment('guard recv is fixnum') # TODO: skip this with type information
2023-01-02 22:53:14 -08:00
asm.test([SP, C.VALUE.size * recv_index], C.RUBY_FIXNUM_FLAG)
2023-01-07 14:06:38 -08:00
asm.jz(side_exit(jit, ctx))
2023-01-02 22:53:14 -08:00
2023-01-03 23:51:37 -08:00
asm.comment('guard obj is fixnum') # TODO: skip this with type information
2023-01-02 22:53:14 -08:00
asm.test([SP, C.VALUE.size * obj_index], C.RUBY_FIXNUM_FLAG)
2023-01-07 14:06:38 -08:00
asm.jz(side_exit(jit, ctx))
2023-01-02 22:53:14 -08:00
asm.mov(:rax, [SP, C.VALUE.size * obj_index])
asm.cmp([SP, C.VALUE.size * recv_index], :rax)
asm.mov(:rax, Qfalse)
asm.mov(:rcx, Qtrue)
asm.cmovl(:rax, :rcx)
asm.mov([SP, C.VALUE.size * recv_index], :rax)
ctx.stack_pop(1)
KeepCompiling
else
CantCompile # TODO: delegate to send
2023-01-02 14:11:06 -08:00
end
2022-12-31 13:41:32 -08:00
end
2022-12-28 13:50:24 -08:00
# opt_le
# opt_gt
# opt_ge
# opt_ltlt
# opt_and
# opt_or
# opt_aref
# opt_aset
# opt_aset_with
# opt_aref_with
# opt_length
# opt_size
# opt_empty_p
# opt_succ
# opt_not
# opt_regexpmatch2
# invokebuiltin
# opt_invokebuiltin_delegate
# opt_invokebuiltin_delegate_leave
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
2022-12-30 22:16:07 -08:00
# @param asm [RubyVM::MJIT::Assembler]
2022-12-28 13:50:24 -08:00
def getlocal_WC_0(jit, ctx, asm)
# Get operands
idx = jit.operand(0)
level = 0
# Get EP
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:ep)])
# Get a local variable
asm.mov(:rax, [:rax, -idx * C.VALUE.size])
# Push it to the stack
asm.mov([SP, C.VALUE.size * ctx.stack_size], :rax)
2022-12-31 13:41:32 -08:00
ctx.stack_push(1)
2022-12-28 13:50:24 -08:00
KeepCompiling
end
# getlocal_WC_1
# setlocal_WC_0
# setlocal_WC_1
2023-01-07 13:54:26 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def putobject_INT2FIX_0_(jit, ctx, asm)
putobject(jit, ctx, asm, val: C.to_value(0))
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def putobject_INT2FIX_1_(jit, ctx, asm)
putobject(jit, ctx, asm, val: C.to_value(1))
end
2022-12-28 13:50:24 -08:00
#
# Helpers
#
2022-12-23 14:17:32 -08:00
2023-01-07 21:24:30 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def compile_check_ints(jit, ctx, asm)
asm.comment('RUBY_VM_CHECK_INTS(ec)')
asm.mov(:eax, [EC, C.rb_execution_context_t.offsetof(:interrupt_flag)])
asm.test(:eax, :eax)
asm.jnz(side_exit(jit, ctx))
end
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
# @param cd `RubyVM::MJIT::CPointer::Struct_rb_call_data`
def compile_send_general(jit, ctx, asm, cd)
ci = cd.ci
argc = C.vm_ci_argc(ci)
mid = C.vm_ci_mid(ci)
flags = C.vm_ci_flag(ci)
if flags & C.VM_CALL_KW_SPLAT != 0
return CantCompile
end
unless jit.at_current_insn?
defer_compilation(jit, ctx, asm)
return EndBlock
end
raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
recv_depth = argc + ((flags & C.VM_CALL_ARGS_BLOCKARG == 0) ? 0 : 1)
recv_index = ctx.stack_size - 1 - recv_depth
comptime_recv = jit.peek_at_stack(recv_depth)
comptime_recv_klass = C.rb_class_of(comptime_recv)
# Guard known class
if comptime_recv_klass.singleton_class?
asm.comment('guard known object with singleton class')
asm.mov(:rax, C.to_value(comptime_recv))
asm.cmp([SP, C.VALUE.size * recv_index], :rax)
asm.jne(side_exit(jit, ctx))
else
return CantCompile
end
# Do method lookup
cme = C.rb_callable_method_entry(comptime_recv_klass, mid)
if cme.nil?
return CantCompile
end
case C.METHOD_ENTRY_VISI(cme)
when C.METHOD_VISI_PUBLIC
# You can always call public methods
when C.METHOD_VISI_PRIVATE
if flags & C.VM_CALL_FCALL == 0
# VM_CALL_FCALL: Callsites without a receiver of an explicit `self` receiver
return CantCompile
end
when C.METHOD_VISI_PROTECTED
return CantCompile # TODO: support this
else
raise 'cmes should always have a visibility'
end
# TODO: assume_method_lookup_stable
if flags & C.VM_CALL_ARGS_SPLAT != 0 && cme.def.type != C.VM_METHOD_TYPE_ISEQ
return CantCompile
end
case cme.def.type
when C.VM_METHOD_TYPE_ISEQ
iseq = def_iseq_ptr(cme.def)
frame_type = C.VM_FRAME_MAGIC_METHOD | C.VM_ENV_FLAG_LOCAL
compile_send_iseq(jit, ctx, asm, iseq, ci, frame_type, cme, flags, argc)
else
return CantCompile
end
end
def compile_send_iseq(jit, ctx, asm, iseq, ci, frame_type, cme, flags, argc)
# TODO: check a bunch of CantCompile cases
compile_check_ints(jit, ctx, asm)
# TODO: stack overflow check
# TODO: more flag checks
# Pop arguments and a receiver for the current caller frame
raise 'sp_offset != stack_size' if ctx.sp_offset != ctx.stack_size # TODO: handle this
sp_index = ctx.stack_size - argc - 1 # arguments and receiver
asm.comment('save SP to caller CFP')
asm.lea(:rax, [SP, sp_index])
asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], :rax)
# TODO: do something about ctx.sp_index
asm.comment('save PC to CFP')
next_pc = jit.pc + jit.insn.len * C.VALUE.size
asm.mov(:rax, next_pc)
asm.mov([CFP, C.rb_control_frame_t.offsetof(:pc)], :rax) # cfp->pc = rax
# TODO: push cme, specval, frame type
# TODO: push callee control frame
asm.comment('switch to new CFP')
asm.lea(:rax, [CFP, -C.rb_control_frame_t.size])
asm.mov(CFP, :rax);
asm.mov([EC, C.rb_execution_context_t.offsetof(:cfp)], :rax)
asm.comment('save SP to callee CFP')
num_locals = 0 # TODO
sp_offset = C.VALUE.size * (3 + num_locals + ctx.stack_size)
asm.add(SP, sp_offset)
asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], SP)
asm.comment('save ISEQ to callee CFP')
asm.mov(:rax, iseq.to_i)
asm.mov([CFP, C.rb_control_frame_t.offsetof(:iseq)], :rax)
asm.comment('save EP to callee CFP')
asm.lea(:rax, [SP, -C.VALUE.size])
asm.mov([CFP, C.rb_control_frame_t.offsetof(:ep)], :rax)
asm.comment('set frame type')
asm.mov([SP, C.VALUE.size * -1], C.VM_FRAME_MAGIC_METHOD | C.VM_ENV_FLAG_LOCAL)
asm.comment('set specval')
asm.mov([SP, C.VALUE.size * -2], C.VM_BLOCK_HANDLER_NONE)
# Stub the return destination from the callee
# TODO: set up return ctx correctly
jit_return_stub = BlockStub.new(iseq: jit.iseq, pc: next_pc, ctx: ctx.dup)
jit_return = Assembler.new.then do |ocb_asm|
@exit_compiler.compile_block_stub(ctx, ocb_asm, jit_return_stub)
@ocb.write(ocb_asm)
end
jit_return_stub.change_block = proc do |jump_asm, new_addr|
jump_asm.comment('update cfp->jit_return')
jump_asm.stub(jit_return_stub) do
jump_asm.mov(:rax, new_addr)
jump_asm.mov([CFP, C.rb_control_frame_t.offsetof(:jit_return)], :rax)
end
end
jit_return_stub.change_block.call(asm, jit_return)
callee_ctx = Context.new
compile_block_stub(iseq, iseq.body.iseq_encoded.to_i, callee_ctx, asm)
EndBlock
end
2022-12-23 14:17:32 -08:00
def assert_eq!(left, right)
if left != right
raise "'#{left.inspect}' was not '#{right.inspect}'"
end
end
2022-12-30 22:16:07 -08:00
2023-01-02 22:53:14 -08:00
def fixnum?(obj)
flag = C.RUBY_FIXNUM_FLAG
(C.to_value(obj) & flag) == flag
end
2022-12-31 13:41:32 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
# @param asm [RubyVM::MJIT::Assembler]
def defer_compilation(jit, ctx, asm)
# Make a stub to compile the current insn
2023-01-07 21:24:30 -08:00
compile_block_stub(jit.iseq, jit.pc, ctx, asm, comment: 'defer_compilation: block stub')
end
def compile_block_stub(iseq, pc, ctx, asm, comment: 'block stub')
block_stub = BlockStub.new(iseq:, pc:, ctx: ctx.dup)
2022-12-31 13:41:32 -08:00
stub_hit = Assembler.new.then do |ocb_asm|
2023-01-07 21:24:30 -08:00
@exit_compiler.compile_block_stub(ctx, ocb_asm, block_stub)
2022-12-31 13:41:32 -08:00
@ocb.write(ocb_asm)
end
2023-01-07 21:24:30 -08:00
block_stub.change_block = proc do |jump_asm, new_addr|
jump_asm.comment(comment)
jump_asm.stub(block_stub) do
jump_asm.jmp(new_addr)
end
2023-01-01 23:13:43 -08:00
end
2023-01-07 21:24:30 -08:00
block_stub.change_block.call(asm, stub_hit)
2022-12-31 13:41:32 -08:00
end
2022-12-30 22:16:07 -08:00
# @param jit [RubyVM::MJIT::JITState]
# @param ctx [RubyVM::MJIT::Context]
2023-01-02 22:53:14 -08:00
def side_exit(jit, ctx)
if side_exit = jit.side_exits[jit.pc]
return side_exit
end
2022-12-30 22:16:07 -08:00
asm = Assembler.new
2023-01-02 14:11:06 -08:00
@exit_compiler.compile_side_exit(jit, ctx, asm)
2023-01-02 22:53:14 -08:00
jit.side_exits[jit.pc] = @ocb.write(asm)
2022-12-30 22:16:07 -08:00
end
2023-01-07 21:24:30 -08:00
def def_iseq_ptr(cme_def)
C.rb_iseq_check(cme_def.body.iseq.iseqptr)
end
2022-12-17 13:39:35 -08:00
end
end