2022-12-17 13:39:35 -08:00
|
|
|
module RubyVM::MJIT
|
2022-12-26 14:09:45 -08:00
|
|
|
class InsnCompiler
|
2022-12-30 22:16:07 -08:00
|
|
|
# @param ocb [CodeBlock]
|
2023-01-02 14:11:06 -08:00
|
|
|
# @param exit_compiler [RubyVM::MJIT::ExitCompiler]
|
2023-02-03 22:42:13 -08:00
|
|
|
def initialize(cb, ocb, exit_compiler)
|
2022-12-30 22:16:07 -08:00
|
|
|
@ocb = ocb
|
2023-01-02 14:11:06 -08:00
|
|
|
@exit_compiler = exit_compiler
|
2023-02-08 01:48:32 -08:00
|
|
|
@gc_refs = [] # TODO: GC offsets?
|
2023-02-10 11:43:53 -08:00
|
|
|
|
|
|
|
@full_cfunc_return = Assembler.new.then do |asm|
|
|
|
|
@exit_compiler.compile_full_cfunc_return(asm)
|
|
|
|
@ocb.write(asm)
|
|
|
|
end
|
|
|
|
|
2023-01-07 21:24:30 -08:00
|
|
|
# freeze # workaround a binding.irb issue. TODO: resurrect this
|
2022-12-30 21:27:12 -08:00
|
|
|
end
|
2022-12-28 13:50:24 -08:00
|
|
|
|
2023-01-02 13:30:56 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
# @param insn `RubyVM::MJIT::Instruction`
|
|
|
|
def compile(jit, ctx, asm, insn)
|
|
|
|
asm.incr_counter(:mjit_insns_count)
|
|
|
|
asm.comment("Insn: #{insn.name}")
|
|
|
|
|
2023-02-15 21:26:04 -08:00
|
|
|
# 48/101
|
2023-01-02 13:30:56 -08:00
|
|
|
case insn.name
|
2023-02-07 23:40:05 -08:00
|
|
|
when :nop then nop(jit, ctx, asm)
|
2023-02-14 22:24:32 -08:00
|
|
|
when :getlocal then getlocal(jit, ctx, asm)
|
|
|
|
when :setlocal then setlocal(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# getblockparam
|
|
|
|
# setblockparam
|
|
|
|
# getblockparamproxy
|
|
|
|
# getspecial
|
|
|
|
# setspecial
|
2023-02-07 14:42:58 -08:00
|
|
|
when :getinstancevariable then getinstancevariable(jit, ctx, asm)
|
2023-02-08 17:27:21 -08:00
|
|
|
when :setinstancevariable then setinstancevariable(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# getclassvariable
|
|
|
|
# setclassvariable
|
2023-02-14 23:18:19 -08:00
|
|
|
when :opt_getconstant_path then opt_getconstant_path(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# getconstant
|
|
|
|
# setconstant
|
|
|
|
# getglobal
|
|
|
|
# setglobal
|
|
|
|
when :putnil then putnil(jit, ctx, asm)
|
2023-01-07 13:39:33 -08:00
|
|
|
when :putself then putself(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
when :putobject then putobject(jit, ctx, asm)
|
|
|
|
# putspecialobject
|
|
|
|
# putstring
|
|
|
|
# concatstrings
|
|
|
|
# anytostring
|
|
|
|
# toregexp
|
|
|
|
# intern
|
|
|
|
# newarray
|
|
|
|
# newarraykwsplat
|
2023-02-15 21:26:04 -08:00
|
|
|
when :duparray then duparray(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# duphash
|
2023-02-15 21:26:04 -08:00
|
|
|
when :expandarray then expandarray(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# concatarray
|
|
|
|
# splatarray
|
|
|
|
# newhash
|
|
|
|
# newrange
|
2023-02-08 17:33:27 -08:00
|
|
|
when :pop then pop(jit, ctx, asm)
|
2023-02-08 17:17:36 -08:00
|
|
|
when :dup then dup(jit, ctx, asm)
|
2023-02-14 22:52:12 -08:00
|
|
|
when :dupn then dupn(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# swap
|
|
|
|
# opt_reverse
|
|
|
|
# topn
|
2023-02-14 22:52:12 -08:00
|
|
|
when :setn then setn(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# adjuststack
|
|
|
|
# defined
|
|
|
|
# checkmatch
|
|
|
|
# checkkeyword
|
|
|
|
# checktype
|
|
|
|
# defineclass
|
|
|
|
# definemethod
|
|
|
|
# definesmethod
|
|
|
|
# send
|
2023-01-07 21:24:30 -08:00
|
|
|
when :opt_send_without_block then opt_send_without_block(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# objtostring
|
|
|
|
# opt_str_freeze
|
2023-02-08 11:51:35 -08:00
|
|
|
when :opt_nil_p then opt_nil_p(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# opt_str_uminus
|
|
|
|
# opt_newarray_max
|
|
|
|
# opt_newarray_min
|
|
|
|
# invokesuper
|
|
|
|
# invokeblock
|
|
|
|
when :leave then leave(jit, ctx, asm)
|
|
|
|
# throw
|
2023-02-08 17:42:16 -08:00
|
|
|
when :jump then jump(jit, ctx, asm)
|
2023-02-13 07:30:25 -08:00
|
|
|
when :branchif then branchif(jit, ctx, asm)
|
2023-01-07 13:21:14 -08:00
|
|
|
when :branchunless then branchunless(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# branchnil
|
|
|
|
# once
|
|
|
|
# opt_case_dispatch
|
2023-02-06 15:44:34 -08:00
|
|
|
when :opt_plus then opt_plus(jit, ctx, asm)
|
2023-01-07 14:06:38 -08:00
|
|
|
when :opt_minus then opt_minus(jit, ctx, asm)
|
2023-02-08 11:51:35 -08:00
|
|
|
when :opt_mult then opt_mult(jit, ctx, asm)
|
|
|
|
when :opt_div then opt_div(jit, ctx, asm)
|
2023-02-13 22:36:02 -08:00
|
|
|
when :opt_mod then opt_mod(jit, ctx, asm)
|
2023-02-13 23:57:40 -08:00
|
|
|
when :opt_eq then opt_eq(jit, ctx, asm)
|
|
|
|
when :opt_neq then opt_neq(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
when :opt_lt then opt_lt(jit, ctx, asm)
|
2023-02-13 21:48:24 -08:00
|
|
|
when :opt_le then opt_le(jit, ctx, asm)
|
|
|
|
when :opt_gt then opt_gt(jit, ctx, asm)
|
|
|
|
when :opt_ge then opt_ge(jit, ctx, asm)
|
2023-02-08 11:51:35 -08:00
|
|
|
when :opt_ltlt then opt_ltlt(jit, ctx, asm)
|
2023-02-13 23:05:56 -08:00
|
|
|
when :opt_and then opt_and(jit, ctx, asm)
|
|
|
|
when :opt_or then opt_or(jit, ctx, asm)
|
2023-02-08 11:10:04 -08:00
|
|
|
when :opt_aref then opt_aref(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# opt_aset
|
|
|
|
# opt_aset_with
|
|
|
|
# opt_aref_with
|
2023-02-08 11:51:35 -08:00
|
|
|
when :opt_length then opt_length(jit, ctx, asm)
|
|
|
|
when :opt_size then opt_size(jit, ctx, asm)
|
|
|
|
when :opt_empty_p then opt_empty_p(jit, ctx, asm)
|
|
|
|
when :opt_succ then opt_succ(jit, ctx, asm)
|
|
|
|
when :opt_not then opt_not(jit, ctx, asm)
|
|
|
|
when :opt_regexpmatch2 then opt_regexpmatch2(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
# invokebuiltin
|
|
|
|
# opt_invokebuiltin_delegate
|
|
|
|
# opt_invokebuiltin_delegate_leave
|
|
|
|
when :getlocal_WC_0 then getlocal_WC_0(jit, ctx, asm)
|
2023-02-08 01:17:39 -08:00
|
|
|
when :getlocal_WC_1 then getlocal_WC_1(jit, ctx, asm)
|
2023-02-13 22:50:52 -08:00
|
|
|
when :setlocal_WC_0 then setlocal_WC_0(jit, ctx, asm)
|
2023-02-14 22:24:32 -08:00
|
|
|
when :setlocal_WC_1 then setlocal_WC_1(jit, ctx, asm)
|
2023-01-07 13:54:26 -08:00
|
|
|
when :putobject_INT2FIX_0_ then putobject_INT2FIX_0_(jit, ctx, asm)
|
|
|
|
when :putobject_INT2FIX_1_ then putobject_INT2FIX_1_(jit, ctx, asm)
|
2023-01-02 13:30:56 -08:00
|
|
|
else CantCompile
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
#
|
|
|
|
# Insns
|
|
|
|
#
|
|
|
|
|
2023-02-07 23:40:05 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def nop(jit, ctx, asm)
|
|
|
|
# Do nothing
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
2023-02-14 22:24:32 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def getlocal(jit, ctx, asm)
|
|
|
|
idx = jit.operand(0)
|
|
|
|
level = jit.operand(1)
|
|
|
|
jit_getlocal_generic(jit, ctx, asm, idx:, level:)
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def setlocal(jit, ctx, asm)
|
|
|
|
idx = jit.operand(0)
|
|
|
|
level = jit.operand(1)
|
|
|
|
jit_setlocal_generic(jit, ctx, asm, idx:, level:)
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# getblockparam
|
|
|
|
# setblockparam
|
|
|
|
# getblockparamproxy
|
|
|
|
# getspecial
|
|
|
|
# setspecial
|
2023-02-07 14:42:58 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def getinstancevariable(jit, ctx, asm)
|
2023-02-08 01:48:32 -08:00
|
|
|
# Specialize on a compile-time receiver, and split a block for chain guards
|
2023-02-07 14:42:58 -08:00
|
|
|
unless jit.at_current_insn?
|
|
|
|
defer_compilation(jit, ctx, asm)
|
|
|
|
return EndBlock
|
|
|
|
end
|
|
|
|
|
|
|
|
id = jit.operand(0)
|
|
|
|
comptime_obj = jit.peek_at_self
|
|
|
|
|
|
|
|
jit_getivar(jit, ctx, asm, comptime_obj, id)
|
|
|
|
end
|
|
|
|
|
2023-02-08 17:27:21 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def setinstancevariable(jit, ctx, asm)
|
|
|
|
id = jit.operand(0)
|
|
|
|
ivc = jit.operand(1)
|
|
|
|
|
|
|
|
# rb_vm_setinstancevariable could raise exceptions
|
|
|
|
jit_prepare_routine_call(jit, ctx, asm)
|
|
|
|
|
|
|
|
val_opnd = ctx.stack_pop
|
|
|
|
|
|
|
|
asm.comment('rb_vm_setinstancevariable')
|
|
|
|
asm.mov(:rdi, jit.iseq.to_i)
|
|
|
|
asm.mov(:rsi, [CFP, C.rb_control_frame_t.offsetof(:self)])
|
|
|
|
asm.mov(:rdx, id)
|
|
|
|
asm.mov(:rcx, val_opnd)
|
|
|
|
asm.mov(:r8, ivc)
|
|
|
|
asm.call(C.rb_vm_setinstancevariable)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# getclassvariable
|
|
|
|
# setclassvariable
|
2023-02-14 23:18:19 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_getconstant_path(jit, ctx, asm)
|
|
|
|
ic = C.iseq_inline_constant_cache.new(jit.operand(0))
|
|
|
|
idlist = ic.segments
|
|
|
|
|
|
|
|
# See vm_ic_hit_p(). The same conditions are checked in yjit_constant_ic_update().
|
|
|
|
ice = ic.entry
|
|
|
|
if ice.nil?
|
|
|
|
# In this case, leave a block that unconditionally side exits
|
|
|
|
# for the interpreter to invalidate.
|
2023-02-14 23:48:25 -08:00
|
|
|
asm.incr_counter(:optgetconst_not_cached)
|
2023-02-14 23:18:19 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
# Make sure there is an exit for this block as the interpreter might want
|
|
|
|
# to invalidate this block from yjit_constant_ic_update().
|
|
|
|
Invariants.ensure_block_entry_exit(jit, cause: 'opt_getconstant_path')
|
|
|
|
|
|
|
|
if ice.ic_cref # with cref
|
|
|
|
# Not supported yet
|
2023-02-14 23:48:25 -08:00
|
|
|
asm.incr_counter(:optgetconst_cref)
|
2023-02-14 23:18:19 -08:00
|
|
|
return CantCompile
|
|
|
|
else # without cref
|
|
|
|
# TODO: implement this
|
|
|
|
# Optimize for single ractor mode.
|
|
|
|
# if !assume_single_ractor_mode(jit, ocb)
|
|
|
|
# return CantCompile
|
|
|
|
# end
|
|
|
|
|
|
|
|
# Invalidate output code on any constant writes associated with
|
|
|
|
# constants referenced within the current block.
|
|
|
|
#assume_stable_constant_names(jit, ocb, idlist);
|
|
|
|
|
|
|
|
putobject(jit, ctx, asm, val: ice.value)
|
|
|
|
end
|
|
|
|
|
|
|
|
jump_to_next_insn(jit, ctx, asm)
|
|
|
|
EndBlock
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# getconstant
|
|
|
|
# setconstant
|
|
|
|
# getglobal
|
|
|
|
# setglobal
|
|
|
|
|
2022-12-26 14:09:45 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
2022-12-23 14:17:32 -08:00
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
2022-12-30 22:16:07 -08:00
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2022-12-26 14:09:45 -08:00
|
|
|
def putnil(jit, ctx, asm)
|
2023-02-08 09:30:47 -08:00
|
|
|
putobject(jit, ctx, asm, val: Qnil)
|
2022-12-17 13:39:35 -08:00
|
|
|
end
|
|
|
|
|
2023-01-07 13:39:33 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def putself(jit, ctx, asm)
|
2023-02-08 09:30:47 -08:00
|
|
|
stack_top = ctx.stack_push
|
2023-01-07 13:39:33 -08:00
|
|
|
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:self)])
|
2023-02-08 09:30:47 -08:00
|
|
|
asm.mov(stack_top, :rax)
|
2023-01-07 13:39:33 -08:00
|
|
|
KeepCompiling
|
|
|
|
end
|
2022-12-28 14:43:04 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
2022-12-30 22:16:07 -08:00
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2023-01-07 13:54:26 -08:00
|
|
|
def putobject(jit, ctx, asm, val: jit.operand(0))
|
2022-12-28 14:43:04 -08:00
|
|
|
# Push it to the stack
|
2023-02-08 09:30:47 -08:00
|
|
|
stack_top = ctx.stack_push
|
2022-12-28 14:43:04 -08:00
|
|
|
if asm.imm32?(val)
|
2023-02-08 09:30:47 -08:00
|
|
|
asm.mov(stack_top, val)
|
2022-12-28 14:43:04 -08:00
|
|
|
else # 64-bit immediates can't be directly written to memory
|
|
|
|
asm.mov(:rax, val)
|
2023-02-08 09:30:47 -08:00
|
|
|
asm.mov(stack_top, :rax)
|
2022-12-28 14:43:04 -08:00
|
|
|
end
|
2023-02-08 09:30:47 -08:00
|
|
|
# TODO: GC offsets?
|
2022-12-28 14:43:04 -08:00
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# putspecialobject
|
|
|
|
# putstring
|
|
|
|
# concatstrings
|
|
|
|
# anytostring
|
|
|
|
# toregexp
|
|
|
|
# intern
|
|
|
|
# newarray
|
|
|
|
# newarraykwsplat
|
2023-02-15 21:26:04 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def duparray(jit, ctx, asm)
|
|
|
|
ary = jit.operand(0)
|
|
|
|
|
|
|
|
# Save the PC and SP because we are allocating
|
|
|
|
jit_prepare_routine_call(jit, ctx, asm)
|
|
|
|
|
|
|
|
# call rb_ary_resurrect(VALUE ary);
|
|
|
|
asm.comment('call rb_ary_resurrect')
|
|
|
|
asm.mov(C_ARGS[0], ary)
|
|
|
|
asm.call(C.rb_ary_resurrect)
|
|
|
|
|
|
|
|
stack_ret = ctx.stack_push
|
|
|
|
asm.mov(stack_ret, C_RET)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# duphash
|
2023-02-15 21:26:04 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def expandarray(jit, ctx, asm)
|
|
|
|
# Both arguments are rb_num_t which is unsigned
|
|
|
|
num = jit.operand(0)
|
|
|
|
flag = jit.operand(1)
|
|
|
|
|
|
|
|
# If this instruction has the splat flag, then bail out.
|
|
|
|
if flag & 0x01 != 0
|
|
|
|
asm.incr_counter(:expandarray_splat)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
# If this instruction has the postarg flag, then bail out.
|
|
|
|
if flag & 0x02 != 0
|
|
|
|
asm.incr_counter(:expandarray_postarg)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
|
|
|
array_opnd = ctx.stack_pop(1)
|
|
|
|
|
|
|
|
# num is the number of requested values. If there aren't enough in the
|
|
|
|
# array then we're going to push on nils.
|
|
|
|
# TODO: implement this
|
|
|
|
|
|
|
|
# Move the array from the stack and check that it's an array.
|
|
|
|
asm.mov(:rax, array_opnd)
|
|
|
|
guard_object_is_heap(asm, :rax, counted_exit(side_exit, :expandarray_not_array))
|
|
|
|
guard_object_is_array(asm, :rax, :rcx, counted_exit(side_exit, :expandarray_not_array))
|
|
|
|
|
|
|
|
# If we don't actually want any values, then just return.
|
|
|
|
if num == 0
|
|
|
|
return KeepCompiling
|
|
|
|
end
|
|
|
|
|
|
|
|
jit_array_len(asm, :rax, :rcx)
|
|
|
|
|
|
|
|
# Only handle the case where the number of values in the array is greater
|
|
|
|
# than or equal to the number of values requested.
|
|
|
|
asm.cmp(:rcx, num)
|
|
|
|
asm.jl(counted_exit(side_exit, :expandarray_rhs_too_small))
|
|
|
|
|
|
|
|
# Conditionally load the address of the heap array into REG1.
|
|
|
|
# (struct RArray *)(obj)->as.heap.ptr
|
|
|
|
#asm.mov(:rax, array_opnd)
|
|
|
|
asm.mov(:rcx, [:rax, C.RBasic.offsetof(:flags)])
|
|
|
|
asm.test(:rcx, C.RARRAY_EMBED_FLAG);
|
|
|
|
asm.mov(:rcx, [:rax, C.RArray.offsetof(:as, :heap, :ptr)])
|
|
|
|
|
|
|
|
# Load the address of the embedded array into REG1.
|
|
|
|
# (struct RArray *)(obj)->as.ary
|
|
|
|
asm.lea(:rax, [:rax, C.RArray.offsetof(:as, :ary)])
|
|
|
|
|
|
|
|
asm.cmovnz(:rcx, :rax)
|
|
|
|
|
|
|
|
# Loop backward through the array and push each element onto the stack.
|
|
|
|
(num - 1).downto(0).each do |i|
|
|
|
|
top = ctx.stack_push
|
|
|
|
asm.mov(:rax, [:rcx, i * C.VALUE.size])
|
|
|
|
asm.mov(top, :rax)
|
|
|
|
end
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# concatarray
|
|
|
|
# splatarray
|
|
|
|
# newhash
|
|
|
|
# newrange
|
2023-02-08 17:33:27 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def pop(jit, ctx, asm)
|
|
|
|
ctx.stack_pop
|
|
|
|
KeepCompiling
|
|
|
|
end
|
2023-02-08 17:17:36 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def dup(jit, ctx, asm)
|
|
|
|
val1 = ctx.stack_opnd(0)
|
|
|
|
val2 = ctx.stack_push
|
|
|
|
asm.mov(:rax, val1)
|
|
|
|
asm.mov(val2, :rax)
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
2023-02-14 22:52:12 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def dupn(jit, ctx, asm)
|
|
|
|
n = jit.operand(0)
|
|
|
|
|
|
|
|
# In practice, seems to be only used for n==2
|
|
|
|
if n != 2
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
opnd1 = ctx.stack_opnd(1)
|
|
|
|
opnd0 = ctx.stack_opnd(0)
|
|
|
|
|
|
|
|
dst1 = ctx.stack_push
|
|
|
|
asm.mov(:rax, opnd1)
|
|
|
|
asm.mov(dst1, :rax)
|
|
|
|
|
|
|
|
dst0 = ctx.stack_push
|
|
|
|
asm.mov(:rax, opnd0)
|
|
|
|
asm.mov(dst0, :rax)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# swap
|
|
|
|
# opt_reverse
|
|
|
|
# topn
|
2023-02-14 22:52:12 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def setn(jit, ctx, asm)
|
|
|
|
n = jit.operand(0)
|
|
|
|
|
|
|
|
top_val = ctx.stack_pop(0)
|
|
|
|
dst_opnd = ctx.stack_opnd(n)
|
|
|
|
asm.mov(:rax, top_val)
|
|
|
|
asm.mov(dst_opnd, :rax)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# adjuststack
|
|
|
|
# defined
|
|
|
|
# checkmatch
|
|
|
|
# checkkeyword
|
|
|
|
# checktype
|
|
|
|
# defineclass
|
|
|
|
# definemethod
|
|
|
|
# definesmethod
|
|
|
|
# send
|
2023-01-07 21:24:30 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
# @param cd `RubyVM::MJIT::CPointer::Struct_rb_call_data`
|
|
|
|
def opt_send_without_block(jit, ctx, asm)
|
|
|
|
cd = C.rb_call_data.new(jit.operand(0))
|
2023-02-03 22:42:13 -08:00
|
|
|
jit_call_method(jit, ctx, asm, cd)
|
2023-01-07 21:24:30 -08:00
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# objtostring
|
|
|
|
# opt_str_freeze
|
2023-02-08 11:51:35 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_nil_p(jit, ctx, asm)
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# opt_str_uminus
|
|
|
|
# opt_newarray_max
|
|
|
|
# opt_newarray_min
|
|
|
|
# invokesuper
|
|
|
|
# invokeblock
|
|
|
|
|
2022-12-26 14:09:45 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
2022-12-23 14:17:32 -08:00
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
2022-12-30 22:16:07 -08:00
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2022-12-26 14:09:45 -08:00
|
|
|
def leave(jit, ctx, asm)
|
2023-02-03 22:42:13 -08:00
|
|
|
assert_equal(ctx.stack_size, 1)
|
2022-12-26 14:09:45 -08:00
|
|
|
|
2023-02-07 14:42:58 -08:00
|
|
|
jit_check_ints(jit, ctx, asm)
|
2022-12-23 14:17:32 -08:00
|
|
|
|
2022-12-28 13:16:02 -08:00
|
|
|
asm.comment('pop stack frame')
|
2023-01-07 21:24:30 -08:00
|
|
|
asm.lea(:rax, [CFP, C.rb_control_frame_t.size])
|
|
|
|
asm.mov(CFP, :rax)
|
|
|
|
asm.mov([EC, C.rb_execution_context_t.offsetof(:cfp)], :rax)
|
2022-12-17 13:39:35 -08:00
|
|
|
|
2023-01-07 21:24:30 -08:00
|
|
|
# Return a value (for compile_leave_exit)
|
2023-02-06 15:44:34 -08:00
|
|
|
ret_opnd = ctx.stack_pop
|
|
|
|
asm.mov(:rax, ret_opnd)
|
2022-12-23 14:46:39 -08:00
|
|
|
|
2023-01-07 21:24:30 -08:00
|
|
|
# Set caller's SP and push a value to its stack (for JIT)
|
2023-02-03 22:42:13 -08:00
|
|
|
asm.mov(SP, [CFP, C.rb_control_frame_t.offsetof(:sp)]) # Note: SP is in the position after popping a receiver and arguments
|
2023-01-07 21:24:30 -08:00
|
|
|
asm.mov([SP], :rax)
|
|
|
|
|
|
|
|
# Jump to cfp->jit_return
|
|
|
|
asm.jmp([CFP, -C.rb_control_frame_t.size + C.rb_control_frame_t.offsetof(:jit_return)])
|
2022-12-23 14:46:39 -08:00
|
|
|
|
2022-12-17 13:39:35 -08:00
|
|
|
EndBlock
|
|
|
|
end
|
2022-12-23 14:17:32 -08:00
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# throw
|
2023-02-08 17:42:16 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jump(jit, ctx, asm)
|
2023-02-08 17:54:18 -08:00
|
|
|
# Check for interrupts, but only on backward branches that may create loops
|
2023-02-13 07:30:25 -08:00
|
|
|
jump_offset = jit.operand(0, signed: true)
|
2023-02-08 17:54:18 -08:00
|
|
|
if jump_offset < 0
|
|
|
|
jit_check_ints(jit, ctx, asm)
|
|
|
|
end
|
2023-02-08 17:42:16 -08:00
|
|
|
|
2023-02-08 17:54:18 -08:00
|
|
|
pc = jit.pc + C.VALUE.size * (jit.insn.len + jump_offset)
|
2023-02-08 17:42:16 -08:00
|
|
|
stub_next_block(jit.iseq, pc, ctx, asm)
|
|
|
|
EndBlock
|
|
|
|
end
|
|
|
|
|
2023-02-13 07:30:25 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def branchif(jit, ctx, asm)
|
|
|
|
# Check for interrupts, but only on backward branches that may create loops
|
|
|
|
jump_offset = jit.operand(0, signed: true)
|
|
|
|
if jump_offset < 0
|
|
|
|
jit_check_ints(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
|
|
|
# TODO: skip check for known truthy
|
|
|
|
|
|
|
|
# This `test` sets ZF only for Qnil and Qfalse, which let jz jump.
|
|
|
|
val = ctx.stack_pop
|
|
|
|
asm.test(val, ~Qnil)
|
|
|
|
|
|
|
|
# Set stubs
|
|
|
|
branch_stub = BranchStub.new(
|
|
|
|
iseq: jit.iseq,
|
|
|
|
shape: Default,
|
|
|
|
target0: BranchTarget.new(ctx:, pc: jit.pc + C.VALUE.size * (jit.insn.len + jump_offset)), # branch target
|
|
|
|
target1: BranchTarget.new(ctx:, pc: jit.pc + C.VALUE.size * jit.insn.len), # fallthrough
|
|
|
|
)
|
|
|
|
branch_stub.target0.address = Assembler.new.then do |ocb_asm|
|
|
|
|
@exit_compiler.compile_branch_stub(ctx, ocb_asm, branch_stub, true)
|
|
|
|
@ocb.write(ocb_asm)
|
|
|
|
end
|
|
|
|
branch_stub.target1.address = Assembler.new.then do |ocb_asm|
|
|
|
|
@exit_compiler.compile_branch_stub(ctx, ocb_asm, branch_stub, false)
|
|
|
|
@ocb.write(ocb_asm)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Jump to target0 on jnz
|
|
|
|
branch_stub.compile = proc do |branch_asm|
|
|
|
|
branch_asm.comment("branchif #{branch_stub.shape}")
|
|
|
|
branch_asm.stub(branch_stub) do
|
|
|
|
case branch_stub.shape
|
|
|
|
in Default
|
|
|
|
branch_asm.jnz(branch_stub.target0.address)
|
|
|
|
branch_asm.jmp(branch_stub.target1.address)
|
|
|
|
in Next0
|
|
|
|
branch_asm.jz(branch_stub.target1.address)
|
|
|
|
in Next1
|
|
|
|
branch_asm.jnz(branch_stub.target0.address)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
branch_stub.compile.call(asm)
|
|
|
|
|
|
|
|
EndBlock
|
|
|
|
end
|
2023-01-07 13:21:14 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def branchunless(jit, ctx, asm)
|
2023-02-08 17:54:18 -08:00
|
|
|
# Check for interrupts, but only on backward branches that may create loops
|
2023-02-13 07:30:25 -08:00
|
|
|
jump_offset = jit.operand(0, signed: true)
|
2023-02-08 17:54:18 -08:00
|
|
|
if jump_offset < 0
|
|
|
|
jit_check_ints(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
2023-01-07 13:21:14 -08:00
|
|
|
# TODO: skip check for known truthy
|
|
|
|
|
|
|
|
# This `test` sets ZF only for Qnil and Qfalse, which let jz jump.
|
2023-02-08 17:37:25 -08:00
|
|
|
val = ctx.stack_pop
|
|
|
|
asm.test(val, ~Qnil)
|
2023-01-07 13:21:14 -08:00
|
|
|
|
|
|
|
# Set stubs
|
|
|
|
branch_stub = BranchStub.new(
|
|
|
|
iseq: jit.iseq,
|
2023-02-07 00:17:13 -08:00
|
|
|
shape: Default,
|
2023-02-08 17:54:18 -08:00
|
|
|
target0: BranchTarget.new(ctx:, pc: jit.pc + C.VALUE.size * (jit.insn.len + jump_offset)), # branch target
|
2023-02-09 16:25:06 -08:00
|
|
|
target1: BranchTarget.new(ctx:, pc: jit.pc + C.VALUE.size * jit.insn.len), # fallthrough
|
2023-01-07 13:21:14 -08:00
|
|
|
)
|
2023-02-07 00:17:13 -08:00
|
|
|
branch_stub.target0.address = Assembler.new.then do |ocb_asm|
|
2023-02-07 11:36:45 -08:00
|
|
|
@exit_compiler.compile_branch_stub(ctx, ocb_asm, branch_stub, true)
|
2023-01-07 13:21:14 -08:00
|
|
|
@ocb.write(ocb_asm)
|
|
|
|
end
|
2023-02-07 00:17:13 -08:00
|
|
|
branch_stub.target1.address = Assembler.new.then do |ocb_asm|
|
2023-02-07 11:36:45 -08:00
|
|
|
@exit_compiler.compile_branch_stub(ctx, ocb_asm, branch_stub, false)
|
2023-01-07 13:21:14 -08:00
|
|
|
@ocb.write(ocb_asm)
|
|
|
|
end
|
|
|
|
|
2023-02-07 00:17:13 -08:00
|
|
|
# Jump to target0 on jz
|
|
|
|
branch_stub.compile = proc do |branch_asm|
|
|
|
|
branch_asm.comment("branchunless #{branch_stub.shape}")
|
2023-01-07 13:21:14 -08:00
|
|
|
branch_asm.stub(branch_stub) do
|
2023-02-07 00:17:13 -08:00
|
|
|
case branch_stub.shape
|
|
|
|
in Default
|
|
|
|
branch_asm.jz(branch_stub.target0.address)
|
|
|
|
branch_asm.jmp(branch_stub.target1.address)
|
|
|
|
in Next0
|
|
|
|
branch_asm.jnz(branch_stub.target1.address)
|
|
|
|
in Next1
|
|
|
|
branch_asm.jz(branch_stub.target0.address)
|
|
|
|
end
|
2023-01-07 13:21:14 -08:00
|
|
|
end
|
|
|
|
end
|
2023-02-07 00:17:13 -08:00
|
|
|
branch_stub.compile.call(asm)
|
2023-01-07 13:21:14 -08:00
|
|
|
|
|
|
|
EndBlock
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# branchnil
|
|
|
|
# once
|
|
|
|
# opt_case_dispatch
|
2023-02-06 15:44:34 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_plus(jit, ctx, asm)
|
|
|
|
unless jit.at_current_insn?
|
|
|
|
defer_compilation(jit, ctx, asm)
|
|
|
|
return EndBlock
|
|
|
|
end
|
|
|
|
|
|
|
|
comptime_recv = jit.peek_at_stack(1)
|
|
|
|
comptime_obj = jit.peek_at_stack(0)
|
|
|
|
|
|
|
|
if fixnum?(comptime_recv) && fixnum?(comptime_obj)
|
|
|
|
# Generate a side exit before popping operands
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
2023-02-10 11:43:53 -08:00
|
|
|
unless Invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_PLUS)
|
2023-02-06 15:44:34 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
obj_opnd = ctx.stack_pop
|
|
|
|
recv_opnd = ctx.stack_pop
|
|
|
|
|
|
|
|
asm.comment('guard recv is fixnum') # TODO: skip this with type information
|
|
|
|
asm.test(recv_opnd, C.RUBY_FIXNUM_FLAG)
|
|
|
|
asm.jz(side_exit)
|
|
|
|
|
|
|
|
asm.comment('guard obj is fixnum') # TODO: skip this with type information
|
|
|
|
asm.test(obj_opnd, C.RUBY_FIXNUM_FLAG)
|
|
|
|
asm.jz(side_exit)
|
|
|
|
|
|
|
|
asm.mov(:rax, recv_opnd)
|
|
|
|
asm.sub(:rax, 1) # untag
|
|
|
|
asm.mov(:rcx, obj_opnd)
|
|
|
|
asm.add(:rax, :rcx)
|
|
|
|
asm.jo(side_exit)
|
|
|
|
|
|
|
|
dst_opnd = ctx.stack_push
|
|
|
|
asm.mov(dst_opnd, :rax)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
else
|
2023-02-08 11:51:35 -08:00
|
|
|
opt_send_without_block(jit, ctx, asm)
|
2023-02-06 15:44:34 -08:00
|
|
|
end
|
|
|
|
end
|
2023-01-07 14:06:38 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_minus(jit, ctx, asm)
|
|
|
|
unless jit.at_current_insn?
|
|
|
|
defer_compilation(jit, ctx, asm)
|
|
|
|
return EndBlock
|
|
|
|
end
|
|
|
|
|
|
|
|
comptime_recv = jit.peek_at_stack(1)
|
|
|
|
comptime_obj = jit.peek_at_stack(0)
|
|
|
|
|
|
|
|
if fixnum?(comptime_recv) && fixnum?(comptime_obj)
|
2023-02-08 09:30:47 -08:00
|
|
|
# Generate a side exit before popping operands
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
2023-02-10 11:43:53 -08:00
|
|
|
unless Invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_MINUS)
|
2023-01-07 14:06:38 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
2023-02-08 09:30:47 -08:00
|
|
|
obj_opnd = ctx.stack_pop
|
|
|
|
recv_opnd = ctx.stack_pop
|
2023-01-07 14:06:38 -08:00
|
|
|
|
|
|
|
asm.comment('guard recv is fixnum') # TODO: skip this with type information
|
2023-02-08 09:30:47 -08:00
|
|
|
asm.test(recv_opnd, C.RUBY_FIXNUM_FLAG)
|
|
|
|
asm.jz(side_exit)
|
2023-01-07 14:06:38 -08:00
|
|
|
|
|
|
|
asm.comment('guard obj is fixnum') # TODO: skip this with type information
|
2023-02-08 09:30:47 -08:00
|
|
|
asm.test(obj_opnd, C.RUBY_FIXNUM_FLAG)
|
|
|
|
asm.jz(side_exit)
|
2023-01-07 14:06:38 -08:00
|
|
|
|
2023-02-08 09:30:47 -08:00
|
|
|
asm.mov(:rax, recv_opnd)
|
|
|
|
asm.mov(:rcx, obj_opnd)
|
2023-01-07 14:06:38 -08:00
|
|
|
asm.sub(:rax, :rcx)
|
2023-02-08 09:30:47 -08:00
|
|
|
asm.jo(side_exit)
|
2023-02-06 15:44:34 -08:00
|
|
|
asm.add(:rax, 1) # re-tag
|
2023-01-07 14:06:38 -08:00
|
|
|
|
2023-02-08 09:30:47 -08:00
|
|
|
dst_opnd = ctx.stack_push
|
|
|
|
asm.mov(dst_opnd, :rax)
|
|
|
|
|
2023-01-07 14:06:38 -08:00
|
|
|
KeepCompiling
|
|
|
|
else
|
2023-02-08 11:51:35 -08:00
|
|
|
opt_send_without_block(jit, ctx, asm)
|
2023-01-07 14:06:38 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-02-08 11:51:35 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_mult(jit, ctx, asm)
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_div(jit, ctx, asm)
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
2023-02-13 22:36:02 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_mod(jit, ctx, asm)
|
|
|
|
unless jit.at_current_insn?
|
|
|
|
defer_compilation(jit, ctx, asm)
|
|
|
|
return EndBlock
|
|
|
|
end
|
|
|
|
|
|
|
|
if two_fixnums_on_stack?(jit)
|
|
|
|
# Create a side-exit to fall back to the interpreter
|
|
|
|
# Note: we generate the side-exit before popping operands from the stack
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
|
|
|
unless Invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_MOD)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
# Check that both operands are fixnums
|
|
|
|
guard_two_fixnums(jit, ctx, asm, side_exit)
|
|
|
|
|
|
|
|
# Get the operands and destination from the stack
|
|
|
|
arg1 = ctx.stack_pop(1)
|
|
|
|
arg0 = ctx.stack_pop(1)
|
|
|
|
|
|
|
|
# Check for arg0 % 0
|
|
|
|
asm.cmp(arg1, 0)
|
|
|
|
asm.je(side_exit)
|
|
|
|
|
|
|
|
# Call rb_fix_mod_fix(VALUE recv, VALUE obj)
|
|
|
|
asm.mov(C_ARGS[0], arg0)
|
|
|
|
asm.mov(C_ARGS[1], arg1)
|
|
|
|
asm.call(C.rb_fix_mod_fix)
|
|
|
|
|
|
|
|
# Push the return value onto the stack
|
|
|
|
stack_ret = ctx.stack_push
|
|
|
|
asm.mov(stack_ret, C_RET)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
else
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-02-13 23:57:40 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_eq(jit, ctx, asm)
|
|
|
|
unless jit.at_current_insn?
|
|
|
|
defer_compilation(jit, ctx, asm)
|
|
|
|
return EndBlock
|
|
|
|
end
|
|
|
|
|
|
|
|
if jit_equality_specialized(jit, ctx, asm)
|
|
|
|
jump_to_next_insn(jit, ctx, asm)
|
|
|
|
EndBlock
|
|
|
|
else
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_neq(jit, ctx, asm)
|
|
|
|
# opt_neq is passed two rb_call_data as arguments:
|
|
|
|
# first for ==, second for !=
|
|
|
|
neq_cd = C.rb_call_data.new(jit.operand(1))
|
|
|
|
jit_call_method(jit, ctx, asm, neq_cd)
|
|
|
|
end
|
2022-12-31 13:41:32 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_lt(jit, ctx, asm)
|
2023-02-13 21:48:24 -08:00
|
|
|
jit_fixnum_cmp(jit, ctx, asm, opcode: :cmovl, bop: C.BOP_LT)
|
|
|
|
end
|
2023-01-02 22:53:14 -08:00
|
|
|
|
2023-02-13 21:48:24 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_le(jit, ctx, asm)
|
|
|
|
jit_fixnum_cmp(jit, ctx, asm, opcode: :cmovle, bop: C.BOP_LE)
|
|
|
|
end
|
2023-02-08 09:30:47 -08:00
|
|
|
|
2023-02-13 21:48:24 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_gt(jit, ctx, asm)
|
|
|
|
jit_fixnum_cmp(jit, ctx, asm, opcode: :cmovg, bop: C.BOP_GT)
|
2022-12-31 13:41:32 -08:00
|
|
|
end
|
|
|
|
|
2023-02-13 21:48:24 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_ge(jit, ctx, asm)
|
|
|
|
jit_fixnum_cmp(jit, ctx, asm, opcode: :cmovge, bop: C.BOP_GE)
|
|
|
|
end
|
2023-02-08 11:51:35 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_ltlt(jit, ctx, asm)
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
2023-02-13 23:05:56 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_and(jit, ctx, asm)
|
|
|
|
unless jit.at_current_insn?
|
|
|
|
defer_compilation(jit, ctx, asm)
|
|
|
|
return EndBlock
|
|
|
|
end
|
|
|
|
|
|
|
|
if two_fixnums_on_stack?(jit)
|
|
|
|
# Create a side-exit to fall back to the interpreter
|
|
|
|
# Note: we generate the side-exit before popping operands from the stack
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
|
|
|
unless Invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_AND)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
# Check that both operands are fixnums
|
|
|
|
guard_two_fixnums(jit, ctx, asm, side_exit)
|
|
|
|
|
|
|
|
# Get the operands and destination from the stack
|
|
|
|
arg1 = ctx.stack_pop(1)
|
|
|
|
arg0 = ctx.stack_pop(1)
|
|
|
|
|
|
|
|
asm.comment('bitwise and')
|
|
|
|
asm.mov(:rax, arg0)
|
|
|
|
asm.and(:rax, arg1)
|
|
|
|
|
|
|
|
# Push the return value onto the stack
|
|
|
|
dst = ctx.stack_push
|
|
|
|
asm.mov(dst, :rax)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
else
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_or(jit, ctx, asm)
|
|
|
|
unless jit.at_current_insn?
|
|
|
|
defer_compilation(jit, ctx, asm)
|
|
|
|
return EndBlock
|
|
|
|
end
|
|
|
|
|
|
|
|
if two_fixnums_on_stack?(jit)
|
|
|
|
# Create a side-exit to fall back to the interpreter
|
|
|
|
# Note: we generate the side-exit before popping operands from the stack
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
|
|
|
unless Invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_OR)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
# Check that both operands are fixnums
|
|
|
|
guard_two_fixnums(jit, ctx, asm, side_exit)
|
|
|
|
|
|
|
|
# Get the operands and destination from the stack
|
|
|
|
asm.comment('bitwise or')
|
|
|
|
arg1 = ctx.stack_pop(1)
|
|
|
|
arg0 = ctx.stack_pop(1)
|
|
|
|
|
|
|
|
# Do the bitwise or arg0 | arg1
|
|
|
|
asm.mov(:rax, arg0)
|
|
|
|
asm.or(:rax, arg1)
|
|
|
|
|
|
|
|
# Push the return value onto the stack
|
|
|
|
dst = ctx.stack_push
|
|
|
|
asm.mov(dst, :rax)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
else
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
end
|
2023-02-08 11:10:04 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_aref(jit, ctx, asm)
|
|
|
|
cd = C.rb_call_data.new(jit.operand(0))
|
|
|
|
argc = C.vm_ci_argc(cd.ci)
|
|
|
|
|
|
|
|
if argc != 1
|
|
|
|
asm.incr_counter(:optaref_argc_not_one)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
unless jit.at_current_insn?
|
|
|
|
defer_compilation(jit, ctx, asm)
|
|
|
|
return EndBlock
|
|
|
|
end
|
|
|
|
|
|
|
|
comptime_recv = jit.peek_at_stack(1)
|
|
|
|
comptime_obj = jit.peek_at_stack(0)
|
|
|
|
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
|
|
|
if comptime_recv.class == Array && fixnum?(comptime_obj)
|
2023-02-13 22:23:22 -08:00
|
|
|
unless Invariants.assume_bop_not_redefined(jit, C.ARRAY_REDEFINED_OP_FLAG, C.BOP_AREF)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
idx_opnd = ctx.stack_opnd(0)
|
|
|
|
recv_opnd = ctx.stack_opnd(1)
|
|
|
|
|
|
|
|
not_array_exit = counted_exit(side_exit, :optaref_recv_not_array)
|
|
|
|
if jit_guard_known_class(jit, ctx, asm, comptime_recv.class, recv_opnd, comptime_recv, not_array_exit) == CantCompile
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
# Bail if idx is not a FIXNUM
|
|
|
|
asm.mov(:rax, idx_opnd)
|
|
|
|
asm.test(:rax, C.RUBY_FIXNUM_FLAG)
|
|
|
|
asm.jz(counted_exit(side_exit, :optaref_arg_not_fixnum))
|
|
|
|
|
|
|
|
# Call VALUE rb_ary_entry_internal(VALUE ary, long offset).
|
|
|
|
# It never raises or allocates, so we don't need to write to cfp->pc.
|
|
|
|
asm.sar(:rax, 1) # Convert fixnum to int
|
|
|
|
asm.mov(C_ARGS[0], recv_opnd)
|
|
|
|
asm.mov(C_ARGS[1], :rax)
|
|
|
|
asm.call(C.rb_ary_entry_internal)
|
|
|
|
|
|
|
|
# Pop the argument and the receiver
|
|
|
|
ctx.stack_pop(2)
|
|
|
|
|
|
|
|
# Push the return value onto the stack
|
|
|
|
stack_ret = ctx.stack_push
|
|
|
|
asm.mov(stack_ret, C_RET)
|
|
|
|
|
|
|
|
# Let guard chains share the same successor
|
|
|
|
jump_to_next_insn(jit, ctx, asm)
|
|
|
|
EndBlock
|
2023-02-08 11:10:04 -08:00
|
|
|
elsif comptime_recv.class == Hash
|
2023-02-10 11:43:53 -08:00
|
|
|
unless Invariants.assume_bop_not_redefined(jit, C.HASH_REDEFINED_OP_FLAG, C.BOP_AREF)
|
2023-02-08 11:10:04 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
recv_opnd = ctx.stack_opnd(1)
|
|
|
|
|
|
|
|
# Guard that the receiver is a Hash
|
2023-02-13 22:23:22 -08:00
|
|
|
not_hash_exit = counted_exit(side_exit, :optaref_recv_not_hash)
|
2023-02-08 11:10:04 -08:00
|
|
|
if jit_guard_known_class(jit, ctx, asm, comptime_recv.class, recv_opnd, comptime_recv, not_hash_exit) == CantCompile
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
# Prepare to call rb_hash_aref(). It might call #hash on the key.
|
|
|
|
jit_prepare_routine_call(jit, ctx, asm)
|
|
|
|
|
|
|
|
asm.comment('call rb_hash_aref')
|
|
|
|
key_opnd = ctx.stack_opnd(0)
|
|
|
|
recv_opnd = ctx.stack_opnd(1)
|
|
|
|
asm.mov(:rdi, recv_opnd)
|
|
|
|
asm.mov(:rsi, key_opnd)
|
|
|
|
asm.call(C.rb_hash_aref)
|
|
|
|
|
|
|
|
# Pop the key and the receiver
|
|
|
|
ctx.stack_pop(2)
|
|
|
|
|
|
|
|
stack_ret = ctx.stack_push
|
2023-02-10 22:07:34 -08:00
|
|
|
asm.mov(stack_ret, C_RET)
|
2023-02-08 11:10:04 -08:00
|
|
|
|
|
|
|
# Let guard chains share the same successor
|
|
|
|
jump_to_next_insn(jit, ctx, asm)
|
|
|
|
EndBlock
|
|
|
|
else
|
2023-02-08 11:51:35 -08:00
|
|
|
opt_send_without_block(jit, ctx, asm)
|
2023-02-08 11:10:04 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# opt_aset
|
|
|
|
# opt_aset_with
|
|
|
|
# opt_aref_with
|
2023-02-08 11:51:35 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_length(jit, ctx, asm)
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_size(jit, ctx, asm)
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_empty_p(jit, ctx, asm)
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_succ(jit, ctx, asm)
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_not(jit, ctx, asm)
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def opt_regexpmatch2(jit, ctx, asm)
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
2022-12-28 13:50:24 -08:00
|
|
|
# invokebuiltin
|
|
|
|
# opt_invokebuiltin_delegate
|
|
|
|
# opt_invokebuiltin_delegate_leave
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
2022-12-30 22:16:07 -08:00
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2022-12-28 13:50:24 -08:00
|
|
|
def getlocal_WC_0(jit, ctx, asm)
|
|
|
|
# Get operands
|
|
|
|
idx = jit.operand(0)
|
|
|
|
level = 0
|
|
|
|
|
|
|
|
# Get EP
|
|
|
|
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:ep)])
|
|
|
|
|
|
|
|
# Get a local variable
|
|
|
|
asm.mov(:rax, [:rax, -idx * C.VALUE.size])
|
|
|
|
|
|
|
|
# Push it to the stack
|
2023-02-08 01:17:39 -08:00
|
|
|
stack_top = ctx.stack_push
|
|
|
|
asm.mov(stack_top, :rax)
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def getlocal_WC_1(jit, ctx, asm)
|
|
|
|
idx = jit.operand(0)
|
2023-02-14 22:24:32 -08:00
|
|
|
jit_getlocal_generic(jit, ctx, asm, idx:, level: 1)
|
2022-12-28 13:50:24 -08:00
|
|
|
end
|
|
|
|
|
2023-02-13 22:50:52 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def setlocal_WC_0(jit, ctx, asm)
|
|
|
|
slot_idx = jit.operand(0)
|
|
|
|
local_idx = slot_to_local_idx(jit.iseq, slot_idx)
|
|
|
|
|
|
|
|
# Load environment pointer EP (level 0) from CFP
|
|
|
|
ep_reg = :rax
|
|
|
|
jit_get_ep(asm, 0, reg: ep_reg)
|
|
|
|
|
|
|
|
# Write barriers may be required when VM_ENV_FLAG_WB_REQUIRED is set, however write barriers
|
|
|
|
# only affect heap objects being written. If we know an immediate value is being written we
|
|
|
|
# can skip this check.
|
|
|
|
|
|
|
|
# flags & VM_ENV_FLAG_WB_REQUIRED
|
|
|
|
flags_opnd = [ep_reg, C.VALUE.size * C.VM_ENV_DATA_INDEX_FLAGS]
|
|
|
|
asm.test(flags_opnd, C.VM_ENV_FLAG_WB_REQUIRED)
|
|
|
|
|
|
|
|
# Create a side-exit to fall back to the interpreter
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
|
|
|
# if (flags & VM_ENV_FLAG_WB_REQUIRED) != 0
|
|
|
|
asm.jnz(side_exit)
|
|
|
|
|
|
|
|
# Pop the value to write from the stack
|
|
|
|
stack_top = ctx.stack_pop(1)
|
|
|
|
|
|
|
|
# Write the value at the environment pointer
|
|
|
|
asm.mov(:rcx, stack_top)
|
|
|
|
asm.mov([ep_reg, -8 * slot_idx], :rcx)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
2023-02-14 22:24:32 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def setlocal_WC_1(jit, ctx, asm)
|
|
|
|
idx = jit.operand(0)
|
|
|
|
jit_setlocal_generic(jit, ctx, asm, idx:, level: 1)
|
|
|
|
end
|
2023-01-07 13:54:26 -08:00
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def putobject_INT2FIX_0_(jit, ctx, asm)
|
|
|
|
putobject(jit, ctx, asm, val: C.to_value(0))
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def putobject_INT2FIX_1_(jit, ctx, asm)
|
|
|
|
putobject(jit, ctx, asm, val: C.to_value(1))
|
|
|
|
end
|
2022-12-28 13:50:24 -08:00
|
|
|
|
2023-01-02 13:30:56 -08:00
|
|
|
#
|
|
|
|
# Helpers
|
|
|
|
#
|
2022-12-23 14:17:32 -08:00
|
|
|
|
2023-02-14 22:24:32 -08:00
|
|
|
def jit_getlocal_generic(jit, ctx, asm, idx:, level:)
|
|
|
|
# Load environment pointer EP at level
|
|
|
|
ep_reg = :rax
|
|
|
|
jit_get_ep(asm, level, reg: ep_reg)
|
|
|
|
|
|
|
|
# Get a local variable
|
|
|
|
asm.mov(:rax, [ep_reg, -idx * C.VALUE.size])
|
|
|
|
|
|
|
|
# Push it to the stack
|
|
|
|
stack_top = ctx.stack_push
|
|
|
|
asm.mov(stack_top, :rax)
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
|
|
|
def jit_setlocal_generic(jit, ctx, asm, idx:, level:)
|
|
|
|
# Load environment pointer EP at level
|
|
|
|
ep_reg = :rax
|
|
|
|
jit_get_ep(asm, level, reg: ep_reg)
|
|
|
|
|
|
|
|
# Write barriers may be required when VM_ENV_FLAG_WB_REQUIRED is set, however write barriers
|
|
|
|
# only affect heap objects being written. If we know an immediate value is being written we
|
|
|
|
# can skip this check.
|
|
|
|
|
|
|
|
# flags & VM_ENV_FLAG_WB_REQUIRED
|
|
|
|
flags_opnd = [ep_reg, C.VALUE.size * C.VM_ENV_DATA_INDEX_FLAGS]
|
|
|
|
asm.test(flags_opnd, C.VM_ENV_FLAG_WB_REQUIRED)
|
|
|
|
|
|
|
|
# Create a side-exit to fall back to the interpreter
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
|
|
|
# if (flags & VM_ENV_FLAG_WB_REQUIRED) != 0
|
|
|
|
asm.jnz(side_exit)
|
|
|
|
|
|
|
|
# Pop the value to write from the stack
|
|
|
|
stack_top = ctx.stack_pop(1)
|
|
|
|
|
|
|
|
# Write the value at the environment pointer
|
|
|
|
asm.mov(:rcx, stack_top)
|
|
|
|
asm.mov([ep_reg, -(C.VALUE.size * idx)], :rcx)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
end
|
|
|
|
|
2023-02-13 22:50:52 -08:00
|
|
|
# Compute the index of a local variable from its slot index
|
|
|
|
def slot_to_local_idx(iseq, slot_idx)
|
|
|
|
# Layout illustration
|
|
|
|
# This is an array of VALUE
|
|
|
|
# | VM_ENV_DATA_SIZE |
|
|
|
|
# v v
|
|
|
|
# low addr <+-------+-------+-------+-------+------------------+
|
|
|
|
# |local 0|local 1| ... |local n| .... |
|
|
|
|
# +-------+-------+-------+-------+------------------+
|
|
|
|
# ^ ^ ^ ^
|
|
|
|
# +-------+---local_table_size----+ cfp->ep--+
|
|
|
|
# | |
|
|
|
|
# +------------------slot_idx----------------+
|
|
|
|
#
|
|
|
|
# See usages of local_var_name() from iseq.c for similar calculation.
|
|
|
|
|
|
|
|
local_table_size = iseq.body.local_table_size
|
|
|
|
op = slot_idx - C.VM_ENV_DATA_SIZE
|
|
|
|
local_table_size - op - 1
|
|
|
|
end
|
|
|
|
|
2023-02-07 14:42:58 -08:00
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def guard_object_is_heap(asm, object_opnd, side_exit)
|
|
|
|
asm.comment('guard object is heap')
|
|
|
|
# Test that the object is not an immediate
|
|
|
|
asm.test(object_opnd, C.RUBY_IMMEDIATE_MASK)
|
|
|
|
asm.jnz(side_exit)
|
|
|
|
|
|
|
|
# Test that the object is not false
|
|
|
|
asm.cmp(object_opnd, Qfalse)
|
|
|
|
asm.je(side_exit)
|
|
|
|
end
|
|
|
|
|
2023-02-15 21:26:04 -08:00
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def guard_object_is_array(asm, object_reg, flags_reg, side_exit)
|
|
|
|
asm.comment('guard object is array')
|
|
|
|
# Pull out the type mask
|
|
|
|
asm.mov(flags_reg, [object_reg, C.RBasic.offsetof(:flags)])
|
|
|
|
asm.and(flags_reg, C.RUBY_T_MASK)
|
|
|
|
|
|
|
|
# Compare the result with T_ARRAY
|
|
|
|
asm.cmp(flags_reg, C.RUBY_T_ARRAY)
|
|
|
|
asm.jne(side_exit)
|
|
|
|
end
|
|
|
|
|
2023-02-08 00:47:01 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_chain_guard(opcode, jit, ctx, asm, side_exit, limit: 10)
|
2023-02-13 21:48:24 -08:00
|
|
|
opcode => :je | :jne | :jnz | :jz
|
2023-02-08 00:47:01 -08:00
|
|
|
|
|
|
|
if ctx.chain_depth < limit
|
|
|
|
deeper = ctx.dup
|
|
|
|
deeper.chain_depth += 1
|
|
|
|
|
|
|
|
branch_stub = BranchStub.new(
|
|
|
|
iseq: jit.iseq,
|
|
|
|
shape: Default,
|
|
|
|
target0: BranchTarget.new(ctx: deeper, pc: jit.pc),
|
|
|
|
)
|
|
|
|
branch_stub.target0.address = Assembler.new.then do |ocb_asm|
|
|
|
|
@exit_compiler.compile_branch_stub(deeper, ocb_asm, branch_stub, true)
|
|
|
|
@ocb.write(ocb_asm)
|
|
|
|
end
|
|
|
|
branch_stub.compile = proc do |branch_asm|
|
2023-02-09 16:25:06 -08:00
|
|
|
# Not using `asm.comment` here since it's usually put before cmp/test before this.
|
2023-02-08 00:47:01 -08:00
|
|
|
branch_asm.stub(branch_stub) do
|
|
|
|
case branch_stub.shape
|
|
|
|
in Default
|
2023-02-10 23:36:48 -08:00
|
|
|
branch_asm.public_send(opcode, branch_stub.target0.address)
|
2023-02-08 00:47:01 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
branch_stub.compile.call(asm)
|
|
|
|
else
|
2023-02-08 01:48:32 -08:00
|
|
|
asm.public_send(opcode, side_exit)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_guard_known_class(jit, ctx, asm, known_klass, obj_opnd, comptime_obj, side_exit, limit: 5)
|
2023-02-09 16:06:49 -08:00
|
|
|
# Only memory operand is supported for now
|
|
|
|
assert_equal(true, obj_opnd.is_a?(Array))
|
|
|
|
|
2023-02-08 01:48:32 -08:00
|
|
|
if known_klass == NilClass
|
2023-02-09 16:06:49 -08:00
|
|
|
asm.comment('guard object is nil')
|
|
|
|
asm.cmp(obj_opnd, Qnil)
|
|
|
|
jit_chain_guard(:jne, jit, ctx, asm, side_exit, limit:)
|
2023-02-08 01:48:32 -08:00
|
|
|
elsif known_klass == TrueClass
|
2023-02-09 16:06:49 -08:00
|
|
|
asm.comment('guard object is true')
|
|
|
|
asm.cmp(obj_opnd, Qtrue)
|
|
|
|
jit_chain_guard(:jne, jit, ctx, asm, side_exit, limit:)
|
2023-02-08 01:48:32 -08:00
|
|
|
elsif known_klass == FalseClass
|
2023-02-09 16:06:49 -08:00
|
|
|
asm.comment('guard object is false')
|
|
|
|
asm.cmp(obj_opnd, Qfalse)
|
|
|
|
jit_chain_guard(:jne, jit, ctx, asm, side_exit, limit:)
|
|
|
|
elsif known_klass == Integer && fixnum?(comptime_obj)
|
|
|
|
asm.comment('guard object is fixnum')
|
|
|
|
asm.test(obj_opnd, C.RUBY_FIXNUM_FLAG)
|
|
|
|
jit_chain_guard(:jz, jit, ctx, asm, side_exit, limit:)
|
2023-02-08 01:48:32 -08:00
|
|
|
elsif known_klass == Symbol
|
|
|
|
asm.incr_counter(:send_guard_symbol)
|
|
|
|
return CantCompile
|
|
|
|
elsif known_klass == Float
|
|
|
|
asm.incr_counter(:send_guard_float)
|
|
|
|
return CantCompile
|
|
|
|
elsif known_klass.singleton_class?
|
|
|
|
asm.comment('guard known object with singleton class')
|
|
|
|
asm.mov(:rax, C.to_value(comptime_obj))
|
|
|
|
asm.cmp(obj_opnd, :rax)
|
|
|
|
jit_chain_guard(:jne, jit, ctx, asm, side_exit, limit:)
|
|
|
|
else
|
2023-02-09 16:06:49 -08:00
|
|
|
# Load memory to a register
|
|
|
|
asm.mov(:rax, obj_opnd)
|
|
|
|
obj_opnd = :rax
|
2023-02-08 01:48:32 -08:00
|
|
|
|
|
|
|
# Check that the receiver is a heap object
|
|
|
|
# Note: if we get here, the class doesn't have immediate instances.
|
|
|
|
asm.comment('guard not immediate')
|
|
|
|
asm.test(obj_opnd, C.RUBY_IMMEDIATE_MASK)
|
|
|
|
jit_chain_guard(:jnz, jit, ctx, asm, side_exit, limit:)
|
|
|
|
asm.cmp(obj_opnd, Qfalse)
|
|
|
|
jit_chain_guard(:je, jit, ctx, asm, side_exit, limit:)
|
|
|
|
|
|
|
|
# Bail if receiver class is different from known_klass
|
|
|
|
klass_opnd = [obj_opnd, C.RBasic.offsetof(:klass)]
|
|
|
|
asm.comment('guard known class')
|
|
|
|
asm.mov(:rcx, to_value(known_klass))
|
|
|
|
asm.cmp(klass_opnd, :rcx)
|
|
|
|
jit_chain_guard(:jne, jit, ctx, asm, side_exit, limit:)
|
2023-02-08 00:47:01 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-02-13 22:36:02 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
def two_fixnums_on_stack?(jit)
|
|
|
|
comptime_recv = jit.peek_at_stack(1)
|
|
|
|
comptime_arg = jit.peek_at_stack(0)
|
|
|
|
return fixnum?(comptime_recv) && fixnum?(comptime_arg)
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def guard_two_fixnums(jit, ctx, asm, side_exit)
|
|
|
|
# Get stack operands without popping them
|
|
|
|
arg1 = ctx.stack_opnd(0)
|
|
|
|
arg0 = ctx.stack_opnd(1)
|
|
|
|
|
|
|
|
asm.comment('guard arg0 fixnum')
|
|
|
|
asm.test(arg0, C.RUBY_FIXNUM_FLAG)
|
|
|
|
jit_chain_guard(:jz, jit, ctx, asm, side_exit)
|
|
|
|
# TODO: upgrade type, and skip the check when possible
|
|
|
|
|
|
|
|
asm.comment('guard arg1 fixnum')
|
|
|
|
asm.test(arg1, C.RUBY_FIXNUM_FLAG)
|
|
|
|
jit_chain_guard(:jz, jit, ctx, asm, side_exit)
|
|
|
|
# TODO: upgrade type, and skip the check when possible
|
|
|
|
end
|
|
|
|
|
2023-02-13 21:48:24 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_fixnum_cmp(jit, ctx, asm, opcode:, bop:)
|
|
|
|
opcode => :cmovl | :cmovle | :cmovg | :cmovge
|
|
|
|
|
|
|
|
unless jit.at_current_insn?
|
|
|
|
defer_compilation(jit, ctx, asm)
|
|
|
|
return EndBlock
|
|
|
|
end
|
|
|
|
|
|
|
|
comptime_recv = jit.peek_at_stack(1)
|
|
|
|
comptime_obj = jit.peek_at_stack(0)
|
|
|
|
|
|
|
|
if fixnum?(comptime_recv) && fixnum?(comptime_obj)
|
|
|
|
# Generate a side exit before popping operands
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
|
|
|
unless Invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, bop)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
obj_opnd = ctx.stack_pop
|
|
|
|
recv_opnd = ctx.stack_pop
|
|
|
|
|
|
|
|
asm.comment('guard recv is fixnum') # TODO: skip this with type information
|
|
|
|
asm.test(recv_opnd, C.RUBY_FIXNUM_FLAG)
|
|
|
|
asm.jz(side_exit)
|
|
|
|
|
|
|
|
asm.comment('guard obj is fixnum') # TODO: skip this with type information
|
|
|
|
asm.test(obj_opnd, C.RUBY_FIXNUM_FLAG)
|
|
|
|
asm.jz(side_exit)
|
|
|
|
|
|
|
|
asm.mov(:rax, obj_opnd)
|
|
|
|
asm.cmp(recv_opnd, :rax)
|
|
|
|
asm.mov(:rax, Qfalse)
|
|
|
|
asm.mov(:rcx, Qtrue)
|
|
|
|
asm.public_send(opcode, :rax, :rcx)
|
|
|
|
|
|
|
|
dst_opnd = ctx.stack_push
|
|
|
|
asm.mov(dst_opnd, :rax)
|
|
|
|
|
|
|
|
KeepCompiling
|
|
|
|
else
|
|
|
|
opt_send_without_block(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-02-13 23:57:40 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_equality_specialized(jit, ctx, asm)
|
|
|
|
# Create a side-exit to fall back to the interpreter
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
|
|
|
a_opnd = ctx.stack_opnd(1)
|
|
|
|
b_opnd = ctx.stack_opnd(0)
|
|
|
|
|
|
|
|
comptime_a = jit.peek_at_stack(1)
|
|
|
|
comptime_b = jit.peek_at_stack(0)
|
|
|
|
|
|
|
|
if two_fixnums_on_stack?(jit)
|
|
|
|
unless Invariants.assume_bop_not_redefined(jit, C.INTEGER_REDEFINED_OP_FLAG, C.BOP_EQ)
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
|
|
|
|
guard_two_fixnums(jit, ctx, asm, side_exit)
|
|
|
|
|
|
|
|
asm.comment('check fixnum equality')
|
|
|
|
asm.mov(:rax, a_opnd)
|
|
|
|
asm.mov(:rcx, b_opnd)
|
|
|
|
asm.cmp(:rax, :rcx)
|
|
|
|
asm.mov(:rax, Qfalse)
|
|
|
|
asm.mov(:rcx, Qtrue)
|
|
|
|
asm.cmove(:rax, :rcx)
|
|
|
|
|
|
|
|
# Push the output on the stack
|
|
|
|
ctx.stack_pop(2)
|
|
|
|
dst = ctx.stack_push
|
|
|
|
asm.mov(dst, :rax)
|
|
|
|
|
|
|
|
true
|
|
|
|
elsif comptime_a.class == String && comptime_b.class == String
|
|
|
|
unless Invariants.assume_bop_not_redefined(jit, C.STRING_REDEFINED_OP_FLAG, C.BOP_EQ)
|
|
|
|
# if overridden, emit the generic version
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
|
|
|
|
# Guard that a is a String
|
|
|
|
if jit_guard_known_class(jit, ctx, asm, comptime_a.class, a_opnd, comptime_a, side_exit) == CantCompile
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
|
|
|
|
equal_label = asm.new_label(:equal)
|
|
|
|
ret_label = asm.new_label(:ret)
|
|
|
|
|
|
|
|
# If they are equal by identity, return true
|
|
|
|
asm.mov(:rax, a_opnd)
|
|
|
|
asm.mov(:rcx, b_opnd)
|
|
|
|
asm.cmp(:rax, :rcx)
|
|
|
|
asm.je(equal_label)
|
|
|
|
|
|
|
|
# Otherwise guard that b is a T_STRING (from type info) or String (from runtime guard)
|
|
|
|
# Note: any T_STRING is valid here, but we check for a ::String for simplicity
|
|
|
|
# To pass a mutable static variable (rb_cString) requires an unsafe block
|
|
|
|
if jit_guard_known_class(jit, ctx, asm, comptime_b.class, b_opnd, comptime_b, side_exit) == CantCompile
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
|
|
|
|
asm.comment('call rb_str_eql_internal')
|
|
|
|
asm.mov(C_ARGS[0], a_opnd)
|
|
|
|
asm.mov(C_ARGS[1], b_opnd)
|
|
|
|
asm.call(C.rb_str_eql_internal)
|
|
|
|
|
|
|
|
# Push the output on the stack
|
|
|
|
ctx.stack_pop(2)
|
|
|
|
dst = ctx.stack_push
|
|
|
|
asm.mov(dst, C_RET)
|
|
|
|
asm.jmp(ret_label)
|
|
|
|
|
|
|
|
asm.write_label(equal_label)
|
|
|
|
asm.mov(dst, Qtrue)
|
|
|
|
|
|
|
|
asm.write_label(ret_label)
|
|
|
|
|
|
|
|
true
|
|
|
|
else
|
|
|
|
false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-02-08 11:10:04 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_prepare_routine_call(jit, ctx, asm)
|
|
|
|
jit_save_pc(jit, asm)
|
|
|
|
jit_save_sp(jit, ctx, asm)
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2023-02-09 16:25:06 -08:00
|
|
|
def jit_save_pc(jit, asm, comment: 'save PC to CFP')
|
2023-02-08 11:10:04 -08:00
|
|
|
next_pc = jit.pc + jit.insn.len * C.VALUE.size # Use the next one for backtrace and side exits
|
2023-02-09 16:25:06 -08:00
|
|
|
asm.comment(comment)
|
2023-02-08 11:10:04 -08:00
|
|
|
asm.mov(:rax, next_pc)
|
|
|
|
asm.mov([CFP, C.rb_control_frame_t.offsetof(:pc)], :rax)
|
|
|
|
end
|
|
|
|
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_save_sp(jit, ctx, asm)
|
|
|
|
if ctx.sp_offset != 0
|
|
|
|
asm.comment('save SP to CFP')
|
2023-02-09 16:25:06 -08:00
|
|
|
asm.lea(SP, ctx.sp_opnd)
|
2023-02-08 11:10:04 -08:00
|
|
|
asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], SP)
|
|
|
|
ctx.sp_offset = 0
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-02-08 00:47:01 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jump_to_next_insn(jit, ctx, asm)
|
|
|
|
reset_depth = ctx.dup
|
|
|
|
reset_depth.chain_depth = 0
|
|
|
|
|
|
|
|
next_pc = jit.pc + jit.insn.len * C.VALUE.size
|
|
|
|
stub_next_block(jit.iseq, next_pc, reset_depth, asm, comment: 'jump_to_next_insn')
|
|
|
|
end
|
|
|
|
|
2023-02-07 14:42:58 -08:00
|
|
|
# rb_vm_check_ints
|
2023-01-07 21:24:30 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2023-02-07 14:42:58 -08:00
|
|
|
def jit_check_ints(jit, ctx, asm)
|
2023-01-07 21:24:30 -08:00
|
|
|
asm.comment('RUBY_VM_CHECK_INTS(ec)')
|
|
|
|
asm.mov(:eax, [EC, C.rb_execution_context_t.offsetof(:interrupt_flag)])
|
|
|
|
asm.test(:eax, :eax)
|
|
|
|
asm.jnz(side_exit(jit, ctx))
|
|
|
|
end
|
|
|
|
|
2023-02-08 01:17:39 -08:00
|
|
|
# vm_get_ep
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2023-02-13 22:50:52 -08:00
|
|
|
def jit_get_ep(asm, level, reg:)
|
|
|
|
asm.mov(reg, [CFP, C.rb_control_frame_t.offsetof(:ep)])
|
2023-02-08 01:17:39 -08:00
|
|
|
level.times do
|
|
|
|
# GET_PREV_EP: ep[VM_ENV_DATA_INDEX_SPECVAL] & ~0x03
|
2023-02-13 22:50:52 -08:00
|
|
|
asm.mov(reg, [reg, C.VALUE.size * C.VM_ENV_DATA_INDEX_SPECVAL])
|
|
|
|
asm.and(reg, ~0x03)
|
2023-02-08 01:17:39 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-02-07 14:42:58 -08:00
|
|
|
# vm_getivar
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2023-02-08 14:36:55 -08:00
|
|
|
def jit_getivar(jit, ctx, asm, comptime_obj, ivar_id, obj_opnd = nil)
|
2023-02-07 14:42:58 -08:00
|
|
|
side_exit = side_exit(jit, ctx)
|
2023-02-08 00:47:01 -08:00
|
|
|
starting_ctx = ctx.dup # copy for jit_chain_guard
|
2023-02-07 14:42:58 -08:00
|
|
|
|
|
|
|
# Guard not special const
|
|
|
|
if C.SPECIAL_CONST_P(comptime_obj)
|
|
|
|
asm.incr_counter(:getivar_special_const)
|
|
|
|
return CantCompile
|
|
|
|
end
|
2023-02-08 14:36:55 -08:00
|
|
|
if obj_opnd.nil? # getivar
|
|
|
|
asm.mov(:rax, [CFP, C.rb_control_frame_t.offsetof(:self)])
|
|
|
|
else # attr_reader
|
|
|
|
asm.mov(:rax, obj_opnd)
|
|
|
|
end
|
2023-02-07 23:32:13 -08:00
|
|
|
guard_object_is_heap(asm, :rax, counted_exit(side_exit, :getivar_not_heap))
|
2023-02-07 14:42:58 -08:00
|
|
|
|
|
|
|
case C.BUILTIN_TYPE(comptime_obj)
|
|
|
|
when C.T_OBJECT
|
2023-02-07 23:25:33 -08:00
|
|
|
# This is the only supported case for now (ROBJECT_IVPTR)
|
2023-02-07 14:42:58 -08:00
|
|
|
else
|
|
|
|
asm.incr_counter(:getivar_not_t_object)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
shape_id = C.rb_shape_get_shape_id(comptime_obj)
|
|
|
|
if shape_id == C.OBJ_TOO_COMPLEX_SHAPE_ID
|
|
|
|
asm.incr_counter(:getivar_too_complex)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
asm.comment('guard shape')
|
|
|
|
asm.cmp(DwordPtr[:rax, C.rb_shape_id_offset], shape_id)
|
2023-02-08 00:47:01 -08:00
|
|
|
jit_chain_guard(:jne, jit, starting_ctx, asm, counted_exit(side_exit, :getivar_megamorphic))
|
2023-02-07 14:42:58 -08:00
|
|
|
|
|
|
|
index = C.rb_shape_get_iv_index(shape_id, ivar_id)
|
|
|
|
if index
|
2023-02-08 14:36:55 -08:00
|
|
|
asm.comment('ROBJECT_IVPTR')
|
2023-02-07 14:42:58 -08:00
|
|
|
if C.FL_TEST_RAW(comptime_obj, C.ROBJECT_EMBED)
|
2023-02-07 23:25:33 -08:00
|
|
|
# Access embedded array
|
2023-02-07 14:42:58 -08:00
|
|
|
asm.mov(:rax, [:rax, C.RObject.offsetof(:as, :ary) + (index * C.VALUE.size)])
|
|
|
|
else
|
2023-02-07 23:25:33 -08:00
|
|
|
# Pull out an ivar table on heap
|
|
|
|
asm.mov(:rax, [:rax, C.RObject.offsetof(:as, :heap, :ivptr)])
|
|
|
|
# Read the table
|
|
|
|
asm.mov(:rax, [:rax, index * C.VALUE.size])
|
2023-02-07 14:42:58 -08:00
|
|
|
end
|
2023-02-07 23:25:33 -08:00
|
|
|
val_opnd = :rax
|
2023-02-07 14:42:58 -08:00
|
|
|
else
|
|
|
|
val_opnd = Qnil
|
|
|
|
end
|
|
|
|
|
2023-02-08 14:36:55 -08:00
|
|
|
if obj_opnd
|
|
|
|
ctx.stack_pop # pop receiver for attr_reader
|
|
|
|
end
|
2023-02-07 14:42:58 -08:00
|
|
|
stack_opnd = ctx.stack_push
|
|
|
|
asm.mov(stack_opnd, val_opnd)
|
|
|
|
|
2023-02-08 00:47:01 -08:00
|
|
|
# Let guard chains share the same successor
|
|
|
|
jump_to_next_insn(jit, ctx, asm)
|
|
|
|
EndBlock
|
2023-02-07 14:42:58 -08:00
|
|
|
end
|
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
# vm_call_method (vm_sendish -> vm_call_general -> vm_call_method)
|
2023-01-07 21:24:30 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
# @param cd `RubyVM::MJIT::CPointer::Struct_rb_call_data`
|
2023-02-03 22:42:13 -08:00
|
|
|
def jit_call_method(jit, ctx, asm, cd)
|
2023-01-07 21:24:30 -08:00
|
|
|
ci = cd.ci
|
|
|
|
argc = C.vm_ci_argc(ci)
|
|
|
|
mid = C.vm_ci_mid(ci)
|
|
|
|
flags = C.vm_ci_flag(ci)
|
|
|
|
|
2023-02-08 01:48:32 -08:00
|
|
|
# Specialize on a compile-time receiver, and split a block for chain guards
|
2023-01-07 21:24:30 -08:00
|
|
|
unless jit.at_current_insn?
|
|
|
|
defer_compilation(jit, ctx, asm)
|
|
|
|
return EndBlock
|
|
|
|
end
|
|
|
|
|
2023-02-08 01:48:32 -08:00
|
|
|
# Generate a side exit
|
|
|
|
side_exit = side_exit(jit, ctx)
|
|
|
|
|
|
|
|
# Calculate a receiver index
|
2023-02-03 22:42:13 -08:00
|
|
|
if flags & C.VM_CALL_KW_SPLAT != 0
|
|
|
|
# recv_index calculation may not work for this
|
2023-02-06 15:07:58 -08:00
|
|
|
asm.incr_counter(:send_kw_splat)
|
2023-02-03 22:42:13 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
2023-02-09 16:25:06 -08:00
|
|
|
recv_index = argc # TODO: +1 for VM_CALL_ARGS_BLOCKARG
|
2023-01-07 21:24:30 -08:00
|
|
|
|
2023-02-08 01:48:32 -08:00
|
|
|
# Get a compile-time receiver and its class
|
2023-02-08 09:30:47 -08:00
|
|
|
comptime_recv = jit.peek_at_stack(recv_index)
|
2023-01-07 21:24:30 -08:00
|
|
|
comptime_recv_klass = C.rb_class_of(comptime_recv)
|
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
# Guard the receiver class (part of vm_search_method_fastpath)
|
2023-02-08 14:19:09 -08:00
|
|
|
recv_opnd = ctx.stack_opnd(recv_index)
|
2023-02-08 01:48:32 -08:00
|
|
|
megamorphic_exit = counted_exit(side_exit, :send_klass_megamorphic)
|
|
|
|
if jit_guard_known_class(jit, ctx, asm, comptime_recv_klass, recv_opnd, comptime_recv, megamorphic_exit) == CantCompile
|
2023-01-07 21:24:30 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
# Do method lookup (vm_cc_cme(cc) != NULL)
|
2023-01-07 21:24:30 -08:00
|
|
|
cme = C.rb_callable_method_entry(comptime_recv_klass, mid)
|
|
|
|
if cme.nil?
|
2023-02-06 15:07:58 -08:00
|
|
|
asm.incr_counter(:send_missing_cme)
|
2023-02-03 22:42:13 -08:00
|
|
|
return CantCompile # We don't support vm_call_method_name
|
2023-01-07 21:24:30 -08:00
|
|
|
end
|
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
# The main check of vm_call_method before vm_call_method_each_type
|
2023-01-07 21:24:30 -08:00
|
|
|
case C.METHOD_ENTRY_VISI(cme)
|
|
|
|
when C.METHOD_VISI_PUBLIC
|
|
|
|
# You can always call public methods
|
|
|
|
when C.METHOD_VISI_PRIVATE
|
2023-02-03 22:42:13 -08:00
|
|
|
# Allow only callsites without a receiver
|
2023-01-07 21:24:30 -08:00
|
|
|
if flags & C.VM_CALL_FCALL == 0
|
2023-02-06 15:07:58 -08:00
|
|
|
asm.incr_counter(:send_private)
|
2023-01-07 21:24:30 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
when C.METHOD_VISI_PROTECTED
|
2023-02-06 15:07:58 -08:00
|
|
|
asm.incr_counter(:send_protected)
|
2023-01-07 21:24:30 -08:00
|
|
|
return CantCompile # TODO: support this
|
|
|
|
else
|
2023-02-07 00:17:13 -08:00
|
|
|
# TODO: Change them to a constant and use case-in instead
|
2023-02-03 22:42:13 -08:00
|
|
|
raise 'unreachable'
|
2023-01-07 21:24:30 -08:00
|
|
|
end
|
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
# Invalidate on redefinition (part of vm_search_method_fastpath)
|
2023-02-10 11:43:53 -08:00
|
|
|
Invariants.assume_method_lookup_stable(jit, cme)
|
2023-01-07 21:24:30 -08:00
|
|
|
|
2023-02-08 14:36:55 -08:00
|
|
|
jit_call_method_each_type(jit, ctx, asm, ci, argc, flags, cme, comptime_recv, recv_opnd)
|
2023-02-03 22:42:13 -08:00
|
|
|
end
|
2023-01-07 21:24:30 -08:00
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
# vm_call_method_each_type
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2023-02-08 14:36:55 -08:00
|
|
|
def jit_call_method_each_type(jit, ctx, asm, ci, argc, flags, cme, comptime_recv, recv_opnd)
|
2023-01-07 21:24:30 -08:00
|
|
|
case cme.def.type
|
|
|
|
when C.VM_METHOD_TYPE_ISEQ
|
2023-02-03 22:42:13 -08:00
|
|
|
jit_call_iseq_setup(jit, ctx, asm, ci, cme, flags, argc)
|
2023-02-08 14:24:10 -08:00
|
|
|
# when C.VM_METHOD_TYPE_NOTIMPLEMENTED
|
|
|
|
when C.VM_METHOD_TYPE_CFUNC
|
2023-02-09 16:25:06 -08:00
|
|
|
jit_call_cfunc(jit, ctx, asm, ci, cme, flags, argc)
|
2023-02-08 14:24:10 -08:00
|
|
|
when C.VM_METHOD_TYPE_ATTRSET
|
|
|
|
asm.incr_counter(:send_attrset)
|
|
|
|
return CantCompile
|
|
|
|
when C.VM_METHOD_TYPE_IVAR
|
2023-02-08 14:36:55 -08:00
|
|
|
jit_call_ivar(jit, ctx, asm, ci, cme, flags, argc, comptime_recv, recv_opnd)
|
2023-02-08 14:24:10 -08:00
|
|
|
# when C.VM_METHOD_TYPE_MISSING
|
|
|
|
when C.VM_METHOD_TYPE_BMETHOD
|
|
|
|
asm.incr_counter(:send_bmethod)
|
|
|
|
return CantCompile
|
|
|
|
when C.VM_METHOD_TYPE_ALIAS
|
|
|
|
asm.incr_counter(:send_alias)
|
|
|
|
return CantCompile
|
|
|
|
when C.VM_METHOD_TYPE_OPTIMIZED
|
|
|
|
asm.incr_counter(:send_optimized)
|
|
|
|
return CantCompile
|
|
|
|
# when C.VM_METHOD_TYPE_UNDEF
|
|
|
|
when C.VM_METHOD_TYPE_ZSUPER
|
|
|
|
asm.incr_counter(:send_zsuper)
|
|
|
|
return CantCompile
|
|
|
|
when C.VM_METHOD_TYPE_REFINED
|
|
|
|
asm.incr_counter(:send_refined)
|
|
|
|
return CantCompile
|
2023-01-07 21:24:30 -08:00
|
|
|
else
|
2023-02-08 14:24:10 -08:00
|
|
|
asm.incr_counter(:send_not_implemented_type)
|
2023-01-07 21:24:30 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
# vm_call_iseq_setup
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_call_iseq_setup(jit, ctx, asm, ci, cme, flags, argc)
|
|
|
|
iseq = def_iseq_ptr(cme.def)
|
|
|
|
opt_pc = jit_callee_setup_arg(jit, ctx, asm, ci, flags, iseq)
|
|
|
|
if opt_pc == CantCompile
|
|
|
|
# We hit some unsupported path of vm_callee_setup_arg
|
|
|
|
return CantCompile
|
|
|
|
end
|
2023-01-07 21:24:30 -08:00
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
if flags & C.VM_CALL_TAILCALL != 0
|
|
|
|
# We don't support vm_call_iseq_setup_tailcall
|
2023-02-06 15:07:58 -08:00
|
|
|
asm.incr_counter(:send_tailcall)
|
2023-02-03 22:42:13 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
jit_call_iseq_setup_normal(jit, ctx, asm, ci, cme, flags, argc, iseq)
|
|
|
|
end
|
2023-01-07 21:24:30 -08:00
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
# vm_call_iseq_setup_normal (vm_call_iseq_setup_2 -> vm_call_iseq_setup_normal)
|
2023-02-08 14:36:55 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2023-02-03 22:42:13 -08:00
|
|
|
def jit_call_iseq_setup_normal(jit, ctx, asm, ci, cme, flags, argc, iseq)
|
|
|
|
# Save caller SP and PC before pushing a callee frame for backtrace and side exits
|
2023-01-07 21:24:30 -08:00
|
|
|
asm.comment('save SP to caller CFP')
|
2023-02-09 16:25:06 -08:00
|
|
|
# Not setting this to SP register. This cfp->sp will be copied to SP on leave insn.
|
|
|
|
sp_index = -(1 + argc) # Pop receiver and arguments for side exits # TODO: subtract one more for VM_CALL_ARGS_BLOCKARG
|
|
|
|
asm.lea(:rax, ctx.sp_opnd(C.VALUE.size * sp_index))
|
2023-01-07 21:24:30 -08:00
|
|
|
asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], :rax)
|
2023-02-09 16:25:06 -08:00
|
|
|
jit_save_pc(jit, asm, comment: 'save PC to caller CFP')
|
2023-01-07 21:24:30 -08:00
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
frame_type = C.VM_FRAME_MAGIC_METHOD | C.VM_ENV_FLAG_LOCAL
|
2023-02-09 21:49:35 -08:00
|
|
|
jit_push_frame(
|
|
|
|
jit, ctx, asm, ci, cme, flags, argc, frame_type,
|
|
|
|
iseq: iseq,
|
|
|
|
local_size: iseq.body.local_table_size - iseq.body.param.size,
|
|
|
|
stack_max: iseq.body.stack_max,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Jump to a stub for the callee ISEQ
|
|
|
|
callee_ctx = Context.new
|
|
|
|
stub_next_block(iseq, iseq.body.iseq_encoded.to_i, callee_ctx, asm)
|
|
|
|
|
|
|
|
EndBlock
|
2023-02-03 22:42:13 -08:00
|
|
|
end
|
2023-01-07 21:24:30 -08:00
|
|
|
|
2023-02-09 16:25:06 -08:00
|
|
|
# vm_call_cfunc
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_call_cfunc(jit, ctx, asm, ci, cme, flags, argc)
|
|
|
|
if jit_caller_setup_arg(jit, ctx, asm, flags) == CantCompile
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
if jit_caller_remove_empty_kw_splat(jit, ctx, asm, flags) == CantCompile
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
jit_call_cfunc_with_frame(jit, ctx, asm, ci, cme, flags, argc)
|
|
|
|
end
|
|
|
|
|
|
|
|
# jit_call_cfunc_with_frame
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_call_cfunc_with_frame(jit, ctx, asm, ci, cme, flags, argc)
|
|
|
|
cfunc = cme.def.body.cfunc
|
|
|
|
|
|
|
|
if argc + 1 > 6
|
|
|
|
asm.incr_counter(:send_cfunc_too_many_args)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
frame_type = C.VM_FRAME_MAGIC_CFUNC | C.VM_FRAME_FLAG_CFRAME | C.VM_ENV_FLAG_LOCAL
|
|
|
|
if flags & C.VM_CALL_KW_SPLAT != 0
|
|
|
|
frame_type |= C.VM_FRAME_FLAG_CFRAME_KW
|
|
|
|
end
|
|
|
|
|
2023-02-10 21:06:02 -08:00
|
|
|
# EXEC_EVENT_HOOK: RUBY_EVENT_C_CALL and RUBY_EVENT_C_RETURN
|
|
|
|
if C.rb_mjit_global_events & (C.RUBY_EVENT_C_CALL | C.RUBY_EVENT_C_RETURN) != 0
|
|
|
|
asm.incr_counter(:send_c_tracing)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
2023-02-09 16:25:06 -08:00
|
|
|
# rb_check_arity
|
2023-02-10 22:07:34 -08:00
|
|
|
if cfunc.argc >= 0 && argc != cfunc.argc
|
2023-02-09 16:25:06 -08:00
|
|
|
asm.incr_counter(:send_arity)
|
|
|
|
return CantCompile
|
|
|
|
end
|
2023-02-14 23:44:44 -08:00
|
|
|
if cfunc.argc == -2
|
|
|
|
asm.incr_counter(:send_cfunc_ruby_array_varg)
|
|
|
|
return CantCompile
|
|
|
|
end
|
2023-02-09 16:25:06 -08:00
|
|
|
|
2023-02-13 00:00:26 -08:00
|
|
|
# Check interrupts before SP motion to safely side-exit with the original SP.
|
|
|
|
jit_check_ints(jit, ctx, asm)
|
|
|
|
|
2023-02-09 16:25:06 -08:00
|
|
|
# Save caller SP and PC before pushing a callee frame for backtrace and side exits
|
|
|
|
asm.comment('save SP to caller CFP')
|
|
|
|
sp_index = -(1 + argc) # Pop receiver and arguments for side exits # TODO: subtract one more for VM_CALL_ARGS_BLOCKARG
|
|
|
|
asm.lea(SP, ctx.sp_opnd(C.VALUE.size * sp_index))
|
|
|
|
asm.mov([CFP, C.rb_control_frame_t.offsetof(:sp)], SP)
|
|
|
|
ctx.sp_offset = -sp_index
|
|
|
|
jit_save_pc(jit, asm, comment: 'save PC to caller CFP')
|
|
|
|
|
|
|
|
# Push a callee frame. SP register and ctx are not modified inside this.
|
|
|
|
jit_push_frame(jit, ctx, asm, ci, cme, flags, argc, frame_type)
|
|
|
|
|
|
|
|
asm.comment('call C function')
|
2023-02-10 22:07:34 -08:00
|
|
|
case cfunc.argc
|
|
|
|
in (0..) # Non-variadic method
|
|
|
|
# Push receiver and args
|
|
|
|
(1 + argc).times do |i|
|
|
|
|
asm.mov(C_ARGS[i], ctx.stack_opnd(argc - i)) # TODO: +1 for VM_CALL_ARGS_BLOCKARG
|
|
|
|
end
|
|
|
|
in -1 # Variadic method: rb_f_puts(int argc, VALUE *argv, VALUE recv)
|
|
|
|
asm.mov(C_ARGS[0], argc)
|
|
|
|
asm.lea(C_ARGS[1], ctx.stack_opnd(argc - 1)) # argv
|
|
|
|
asm.mov(C_ARGS[2], ctx.stack_opnd(argc)) # recv
|
2023-02-09 16:25:06 -08:00
|
|
|
end
|
|
|
|
asm.mov(:rax, cfunc.func)
|
|
|
|
asm.call(:rax) # TODO: use rel32 if close enough
|
|
|
|
ctx.stack_pop(1 + argc)
|
|
|
|
|
2023-02-10 11:43:53 -08:00
|
|
|
Invariants.record_global_inval_patch(asm, @full_cfunc_return)
|
|
|
|
|
2023-02-09 16:25:06 -08:00
|
|
|
asm.comment('push the return value')
|
|
|
|
stack_ret = ctx.stack_push
|
2023-02-10 22:07:34 -08:00
|
|
|
asm.mov(stack_ret, C_RET)
|
2023-02-09 16:25:06 -08:00
|
|
|
|
|
|
|
asm.comment('pop the stack frame')
|
|
|
|
asm.mov([EC, C.rb_execution_context_t.offsetof(:cfp)], CFP)
|
|
|
|
|
|
|
|
# Let guard chains share the same successor (ctx.sp_offset == 1)
|
|
|
|
assert_equal(1, ctx.sp_offset)
|
|
|
|
jump_to_next_insn(jit, ctx, asm)
|
|
|
|
EndBlock
|
|
|
|
end
|
|
|
|
|
2023-02-08 14:36:55 -08:00
|
|
|
# vm_call_ivar
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_call_ivar(jit, ctx, asm, ci, cme, flags, argc, comptime_recv, recv_opnd)
|
|
|
|
if flags & C.VM_CALL_ARGS_SPLAT != 0
|
|
|
|
asm.incr_counter(:send_ivar_splat)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
if argc != 0
|
2023-02-09 21:49:35 -08:00
|
|
|
asm.incr_counter(:send_arity)
|
2023-02-08 14:36:55 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
if flags & C.VM_CALL_OPT_SEND != 0
|
|
|
|
asm.incr_counter(:send_ivar_opt_send)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
ivar_id = cme.def.body.attr.id
|
|
|
|
|
|
|
|
if flags & C.VM_CALL_OPT_SEND != 0
|
|
|
|
asm.incr_counter(:send_ivar_blockarg)
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
jit_getivar(jit, ctx, asm, comptime_recv, ivar_id, recv_opnd)
|
|
|
|
end
|
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
# vm_push_frame
|
|
|
|
#
|
|
|
|
# Frame structure:
|
|
|
|
# | args | locals | cme/cref | block_handler/prev EP | frame type (EP here) | stack bottom (SP here)
|
2023-02-09 14:38:41 -08:00
|
|
|
#
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
2023-02-09 21:49:35 -08:00
|
|
|
def jit_push_frame(jit, ctx, asm, ci, cme, flags, argc, frame_type, iseq: nil, local_size: 0, stack_max: 0)
|
2023-03-06 07:24:02 +00:00
|
|
|
# CHECK_VM_STACK_OVERFLOW0: next_cfp <= sp + (local_size + stack_max)
|
2023-02-09 14:38:41 -08:00
|
|
|
asm.comment('stack overflow check')
|
2023-02-09 21:49:35 -08:00
|
|
|
asm.lea(:rax, ctx.sp_opnd(C.rb_control_frame_t.size + C.VALUE.size * (local_size + stack_max)))
|
2023-02-09 14:38:41 -08:00
|
|
|
asm.cmp(CFP, :rax)
|
|
|
|
asm.jbe(counted_exit(side_exit(jit, ctx), :send_stackoverflow))
|
|
|
|
|
2023-02-06 15:07:58 -08:00
|
|
|
local_size.times do |i|
|
|
|
|
asm.comment('set local variables') if i == 0
|
2023-02-08 09:30:47 -08:00
|
|
|
local_index = ctx.sp_offset + i
|
2023-02-06 15:07:58 -08:00
|
|
|
asm.mov([SP, C.VALUE.size * local_index], Qnil)
|
2023-02-03 22:42:13 -08:00
|
|
|
end
|
2023-01-07 21:24:30 -08:00
|
|
|
|
2023-02-09 21:59:27 -08:00
|
|
|
asm.comment('set up EP with managing data')
|
|
|
|
ep_offset = ctx.sp_offset + local_size + 2
|
|
|
|
asm.mov(:rax, cme.to_i)
|
|
|
|
asm.mov([SP, C.VALUE.size * (ep_offset - 2)], :rax)
|
|
|
|
asm.mov([SP, C.VALUE.size * (ep_offset - 1)], C.VM_BLOCK_HANDLER_NONE)
|
|
|
|
asm.mov([SP, C.VALUE.size * (ep_offset - 0)], frame_type)
|
|
|
|
|
2023-02-09 22:02:10 -08:00
|
|
|
asm.comment('set up new frame')
|
2023-02-09 22:06:51 -08:00
|
|
|
cfp_offset = -C.rb_control_frame_t.size # callee CFP
|
2023-02-10 21:54:35 -08:00
|
|
|
# For ISEQ, JIT code will set it as needed. However, C func needs 0 there for svar frame detection.
|
|
|
|
if iseq.nil?
|
|
|
|
asm.mov([CFP, cfp_offset + C.rb_control_frame_t.offsetof(:pc)], 0)
|
|
|
|
end
|
2023-02-03 22:42:13 -08:00
|
|
|
asm.mov(:rax, iseq.to_i)
|
2023-02-09 22:06:51 -08:00
|
|
|
asm.mov([CFP, cfp_offset + C.rb_control_frame_t.offsetof(:iseq)], :rax)
|
2023-02-09 16:25:06 -08:00
|
|
|
self_index = ctx.sp_offset - (1 + argc) # TODO: +1 for VM_CALL_ARGS_BLOCKARG
|
2023-02-03 22:42:13 -08:00
|
|
|
asm.mov(:rax, [SP, C.VALUE.size * self_index])
|
2023-02-09 22:06:51 -08:00
|
|
|
asm.mov([CFP, cfp_offset + C.rb_control_frame_t.offsetof(:self)], :rax)
|
2023-02-09 16:25:06 -08:00
|
|
|
asm.lea(:rax, [SP, C.VALUE.size * ep_offset])
|
2023-02-09 22:06:51 -08:00
|
|
|
asm.mov([CFP, cfp_offset + C.rb_control_frame_t.offsetof(:ep)], :rax)
|
|
|
|
asm.mov([CFP, cfp_offset + C.rb_control_frame_t.offsetof(:block_code)], 0)
|
2023-02-09 16:25:06 -08:00
|
|
|
# Update SP register only for ISEQ calls. SP-relative operations should be done above this.
|
|
|
|
sp_reg = iseq ? SP : :rax
|
|
|
|
asm.lea(sp_reg, [SP, C.VALUE.size * (ctx.sp_offset + local_size + 3)])
|
|
|
|
asm.mov([CFP, cfp_offset + C.rb_control_frame_t.offsetof(:sp)], sp_reg)
|
|
|
|
asm.mov([CFP, cfp_offset + C.rb_control_frame_t.offsetof(:__bp__)], sp_reg) # TODO: get rid of this!!
|
2023-02-03 22:42:13 -08:00
|
|
|
|
2023-02-09 21:49:35 -08:00
|
|
|
# cfp->jit_return is used only for ISEQs
|
|
|
|
if iseq
|
|
|
|
# Stub cfp->jit_return
|
|
|
|
return_ctx = ctx.dup
|
2023-02-09 16:25:06 -08:00
|
|
|
return_ctx.stack_size -= argc # Pop args # TODO: subtract 1 more for VM_CALL_ARGS_BLOCKARG
|
2023-02-09 21:49:35 -08:00
|
|
|
return_ctx.sp_offset = 1 # SP is in the position after popping a receiver and arguments
|
|
|
|
branch_stub = BranchStub.new(
|
|
|
|
iseq: jit.iseq,
|
|
|
|
shape: Default,
|
|
|
|
target0: BranchTarget.new(ctx: return_ctx, pc: jit.pc + jit.insn.len * C.VALUE.size),
|
|
|
|
)
|
|
|
|
branch_stub.target0.address = Assembler.new.then do |ocb_asm|
|
|
|
|
@exit_compiler.compile_branch_stub(return_ctx, ocb_asm, branch_stub, true)
|
|
|
|
@ocb.write(ocb_asm)
|
|
|
|
end
|
|
|
|
branch_stub.compile = proc do |branch_asm|
|
|
|
|
branch_asm.comment('set jit_return to callee CFP')
|
|
|
|
branch_asm.stub(branch_stub) do
|
|
|
|
case branch_stub.shape
|
|
|
|
in Default
|
|
|
|
branch_asm.mov(:rax, branch_stub.target0.address)
|
2023-02-09 22:06:51 -08:00
|
|
|
branch_asm.mov([CFP, cfp_offset + C.rb_control_frame_t.offsetof(:jit_return)], :rax)
|
2023-02-09 21:49:35 -08:00
|
|
|
end
|
2023-02-07 11:36:45 -08:00
|
|
|
end
|
2023-01-07 21:24:30 -08:00
|
|
|
end
|
2023-02-09 21:49:35 -08:00
|
|
|
branch_stub.compile.call(asm)
|
2023-01-07 21:24:30 -08:00
|
|
|
end
|
|
|
|
|
2023-02-09 22:06:51 -08:00
|
|
|
asm.comment('switch to callee CFP')
|
2023-02-09 16:25:06 -08:00
|
|
|
# Update CFP register only for ISEQ calls
|
|
|
|
cfp_reg = iseq ? CFP : :rax
|
|
|
|
asm.lea(cfp_reg, [CFP, cfp_offset])
|
|
|
|
asm.mov([EC, C.rb_execution_context_t.offsetof(:cfp)], cfp_reg)
|
2023-01-07 21:24:30 -08:00
|
|
|
end
|
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
# vm_callee_setup_arg: Set up args and return opt_pc (or CantCompile)
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_callee_setup_arg(jit, ctx, asm, ci, flags, iseq)
|
|
|
|
if flags & C.VM_CALL_KW_SPLAT == 0
|
|
|
|
if C.rb_simple_iseq_p(iseq)
|
|
|
|
if jit_caller_setup_arg(jit, ctx, asm, flags) == CantCompile
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
if jit_caller_remove_empty_kw_splat(jit, ctx, asm, flags) == CantCompile
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
if C.vm_ci_argc(ci) != iseq.body.param.lead_num
|
|
|
|
# argument_arity_error
|
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
return 0
|
|
|
|
else
|
|
|
|
# We don't support the remaining `else if`s yet.
|
2023-02-09 16:25:06 -08:00
|
|
|
asm.incr_counter(:send_iseq_not_simple)
|
2023-02-03 22:42:13 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# We don't support setup_parameters_complex
|
2023-02-09 16:25:06 -08:00
|
|
|
asm.incr_counter(:send_iseq_kw_splat)
|
2023-02-03 22:42:13 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
|
|
|
|
# CALLER_SETUP_ARG: Return CantCompile if not supported
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_caller_setup_arg(jit, ctx, asm, flags)
|
|
|
|
if flags & C.VM_CALL_ARGS_SPLAT != 0
|
|
|
|
# We don't support vm_caller_setup_arg_splat
|
2023-02-06 15:07:58 -08:00
|
|
|
asm.incr_counter(:send_args_splat)
|
2023-02-03 22:42:13 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
if flags & (C.VM_CALL_KWARG | C.VM_CALL_KW_SPLAT) != 0
|
|
|
|
# We don't support keyword args either
|
2023-02-06 15:07:58 -08:00
|
|
|
asm.incr_counter(:send_kwarg)
|
2023-02-03 22:42:13 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# CALLER_REMOVE_EMPTY_KW_SPLAT: Return CantCompile if not supported
|
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def jit_caller_remove_empty_kw_splat(jit, ctx, asm, flags)
|
|
|
|
if (flags & C.VM_CALL_KW_SPLAT) > 0
|
|
|
|
# We don't support removing the last Hash argument
|
2023-02-07 14:42:58 -08:00
|
|
|
asm.incr_counter(:send_kw_splat)
|
2023-02-03 22:42:13 -08:00
|
|
|
return CantCompile
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-02-15 21:26:04 -08:00
|
|
|
# Generate RARRAY_LEN. For array_opnd, use Opnd::Reg to reduce memory access,
|
|
|
|
# and use Opnd::Mem to save registers.
|
|
|
|
def jit_array_len(asm, array_reg, len_reg)
|
|
|
|
asm.comment('get array length for embedded or heap')
|
|
|
|
|
|
|
|
# Pull out the embed flag to check if it's an embedded array.
|
|
|
|
asm.mov(len_reg, [array_reg, C.RBasic.offsetof(:flags)])
|
|
|
|
|
|
|
|
# Get the length of the array
|
|
|
|
asm.and(len_reg, C.RARRAY_EMBED_LEN_MASK)
|
|
|
|
asm.sar(len_reg, C.RARRAY_EMBED_LEN_SHIFT)
|
|
|
|
|
|
|
|
# Conditionally move the length of the heap array
|
|
|
|
asm.test([array_reg, C.RBasic.offsetof(:flags)], C.RARRAY_EMBED_FLAG)
|
|
|
|
|
|
|
|
# Select the array length value
|
|
|
|
asm.cmovz(len_reg, [array_reg, C.RArray.offsetof(:as, :heap, :len)])
|
|
|
|
end
|
|
|
|
|
2023-02-03 22:42:13 -08:00
|
|
|
def assert_equal(left, right)
|
2022-12-23 14:17:32 -08:00
|
|
|
if left != right
|
|
|
|
raise "'#{left.inspect}' was not '#{right.inspect}'"
|
|
|
|
end
|
|
|
|
end
|
2022-12-30 22:16:07 -08:00
|
|
|
|
2023-01-02 22:53:14 -08:00
|
|
|
def fixnum?(obj)
|
|
|
|
flag = C.RUBY_FIXNUM_FLAG
|
|
|
|
(C.to_value(obj) & flag) == flag
|
|
|
|
end
|
|
|
|
|
2022-12-31 13:41:32 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
|
|
|
# @param asm [RubyVM::MJIT::Assembler]
|
|
|
|
def defer_compilation(jit, ctx, asm)
|
|
|
|
# Make a stub to compile the current insn
|
2023-02-07 11:55:07 -08:00
|
|
|
stub_next_block(jit.iseq, jit.pc, ctx, asm, comment: 'defer_compilation')
|
2022-12-31 13:41:32 -08:00
|
|
|
end
|
|
|
|
|
2023-02-07 11:51:11 -08:00
|
|
|
def stub_next_block(iseq, pc, ctx, asm, comment: 'stub_next_block')
|
|
|
|
branch_stub = BranchStub.new(
|
|
|
|
iseq:,
|
|
|
|
shape: Default,
|
|
|
|
target0: BranchTarget.new(ctx:, pc:),
|
|
|
|
)
|
|
|
|
branch_stub.target0.address = Assembler.new.then do |ocb_asm|
|
|
|
|
@exit_compiler.compile_branch_stub(ctx, ocb_asm, branch_stub, true)
|
|
|
|
@ocb.write(ocb_asm)
|
|
|
|
end
|
|
|
|
branch_stub.compile = proc do |branch_asm|
|
|
|
|
branch_asm.comment(comment)
|
|
|
|
branch_asm.stub(branch_stub) do
|
|
|
|
case branch_stub.shape
|
|
|
|
in Default
|
|
|
|
branch_asm.jmp(branch_stub.target0.address)
|
|
|
|
in Next0
|
|
|
|
# Just write the block without a jump
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
branch_stub.compile.call(asm)
|
|
|
|
end
|
|
|
|
|
2022-12-30 22:16:07 -08:00
|
|
|
# @param jit [RubyVM::MJIT::JITState]
|
|
|
|
# @param ctx [RubyVM::MJIT::Context]
|
2023-01-02 22:53:14 -08:00
|
|
|
def side_exit(jit, ctx)
|
|
|
|
if side_exit = jit.side_exits[jit.pc]
|
|
|
|
return side_exit
|
|
|
|
end
|
2022-12-30 22:16:07 -08:00
|
|
|
asm = Assembler.new
|
2023-01-02 14:11:06 -08:00
|
|
|
@exit_compiler.compile_side_exit(jit, ctx, asm)
|
2023-01-02 22:53:14 -08:00
|
|
|
jit.side_exits[jit.pc] = @ocb.write(asm)
|
2022-12-30 22:16:07 -08:00
|
|
|
end
|
2023-01-07 21:24:30 -08:00
|
|
|
|
2023-02-07 23:32:13 -08:00
|
|
|
def counted_exit(side_exit, name)
|
|
|
|
asm = Assembler.new
|
|
|
|
asm.incr_counter(name)
|
|
|
|
asm.jmp(side_exit)
|
|
|
|
@ocb.write(asm)
|
|
|
|
end
|
|
|
|
|
2023-01-07 21:24:30 -08:00
|
|
|
def def_iseq_ptr(cme_def)
|
|
|
|
C.rb_iseq_check(cme_def.body.iseq.iseqptr)
|
|
|
|
end
|
2023-02-08 01:48:32 -08:00
|
|
|
|
|
|
|
def to_value(obj)
|
|
|
|
@gc_refs << obj
|
|
|
|
C.to_value(obj)
|
|
|
|
end
|
2022-12-17 13:39:35 -08:00
|
|
|
end
|
|
|
|
end
|