summaryrefslogtreecommitdiff
path: root/yjit/src/codegen.rs
diff options
context:
space:
mode:
Diffstat (limited to 'yjit/src/codegen.rs')
-rw-r--r--yjit/src/codegen.rs64
1 files changed, 40 insertions, 24 deletions
diff --git a/yjit/src/codegen.rs b/yjit/src/codegen.rs
index e9c17cb537..177bdfe088 100644
--- a/yjit/src/codegen.rs
+++ b/yjit/src/codegen.rs
@@ -19,6 +19,7 @@ use std::ffi::CStr;
use std::mem::{self, size_of};
use std::os::raw::{c_int};
use std::ptr;
+use std::rc::Rc;
use std::slice;
pub use crate::virtualmem::CodePtr;
@@ -619,38 +620,36 @@ fn gen_leave_exit(ocb: &mut OutlinedCb) -> CodePtr {
}
// Generate a runtime guard that ensures the PC is at the expected
-// instruction index in the iseq, otherwise takes a side-exit.
+// instruction index in the iseq, otherwise takes an entry stub
+// that generates another check and entry.
// This is to handle the situation of optional parameters.
// When a function with optional parameters is called, the entry
// PC for the method isn't necessarily 0.
-fn gen_pc_guard(asm: &mut Assembler, iseq: IseqPtr, insn_idx: u16) {
+pub fn gen_entry_chain_guard(
+ asm: &mut Assembler,
+ ocb: &mut OutlinedCb,
+ iseq: IseqPtr,
+ insn_idx: u16,
+) -> Option<PendingEntryRef> {
+ let entry = new_pending_entry();
+ let stub_addr = gen_entry_stub(entry.uninit_entry.as_ptr() as usize, ocb)?;
+
let pc_opnd = Opnd::mem(64, CFP, RUBY_OFFSET_CFP_PC);
let expected_pc = unsafe { rb_iseq_pc_at_idx(iseq, insn_idx.into()) };
let expected_pc_opnd = Opnd::const_ptr(expected_pc as *const u8);
+ asm.comment("guard expected PC");
asm.cmp(pc_opnd, expected_pc_opnd);
- let pc_match = asm.new_label("pc_match");
- asm.je(pc_match);
-
- // We're not starting at the first PC, so we need to exit.
- gen_counter_incr!(asm, leave_start_pc_non_zero);
-
- asm.cpop_into(SP);
- asm.cpop_into(EC);
- asm.cpop_into(CFP);
-
- asm.frame_teardown();
-
- asm.cret(Qundef.into());
-
- // PC should match the expected insn_idx
- asm.write_label(pc_match);
+ asm.mark_entry_start(&entry);
+ asm.jne(stub_addr.into());
+ asm.mark_entry_end(&entry);
+ return Some(entry);
}
/// Compile an interpreter entry block to be inserted into an iseq
/// Returns None if compilation fails.
-pub fn gen_entry_prologue(cb: &mut CodeBlock, iseq: IseqPtr, insn_idx: u16) -> Option<CodePtr> {
+pub fn gen_entry_prologue(cb: &mut CodeBlock, ocb: &mut OutlinedCb, iseq: IseqPtr, insn_idx: u16) -> Option<CodePtr> {
let code_ptr = cb.get_write_ptr();
let mut asm = Assembler::new();
@@ -685,10 +684,13 @@ pub fn gen_entry_prologue(cb: &mut CodeBlock, iseq: IseqPtr, insn_idx: u16) -> O
// different location depending on the optional parameters. If an iseq
// has optional parameters, we'll add a runtime check that the PC we've
// compiled for is the same PC that the interpreter wants us to run with.
- // If they don't match, then we'll take a side exit.
- if unsafe { get_iseq_flags_has_opt(iseq) } {
- gen_pc_guard(&mut asm, iseq, insn_idx);
- }
+ // If they don't match, then we'll jump to an entry stub and generate
+ // another PC check and entry there.
+ let pending_entry = if unsafe { get_iseq_flags_has_opt(iseq) } {
+ Some(gen_entry_chain_guard(&mut asm, ocb, iseq, insn_idx)?)
+ } else {
+ None
+ };
asm.compile(cb);
@@ -700,6 +702,12 @@ pub fn gen_entry_prologue(cb: &mut CodeBlock, iseq: IseqPtr, insn_idx: u16) -> O
for page in cb.addrs_to_pages(code_ptr, cb.get_write_ptr()) {
iseq_payload.pages.insert(page);
}
+ // Write an entry to the heap and push it to the ISEQ
+ if let Some(pending_entry) = pending_entry {
+ let pending_entry = Rc::try_unwrap(pending_entry)
+ .ok().expect("PendingEntry should be unique");
+ iseq_payload.entries.push(pending_entry.into_entry());
+ }
Some(code_ptr)
}
}
@@ -7864,6 +7872,9 @@ pub struct CodegenGlobals {
// For servicing branch stubs
branch_stub_hit_trampoline: CodePtr,
+ // For servicing entry stubs
+ entry_stub_hit_trampoline: CodePtr,
+
// Code for full logic of returning from C method and exiting to the interpreter
outline_full_cfunc_return_pos: CodePtr,
@@ -7901,7 +7912,6 @@ impl CodegenGlobals {
#[cfg(not(test))]
let (mut cb, mut ocb) = {
use std::cell::RefCell;
- use std::rc::Rc;
let virt_block: *mut u8 = unsafe { rb_yjit_reserve_addr_space(mem_size as u32) };
@@ -7949,6 +7959,7 @@ impl CodegenGlobals {
let stub_exit_code = gen_code_for_exit_from_stub(&mut ocb);
let branch_stub_hit_trampoline = gen_branch_stub_hit_trampoline(&mut ocb);
+ let entry_stub_hit_trampoline = gen_entry_stub_hit_trampoline(&mut ocb);
// Generate full exit code for C func
let cfunc_exit_code = gen_full_cfunc_return(&mut ocb);
@@ -7967,6 +7978,7 @@ impl CodegenGlobals {
stub_exit_code: stub_exit_code,
outline_full_cfunc_return_pos: cfunc_exit_code,
branch_stub_hit_trampoline,
+ entry_stub_hit_trampoline,
global_inval_patches: Vec::new(),
method_codegen_table: HashMap::new(),
ocb_pages,
@@ -8105,6 +8117,10 @@ impl CodegenGlobals {
CodegenGlobals::get_instance().branch_stub_hit_trampoline
}
+ pub fn get_entry_stub_hit_trampoline() -> CodePtr {
+ CodegenGlobals::get_instance().entry_stub_hit_trampoline
+ }
+
pub fn look_up_codegen_method(method_serial: usize) -> Option<MethodGenFn> {
let table = &CodegenGlobals::get_instance().method_codegen_table;